diff --git a/dist/node_modules/.bin/semver b/dist/node_modules/.bin/semver new file mode 100644 index 0000000..77443e7 --- /dev/null +++ b/dist/node_modules/.bin/semver @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@" +else + exec node "$basedir/../semver/bin/semver.js" "$@" +fi diff --git a/dist/node_modules/.bin/semver.cmd b/dist/node_modules/.bin/semver.cmd new file mode 100644 index 0000000..9913fa9 --- /dev/null +++ b/dist/node_modules/.bin/semver.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver.js" %* diff --git a/dist/node_modules/.bin/semver.ps1 b/dist/node_modules/.bin/semver.ps1 new file mode 100644 index 0000000..314717a --- /dev/null +++ b/dist/node_modules/.bin/semver.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args + } else { + & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../semver/bin/semver.js" $args + } else { + & "node$exe" "$basedir/../semver/bin/semver.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/dist/node_modules/.bin/uuid b/dist/node_modules/.bin/uuid new file mode 100644 index 0000000..c3ec003 --- /dev/null +++ b/dist/node_modules/.bin/uuid @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../uuid/dist/bin/uuid" "$@" +else + exec node "$basedir/../uuid/dist/bin/uuid" "$@" +fi diff --git a/dist/node_modules/.bin/uuid.cmd b/dist/node_modules/.bin/uuid.cmd new file mode 100644 index 0000000..0f2376e --- /dev/null +++ b/dist/node_modules/.bin/uuid.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\uuid\dist\bin\uuid" %* diff --git a/dist/node_modules/.bin/uuid.ps1 b/dist/node_modules/.bin/uuid.ps1 new file mode 100644 index 0000000..7804628 --- /dev/null +++ b/dist/node_modules/.bin/uuid.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../uuid/dist/bin/uuid" $args + } else { + & "$basedir/node$exe" "$basedir/../uuid/dist/bin/uuid" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../uuid/dist/bin/uuid" $args + } else { + & "node$exe" "$basedir/../uuid/dist/bin/uuid" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/dist/node_modules/.package-lock.json b/dist/node_modules/.package-lock.json new file mode 100644 index 0000000..3ec2a23 --- /dev/null +++ b/dist/node_modules/.package-lock.json @@ -0,0 +1,730 @@ +{ + "name": "autotag-action", + "version": "1.1.3", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@actions/core": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz", + "integrity": "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==", + "dependencies": { + "@actions/http-client": "^2.0.1", + "uuid": "^8.3.2" + } + }, + "node_modules/@actions/http-client": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz", + "integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==", + "dependencies": { + "tunnel": "^0.0.6", + "undici": "^5.25.4" + } + }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@octokit/app": { + "version": "13.1.8", + "resolved": "https://registry.npmjs.org/@octokit/app/-/app-13.1.8.tgz", + "integrity": "sha512-bCncePMguVyFpdBbnceFKfmPOuUD94T189GuQ0l00ZcQ+mX4hyPqnaWJlsXE2HSdA71eV7p8GPDZ+ErplTkzow==", + "dependencies": { + "@octokit/auth-app": "^4.0.13", + "@octokit/auth-unauthenticated": "^3.0.0", + "@octokit/core": "^4.0.0", + "@octokit/oauth-app": "^4.0.7", + "@octokit/plugin-paginate-rest": "^6.0.0", + "@octokit/types": "^9.0.0", + "@octokit/webhooks": "^10.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/auth-app": { + "version": "4.0.13", + "resolved": "https://registry.npmjs.org/@octokit/auth-app/-/auth-app-4.0.13.tgz", + "integrity": "sha512-NBQkmR/Zsc+8fWcVIFrwDgNXS7f4XDrkd9LHdi9DPQw1NdGHLviLzRO2ZBwTtepnwHXW5VTrVU9eFGijMUqllg==", + "dependencies": { + "@octokit/auth-oauth-app": "^5.0.0", + "@octokit/auth-oauth-user": "^2.0.0", + "@octokit/request": "^6.0.0", + "@octokit/request-error": "^3.0.0", + "@octokit/types": "^9.0.0", + "deprecation": "^2.3.1", + "lru-cache": "^9.0.0", + "universal-github-app-jwt": "^1.1.1", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/auth-oauth-app": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-app/-/auth-oauth-app-5.0.6.tgz", + "integrity": "sha512-SxyfIBfeFcWd9Z/m1xa4LENTQ3l1y6Nrg31k2Dcb1jS5ov7pmwMJZ6OGX8q3K9slRgVpeAjNA1ipOAMHkieqyw==", + "dependencies": { + "@octokit/auth-oauth-device": "^4.0.0", + "@octokit/auth-oauth-user": "^2.0.0", + "@octokit/request": "^6.0.0", + "@octokit/types": "^9.0.0", + "@types/btoa-lite": "^1.0.0", + "btoa-lite": "^1.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/auth-oauth-device": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-device/-/auth-oauth-device-4.0.5.tgz", + "integrity": "sha512-XyhoWRTzf2ZX0aZ52a6Ew5S5VBAfwwx1QnC2Np6Et3MWQpZjlREIcbcvVZtkNuXp6Z9EeiSLSDUqm3C+aMEHzQ==", + "dependencies": { + "@octokit/oauth-methods": "^2.0.0", + "@octokit/request": "^6.0.0", + "@octokit/types": "^9.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/auth-oauth-user": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-user/-/auth-oauth-user-2.1.2.tgz", + "integrity": "sha512-kkRqNmFe7s5GQcojE3nSlF+AzYPpPv7kvP/xYEnE57584pixaFBH8Vovt+w5Y3E4zWUEOxjdLItmBTFAWECPAg==", + "dependencies": { + "@octokit/auth-oauth-device": "^4.0.0", + "@octokit/oauth-methods": "^2.0.0", + "@octokit/request": "^6.0.0", + "@octokit/types": "^9.0.0", + "btoa-lite": "^1.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/auth-token": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.4.tgz", + "integrity": "sha512-TWFX7cZF2LXoCvdmJWY7XVPi74aSY0+FfBZNSXEXFkMpjcqsQwDSYVv5FhRFaI0V1ECnwbz4j59T/G+rXNWaIQ==", + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/auth-unauthenticated": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@octokit/auth-unauthenticated/-/auth-unauthenticated-3.0.5.tgz", + "integrity": "sha512-yH2GPFcjrTvDWPwJWWCh0tPPtTL5SMgivgKPA+6v/XmYN6hGQkAto8JtZibSKOpf8ipmeYhLNWQ2UgW0GYILCw==", + "dependencies": { + "@octokit/request-error": "^3.0.0", + "@octokit/types": "^9.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/core": { + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-4.2.4.tgz", + "integrity": "sha512-rYKilwgzQ7/imScn3M9/pFfUf4I1AZEH3KhyJmtPdE2zfaXAn2mFfUy4FbKewzc2We5y/LlKLj36fWJLKC2SIQ==", + "dependencies": { + "@octokit/auth-token": "^3.0.0", + "@octokit/graphql": "^5.0.0", + "@octokit/request": "^6.0.0", + "@octokit/request-error": "^3.0.0", + "@octokit/types": "^9.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/endpoint": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-7.0.6.tgz", + "integrity": "sha512-5L4fseVRUsDFGR00tMWD/Trdeeihn999rTMGRMC1G/Ldi1uWlWJzI98H4Iak5DB/RVvQuyMYKqSK/R6mbSOQyg==", + "dependencies": { + "@octokit/types": "^9.0.0", + "is-plain-object": "^5.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/graphql": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-5.0.6.tgz", + "integrity": "sha512-Fxyxdy/JH0MnIB5h+UQ3yCoh1FG4kWXfFKkpWqjZHw/p+Kc8Y44Hu/kCgNBT6nU1shNumEchmW/sUO1JuQnPcw==", + "dependencies": { + "@octokit/request": "^6.0.0", + "@octokit/types": "^9.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/oauth-app": { + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/@octokit/oauth-app/-/oauth-app-4.2.4.tgz", + "integrity": "sha512-iuOVFrmm5ZKNavRtYu5bZTtmlKLc5uVgpqTfMEqYYf2OkieV6VdxKZAb5qLVdEPL8LU2lMWcGpavPBV835cgoA==", + "dependencies": { + "@octokit/auth-oauth-app": "^5.0.0", + "@octokit/auth-oauth-user": "^2.0.0", + "@octokit/auth-unauthenticated": "^3.0.0", + "@octokit/core": "^4.0.0", + "@octokit/oauth-authorization-url": "^5.0.0", + "@octokit/oauth-methods": "^2.0.0", + "@types/aws-lambda": "^8.10.83", + "fromentries": "^1.3.1", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/oauth-authorization-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-5.0.0.tgz", + "integrity": "sha512-y1WhN+ERDZTh0qZ4SR+zotgsQUE1ysKnvBt1hvDRB2WRzYtVKQjn97HEPzoehh66Fj9LwNdlZh+p6TJatT0zzg==", + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/oauth-methods": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@octokit/oauth-methods/-/oauth-methods-2.0.6.tgz", + "integrity": "sha512-l9Uml2iGN2aTWLZcm8hV+neBiFXAQ9+3sKiQe/sgumHlL6HDg0AQ8/l16xX/5jJvfxueqTW5CWbzd0MjnlfHZw==", + "dependencies": { + "@octokit/oauth-authorization-url": "^5.0.0", + "@octokit/request": "^6.2.3", + "@octokit/request-error": "^3.0.3", + "@octokit/types": "^9.0.0", + "btoa-lite": "^1.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "18.1.1", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-18.1.1.tgz", + "integrity": "sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw==" + }, + "node_modules/@octokit/plugin-paginate-rest": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-6.1.2.tgz", + "integrity": "sha512-qhrmtQeHU/IivxucOV1bbI/xZyC/iOBhclokv7Sut5vnejAIAEXVcGQeRpQlU39E0WwK9lNvJHphHri/DB6lbQ==", + "dependencies": { + "@octokit/tsconfig": "^1.0.2", + "@octokit/types": "^9.2.3" + }, + "engines": { + "node": ">= 14" + }, + "peerDependencies": { + "@octokit/core": ">=4" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-7.2.3.tgz", + "integrity": "sha512-I5Gml6kTAkzVlN7KCtjOM+Ruwe/rQppp0QU372K1GP7kNOYEKe8Xn5BW4sE62JAHdwpq95OQK/qGNyKQMUzVgA==", + "dependencies": { + "@octokit/types": "^10.0.0" + }, + "engines": { + "node": ">= 14" + }, + "peerDependencies": { + "@octokit/core": ">=3" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-10.0.0.tgz", + "integrity": "sha512-Vm8IddVmhCgU1fxC1eyinpwqzXPEYu0NrYzD3YZjlGjyftdLBTeqNblRC0jmJmgxbJIsQlyogVeGnrNaaMVzIg==", + "dependencies": { + "@octokit/openapi-types": "^18.0.0" + } + }, + "node_modules/@octokit/plugin-retry": { + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-4.1.6.tgz", + "integrity": "sha512-obkYzIgEC75r8+9Pnfiiqy3y/x1bc3QLE5B7qvv9wi9Kj0R5tGQFC6QMBg1154WQ9lAVypuQDGyp3hNpp15gQQ==", + "dependencies": { + "@octokit/types": "^9.0.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 14" + }, + "peerDependencies": { + "@octokit/core": ">=3" + } + }, + "node_modules/@octokit/plugin-throttling": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@octokit/plugin-throttling/-/plugin-throttling-5.2.3.tgz", + "integrity": "sha512-C9CFg9mrf6cugneKiaI841iG8DOv6P5XXkjmiNNut+swePxQ7RWEdAZRp5rJoE1hjsIqiYcKa/ZkOQ+ujPI39Q==", + "dependencies": { + "@octokit/types": "^9.0.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 14" + }, + "peerDependencies": { + "@octokit/core": "^4.0.0" + } + }, + "node_modules/@octokit/request": { + "version": "6.2.8", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-6.2.8.tgz", + "integrity": "sha512-ow4+pkVQ+6XVVsekSYBzJC0VTVvh/FCTUUgTsboGq+DTeWdyIFV8WSCdo0RIxk6wSkBTHqIK1mYuY7nOBXOchw==", + "dependencies": { + "@octokit/endpoint": "^7.0.0", + "@octokit/request-error": "^3.0.0", + "@octokit/types": "^9.0.0", + "is-plain-object": "^5.0.0", + "node-fetch": "^2.6.7", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/request-error": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.3.tgz", + "integrity": "sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ==", + "dependencies": { + "@octokit/types": "^9.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/tsconfig": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@octokit/tsconfig/-/tsconfig-1.0.2.tgz", + "integrity": "sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA==" + }, + "node_modules/@octokit/types": { + "version": "9.3.2", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz", + "integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==", + "dependencies": { + "@octokit/openapi-types": "^18.0.0" + } + }, + "node_modules/@octokit/webhooks": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.9.2.tgz", + "integrity": "sha512-hFVF/szz4l/Y/GQdKxNmQjUke0XJXK986p+ucIlubTGVPVtVtup5G1jarQfvCMBs9Fvlf9dvH8K83E4lefmofQ==", + "dependencies": { + "@octokit/request-error": "^3.0.0", + "@octokit/webhooks-methods": "^3.0.0", + "@octokit/webhooks-types": "6.11.0", + "aggregate-error": "^3.1.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/webhooks-methods": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.3.tgz", + "integrity": "sha512-2vM+DCNTJ5vL62O5LagMru6XnYhV4fJslK+5YUkTa6rWlW2S+Tqs1lF9Wr9OGqHfVwpBj3TeztWfVON/eUoW1Q==", + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/webhooks-types": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.11.0.tgz", + "integrity": "sha512-AanzbulOHljrku1NGfafxdpTCfw2ENaWzH01N2vqQM+cUFbk868Cgh0xylz0JIM9BoKbfI++bdD6EYX0Q/UTEw==" + }, + "node_modules/@types/aws-lambda": { + "version": "8.10.136", + "resolved": "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.136.tgz", + "integrity": "sha512-cmmgqxdVGhxYK9lZMYYXYRJk6twBo53ivtXjIUEFZxfxe4TkZTZBK3RRWrY2HjJcUIix0mdifn15yjOAat5lTA==" + }, + "node_modules/@types/btoa-lite": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@types/btoa-lite/-/btoa-lite-1.0.2.tgz", + "integrity": "sha512-ZYbcE2x7yrvNFJiU7xJGrpF/ihpkM7zKgw8bha3LNJSesvTtUNxbpzaT7WXBIryf6jovisrxTBvymxMeLLj1Mg==" + }, + "node_modules/@types/jsonwebtoken": { + "version": "9.0.6", + "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.6.tgz", + "integrity": "sha512-/5hndP5dCjloafCXns6SZyESp3Ldq7YjH3zwzwczYnjxIT0Fqzk5ROSYVGfFyczIue7IUEj8hkvLbPoLQ18vQw==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/node": { + "version": "20.11.25", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.25.tgz", + "integrity": "sha512-TBHyJxk2b7HceLVGFcpAUjsa5zIdsPWlR6XHfyGzd0SFu+/NFgQgMAl96MSDZgQDvJAvV6BKsFOrt6zIL09JDw==", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/before-after-hook": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" + }, + "node_modules/bottleneck": { + "version": "2.19.5", + "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", + "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" + }, + "node_modules/btoa-lite": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/btoa-lite/-/btoa-lite-1.0.0.tgz", + "integrity": "sha512-gvW7InbIyF8AicrqWoptdW08pUxuhq8BEgowNajy9RhiE86fmGAGl+bLKo6oB8QP0CkqHLowfN0oJdKC/J6LbA==" + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==" + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/fromentries": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz", + "integrity": "sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==" + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==" + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" + }, + "node_modules/lru-cache": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-9.1.2.tgz", + "integrity": "sha512-ERJq3FOzJTxBbFjZ7iDs+NiK4VI9Wz+RdrrAB8dio1oV+YvdPzUEE4QNiT2VD51DkIbCYRUUzCRkssXCHqSnKQ==", + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/octokit": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/octokit/-/octokit-2.1.0.tgz", + "integrity": "sha512-Pxi6uKTjBRZWgAwsw1NgHdRlL+QASCN35OYS7X79o7PtBME0CLXEroZmPtEwlWZbPTP+iDbEy2wCbSOgm0uGIQ==", + "dependencies": { + "@octokit/app": "^13.1.5", + "@octokit/core": "^4.2.1", + "@octokit/oauth-app": "^4.2.1", + "@octokit/plugin-paginate-rest": "^6.1.0", + "@octokit/plugin-rest-endpoint-methods": "^7.1.1", + "@octokit/plugin-retry": "^4.1.3", + "@octokit/plugin-throttling": "^5.2.2", + "@octokit/request-error": "^v3.0.3", + "@octokit/types": "^9.2.2" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "node_modules/tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } + }, + "node_modules/undici": { + "version": "5.28.3", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", + "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + }, + "node_modules/universal-github-app-jwt": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/universal-github-app-jwt/-/universal-github-app-jwt-1.1.2.tgz", + "integrity": "sha512-t1iB2FmLFE+yyJY9+3wMx0ejB+MQpEVkH0gQv7dR6FZyltyq+ZZO0uDpbopxhrZ3SLEO4dCEkIujOMldEQ2iOA==", + "dependencies": { + "@types/jsonwebtoken": "^9.0.0", + "jsonwebtoken": "^9.0.2" + } + }, + "node_modules/universal-user-agent": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" + }, + "node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } +} diff --git a/dist/node_modules/@actions/core/LICENSE.md b/dist/node_modules/@actions/core/LICENSE.md new file mode 100644 index 0000000..dbae2ed --- /dev/null +++ b/dist/node_modules/@actions/core/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/dist/node_modules/@actions/core/README.md b/dist/node_modules/@actions/core/README.md new file mode 100644 index 0000000..8a47143 --- /dev/null +++ b/dist/node_modules/@actions/core/README.md @@ -0,0 +1,335 @@ +# `@actions/core` + +> Core functions for setting results, logging, registering secrets and exporting variables across actions + +## Usage + +### Import the package + +```js +// javascript +const core = require('@actions/core'); + +// typescript +import * as core from '@actions/core'; +``` + +#### Inputs/Outputs + +Action inputs can be read with `getInput` which returns a `string` or `getBooleanInput` which parses a boolean based on the [yaml 1.2 specification](https://yaml.org/spec/1.2/spec.html#id2804923). If `required` set to be false, the input should have a default value in `action.yml`. + +Outputs can be set with `setOutput` which makes them available to be mapped into inputs of other actions to ensure they are decoupled. + +```js +const myInput = core.getInput('inputName', { required: true }); +const myBooleanInput = core.getBooleanInput('booleanInputName', { required: true }); +const myMultilineInput = core.getMultilineInput('multilineInputName', { required: true }); +core.setOutput('outputKey', 'outputVal'); +``` + +#### Exporting variables + +Since each step runs in a separate process, you can use `exportVariable` to add it to this step and future steps environment blocks. + +```js +core.exportVariable('envVar', 'Val'); +``` + +#### Setting a secret + +Setting a secret registers the secret with the runner to ensure it is masked in logs. + +```js +core.setSecret('myPassword'); +``` + +#### PATH Manipulation + +To make a tool's path available in the path for the remainder of the job (without altering the machine or containers state), use `addPath`. The runner will prepend the path given to the jobs PATH. + +```js +core.addPath('/path/to/mytool'); +``` + +#### Exit codes + +You should use this library to set the failing exit code for your action. If status is not set and the script runs to completion, that will lead to a success. + +```js +const core = require('@actions/core'); + +try { + // Do stuff +} +catch (err) { + // setFailed logs the message and sets a failing exit code + core.setFailed(`Action failed with error ${err}`); +} +``` + +Note that `setNeutral` is not yet implemented in actions V2 but equivalent functionality is being planned. + +#### Logging + +Finally, this library provides some utilities for logging. Note that debug logging is hidden from the logs by default. This behavior can be toggled by enabling the [Step Debug Logs](../../docs/action-debugging.md#step-debug-logs). + +```js +const core = require('@actions/core'); + +const myInput = core.getInput('input'); +try { + core.debug('Inside try block'); + + if (!myInput) { + core.warning('myInput was not set'); + } + + if (core.isDebug()) { + // curl -v https://github.com + } else { + // curl https://github.com + } + + // Do stuff + core.info('Output to the actions build log') + + core.notice('This is a message that will also emit an annotation') +} +catch (err) { + core.error(`Error ${err}, action may still succeed though`); +} +``` + +This library can also wrap chunks of output in foldable groups. + +```js +const core = require('@actions/core') + +// Manually wrap output +core.startGroup('Do some function') +doSomeFunction() +core.endGroup() + +// Wrap an asynchronous function call +const result = await core.group('Do something async', async () => { + const response = await doSomeHTTPRequest() + return response +}) +``` + +#### Annotations + +This library has 3 methods that will produce [annotations](https://docs.github.com/en/rest/reference/checks#create-a-check-run). +```js +core.error('This is a bad error, action may still succeed though.') + +core.warning('Something went wrong, but it\'s not bad enough to fail the build.') + +core.notice('Something happened that you might want to know about.') +``` + +These will surface to the UI in the Actions page and on Pull Requests. They look something like this: + +![Annotations Image](../../docs/assets/annotations.png) + +These annotations can also be attached to particular lines and columns of your source files to show exactly where a problem is occuring. + +These options are: +```typescript +export interface AnnotationProperties { + /** + * A title for the annotation. + */ + title?: string + + /** + * The name of the file for which the annotation should be created. + */ + file?: string + + /** + * The start line for the annotation. + */ + startLine?: number + + /** + * The end line for the annotation. Defaults to `startLine` when `startLine` is provided. + */ + endLine?: number + + /** + * The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values. + */ + startColumn?: number + + /** + * The end column for the annotation. Cannot be sent when `startLine` and `endLine` are different values. + * Defaults to `startColumn` when `startColumn` is provided. + */ + endColumn?: number +} +``` + +#### Styling output + +Colored output is supported in the Action logs via standard [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code). 3/4 bit, 8 bit and 24 bit colors are all supported. + +Foreground colors: + +```js +// 3/4 bit +core.info('\u001b[35mThis foreground will be magenta') + +// 8 bit +core.info('\u001b[38;5;6mThis foreground will be cyan') + +// 24 bit +core.info('\u001b[38;2;255;0;0mThis foreground will be bright red') +``` + +Background colors: + +```js +// 3/4 bit +core.info('\u001b[43mThis background will be yellow'); + +// 8 bit +core.info('\u001b[48;5;6mThis background will be cyan') + +// 24 bit +core.info('\u001b[48;2;255;0;0mThis background will be bright red') +``` + +Special styles: + +```js +core.info('\u001b[1mBold text') +core.info('\u001b[3mItalic text') +core.info('\u001b[4mUnderlined text') +``` + +ANSI escape codes can be combined with one another: + +```js +core.info('\u001b[31;46mRed foreground with a cyan background and \u001b[1mbold text at the end'); +``` + +> Note: Escape codes reset at the start of each line + +```js +core.info('\u001b[35mThis foreground will be magenta') +core.info('This foreground will reset to the default') +``` + +Manually typing escape codes can be a little difficult, but you can use third party modules such as [ansi-styles](https://github.com/chalk/ansi-styles). + +```js +const style = require('ansi-styles'); +core.info(style.color.ansi16m.hex('#abcdef') + 'Hello world!') +``` + +#### Action state + +You can use this library to save state and get state for sharing information between a given wrapper action: + +**action.yml**: + +```yaml +name: 'Wrapper action sample' +inputs: + name: + default: 'GitHub' +runs: + using: 'node12' + main: 'main.js' + post: 'cleanup.js' +``` + +In action's `main.js`: + +```js +const core = require('@actions/core'); + +core.saveState("pidToKill", 12345); +``` + +In action's `cleanup.js`: + +```js +const core = require('@actions/core'); + +var pid = core.getState("pidToKill"); + +process.kill(pid); +``` + +#### OIDC Token + +You can use these methods to interact with the GitHub OIDC provider and get a JWT ID token which would help to get access token from third party cloud providers. + +**Method Name**: getIDToken() + +**Inputs** + +audience : optional + +**Outputs** + +A [JWT](https://jwt.io/) ID Token + +In action's `main.ts`: +```js +const core = require('@actions/core'); +async function getIDTokenAction(): Promise { + + const audience = core.getInput('audience', {required: false}) + + const id_token1 = await core.getIDToken() // ID Token with default audience + const id_token2 = await core.getIDToken(audience) // ID token with custom audience + + // this id_token can be used to get access token from third party cloud providers +} +getIDTokenAction() +``` + +In action's `actions.yml`: + +```yaml +name: 'GetIDToken' +description: 'Get ID token from Github OIDC provider' +inputs: + audience: + description: 'Audience for which the ID token is intended for' + required: false +outputs: + id_token1: + description: 'ID token obtained from OIDC provider' + id_token2: + description: 'ID token obtained from OIDC provider' +runs: + using: 'node12' + main: 'dist/index.js' +``` + +#### Filesystem path helpers + +You can use these methods to manipulate file paths across operating systems. + +The `toPosixPath` function converts input paths to Posix-style (Linux) paths. +The `toWin32Path` function converts input paths to Windows-style paths. These +functions work independently of the underlying runner operating system. + +```js +toPosixPath('\\foo\\bar') // => /foo/bar +toWin32Path('/foo/bar') // => \foo\bar +``` + +The `toPlatformPath` function converts input paths to the expected value on the runner's operating system. + +```js +// On a Windows runner. +toPlatformPath('/foo/bar') // => \foo\bar + +// On a Linux runner. +toPlatformPath('\\foo\\bar') // => /foo/bar +``` diff --git a/dist/node_modules/@actions/core/lib/command.d.ts b/dist/node_modules/@actions/core/lib/command.d.ts new file mode 100644 index 0000000..53f8f4b --- /dev/null +++ b/dist/node_modules/@actions/core/lib/command.d.ts @@ -0,0 +1,15 @@ +export interface CommandProperties { + [key: string]: any; +} +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +export declare function issueCommand(command: string, properties: CommandProperties, message: any): void; +export declare function issue(name: string, message?: string): void; diff --git a/dist/node_modules/@actions/core/lib/command.js b/dist/node_modules/@actions/core/lib/command.js new file mode 100644 index 0000000..0b28c66 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/command.js @@ -0,0 +1,92 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(require("os")); +const utils_1 = require("./utils"); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/command.js.map b/dist/node_modules/@actions/core/lib/command.js.map new file mode 100644 index 0000000..51c7c63 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/command.js.map @@ -0,0 +1 @@ +{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,mCAAsC;AAWtC;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAY;IAEZ,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,OAAO,GAAG,EAAE;IAC9C,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,IAAI,CAAA;AAEvB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,IAAI,KAAK,GAAG,IAAI,CAAA;YAChB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,IAAI,KAAK,EAAE;4BACT,KAAK,GAAG,KAAK,CAAA;yBACd;6BAAM;4BACL,MAAM,IAAI,GAAG,CAAA;yBACd;wBAED,MAAM,IAAI,GAAG,GAAG,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAA;qBAC1C;iBACF;aACF;SACF;QAED,MAAM,IAAI,GAAG,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAA;QACpD,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED,SAAS,UAAU,CAAC,CAAM;IACxB,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AAC1B,CAAC;AAED,SAAS,cAAc,CAAC,CAAM;IAC5B,OAAO,sBAAc,CAAC,CAAC,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"} \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/core.d.ts b/dist/node_modules/@actions/core/lib/core.d.ts new file mode 100644 index 0000000..ceecdd3 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/core.d.ts @@ -0,0 +1,198 @@ +/** + * Interface for getInput options + */ +export interface InputOptions { + /** Optional. Whether the input is required. If required and not present, will throw. Defaults to false */ + required?: boolean; + /** Optional. Whether leading/trailing whitespace will be trimmed for the input. Defaults to true */ + trimWhitespace?: boolean; +} +/** + * The code to exit an action + */ +export declare enum ExitCode { + /** + * A code indicating that the action was successful + */ + Success = 0, + /** + * A code indicating that the action was a failure + */ + Failure = 1 +} +/** + * Optional properties that can be sent with annotation commands (notice, error, and warning) + * See: https://docs.github.com/en/rest/reference/checks#create-a-check-run for more information about annotations. + */ +export interface AnnotationProperties { + /** + * A title for the annotation. + */ + title?: string; + /** + * The path of the file for which the annotation should be created. + */ + file?: string; + /** + * The start line for the annotation. + */ + startLine?: number; + /** + * The end line for the annotation. Defaults to `startLine` when `startLine` is provided. + */ + endLine?: number; + /** + * The start column for the annotation. Cannot be sent when `startLine` and `endLine` are different values. + */ + startColumn?: number; + /** + * The end column for the annotation. Cannot be sent when `startLine` and `endLine` are different values. + * Defaults to `startColumn` when `startColumn` is provided. + */ + endColumn?: number; +} +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +export declare function exportVariable(name: string, val: any): void; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +export declare function setSecret(secret: string): void; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +export declare function addPath(inputPath: string): void; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +export declare function getInput(name: string, options?: InputOptions): string; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +export declare function getMultilineInput(name: string, options?: InputOptions): string[]; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +export declare function getBooleanInput(name: string, options?: InputOptions): boolean; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +export declare function setOutput(name: string, value: any): void; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +export declare function setCommandEcho(enabled: boolean): void; +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +export declare function setFailed(message: string | Error): void; +/** + * Gets whether Actions Step Debug is on or not + */ +export declare function isDebug(): boolean; +/** + * Writes debug message to user log + * @param message debug message + */ +export declare function debug(message: string): void; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +export declare function error(message: string | Error, properties?: AnnotationProperties): void; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +export declare function warning(message: string | Error, properties?: AnnotationProperties): void; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +export declare function notice(message: string | Error, properties?: AnnotationProperties): void; +/** + * Writes info to log with console.log. + * @param message info message + */ +export declare function info(message: string): void; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +export declare function startGroup(name: string): void; +/** + * End an output group. + */ +export declare function endGroup(): void; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +export declare function group(name: string, fn: () => Promise): Promise; +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +export declare function saveState(name: string, value: any): void; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +export declare function getState(name: string): string; +export declare function getIDToken(aud?: string): Promise; +/** + * Summary exports + */ +export { summary } from './summary'; +/** + * @deprecated use core.summary + */ +export { markdownSummary } from './summary'; +/** + * Path exports + */ +export { toPosixPath, toWin32Path, toPlatformPath } from './path-utils'; diff --git a/dist/node_modules/@actions/core/lib/core.js b/dist/node_modules/@actions/core/lib/core.js new file mode 100644 index 0000000..48df6ad --- /dev/null +++ b/dist/node_modules/@actions/core/lib/core.js @@ -0,0 +1,336 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +const command_1 = require("./command"); +const file_command_1 = require("./file-command"); +const utils_1 = require("./utils"); +const os = __importStar(require("os")); +const path = __importStar(require("path")); +const oidc_utils_1 = require("./oidc-utils"); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); + } + command_1.issueCommand('set-env', { name }, convertedVal); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueFileCommand('PATH', inputPath); + } + else { + command_1.issueCommand('add-path', {}, inputPath); + } + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); + } + process.stdout.write(os.EOL); + command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); + } + command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = require("./summary"); +Object.defineProperty(exports, "summary", { enumerable: true, get: function () { return summary_1.summary; } }); +/** + * @deprecated use core.summary + */ +var summary_2 = require("./summary"); +Object.defineProperty(exports, "markdownSummary", { enumerable: true, get: function () { return summary_2.markdownSummary; } }); +/** + * Path exports + */ +var path_utils_1 = require("./path-utils"); +Object.defineProperty(exports, "toPosixPath", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } }); +Object.defineProperty(exports, "toWin32Path", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } }); +Object.defineProperty(exports, "toPlatformPath", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }); +//# sourceMappingURL=core.js.map \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/core.js.map b/dist/node_modules/@actions/core/lib/core.js.map new file mode 100644 index 0000000..99f7fd8 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/core.js.map @@ -0,0 +1 @@ +{"version":3,"file":"core.js","sourceRoot":"","sources":["../src/core.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAA6C;AAC7C,iDAAuE;AACvE,mCAA2D;AAE3D,uCAAwB;AACxB,2CAA4B;AAE5B,6CAAuC;AAavC;;GAEG;AACH,IAAY,QAUX;AAVD,WAAY,QAAQ;IAClB;;OAEG;IACH,6CAAW,CAAA;IAEX;;OAEG;IACH,6CAAW,CAAA;AACb,CAAC,EAVW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAUnB;AAuCD,yEAAyE;AACzE,YAAY;AACZ,yEAAyE;AAEzE;;;;GAIG;AACH,8DAA8D;AAC9D,SAAgB,cAAc,CAAC,IAAY,EAAE,GAAQ;IACnD,MAAM,YAAY,GAAG,sBAAc,CAAC,GAAG,CAAC,CAAA;IACxC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,YAAY,CAAA;IAEhC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE,CAAA;IAChD,IAAI,QAAQ,EAAE;QACZ,OAAO,+BAAgB,CAAC,KAAK,EAAE,qCAAsB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;KAClE;IAED,sBAAY,CAAC,SAAS,EAAE,EAAC,IAAI,EAAC,EAAE,YAAY,CAAC,CAAA;AAC/C,CAAC;AAVD,wCAUC;AAED;;;GAGG;AACH,SAAgB,SAAS,CAAC,MAAc;IACtC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,MAAM,CAAC,CAAA;AACtC,CAAC;AAFD,8BAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,SAAiB;IACvC,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;IACjD,IAAI,QAAQ,EAAE;QACZ,+BAAgB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAA;KACpC;SAAM;QACL,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,SAAS,CAAC,CAAA;KACxC;IACD,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAA;AAC7E,CAAC;AARD,0BAQC;AAED;;;;;;;;GAQG;AACH,SAAgB,QAAQ,CAAC,IAAY,EAAE,OAAsB;IAC3D,MAAM,GAAG,GACP,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,IAAI,EAAE,CAAA;IACrE,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,GAAG,EAAE;QACvC,MAAM,IAAI,KAAK,CAAC,oCAAoC,IAAI,EAAE,CAAC,CAAA;KAC5D;IAED,IAAI,OAAO,IAAI,OAAO,CAAC,cAAc,KAAK,KAAK,EAAE;QAC/C,OAAO,GAAG,CAAA;KACX;IAED,OAAO,GAAG,CAAC,IAAI,EAAE,CAAA;AACnB,CAAC;AAZD,4BAYC;AAED;;;;;;;GAOG;AACH,SAAgB,iBAAiB,CAC/B,IAAY,EACZ,OAAsB;IAEtB,MAAM,MAAM,GAAa,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC;SAC7C,KAAK,CAAC,IAAI,CAAC;SACX,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IAExB,IAAI,OAAO,IAAI,OAAO,CAAC,cAAc,KAAK,KAAK,EAAE;QAC/C,OAAO,MAAM,CAAA;KACd;IAED,OAAO,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAA;AAC1C,CAAC;AAbD,8CAaC;AAED;;;;;;;;;GASG;AACH,SAAgB,eAAe,CAAC,IAAY,EAAE,OAAsB;IAClE,MAAM,SAAS,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAA;IAC1C,MAAM,UAAU,GAAG,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IAC9C,MAAM,GAAG,GAAG,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACnC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,IAAI,CAAA;IACxC,IAAI,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC;QAAE,OAAO,KAAK,CAAA;IAC1C,MAAM,IAAI,SAAS,CACjB,6DAA6D,IAAI,IAAI;QACnE,4EAA4E,CAC/E,CAAA;AACH,CAAC;AAVD,0CAUC;AAED;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,EAAE,CAAA;IACnD,IAAI,QAAQ,EAAE;QACZ,OAAO,+BAAgB,CAAC,QAAQ,EAAE,qCAAsB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,CAAA;KACvE;IAED,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,CAAC,CAAA;IAC5B,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,sBAAc,CAAC,KAAK,CAAC,CAAC,CAAA;AAC3D,CAAC;AARD,8BAQC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,OAAgB;IAC7C,eAAK,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAA;AACvC,CAAC;AAFD,wCAEC;AAED,yEAAyE;AACzE,UAAU;AACV,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,SAAS,CAAC,OAAuB;IAC/C,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAA;IAEnC,KAAK,CAAC,OAAO,CAAC,CAAA;AAChB,CAAC;AAJD,8BAIC;AAED,yEAAyE;AACzE,mBAAmB;AACnB,yEAAyE;AAEzE;;GAEG;AACH,SAAgB,OAAO;IACrB,OAAO,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,GAAG,CAAA;AAC5C,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,sBAAY,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACpC,CAAC;AAFD,sBAEC;AAED;;;;GAIG;AACH,SAAgB,KAAK,CACnB,OAAuB,EACvB,aAAmC,EAAE;IAErC,sBAAY,CACV,OAAO,EACP,2BAAmB,CAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,sBASC;AAED;;;;GAIG;AACH,SAAgB,OAAO,CACrB,OAAuB,EACvB,aAAmC,EAAE;IAErC,sBAAY,CACV,SAAS,EACT,2BAAmB,CAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,0BASC;AAED;;;;GAIG;AACH,SAAgB,MAAM,CACpB,OAAuB,EACvB,aAAmC,EAAE;IAErC,sBAAY,CACV,QAAQ,EACR,2BAAmB,CAAC,UAAU,CAAC,EAC/B,OAAO,YAAY,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CACxD,CAAA;AACH,CAAC;AATD,wBASC;AAED;;;GAGG;AACH,SAAgB,IAAI,CAAC,OAAe;IAClC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,oBAEC;AAED;;;;;;GAMG;AACH,SAAgB,UAAU,CAAC,IAAY;IACrC,eAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;AACtB,CAAC;AAFD,gCAEC;AAED;;GAEG;AACH,SAAgB,QAAQ;IACtB,eAAK,CAAC,UAAU,CAAC,CAAA;AACnB,CAAC;AAFD,4BAEC;AAED;;;;;;;GAOG;AACH,SAAsB,KAAK,CAAI,IAAY,EAAE,EAAoB;;QAC/D,UAAU,CAAC,IAAI,CAAC,CAAA;QAEhB,IAAI,MAAS,CAAA;QAEb,IAAI;YACF,MAAM,GAAG,MAAM,EAAE,EAAE,CAAA;SACpB;gBAAS;YACR,QAAQ,EAAE,CAAA;SACX;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAZD,sBAYC;AAED,yEAAyE;AACzE,uBAAuB;AACvB,yEAAyE;AAEzE;;;;;GAKG;AACH,8DAA8D;AAC9D,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAU;IAChD,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAA;IAClD,IAAI,QAAQ,EAAE;QACZ,OAAO,+BAAgB,CAAC,OAAO,EAAE,qCAAsB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,CAAA;KACtE;IAED,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,sBAAc,CAAC,KAAK,CAAC,CAAC,CAAA;AAC3D,CAAC;AAPD,8BAOC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CAAC,IAAY;IACnC,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,EAAE,CAAC,IAAI,EAAE,CAAA;AAC3C,CAAC;AAFD,4BAEC;AAED,SAAsB,UAAU,CAAC,GAAY;;QAC3C,OAAO,MAAM,uBAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;IACzC,CAAC;CAAA;AAFD,gCAEC;AAED;;GAEG;AACH,qCAAiC;AAAzB,kGAAA,OAAO,OAAA;AAEf;;GAEG;AACH,qCAAyC;AAAjC,0GAAA,eAAe,OAAA;AAEvB;;GAEG;AACH,2CAAqE;AAA7D,yGAAA,WAAW,OAAA;AAAE,yGAAA,WAAW,OAAA;AAAE,4GAAA,cAAc,OAAA"} \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/file-command.d.ts b/dist/node_modules/@actions/core/lib/file-command.d.ts new file mode 100644 index 0000000..2d1f2f4 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/file-command.d.ts @@ -0,0 +1,2 @@ +export declare function issueFileCommand(command: string, message: any): void; +export declare function prepareKeyValueMessage(key: string, value: any): string; diff --git a/dist/node_modules/@actions/core/lib/file-command.js b/dist/node_modules/@actions/core/lib/file-command.js new file mode 100644 index 0000000..2d0d738 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/file-command.js @@ -0,0 +1,58 @@ +"use strict"; +// For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(require("fs")); +const os = __importStar(require("os")); +const uuid_1 = require("uuid"); +const utils_1 = require("./utils"); +function issueFileCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + const convertedValue = utils_1.toCommandValue(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; +//# sourceMappingURL=file-command.js.map \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/file-command.js.map b/dist/node_modules/@actions/core/lib/file-command.js.map new file mode 100644 index 0000000..b1a9d54 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/file-command.js.map @@ -0,0 +1 @@ +{"version":3,"file":"file-command.js","sourceRoot":"","sources":["../src/file-command.ts"],"names":[],"mappings":";AAAA,uCAAuC;;;;;;;;;;;;;;;;;;;;;;AAEvC,mCAAmC;AACnC,uDAAuD;AAEvD,uCAAwB;AACxB,uCAAwB;AACxB,+BAAiC;AACjC,mCAAsC;AAEtC,SAAgB,gBAAgB,CAAC,OAAe,EAAE,OAAY;IAC5D,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,OAAO,EAAE,CAAC,CAAA;IACjD,IAAI,CAAC,QAAQ,EAAE;QACb,MAAM,IAAI,KAAK,CACb,wDAAwD,OAAO,EAAE,CAClE,CAAA;KACF;IACD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;QAC5B,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,EAAE,CAAC,CAAA;KACrD;IAED,EAAE,CAAC,cAAc,CAAC,QAAQ,EAAE,GAAG,sBAAc,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,EAAE;QACjE,QAAQ,EAAE,MAAM;KACjB,CAAC,CAAA;AACJ,CAAC;AAdD,4CAcC;AAED,SAAgB,sBAAsB,CAAC,GAAW,EAAE,KAAU;IAC5D,MAAM,SAAS,GAAG,gBAAgB,SAAM,EAAE,EAAE,CAAA;IAC5C,MAAM,cAAc,GAAG,sBAAc,CAAC,KAAK,CAAC,CAAA;IAE5C,4EAA4E;IAC5E,6EAA6E;IAC7E,iBAAiB;IACjB,IAAI,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QAC3B,MAAM,IAAI,KAAK,CACb,4DAA4D,SAAS,GAAG,CACzE,CAAA;KACF;IAED,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CACb,6DAA6D,SAAS,GAAG,CAC1E,CAAA;KACF;IAED,OAAO,GAAG,GAAG,KAAK,SAAS,GAAG,EAAE,CAAC,GAAG,GAAG,cAAc,GAAG,EAAE,CAAC,GAAG,GAAG,SAAS,EAAE,CAAA;AAC9E,CAAC;AApBD,wDAoBC"} \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/oidc-utils.d.ts b/dist/node_modules/@actions/core/lib/oidc-utils.d.ts new file mode 100644 index 0000000..657c7f4 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/oidc-utils.d.ts @@ -0,0 +1,7 @@ +export declare class OidcClient { + private static createHttpClient; + private static getRequestToken; + private static getIDTokenUrl; + private static getCall; + static getIDToken(audience?: string): Promise; +} diff --git a/dist/node_modules/@actions/core/lib/oidc-utils.js b/dist/node_modules/@actions/core/lib/oidc-utils.js new file mode 100644 index 0000000..092e93d --- /dev/null +++ b/dist/node_modules/@actions/core/lib/oidc-utils.js @@ -0,0 +1,77 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.OidcClient = void 0; +const http_client_1 = require("@actions/http-client"); +const auth_1 = require("@actions/http-client/lib/auth"); +const core_1 = require("./core"); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); + } + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + } + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + core_1.debug(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + core_1.setSecret(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); + } +} +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/oidc-utils.js.map b/dist/node_modules/@actions/core/lib/oidc-utils.js.map new file mode 100644 index 0000000..22506b8 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/oidc-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"oidc-utils.js","sourceRoot":"","sources":["../src/oidc-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;AAGA,sDAA+C;AAC/C,wDAAqE;AACrE,iCAAuC;AAKvC,MAAa,UAAU;IACb,MAAM,CAAC,gBAAgB,CAC7B,UAAU,GAAG,IAAI,EACjB,QAAQ,GAAG,EAAE;QAEb,MAAM,cAAc,GAAmB;YACrC,YAAY,EAAE,UAAU;YACxB,UAAU,EAAE,QAAQ;SACrB,CAAA;QAED,OAAO,IAAI,wBAAU,CACnB,qBAAqB,EACrB,CAAC,IAAI,8BAAuB,CAAC,UAAU,CAAC,eAAe,EAAE,CAAC,CAAC,EAC3D,cAAc,CACf,CAAA;IACH,CAAC;IAEO,MAAM,CAAC,eAAe;QAC5B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAA;QAC3D,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CACb,2DAA2D,CAC5D,CAAA;SACF;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,MAAM,CAAC,aAAa;QAC1B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAA;QAC9D,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;SAC3E;QACD,OAAO,UAAU,CAAA;IACnB,CAAC;IAEO,MAAM,CAAO,OAAO,CAAC,YAAoB;;;YAC/C,MAAM,UAAU,GAAG,UAAU,CAAC,gBAAgB,EAAE,CAAA;YAEhD,MAAM,GAAG,GAAG,MAAM,UAAU;iBACzB,OAAO,CAAgB,YAAY,CAAC;iBACpC,KAAK,CAAC,KAAK,CAAC,EAAE;gBACb,MAAM,IAAI,KAAK,CACb;uBACa,KAAK,CAAC,UAAU;yBACd,KAAK,CAAC,OAAO,EAAE,CAC/B,CAAA;YACH,CAAC,CAAC,CAAA;YAEJ,MAAM,QAAQ,SAAG,GAAG,CAAC,MAAM,0CAAE,KAAK,CAAA;YAClC,IAAI,CAAC,QAAQ,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;aACjE;YACD,OAAO,QAAQ,CAAA;;KAChB;IAED,MAAM,CAAO,UAAU,CAAC,QAAiB;;YACvC,IAAI;gBACF,gDAAgD;gBAChD,IAAI,YAAY,GAAW,UAAU,CAAC,aAAa,EAAE,CAAA;gBACrD,IAAI,QAAQ,EAAE;oBACZ,MAAM,eAAe,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAA;oBACpD,YAAY,GAAG,GAAG,YAAY,aAAa,eAAe,EAAE,CAAA;iBAC7D;gBAED,YAAK,CAAC,mBAAmB,YAAY,EAAE,CAAC,CAAA;gBAExC,MAAM,QAAQ,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;gBACvD,gBAAS,CAAC,QAAQ,CAAC,CAAA;gBACnB,OAAO,QAAQ,CAAA;aAChB;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,kBAAkB,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;QACH,CAAC;KAAA;CACF;AAzED,gCAyEC"} \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/path-utils.d.ts b/dist/node_modules/@actions/core/lib/path-utils.d.ts new file mode 100644 index 0000000..1fee9f3 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/path-utils.d.ts @@ -0,0 +1,25 @@ +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +export declare function toPosixPath(pth: string): string; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +export declare function toWin32Path(pth: string): string; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +export declare function toPlatformPath(pth: string): string; diff --git a/dist/node_modules/@actions/core/lib/path-utils.js b/dist/node_modules/@actions/core/lib/path-utils.js new file mode 100644 index 0000000..7251c82 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/path-utils.js @@ -0,0 +1,58 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(require("path")); +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/path-utils.js.map b/dist/node_modules/@actions/core/lib/path-utils.js.map new file mode 100644 index 0000000..7ab1cac --- /dev/null +++ b/dist/node_modules/@actions/core/lib/path-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"path-utils.js","sourceRoot":"","sources":["../src/path-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAE5B;;;;;;GAMG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,OAAO,CAAC,OAAO,EAAE,GAAG,CAAC,CAAA;AAClC,CAAC;AAFD,kCAEC;AAED;;;;;;GAMG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,GAAG,CAAC,OAAO,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;AAClC,CAAC;AAFD,kCAEC;AAED;;;;;;;GAOG;AACH,SAAgB,cAAc,CAAC,GAAW;IACxC,OAAO,GAAG,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,wCAEC"} \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/summary.d.ts b/dist/node_modules/@actions/core/lib/summary.d.ts new file mode 100644 index 0000000..bb79255 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/summary.d.ts @@ -0,0 +1,202 @@ +export declare const SUMMARY_ENV_VAR = "GITHUB_STEP_SUMMARY"; +export declare const SUMMARY_DOCS_URL = "https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary"; +export declare type SummaryTableRow = (SummaryTableCell | string)[]; +export interface SummaryTableCell { + /** + * Cell content + */ + data: string; + /** + * Render cell as header + * (optional) default: false + */ + header?: boolean; + /** + * Number of columns the cell extends + * (optional) default: '1' + */ + colspan?: string; + /** + * Number of rows the cell extends + * (optional) default: '1' + */ + rowspan?: string; +} +export interface SummaryImageOptions { + /** + * The width of the image in pixels. Must be an integer without a unit. + * (optional) + */ + width?: string; + /** + * The height of the image in pixels. Must be an integer without a unit. + * (optional) + */ + height?: string; +} +export interface SummaryWriteOptions { + /** + * Replace all existing content in summary file with buffer contents + * (optional) default: false + */ + overwrite?: boolean; +} +declare class Summary { + private _buffer; + private _filePath?; + constructor(); + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + private filePath; + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + private wrap; + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options?: SummaryWriteOptions): Promise; + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear(): Promise; + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify(): string; + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer(): boolean; + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer(): Summary; + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text: string, addEOL?: boolean): Summary; + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL(): Summary; + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code: string, lang?: string): Summary; + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items: string[], ordered?: boolean): Summary; + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows: SummaryTableRow[]): Summary; + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label: string, content: string): Summary; + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src: string, alt: string, options?: SummaryImageOptions): Summary; + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text: string, level?: number | string): Summary; + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator(): Summary; + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak(): Summary; + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text: string, cite?: string): Summary; + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text: string, href: string): Summary; +} +/** + * @deprecated use `core.summary` + */ +export declare const markdownSummary: Summary; +export declare const summary: Summary; +export {}; diff --git a/dist/node_modules/@actions/core/lib/summary.js b/dist/node_modules/@actions/core/lib/summary.js new file mode 100644 index 0000000..04a335b --- /dev/null +++ b/dist/node_modules/@actions/core/lib/summary.js @@ -0,0 +1,283 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = require("os"); +const fs_1 = require("fs"); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/summary.js.map b/dist/node_modules/@actions/core/lib/summary.js.map new file mode 100644 index 0000000..d598f26 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/summary.js.map @@ -0,0 +1 @@ +{"version":3,"file":"summary.js","sourceRoot":"","sources":["../src/summary.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,2BAAsB;AACtB,2BAAsC;AACtC,MAAM,EAAC,MAAM,EAAE,UAAU,EAAE,SAAS,EAAC,GAAG,aAAQ,CAAA;AAEnC,QAAA,eAAe,GAAG,qBAAqB,CAAA;AACvC,QAAA,gBAAgB,GAC3B,2GAA2G,CAAA;AA+C7G,MAAM,OAAO;IAIX;QACE,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;IACnB,CAAC;IAED;;;;;OAKG;IACW,QAAQ;;YACpB,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,OAAO,IAAI,CAAC,SAAS,CAAA;aACtB;YAED,MAAM,WAAW,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAe,CAAC,CAAA;YAChD,IAAI,CAAC,WAAW,EAAE;gBAChB,MAAM,IAAI,KAAK,CACb,4CAA4C,uBAAe,6DAA6D,CACzH,CAAA;aACF;YAED,IAAI;gBACF,MAAM,MAAM,CAAC,WAAW,EAAE,cAAS,CAAC,IAAI,GAAG,cAAS,CAAC,IAAI,CAAC,CAAA;aAC3D;YAAC,WAAM;gBACN,MAAM,IAAI,KAAK,CACb,mCAAmC,WAAW,0DAA0D,CACzG,CAAA;aACF;YAED,IAAI,CAAC,SAAS,GAAG,WAAW,CAAA;YAC5B,OAAO,IAAI,CAAC,SAAS,CAAA;QACvB,CAAC;KAAA;IAED;;;;;;;;OAQG;IACK,IAAI,CACV,GAAW,EACX,OAAsB,EACtB,QAAuC,EAAE;QAEzC,MAAM,SAAS,GAAG,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC;aACpC,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,IAAI,GAAG,KAAK,KAAK,GAAG,CAAC;aAC3C,IAAI,CAAC,EAAE,CAAC,CAAA;QAEX,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,IAAI,GAAG,GAAG,SAAS,GAAG,CAAA;SAC9B;QAED,OAAO,IAAI,GAAG,GAAG,SAAS,IAAI,OAAO,KAAK,GAAG,GAAG,CAAA;IAClD,CAAC;IAED;;;;;;OAMG;IACG,KAAK,CAAC,OAA6B;;YACvC,MAAM,SAAS,GAAG,CAAC,EAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,CAAA,CAAA;YACtC,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,EAAE,CAAA;YACtC,MAAM,SAAS,GAAG,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAA;YACpD,MAAM,SAAS,CAAC,QAAQ,EAAE,IAAI,CAAC,OAAO,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAAA;YAC3D,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;QAC3B,CAAC;KAAA;IAED;;;;OAIG;IACG,KAAK;;YACT,OAAO,IAAI,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,EAAC,SAAS,EAAE,IAAI,EAAC,CAAC,CAAA;QACpD,CAAC;KAAA;IAED;;;;OAIG;IACH,SAAS;QACP,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;IAED;;;;OAIG;IACH,aAAa;QACX,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,CAAA;IAClC,CAAC;IAED;;;;OAIG;IACH,WAAW;QACT,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;QACjB,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;;;;;OAOG;IACH,MAAM,CAAC,IAAY,EAAE,MAAM,GAAG,KAAK;QACjC,IAAI,CAAC,OAAO,IAAI,IAAI,CAAA;QACpB,OAAO,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,IAAI,CAAA;IACtC,CAAC;IAED;;;;OAIG;IACH,MAAM;QACJ,OAAO,IAAI,CAAC,MAAM,CAAC,QAAG,CAAC,CAAA;IACzB,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,IAAY,EAAE,IAAa;QACtC,MAAM,KAAK,qBACN,CAAC,IAAI,IAAI,EAAC,IAAI,EAAC,CAAC,CACpB,CAAA;QACD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;QAChE,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,OAAO,CAAC,KAAe,EAAE,OAAO,GAAG,KAAK;QACtC,MAAM,GAAG,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;QACjC,MAAM,SAAS,GAAG,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;QACnE,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,SAAS,CAAC,CAAA;QACzC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;OAMG;IACH,QAAQ,CAAC,IAAuB;QAC9B,MAAM,SAAS,GAAG,IAAI;aACnB,GAAG,CAAC,GAAG,CAAC,EAAE;YACT,MAAM,KAAK,GAAG,GAAG;iBACd,GAAG,CAAC,IAAI,CAAC,EAAE;gBACV,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;oBAC5B,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;iBAC7B;gBAED,MAAM,EAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAC,GAAG,IAAI,CAAA;gBAC7C,MAAM,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;gBAChC,MAAM,KAAK,mCACN,CAAC,OAAO,IAAI,EAAC,OAAO,EAAC,CAAC,GACtB,CAAC,OAAO,IAAI,EAAC,OAAO,EAAC,CAAC,CAC1B,CAAA;gBAED,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;YACpC,CAAC,CAAC;iBACD,IAAI,CAAC,EAAE,CAAC,CAAA;YAEX,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;QAC/B,CAAC,CAAC;aACD,IAAI,CAAC,EAAE,CAAC,CAAA;QAEX,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,CAAA;QAC7C,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,UAAU,CAAC,KAAa,EAAE,OAAe;QACvC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,GAAG,OAAO,CAAC,CAAA;QAC3E,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;;OAQG;IACH,QAAQ,CAAC,GAAW,EAAE,GAAW,EAAE,OAA6B;QAC9D,MAAM,EAAC,KAAK,EAAE,MAAM,EAAC,GAAG,OAAO,IAAI,EAAE,CAAA;QACrC,MAAM,KAAK,mCACN,CAAC,KAAK,IAAI,EAAC,KAAK,EAAC,CAAC,GAClB,CAAC,MAAM,IAAI,EAAC,MAAM,EAAC,CAAC,CACxB,CAAA;QAED,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,kBAAG,GAAG,EAAE,GAAG,IAAK,KAAK,EAAE,CAAA;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,UAAU,CAAC,IAAY,EAAE,KAAuB;QAC9C,MAAM,GAAG,GAAG,IAAI,KAAK,EAAE,CAAA;QACvB,MAAM,UAAU,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC;YACnE,CAAC,CAAC,GAAG;YACL,CAAC,CAAC,IAAI,CAAA;QACR,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAA;QAC3C,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;OAIG;IACH,YAAY;QACV,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QACrC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;OAIG;IACH,QAAQ;QACN,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QACrC,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,QAAQ,CAAC,IAAY,EAAE,IAAa;QAClC,MAAM,KAAK,qBACN,CAAC,IAAI,IAAI,EAAC,IAAI,EAAC,CAAC,CACpB,CAAA;QACD,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;QACpD,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,OAAO,CAAC,IAAY,EAAE,IAAY;QAChC,MAAM,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,EAAE,EAAC,IAAI,EAAC,CAAC,CAAA;QAC5C,OAAO,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;IACtC,CAAC;CACF;AAED,MAAM,QAAQ,GAAG,IAAI,OAAO,EAAE,CAAA;AAE9B;;GAEG;AACU,QAAA,eAAe,GAAG,QAAQ,CAAA;AAC1B,QAAA,OAAO,GAAG,QAAQ,CAAA"} \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/utils.d.ts b/dist/node_modules/@actions/core/lib/utils.d.ts new file mode 100644 index 0000000..3b9e28d --- /dev/null +++ b/dist/node_modules/@actions/core/lib/utils.d.ts @@ -0,0 +1,14 @@ +import { AnnotationProperties } from './core'; +import { CommandProperties } from './command'; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +export declare function toCommandValue(input: any): string; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +export declare function toCommandProperties(annotationProperties: AnnotationProperties): CommandProperties; diff --git a/dist/node_modules/@actions/core/lib/utils.js b/dist/node_modules/@actions/core/lib/utils.js new file mode 100644 index 0000000..9b5ca44 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/utils.js @@ -0,0 +1,40 @@ +"use strict"; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toCommandProperties = exports.toCommandValue = void 0; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/dist/node_modules/@actions/core/lib/utils.js.map b/dist/node_modules/@actions/core/lib/utils.js.map new file mode 100644 index 0000000..8211bb7 --- /dev/null +++ b/dist/node_modules/@actions/core/lib/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";AAAA,mCAAmC;AACnC,uDAAuD;;;AAKvD;;;GAGG;AACH,SAAgB,cAAc,CAAC,KAAU;IACvC,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,OAAO,EAAE,CAAA;KACV;SAAM,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,YAAY,MAAM,EAAE;QAC/D,OAAO,KAAe,CAAA;KACvB;IACD,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAA;AAC9B,CAAC;AAPD,wCAOC;AAED;;;;;GAKG;AACH,SAAgB,mBAAmB,CACjC,oBAA0C;IAE1C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC,MAAM,EAAE;QAC7C,OAAO,EAAE,CAAA;KACV;IAED,OAAO;QACL,KAAK,EAAE,oBAAoB,CAAC,KAAK;QACjC,IAAI,EAAE,oBAAoB,CAAC,IAAI;QAC/B,IAAI,EAAE,oBAAoB,CAAC,SAAS;QACpC,OAAO,EAAE,oBAAoB,CAAC,OAAO;QACrC,GAAG,EAAE,oBAAoB,CAAC,WAAW;QACrC,SAAS,EAAE,oBAAoB,CAAC,SAAS;KAC1C,CAAA;AACH,CAAC;AAfD,kDAeC"} \ No newline at end of file diff --git a/dist/node_modules/@actions/core/package.json b/dist/node_modules/@actions/core/package.json new file mode 100644 index 0000000..1558268 --- /dev/null +++ b/dist/node_modules/@actions/core/package.json @@ -0,0 +1,46 @@ +{ + "name": "@actions/core", + "version": "1.10.1", + "description": "Actions core lib", + "keywords": [ + "github", + "actions", + "core" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/core", + "license": "MIT", + "main": "lib/core.js", + "types": "lib/core.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/core" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc -p tsconfig.json" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/http-client": "^2.0.1", + "uuid": "^8.3.2" + }, + "devDependencies": { + "@types/node": "^12.0.2", + "@types/uuid": "^8.3.4" + } +} \ No newline at end of file diff --git a/dist/node_modules/@actions/http-client/LICENSE b/dist/node_modules/@actions/http-client/LICENSE new file mode 100644 index 0000000..5823a51 --- /dev/null +++ b/dist/node_modules/@actions/http-client/LICENSE @@ -0,0 +1,21 @@ +Actions Http Client for Node.js + +Copyright (c) GitHub, Inc. + +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/@actions/http-client/README.md b/dist/node_modules/@actions/http-client/README.md new file mode 100644 index 0000000..7e06ade --- /dev/null +++ b/dist/node_modules/@actions/http-client/README.md @@ -0,0 +1,73 @@ +# `@actions/http-client` + +A lightweight HTTP client optimized for building actions. + +## Features + + - HTTP client with TypeScript generics and async/await/Promises + - Typings included! + - [Proxy support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners#using-a-proxy-server-with-self-hosted-runners) just works with actions and the runner + - Targets ES2019 (runner runs actions with node 12+). Only supported on node 12+. + - Basic, Bearer and PAT Support out of the box. Extensible handlers for others. + - Redirects supported + +Features and releases [here](./RELEASES.md) + +## Install + +``` +npm install @actions/http-client --save +``` + +## Samples + +See the [tests](./__tests__) for detailed examples. + +## Errors + +### HTTP + +The HTTP client does not throw unless truly exceptional. + +* A request that successfully executes resulting in a 404, 500 etc... will return a response object with a status code and a body. +* Redirects (3xx) will be followed by default. + +See the [tests](./__tests__) for detailed examples. + +## Debugging + +To enable detailed console logging of all HTTP requests and responses, set the NODE_DEBUG environment varible: + +```shell +export NODE_DEBUG=http +``` + +## Node support + +The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+. + +## Support and Versioning + +We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat). + +## Contributing + +We welcome PRs. Please create an issue and if applicable, a design before proceeding with code. + +once: + +``` +npm install +``` + +To build: + +``` +npm run build +``` + +To run all tests: + +``` +npm test +``` diff --git a/dist/node_modules/@actions/http-client/lib/auth.d.ts b/dist/node_modules/@actions/http-client/lib/auth.d.ts new file mode 100644 index 0000000..8cc9fc3 --- /dev/null +++ b/dist/node_modules/@actions/http-client/lib/auth.d.ts @@ -0,0 +1,26 @@ +/// +import * as http from 'http'; +import * as ifm from './interfaces'; +import { HttpClientResponse } from './index'; +export declare class BasicCredentialHandler implements ifm.RequestHandler { + username: string; + password: string; + constructor(username: string, password: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} +export declare class BearerCredentialHandler implements ifm.RequestHandler { + token: string; + constructor(token: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} +export declare class PersonalAccessTokenCredentialHandler implements ifm.RequestHandler { + token: string; + constructor(token: string); + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(): boolean; + handleAuthentication(): Promise; +} diff --git a/dist/node_modules/@actions/http-client/lib/auth.js b/dist/node_modules/@actions/http-client/lib/auth.js new file mode 100644 index 0000000..2c150a3 --- /dev/null +++ b/dist/node_modules/@actions/http-client/lib/auth.js @@ -0,0 +1,81 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map \ No newline at end of file diff --git a/dist/node_modules/@actions/http-client/lib/auth.js.map b/dist/node_modules/@actions/http-client/lib/auth.js.map new file mode 100644 index 0000000..62cc16b --- /dev/null +++ b/dist/node_modules/@actions/http-client/lib/auth.js.map @@ -0,0 +1 @@ +{"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;;AAIA,MAAa,sBAAsB;IAIjC,YAAY,QAAgB,EAAE,QAAgB;QAC5C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;IAC1B,CAAC;IAED,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CACpC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA1BD,wDA0BC;AAED,MAAa,uBAAuB;IAGlC,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,IAAI,CAAC,KAAK,EAAE,CAAA;IAC3D,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AAxBD,0DAwBC;AAED,MAAa,oCAAoC;IAK/C,YAAY,KAAa;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IAED,yCAAyC;IACzC,sDAAsD;IACtD,cAAc,CAAC,OAA4B;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,MAAM,KAAK,CAAC,4BAA4B,CAAC,CAAA;SAC1C;QACD,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACrD,OAAO,IAAI,CAAC,KAAK,EAAE,CACpB,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAA;IACxB,CAAC;IAED,iCAAiC;IACjC,uBAAuB;QACrB,OAAO,KAAK,CAAA;IACd,CAAC;IAEK,oBAAoB;;YACxB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAA;QACpC,CAAC;KAAA;CACF;AA5BD,oFA4BC"} \ No newline at end of file diff --git a/dist/node_modules/@actions/http-client/lib/index.d.ts b/dist/node_modules/@actions/http-client/lib/index.d.ts new file mode 100644 index 0000000..38db700 --- /dev/null +++ b/dist/node_modules/@actions/http-client/lib/index.d.ts @@ -0,0 +1,130 @@ +/// +/// +/// +import * as http from 'http'; +import * as ifm from './interfaces'; +import { ProxyAgent } from 'undici'; +export declare enum HttpCodes { + OK = 200, + MultipleChoices = 300, + MovedPermanently = 301, + ResourceMoved = 302, + SeeOther = 303, + NotModified = 304, + UseProxy = 305, + SwitchProxy = 306, + TemporaryRedirect = 307, + PermanentRedirect = 308, + BadRequest = 400, + Unauthorized = 401, + PaymentRequired = 402, + Forbidden = 403, + NotFound = 404, + MethodNotAllowed = 405, + NotAcceptable = 406, + ProxyAuthenticationRequired = 407, + RequestTimeout = 408, + Conflict = 409, + Gone = 410, + TooManyRequests = 429, + InternalServerError = 500, + NotImplemented = 501, + BadGateway = 502, + ServiceUnavailable = 503, + GatewayTimeout = 504 +} +export declare enum Headers { + Accept = "accept", + ContentType = "content-type" +} +export declare enum MediaTypes { + ApplicationJson = "application/json" +} +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +export declare function getProxyUrl(serverUrl: string): string; +export declare class HttpClientError extends Error { + constructor(message: string, statusCode: number); + statusCode: number; + result?: any; +} +export declare class HttpClientResponse { + constructor(message: http.IncomingMessage); + message: http.IncomingMessage; + readBody(): Promise; + readBodyBuffer?(): Promise; +} +export declare function isHttps(requestUrl: string): boolean; +export declare class HttpClient { + userAgent: string | undefined; + handlers: ifm.RequestHandler[]; + requestOptions: ifm.RequestOptions | undefined; + private _ignoreSslError; + private _socketTimeout; + private _allowRedirects; + private _allowRedirectDowngrade; + private _maxRedirects; + private _allowRetries; + private _maxRetries; + private _agent; + private _proxyAgent; + private _proxyAgentDispatcher; + private _keepAlive; + private _disposed; + constructor(userAgent?: string, handlers?: ifm.RequestHandler[], requestOptions?: ifm.RequestOptions); + options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + head(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + postJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + putJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + patchJson(requestUrl: string, obj: any, additionalHeaders?: http.OutgoingHttpHeaders): Promise>; + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream | null, headers?: http.OutgoingHttpHeaders): Promise; + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose(): void; + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null): Promise; + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info: ifm.RequestInfo, data: string | NodeJS.ReadableStream | null, onResult: (err?: Error, res?: HttpClientResponse) => void): void; + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl: string): http.Agent; + getAgentDispatcher(serverUrl: string): ProxyAgent | undefined; + private _prepareRequest; + private _mergeHeaders; + private _getExistingOrDefaultHeader; + private _getAgent; + private _getProxyAgentDispatcher; + private _performExponentialBackoff; + private _processResponse; +} diff --git a/dist/node_modules/@actions/http-client/lib/index.js b/dist/node_modules/@actions/http-client/lib/index.js new file mode 100644 index 0000000..f627c6a --- /dev/null +++ b/dist/node_modules/@actions/http-client/lib/index.js @@ -0,0 +1,652 @@ +"use strict"; +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(require("http")); +const https = __importStar(require("https")); +const pm = __importStar(require("./proxy")); +const tunnel = __importStar(require("tunnel")); +const undici_1 = require("undici"); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers || (exports.Headers = Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); + } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (!useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if tunneling agent isn't assigned create a new agent + if (!agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `${proxyUrl.username}:${proxyUrl.password}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/node_modules/@actions/http-client/lib/index.js.map b/dist/node_modules/@actions/http-client/lib/index.js.map new file mode 100644 index 0000000..1fce6fc --- /dev/null +++ b/dist/node_modules/@actions/http-client/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAAA,uDAAuD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEvD,2CAA4B;AAC5B,6CAA8B;AAG9B,4CAA6B;AAC7B,+CAAgC;AAChC,mCAAiC;AAEjC,IAAY,SA4BX;AA5BD,WAAY,SAAS;IACnB,uCAAQ,CAAA;IACR,iEAAqB,CAAA;IACrB,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,mDAAc,CAAA;IACd,yDAAiB,CAAA;IACjB,qEAAuB,CAAA;IACvB,qEAAuB,CAAA;IACvB,uDAAgB,CAAA;IAChB,2DAAkB,CAAA;IAClB,iEAAqB,CAAA;IACrB,qDAAe,CAAA;IACf,mDAAc,CAAA;IACd,mEAAsB,CAAA;IACtB,6DAAmB,CAAA;IACnB,yFAAiC,CAAA;IACjC,+DAAoB,CAAA;IACpB,mDAAc,CAAA;IACd,2CAAU,CAAA;IACV,iEAAqB,CAAA;IACrB,yEAAyB,CAAA;IACzB,+DAAoB,CAAA;IACpB,uDAAgB,CAAA;IAChB,uEAAwB,CAAA;IACxB,+DAAoB,CAAA;AACtB,CAAC,EA5BW,SAAS,yBAAT,SAAS,QA4BpB;AAED,IAAY,OAGX;AAHD,WAAY,OAAO;IACjB,4BAAiB,CAAA;IACjB,uCAA4B,CAAA;AAC9B,CAAC,EAHW,OAAO,uBAAP,OAAO,QAGlB;AAED,IAAY,UAEX;AAFD,WAAY,UAAU;IACpB,kDAAoC,CAAA;AACtC,CAAC,EAFW,UAAU,0BAAV,UAAU,QAErB;AAED;;;GAGG;AACH,SAAgB,WAAW,CAAC,SAAiB;IAC3C,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,IAAI,GAAG,CAAC,SAAS,CAAC,CAAC,CAAA;IACnD,OAAO,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAA;AACtC,CAAC;AAHD,kCAGC;AAED,MAAM,iBAAiB,GAAa;IAClC,SAAS,CAAC,gBAAgB;IAC1B,SAAS,CAAC,aAAa;IACvB,SAAS,CAAC,QAAQ;IAClB,SAAS,CAAC,iBAAiB;IAC3B,SAAS,CAAC,iBAAiB;CAC5B,CAAA;AACD,MAAM,sBAAsB,GAAa;IACvC,SAAS,CAAC,UAAU;IACpB,SAAS,CAAC,kBAAkB;IAC5B,SAAS,CAAC,cAAc;CACzB,CAAA;AACD,MAAM,kBAAkB,GAAa,CAAC,SAAS,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAA;AACzE,MAAM,yBAAyB,GAAG,EAAE,CAAA;AACpC,MAAM,2BAA2B,GAAG,CAAC,CAAA;AAErC,MAAa,eAAgB,SAAQ,KAAK;IACxC,YAAY,OAAe,EAAE,UAAkB;QAC7C,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,iBAAiB,CAAA;QAC7B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CAIF;AAVD,0CAUC;AAED,MAAa,kBAAkB;IAC7B,YAAY,OAA6B;QACvC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAGK,QAAQ;;YACZ,OAAO,IAAI,OAAO,CAAS,CAAM,OAAO,EAAC,EAAE;gBACzC,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;gBAE5B,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAa,EAAE,EAAE;oBACxC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC,CAAA;gBACzC,CAAC,CAAC,CAAA;gBAEF,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBAC1B,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAA;gBAC5B,CAAC,CAAC,CAAA;YACJ,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;IAEK,cAAc;;YAClB,OAAO,IAAI,OAAO,CAAS,CAAM,OAAO,EAAC,EAAE;gBACzC,MAAM,MAAM,GAAa,EAAE,CAAA;gBAE3B,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAa,EAAE,EAAE;oBACxC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;gBACpB,CAAC,CAAC,CAAA;gBAEF,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBAC1B,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAA;gBAChC,CAAC,CAAC,CAAA;YACJ,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AAjCD,gDAiCC;AAED,SAAgB,OAAO,CAAC,UAAkB;IACxC,MAAM,SAAS,GAAQ,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;IAC1C,OAAO,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;AACxC,CAAC;AAHD,0BAGC;AAED,MAAa,UAAU;IAkBrB,YACE,SAAkB,EAClB,QAA+B,EAC/B,cAAmC;QAhB7B,oBAAe,GAAG,KAAK,CAAA;QAEvB,oBAAe,GAAG,IAAI,CAAA;QACtB,4BAAuB,GAAG,KAAK,CAAA;QAC/B,kBAAa,GAAG,EAAE,CAAA;QAClB,kBAAa,GAAG,KAAK,CAAA;QACrB,gBAAW,GAAG,CAAC,CAAA;QAIf,eAAU,GAAG,KAAK,CAAA;QAClB,cAAS,GAAG,KAAK,CAAA;QAOvB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;QAC1B,IAAI,CAAC,QAAQ,GAAG,QAAQ,IAAI,EAAE,CAAA;QAC9B,IAAI,CAAC,cAAc,GAAG,cAAc,CAAA;QACpC,IAAI,cAAc,EAAE;YAClB,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,CAAC,cAAc,GAAG,cAAc,CAAC,aAAa,CAAA;YAElD,IAAI,cAAc,CAAC,cAAc,IAAI,IAAI,EAAE;gBACzC,IAAI,CAAC,eAAe,GAAG,cAAc,CAAC,cAAc,CAAA;aACrD;YAED,IAAI,cAAc,CAAC,sBAAsB,IAAI,IAAI,EAAE;gBACjD,IAAI,CAAC,uBAAuB,GAAG,cAAc,CAAC,sBAAsB,CAAA;aACrE;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,YAAY,EAAE,CAAC,CAAC,CAAA;aAC9D;YAED,IAAI,cAAc,CAAC,SAAS,IAAI,IAAI,EAAE;gBACpC,IAAI,CAAC,UAAU,GAAG,cAAc,CAAC,SAAS,CAAA;aAC3C;YAED,IAAI,cAAc,CAAC,YAAY,IAAI,IAAI,EAAE;gBACvC,IAAI,CAAC,aAAa,GAAG,cAAc,CAAC,YAAY,CAAA;aACjD;YAED,IAAI,cAAc,CAAC,UAAU,IAAI,IAAI,EAAE;gBACrC,IAAI,CAAC,WAAW,GAAG,cAAc,CAAC,UAAU,CAAA;aAC7C;SACF;IACH,CAAC;IAEK,OAAO,CACX,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC3E,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QAC1E,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,KAAK,CACT,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACzE,CAAC;KAAA;IAEK,GAAG,CACP,UAAkB,EAClB,IAAY,EACZ,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACvE,CAAC;KAAA;IAEK,IAAI,CACR,UAAkB,EAClB,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,UAAU,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,CAAC,CAAA;QACxE,CAAC;KAAA;IAEK,UAAU,CACd,IAAY,EACZ,UAAkB,EAClB,MAA6B,EAC7B,iBAA4C;;YAE5C,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,iBAAiB,CAAC,CAAA;QAClE,CAAC;KAAA;IAED;;;OAGG;IACG,OAAO,CACX,UAAkB,EAClB,oBAA8C,EAAE;;YAEhD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,QAAQ,CACZ,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,IAAI,CAC7C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,OAAO,CACX,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,GAAG,CAC5C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAEK,SAAS,CACb,UAAkB,EAClB,GAAQ,EACR,oBAA8C,EAAE;;YAEhD,MAAM,IAAI,GAAW,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;YACjD,iBAAiB,CAAC,OAAO,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,2BAA2B,CAClE,iBAAiB,EACjB,OAAO,CAAC,MAAM,EACd,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,iBAAiB,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,2BAA2B,CACvE,iBAAiB,EACjB,OAAO,CAAC,WAAW,EACnB,UAAU,CAAC,eAAe,CAC3B,CAAA;YACD,MAAM,GAAG,GAAuB,MAAM,IAAI,CAAC,KAAK,CAC9C,UAAU,EACV,IAAI,EACJ,iBAAiB,CAClB,CAAA;YACD,OAAO,IAAI,CAAC,gBAAgB,CAAI,GAAG,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;QAC3D,CAAC;KAAA;IAED;;;;OAIG;IACG,OAAO,CACX,IAAY,EACZ,UAAkB,EAClB,IAA2C,EAC3C,OAAkC;;YAElC,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;aACrD;YAED,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,CAAA;YACrC,IAAI,IAAI,GAAoB,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,SAAS,EAAE,OAAO,CAAC,CAAA;YAE1E,oEAAoE;YACpE,MAAM,QAAQ,GACZ,IAAI,CAAC,aAAa,IAAI,kBAAkB,CAAC,QAAQ,CAAC,IAAI,CAAC;gBACrD,CAAC,CAAC,IAAI,CAAC,WAAW,GAAG,CAAC;gBACtB,CAAC,CAAC,CAAC,CAAA;YACP,IAAI,QAAQ,GAAG,CAAC,CAAA;YAEhB,IAAI,QAAwC,CAAA;YAC5C,GAAG;gBACD,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;gBAE5C,4CAA4C;gBAC5C,IACE,QAAQ;oBACR,QAAQ,CAAC,OAAO;oBAChB,QAAQ,CAAC,OAAO,CAAC,UAAU,KAAK,SAAS,CAAC,YAAY,EACtD;oBACA,IAAI,qBAAqD,CAAA;oBAEzD,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;wBACnC,IAAI,OAAO,CAAC,uBAAuB,CAAC,QAAQ,CAAC,EAAE;4BAC7C,qBAAqB,GAAG,OAAO,CAAA;4BAC/B,MAAK;yBACN;qBACF;oBAED,IAAI,qBAAqB,EAAE;wBACzB,OAAO,qBAAqB,CAAC,oBAAoB,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;qBACpE;yBAAM;wBACL,+EAA+E;wBAC/E,yCAAyC;wBACzC,OAAO,QAAQ,CAAA;qBAChB;iBACF;gBAED,IAAI,kBAAkB,GAAW,IAAI,CAAC,aAAa,CAAA;gBACnD,OACE,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC3B,iBAAiB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC;oBACvD,IAAI,CAAC,eAAe;oBACpB,kBAAkB,GAAG,CAAC,EACtB;oBACA,MAAM,WAAW,GACf,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,CAAA;oBACtC,IAAI,CAAC,WAAW,EAAE;wBAChB,kDAAkD;wBAClD,MAAK;qBACN;oBACD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAAC,WAAW,CAAC,CAAA;oBAC9C,IACE,SAAS,CAAC,QAAQ,KAAK,QAAQ;wBAC/B,SAAS,CAAC,QAAQ,KAAK,iBAAiB,CAAC,QAAQ;wBACjD,CAAC,IAAI,CAAC,uBAAuB,EAC7B;wBACA,MAAM,IAAI,KAAK,CACb,8KAA8K,CAC/K,CAAA;qBACF;oBAED,qEAAqE;oBACrE,mCAAmC;oBACnC,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBAEzB,mEAAmE;oBACnE,IAAI,iBAAiB,CAAC,QAAQ,KAAK,SAAS,CAAC,QAAQ,EAAE;wBACrD,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;4BAC5B,oCAAoC;4BACpC,IAAI,MAAM,CAAC,WAAW,EAAE,KAAK,eAAe,EAAE;gCAC5C,OAAO,OAAO,CAAC,MAAM,CAAC,CAAA;6BACvB;yBACF;qBACF;oBAED,kDAAkD;oBAClD,IAAI,GAAG,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,iBAAiB,EAAE,OAAO,CAAC,CAAA;oBAC7D,QAAQ,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;oBAC5C,kBAAkB,EAAE,CAAA;iBACrB;gBAED,IACE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU;oBAC5B,CAAC,sBAAsB,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,EAC7D;oBACA,8DAA8D;oBAC9D,OAAO,QAAQ,CAAA;iBAChB;gBAED,QAAQ,IAAI,CAAC,CAAA;gBAEb,IAAI,QAAQ,GAAG,QAAQ,EAAE;oBACvB,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBACzB,MAAM,IAAI,CAAC,0BAA0B,CAAC,QAAQ,CAAC,CAAA;iBAChD;aACF,QAAQ,QAAQ,GAAG,QAAQ,EAAC;YAE7B,OAAO,QAAQ,CAAA;QACjB,CAAC;KAAA;IAED;;OAEG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;SACtB;QAED,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACvB,CAAC;IAED;;;;OAIG;IACG,UAAU,CACd,IAAqB,EACrB,IAA2C;;YAE3C,OAAO,IAAI,OAAO,CAAqB,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACzD,SAAS,iBAAiB,CAAC,GAAW,EAAE,GAAwB;oBAC9D,IAAI,GAAG,EAAE;wBACP,MAAM,CAAC,GAAG,CAAC,CAAA;qBACZ;yBAAM,IAAI,CAAC,GAAG,EAAE;wBACf,qDAAqD;wBACrD,MAAM,CAAC,IAAI,KAAK,CAAC,eAAe,CAAC,CAAC,CAAA;qBACnC;yBAAM;wBACL,OAAO,CAAC,GAAG,CAAC,CAAA;qBACb;gBACH,CAAC;gBAED,IAAI,CAAC,sBAAsB,CAAC,IAAI,EAAE,IAAI,EAAE,iBAAiB,CAAC,CAAA;YAC5D,CAAC,CAAC,CAAA;QACJ,CAAC;KAAA;IAED;;;;;OAKG;IACH,sBAAsB,CACpB,IAAqB,EACrB,IAA2C,EAC3C,QAAyD;QAEzD,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACzB,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,EAAE,CAAA;aAC1B;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,GAAG,MAAM,CAAC,UAAU,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACzE;QAED,IAAI,cAAc,GAAG,KAAK,CAAA;QAC1B,SAAS,YAAY,CAAC,GAAW,EAAE,GAAwB;YACzD,IAAI,CAAC,cAAc,EAAE;gBACnB,cAAc,GAAG,IAAI,CAAA;gBACrB,QAAQ,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;aACnB;QACH,CAAC;QAED,MAAM,GAAG,GAAuB,IAAI,CAAC,UAAU,CAAC,OAAO,CACrD,IAAI,CAAC,OAAO,EACZ,CAAC,GAAyB,EAAE,EAAE;YAC5B,MAAM,GAAG,GAAuB,IAAI,kBAAkB,CAAC,GAAG,CAAC,CAAA;YAC3D,YAAY,CAAC,SAAS,EAAE,GAAG,CAAC,CAAA;QAC9B,CAAC,CACF,CAAA;QAED,IAAI,MAAkB,CAAA;QACtB,GAAG,CAAC,EAAE,CAAC,QAAQ,EAAE,IAAI,CAAC,EAAE;YACtB,MAAM,GAAG,IAAI,CAAA;QACf,CAAC,CAAC,CAAA;QAEF,wEAAwE;QACxE,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC,cAAc,IAAI,CAAC,GAAG,KAAK,EAAE,GAAG,EAAE;YACpD,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,GAAG,EAAE,CAAA;aACb;YACD,YAAY,CAAC,IAAI,KAAK,CAAC,oBAAoB,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;QAClE,CAAC,CAAC,CAAA;QAEF,GAAG,CAAC,EAAE,CAAC,OAAO,EAAE,UAAU,GAAG;YAC3B,8BAA8B;YAC9B,0BAA0B;YAC1B,YAAY,CAAC,GAAG,CAAC,CAAA;QACnB,CAAC,CAAC,CAAA;QAEF,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,GAAG,CAAC,KAAK,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;SACxB;QAED,IAAI,IAAI,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACpC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE;gBACf,GAAG,CAAC,GAAG,EAAE,CAAA;YACX,CAAC,CAAC,CAAA;YAEF,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACf;aAAM;YACL,GAAG,CAAC,GAAG,EAAE,CAAA;SACV;IACH,CAAC;IAED;;;;OAIG;IACH,QAAQ,CAAC,SAAiB;QACxB,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAA;QACpC,OAAO,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC,CAAA;IAClC,CAAC;IAED,kBAAkB,CAAC,SAAiB;QAClC,MAAM,SAAS,GAAG,IAAI,GAAG,CAAC,SAAS,CAAC,CAAA;QACpC,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;QAC1C,MAAM,QAAQ,GAAG,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAA;QAC9C,IAAI,CAAC,QAAQ,EAAE;YACb,OAAM;SACP;QAED,OAAO,IAAI,CAAC,wBAAwB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAA;IAC3D,CAAC;IAEO,eAAe,CACrB,MAAc,EACd,UAAe,EACf,OAAkC;QAElC,MAAM,IAAI,GAAqC,EAAE,CAAA;QAEjD,IAAI,CAAC,SAAS,GAAG,UAAU,CAAA;QAC3B,MAAM,QAAQ,GAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAC9D,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAA;QACzC,MAAM,WAAW,GAAW,QAAQ,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAA;QAE/C,IAAI,CAAC,OAAO,GAAwB,EAAE,CAAA;QACtC,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAA;QAC3C,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI;YACrC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;YAC/B,CAAC,CAAC,WAAW,CAAA;QACf,IAAI,CAAC,OAAO,CAAC,IAAI;YACf,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,IAAI,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,IAAI,EAAE,CAAC,CAAA;QACjE,IAAI,CAAC,OAAO,CAAC,MAAM,GAAG,MAAM,CAAA;QAC5B,IAAI,CAAC,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QAClD,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,EAAE;YAC1B,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;SACpD;QAED,IAAI,CAAC,OAAO,CAAC,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAEnD,+CAA+C;QAC/C,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACnC,OAAO,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACrC;SACF;QAED,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,aAAa,CACnB,OAAkC;QAElC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,OAAO,MAAM,CAAC,MAAM,CAClB,EAAE,EACF,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,EAC1C,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAC7B,CAAA;SACF;QAED,OAAO,aAAa,CAAC,OAAO,IAAI,EAAE,CAAC,CAAA;IACrC,CAAC;IAEO,2BAA2B,CACjC,iBAA2C,EAC3C,MAAc,EACd,QAAgB;QAEhB,IAAI,YAAgC,CAAA;QACpC,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE;YACtD,YAAY,GAAG,aAAa,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAA;SAClE;QACD,OAAO,iBAAiB,CAAC,MAAM,CAAC,IAAI,YAAY,IAAI,QAAQ,CAAA;IAC9D,CAAC;IAEO,SAAS,CAAC,SAAc;QAC9B,IAAI,KAAK,CAAA;QACT,MAAM,QAAQ,GAAG,EAAE,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;QAC1C,MAAM,QAAQ,GAAG,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAA;QAE9C,IAAI,IAAI,CAAC,UAAU,IAAI,QAAQ,EAAE;YAC/B,KAAK,GAAG,IAAI,CAAC,WAAW,CAAA;SACzB;QAED,IAAI,CAAC,QAAQ,EAAE;YACb,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;SACpB;QAED,+CAA+C;QAC/C,IAAI,KAAK,EAAE;YACT,OAAO,KAAK,CAAA;SACb;QAED,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAChD,IAAI,UAAU,GAAG,GAAG,CAAA;QACpB,IAAI,IAAI,CAAC,cAAc,EAAE;YACvB,UAAU,GAAG,IAAI,CAAC,cAAc,CAAC,UAAU,IAAI,IAAI,CAAC,WAAW,CAAC,UAAU,CAAA;SAC3E;QAED,sGAAsG;QACtG,IAAI,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;YACjC,MAAM,YAAY,GAAG;gBACnB,UAAU;gBACV,SAAS,EAAE,IAAI,CAAC,UAAU;gBAC1B,KAAK,kCACA,CAAC,CAAC,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAC,IAAI;oBAC9C,SAAS,EAAE,GAAG,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;iBACvD,CAAC,KACF,IAAI,EAAE,QAAQ,CAAC,QAAQ,EACvB,IAAI,EAAE,QAAQ,CAAC,IAAI,GACpB;aACF,CAAA;YAED,IAAI,WAAqB,CAAA;YACzB,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,KAAK,QAAQ,CAAA;YAChD,IAAI,QAAQ,EAAE;gBACZ,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAA;aACvE;iBAAM;gBACL,WAAW,GAAG,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,MAAM,CAAC,YAAY,CAAA;aACrE;YAED,KAAK,GAAG,WAAW,CAAC,YAAY,CAAC,CAAA;YACjC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAA;SACzB;QAED,uDAAuD;QACvD,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,OAAO,GAAG,EAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,UAAU,EAAC,CAAA;YACxD,KAAK,GAAG,QAAQ,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;YACrE,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;SACpB;QAED,IAAI,QAAQ,IAAI,IAAI,CAAC,eAAe,EAAE;YACpC,wGAAwG;YACxG,kFAAkF;YAClF,mDAAmD;YACnD,KAAK,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,IAAI,EAAE,EAAE;gBACjD,kBAAkB,EAAE,KAAK;aAC1B,CAAC,CAAA;SACH;QAED,OAAO,KAAK,CAAA;IACd,CAAC;IAEO,wBAAwB,CAAC,SAAc,EAAE,QAAa;QAC5D,IAAI,UAAU,CAAA;QAEd,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,UAAU,GAAG,IAAI,CAAC,qBAAqB,CAAA;SACxC;QAED,+CAA+C;QAC/C,IAAI,UAAU,EAAE;YACd,OAAO,UAAU,CAAA;SAClB;QAED,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,KAAK,QAAQ,CAAA;QAChD,UAAU,GAAG,IAAI,mBAAU,iBACzB,GAAG,EAAE,QAAQ,CAAC,IAAI,EAClB,UAAU,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IACjC,CAAC,CAAC,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,CAAC,IAAI;YAC9C,KAAK,EAAE,GAAG,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,QAAQ,EAAE;SACnD,CAAC,EACF,CAAA;QACF,IAAI,CAAC,qBAAqB,GAAG,UAAU,CAAA;QAEvC,IAAI,QAAQ,IAAI,IAAI,CAAC,eAAe,EAAE;YACpC,wGAAwG;YACxG,kFAAkF;YAClF,mDAAmD;YACnD,UAAU,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,UAAU,IAAI,EAAE,EAAE;gBACtE,kBAAkB,EAAE,KAAK;aAC1B,CAAC,CAAA;SACH;QAED,OAAO,UAAU,CAAA;IACnB,CAAC;IAEa,0BAA0B,CAAC,WAAmB;;YAC1D,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,yBAAyB,EAAE,WAAW,CAAC,CAAA;YAC9D,MAAM,EAAE,GAAW,2BAA2B,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,WAAW,CAAC,CAAA;YACzE,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;QAChE,CAAC;KAAA;IAEa,gBAAgB,CAC5B,GAAuB,EACvB,OAA4B;;YAE5B,OAAO,IAAI,OAAO,CAAuB,CAAO,OAAO,EAAE,MAAM,EAAE,EAAE;gBACjE,MAAM,UAAU,GAAG,GAAG,CAAC,OAAO,CAAC,UAAU,IAAI,CAAC,CAAA;gBAE9C,MAAM,QAAQ,GAAyB;oBACrC,UAAU;oBACV,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE,EAAE;iBACZ,CAAA;gBAED,uCAAuC;gBACvC,IAAI,UAAU,KAAK,SAAS,CAAC,QAAQ,EAAE;oBACrC,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;gBAED,+BAA+B;gBAE/B,SAAS,oBAAoB,CAAC,GAAQ,EAAE,KAAU;oBAChD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;wBAC7B,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAA;wBACzB,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,EAAE;4BACvB,OAAO,CAAC,CAAA;yBACT;qBACF;oBAED,OAAO,KAAK,CAAA;gBACd,CAAC;gBAED,IAAI,GAAQ,CAAA;gBACZ,IAAI,QAA4B,CAAA;gBAEhC,IAAI;oBACF,QAAQ,GAAG,MAAM,GAAG,CAAC,QAAQ,EAAE,CAAA;oBAC/B,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBACnC,IAAI,OAAO,IAAI,OAAO,CAAC,gBAAgB,EAAE;4BACvC,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE,oBAAoB,CAAC,CAAA;yBACjD;6BAAM;4BACL,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;yBAC3B;wBAED,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAA;qBACtB;oBAED,QAAQ,CAAC,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,OAAO,CAAA;iBACvC;gBAAC,OAAO,GAAG,EAAE;oBACZ,iEAAiE;iBAClE;gBAED,yDAAyD;gBACzD,IAAI,UAAU,GAAG,GAAG,EAAE;oBACpB,IAAI,GAAW,CAAA;oBAEf,0DAA0D;oBAC1D,IAAI,GAAG,IAAI,GAAG,CAAC,OAAO,EAAE;wBACtB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAA;qBAClB;yBAAM,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;wBAC1C,yEAAyE;wBACzE,GAAG,GAAG,QAAQ,CAAA;qBACf;yBAAM;wBACL,GAAG,GAAG,oBAAoB,UAAU,GAAG,CAAA;qBACxC;oBAED,MAAM,GAAG,GAAG,IAAI,eAAe,CAAC,GAAG,EAAE,UAAU,CAAC,CAAA;oBAChD,GAAG,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAA;oBAE5B,MAAM,CAAC,GAAG,CAAC,CAAA;iBACZ;qBAAM;oBACL,OAAO,CAAC,QAAQ,CAAC,CAAA;iBAClB;YACH,CAAC,CAAA,CAAC,CAAA;QACJ,CAAC;KAAA;CACF;AA3rBD,gCA2rBC;AAED,MAAM,aAAa,GAAG,CAAC,GAA2B,EAAO,EAAE,CACzD,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAM,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA"} \ No newline at end of file diff --git a/dist/node_modules/@actions/http-client/lib/interfaces.d.ts b/dist/node_modules/@actions/http-client/lib/interfaces.d.ts new file mode 100644 index 0000000..775ced9 --- /dev/null +++ b/dist/node_modules/@actions/http-client/lib/interfaces.d.ts @@ -0,0 +1,46 @@ +/// +/// +/// +import * as http from 'http'; +import * as https from 'https'; +import { HttpClientResponse } from './index'; +export interface HttpClient { + options(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + get(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + del(requestUrl: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + post(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + patch(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + put(requestUrl: string, data: string, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: http.OutgoingHttpHeaders): Promise; + request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: http.OutgoingHttpHeaders): Promise; + requestRaw(info: RequestInfo, data: string | NodeJS.ReadableStream): Promise; + requestRawWithCallback(info: RequestInfo, data: string | NodeJS.ReadableStream, onResult: (err?: Error, res?: HttpClientResponse) => void): void; +} +export interface RequestHandler { + prepareRequest(options: http.RequestOptions): void; + canHandleAuthentication(response: HttpClientResponse): boolean; + handleAuthentication(httpClient: HttpClient, requestInfo: RequestInfo, data: string | NodeJS.ReadableStream | null): Promise; +} +export interface RequestInfo { + options: http.RequestOptions; + parsedUrl: URL; + httpModule: typeof http | typeof https; +} +export interface RequestOptions { + headers?: http.OutgoingHttpHeaders; + socketTimeout?: number; + ignoreSslError?: boolean; + allowRedirects?: boolean; + allowRedirectDowngrade?: boolean; + maxRedirects?: number; + maxSockets?: number; + keepAlive?: boolean; + deserializeDates?: boolean; + allowRetries?: boolean; + maxRetries?: number; +} +export interface TypedResponse { + statusCode: number; + result: T | null; + headers: http.IncomingHttpHeaders; +} diff --git a/dist/node_modules/@actions/http-client/lib/interfaces.js b/dist/node_modules/@actions/http-client/lib/interfaces.js new file mode 100644 index 0000000..db91911 --- /dev/null +++ b/dist/node_modules/@actions/http-client/lib/interfaces.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/dist/node_modules/@actions/http-client/lib/interfaces.js.map b/dist/node_modules/@actions/http-client/lib/interfaces.js.map new file mode 100644 index 0000000..8fb5f7d --- /dev/null +++ b/dist/node_modules/@actions/http-client/lib/interfaces.js.map @@ -0,0 +1 @@ +{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../src/interfaces.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/dist/node_modules/@actions/http-client/lib/proxy.d.ts b/dist/node_modules/@actions/http-client/lib/proxy.d.ts new file mode 100644 index 0000000..4599865 --- /dev/null +++ b/dist/node_modules/@actions/http-client/lib/proxy.d.ts @@ -0,0 +1,2 @@ +export declare function getProxyUrl(reqUrl: URL): URL | undefined; +export declare function checkBypass(reqUrl: URL): boolean; diff --git a/dist/node_modules/@actions/http-client/lib/proxy.js b/dist/node_modules/@actions/http-client/lib/proxy.js new file mode 100644 index 0000000..d9c43ad --- /dev/null +++ b/dist/node_modules/@actions/http-client/lib/proxy.js @@ -0,0 +1,82 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} +//# sourceMappingURL=proxy.js.map \ No newline at end of file diff --git a/dist/node_modules/@actions/http-client/lib/proxy.js.map b/dist/node_modules/@actions/http-client/lib/proxy.js.map new file mode 100644 index 0000000..585c17d --- /dev/null +++ b/dist/node_modules/@actions/http-client/lib/proxy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":";;;AAAA,SAAgB,WAAW,CAAC,MAAW;IACrC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,KAAK,QAAQ,CAAA;IAE7C,IAAI,WAAW,CAAC,MAAM,CAAC,EAAE;QACvB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,QAAQ,GAAG,CAAC,GAAG,EAAE;QACrB,IAAI,QAAQ,EAAE;YACZ,OAAO,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAA;SAChE;aAAM;YACL,OAAO,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAA;SAC9D;IACH,CAAC,CAAC,EAAE,CAAA;IAEJ,IAAI,QAAQ,EAAE;QACZ,IAAI;YACF,OAAO,IAAI,GAAG,CAAC,QAAQ,CAAC,CAAA;SACzB;QAAC,WAAM;YACN,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,UAAU,CAAC;gBACrE,OAAO,IAAI,GAAG,CAAC,UAAU,QAAQ,EAAE,CAAC,CAAA;SACvC;KACF;SAAM;QACL,OAAO,SAAS,CAAA;KACjB;AACH,CAAC;AAzBD,kCAyBC;AAED,SAAgB,WAAW,CAAC,MAAW;IACrC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACpB,OAAO,KAAK,CAAA;KACb;IAED,MAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAA;IAC/B,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;QAC9B,OAAO,IAAI,CAAA;KACZ;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,EAAE,CAAA;IACxE,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,KAAK,CAAA;KACb;IAED,6BAA6B;IAC7B,IAAI,OAA2B,CAAA;IAC/B,IAAI,MAAM,CAAC,IAAI,EAAE;QACf,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;KAC9B;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,OAAO,EAAE;QACtC,OAAO,GAAG,EAAE,CAAA;KACb;SAAM,IAAI,MAAM,CAAC,QAAQ,KAAK,QAAQ,EAAE;QACvC,OAAO,GAAG,GAAG,CAAA;KACd;IAED,qDAAqD;IACrD,MAAM,aAAa,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;IACrD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,aAAa,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,OAAO,EAAE,CAAC,CAAA;KACrD;IAED,uCAAuC;IACvC,KAAK,MAAM,gBAAgB,IAAI,OAAO;SACnC,KAAK,CAAC,GAAG,CAAC;SACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;SAChC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QACjB,IACE,gBAAgB,KAAK,GAAG;YACxB,aAAa,CAAC,IAAI,CAChB,CAAC,CAAC,EAAE,CACF,CAAC,KAAK,gBAAgB;gBACtB,CAAC,CAAC,QAAQ,CAAC,IAAI,gBAAgB,EAAE,CAAC;gBAClC,CAAC,gBAAgB,CAAC,UAAU,CAAC,GAAG,CAAC;oBAC/B,CAAC,CAAC,QAAQ,CAAC,GAAG,gBAAgB,EAAE,CAAC,CAAC,CACvC,EACD;YACA,OAAO,IAAI,CAAA;SACZ;KACF;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAnDD,kCAmDC;AAED,SAAS,iBAAiB,CAAC,IAAY;IACrC,MAAM,SAAS,GAAG,IAAI,CAAC,WAAW,EAAE,CAAA;IACpC,OAAO,CACL,SAAS,KAAK,WAAW;QACzB,SAAS,CAAC,UAAU,CAAC,MAAM,CAAC;QAC5B,SAAS,CAAC,UAAU,CAAC,OAAO,CAAC;QAC7B,SAAS,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAC1C,CAAA;AACH,CAAC"} \ No newline at end of file diff --git a/dist/node_modules/@actions/http-client/package.json b/dist/node_modules/@actions/http-client/package.json new file mode 100644 index 0000000..0ae89c3 --- /dev/null +++ b/dist/node_modules/@actions/http-client/package.json @@ -0,0 +1,51 @@ +{ + "name": "@actions/http-client", + "version": "2.2.1", + "description": "Actions Http Client", + "keywords": [ + "github", + "actions", + "http" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/http-client", + "license": "MIT", + "main": "lib/index.js", + "types": "lib/index.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/http-client" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "build": "tsc", + "format": "prettier --write **/*.ts", + "format-check": "prettier --check **/*.ts", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "devDependencies": { + "@types/node": "20.7.1", + "@types/tunnel": "0.0.3", + "proxy": "^2.1.1", + "@types/proxy": "^1.0.1" + }, + "dependencies": { + "tunnel": "^0.0.6", + "undici": "^5.25.4" + } +} \ No newline at end of file diff --git a/dist/node_modules/@fastify/busboy/LICENSE b/dist/node_modules/@fastify/busboy/LICENSE new file mode 100644 index 0000000..290762e --- /dev/null +++ b/dist/node_modules/@fastify/busboy/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/dist/node_modules/@fastify/busboy/README.md b/dist/node_modules/@fastify/busboy/README.md new file mode 100644 index 0000000..ece3cc8 --- /dev/null +++ b/dist/node_modules/@fastify/busboy/README.md @@ -0,0 +1,271 @@ +# busboy + +
+ +[![Build Status](https://github.com/fastify/busboy/actions/workflows/ci.yml/badge.svg)](https://github.com/fastify/busboy/actions) +[![Coverage Status](https://coveralls.io/repos/fastify/busboy/badge.svg?branch=master)](https://coveralls.io/r/fastify/busboy?branch=master) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) +[![Security Responsible Disclosure](https://img.shields.io/badge/Security-Responsible%20Disclosure-yellow.svg)](https://github.com/fastify/.github/blob/main/SECURITY.md) + +
+ +
+ +[![NPM version](https://img.shields.io/npm/v/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy) +[![NPM downloads](https://img.shields.io/npm/dm/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy) + +
+ +Description +=========== + +A Node.js module for parsing incoming HTML form data. + +This is an officially supported fork by [fastify](https://github.com/fastify/) organization of the amazing library [originally created](https://github.com/mscdex/busboy) by Brian White, +aimed at addressing long-standing issues with it. + +Benchmark (Mean time for 500 Kb payload, 2000 cycles, 1000 cycle warmup): + +| Library | Version | Mean time in nanoseconds (less is better) | +|-----------------------|---------|-------------------------------------------| +| busboy | 0.3.1 | `340114` | +| @fastify/busboy | 1.0.0 | `270984` | + +[Changelog](https://github.com/fastify/busboy/blob/master/CHANGELOG.md) since busboy 0.31. + +Requirements +============ + +* [Node.js](http://nodejs.org/) 10+ + + +Install +======= + + npm i @fastify/busboy + + +Examples +======== + +* Parsing (multipart) with default options: + +```javascript +const http = require('node:http'); +const { inspect } = require('node:util'); +const Busboy = require('busboy'); + +http.createServer((req, res) => { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', (fieldname, file, filename, encoding, mimetype) => { + console.log(`File [${fieldname}]: filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`); + file.on('data', data => { + console.log(`File [${fieldname}] got ${data.length} bytes`); + }); + file.on('end', () => { + console.log(`File [${fieldname}] Finished`); + }); + }); + busboy.on('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + console.log(`Field [${fieldname}]: value: ${inspect(val)}`); + }); + busboy.on('finish', () => { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(busboy); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end(` +
+
+
+ +
+ `); + } +}).listen(8000, () => { + console.log('Listening for requests'); +}); + +// Example output, using http://nodejs.org/images/ryan-speaker.jpg as the file: +// +// Listening for requests +// File [filefield]: filename: ryan-speaker.jpg, encoding: binary +// File [filefield] got 11971 bytes +// Field [textfield]: value: 'testing! :-)' +// File [filefield] Finished +// Done parsing form! +``` + +* Save all incoming files to disk: + +```javascript +const http = require('node:http'); +const path = require('node:path'); +const os = require('node:os'); +const fs = require('node:fs'); + +const Busboy = require('busboy'); + +http.createServer(function(req, res) { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', function(fieldname, file, filename, encoding, mimetype) { + var saveTo = path.join(os.tmpdir(), path.basename(fieldname)); + file.pipe(fs.createWriteStream(saveTo)); + }); + busboy.on('finish', function() { + res.writeHead(200, { 'Connection': 'close' }); + res.end("That's all folks!"); + }); + return req.pipe(busboy); + } + res.writeHead(404); + res.end(); +}).listen(8000, function() { + console.log('Listening for requests'); +}); +``` + +* Parsing (urlencoded) with default options: + +```javascript +const http = require('node:http'); +const { inspect } = require('node:util'); + +const Busboy = require('busboy'); + +http.createServer(function(req, res) { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', function(fieldname, file, filename, encoding, mimetype) { + console.log('File [' + fieldname + ']: filename: ' + filename); + file.on('data', function(data) { + console.log('File [' + fieldname + '] got ' + data.length + ' bytes'); + }); + file.on('end', function() { + console.log('File [' + fieldname + '] Finished'); + }); + }); + busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) { + console.log('Field [' + fieldname + ']: value: ' + inspect(val)); + }); + busboy.on('finish', function() { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(busboy); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end('\ +
\ +
\ +
\ + Node.js rules!
\ + \ +
\ + '); + } +}).listen(8000, function() { + console.log('Listening for requests'); +}); + +// Example output: +// +// Listening for requests +// Field [textfield]: value: 'testing! :-)' +// Field [selectfield]: value: '9001' +// Field [checkfield]: value: 'on' +// Done parsing form! +``` + + +API +=== + +_Busboy_ is a _Writable_ stream + +Busboy (special) events +----------------------- + +* **file**(< _string_ >fieldname, < _ReadableStream_ >stream, < _string_ >filename, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new file form field found. `transferEncoding` contains the 'Content-Transfer-Encoding' value for the file stream. `mimeType` contains the 'Content-Type' value for the file stream. + * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically and safely discarded (these discarded files do still count towards `files` and `parts` limits). + * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens. + * The property `bytesRead` informs about the number of bytes that have been read so far. + +* **field**(< _string_ >fieldname, < _string_ >value, < _boolean_ >fieldnameTruncated, < _boolean_ >valueTruncated, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new non-file field found. + +* **partsLimit**() - Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted. + +* **filesLimit**() - Emitted when specified `files` limit has been reached. No more 'file' events will be emitted. + +* **fieldsLimit**() - Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted. + + +Busboy methods +-------------- + +* **(constructor)**(< _object_ >config) - Creates and returns a new Busboy instance. + + * The constructor takes the following valid `config` settings: + + * **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers. + + * **autoDestroy** - _boolean_ - Whether this stream should automatically call .destroy() on itself after ending. (Default: false). + + * **highWaterMark** - _integer_ - highWaterMark to use for this Busboy instance (Default: WritableStream default). + + * **fileHwm** - _integer_ - highWaterMark to use for file streams (Default: ReadableStream default). + + * **defCharset** - _string_ - Default character set to use when one isn't defined (Default: 'utf8'). + + * **preservePath** - _boolean_ - If paths in the multipart 'filename' field shall be preserved. (Default: false). + + * **isPartAFile** - __function__ - Use this function to override the default file detection functionality. It has following parameters: + + * fieldName - __string__ The name of the field. + + * contentType - __string__ The content-type of the part, e.g. `text/plain`, `image/jpeg`, `application/octet-stream` + + * fileName - __string__ The name of a file supplied by the part. + + (Default: `(fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)`) + + * **limits** - _object_ - Various limits on incoming data. Valid properties are: + + * **fieldNameSize** - _integer_ - Max field name size (in bytes) (Default: 100 bytes). + + * **fieldSize** - _integer_ - Max field value size (in bytes) (Default: 1 MiB, which is 1024 x 1024 bytes). + + * **fields** - _integer_ - Max number of non-file fields (Default: Infinity). + + * **fileSize** - _integer_ - For multipart forms, the max file size (in bytes) (Default: Infinity). + + * **files** - _integer_ - For multipart forms, the max number of file fields (Default: Infinity). + + * **parts** - _integer_ - For multipart forms, the max number of parts (fields + files) (Default: Infinity). + + * **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000 + + * **headerSize** - _integer_ - For multipart forms, the max size of a multipart header **Default:** 81920. + + * The constructor can throw errors: + + * **Busboy expected an options-Object.** - Busboy expected an Object as first parameters. + + * **Busboy expected an options-Object with headers-attribute.** - The first parameter is lacking of a headers-attribute. + + * **Limit $limit is not a valid number** - Busboy expected the desired limit to be of type number. Busboy throws this Error to prevent a potential security issue by falling silently back to the Busboy-defaults. Potential source for this Error can be the direct use of environment variables without transforming them to the type number. + + * **Unsupported Content-Type.** - The `Content-Type` isn't one Busboy can parse. + + * **Missing Content-Type-header.** - The provided headers don't include `Content-Type` at all. diff --git a/dist/node_modules/@fastify/busboy/deps/dicer/LICENSE b/dist/node_modules/@fastify/busboy/deps/dicer/LICENSE new file mode 100644 index 0000000..290762e --- /dev/null +++ b/dist/node_modules/@fastify/busboy/deps/dicer/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/dist/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js b/dist/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js new file mode 100644 index 0000000..3c8c081 --- /dev/null +++ b/dist/node_modules/@fastify/busboy/deps/dicer/lib/Dicer.js @@ -0,0 +1,213 @@ +'use strict' + +const WritableStream = require('node:stream').Writable +const inherits = require('node:util').inherits + +const StreamSearch = require('../../streamsearch/sbmh') + +const PartStream = require('./PartStream') +const HeaderParser = require('./HeaderParser') + +const DASH = 45 +const B_ONEDASH = Buffer.from('-') +const B_CRLF = Buffer.from('\r\n') +const EMPTY_FN = function () {} + +function Dicer (cfg) { + if (!(this instanceof Dicer)) { return new Dicer(cfg) } + WritableStream.call(this, cfg) + + if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } + + if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } + + this._headerFirst = cfg.headerFirst + + this._dashes = 0 + this._parts = 0 + this._finished = false + this._realFinish = false + this._isPreamble = true + this._justMatched = false + this._firstWrite = true + this._inHeader = true + this._part = undefined + this._cb = undefined + this._ignoreData = false + this._partOpts = { highWaterMark: cfg.partHwm } + this._pause = false + + const self = this + this._hparser = new HeaderParser(cfg) + this._hparser.on('header', function (header) { + self._inHeader = false + self._part.emit('header', header) + }) +} +inherits(Dicer, WritableStream) + +Dicer.prototype.emit = function (ev) { + if (ev === 'finish' && !this._realFinish) { + if (!this._finished) { + const self = this + process.nextTick(function () { + self.emit('error', new Error('Unexpected end of multipart data')) + if (self._part && !self._ignoreData) { + const type = (self._isPreamble ? 'Preamble' : 'Part') + self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) + self._part.push(null) + process.nextTick(function () { + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + return + } + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + } + } else { WritableStream.prototype.emit.apply(this, arguments) } +} + +Dicer.prototype._write = function (data, encoding, cb) { + // ignore unexpected data (e.g. extra trailer data after finished) + if (!this._hparser && !this._bparser) { return cb() } + + if (this._headerFirst && this._isPreamble) { + if (!this._part) { + this._part = new PartStream(this._partOpts) + if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() } + } + const r = this._hparser.push(data) + if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } + } + + // allows for "easier" testing + if (this._firstWrite) { + this._bparser.push(B_CRLF) + this._firstWrite = false + } + + this._bparser.push(data) + + if (this._pause) { this._cb = cb } else { cb() } +} + +Dicer.prototype.reset = function () { + this._part = undefined + this._bparser = undefined + this._hparser = undefined +} + +Dicer.prototype.setBoundary = function (boundary) { + const self = this + this._bparser = new StreamSearch('\r\n--' + boundary) + this._bparser.on('info', function (isMatch, data, start, end) { + self._oninfo(isMatch, data, start, end) + }) +} + +Dicer.prototype._ignore = function () { + if (this._part && !this._ignoreData) { + this._ignoreData = true + this._part.on('error', EMPTY_FN) + // we must perform some kind of read on the stream even though we are + // ignoring the data, otherwise node's Readable stream will not emit 'end' + // after pushing null to the stream + this._part.resume() + } +} + +Dicer.prototype._oninfo = function (isMatch, data, start, end) { + let buf; const self = this; let i = 0; let r; let shouldWriteMore = true + + if (!this._part && this._justMatched && data) { + while (this._dashes < 2 && (start + i) < end) { + if (data[start + i] === DASH) { + ++i + ++this._dashes + } else { + if (this._dashes) { buf = B_ONEDASH } + this._dashes = 0 + break + } + } + if (this._dashes === 2) { + if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) } + this.reset() + this._finished = true + // no more parts will be added + if (self._parts === 0) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } + } + if (this._dashes) { return } + } + if (this._justMatched) { this._justMatched = false } + if (!this._part) { + this._part = new PartStream(this._partOpts) + this._part._read = function (n) { + self._unpause() + } + if (this._isPreamble && this.listenerCount('preamble') !== 0) { + this.emit('preamble', this._part) + } else if (this._isPreamble !== true && this.listenerCount('part') !== 0) { + this.emit('part', this._part) + } else { + this._ignore() + } + if (!this._isPreamble) { this._inHeader = true } + } + if (data && start < end && !this._ignoreData) { + if (this._isPreamble || !this._inHeader) { + if (buf) { shouldWriteMore = this._part.push(buf) } + shouldWriteMore = this._part.push(data.slice(start, end)) + if (!shouldWriteMore) { this._pause = true } + } else if (!this._isPreamble && this._inHeader) { + if (buf) { this._hparser.push(buf) } + r = this._hparser.push(data.slice(start, end)) + if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } + } + } + if (isMatch) { + this._hparser.reset() + if (this._isPreamble) { this._isPreamble = false } else { + if (start !== end) { + ++this._parts + this._part.on('end', function () { + if (--self._parts === 0) { + if (self._finished) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } else { + self._unpause() + } + } + }) + } + } + this._part.push(null) + this._part = undefined + this._ignoreData = false + this._justMatched = true + this._dashes = 0 + } +} + +Dicer.prototype._unpause = function () { + if (!this._pause) { return } + + this._pause = false + if (this._cb) { + const cb = this._cb + this._cb = undefined + cb() + } +} + +module.exports = Dicer diff --git a/dist/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js b/dist/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js new file mode 100644 index 0000000..65f667b --- /dev/null +++ b/dist/node_modules/@fastify/busboy/deps/dicer/lib/HeaderParser.js @@ -0,0 +1,100 @@ +'use strict' + +const EventEmitter = require('node:events').EventEmitter +const inherits = require('node:util').inherits +const getLimit = require('../../../lib/utils/getLimit') + +const StreamSearch = require('../../streamsearch/sbmh') + +const B_DCRLF = Buffer.from('\r\n\r\n') +const RE_CRLF = /\r\n/g +const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex + +function HeaderParser (cfg) { + EventEmitter.call(this) + + cfg = cfg || {} + const self = this + this.nread = 0 + this.maxed = false + this.npairs = 0 + this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) + this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) + this.buffer = '' + this.header = {} + this.finished = false + this.ss = new StreamSearch(B_DCRLF) + this.ss.on('info', function (isMatch, data, start, end) { + if (data && !self.maxed) { + if (self.nread + end - start >= self.maxHeaderSize) { + end = self.maxHeaderSize - self.nread + start + self.nread = self.maxHeaderSize + self.maxed = true + } else { self.nread += (end - start) } + + self.buffer += data.toString('binary', start, end) + } + if (isMatch) { self._finish() } + }) +} +inherits(HeaderParser, EventEmitter) + +HeaderParser.prototype.push = function (data) { + const r = this.ss.push(data) + if (this.finished) { return r } +} + +HeaderParser.prototype.reset = function () { + this.finished = false + this.buffer = '' + this.header = {} + this.ss.reset() +} + +HeaderParser.prototype._finish = function () { + if (this.buffer) { this._parseHeader() } + this.ss.matches = this.ss.maxMatches + const header = this.header + this.header = {} + this.buffer = '' + this.finished = true + this.nread = this.npairs = 0 + this.maxed = false + this.emit('header', header) +} + +HeaderParser.prototype._parseHeader = function () { + if (this.npairs === this.maxHeaderPairs) { return } + + const lines = this.buffer.split(RE_CRLF) + const len = lines.length + let m, h + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (lines[i].length === 0) { continue } + if (lines[i][0] === '\t' || lines[i][0] === ' ') { + // folded header content + // RFC2822 says to just remove the CRLF and not the whitespace following + // it, so we follow the RFC and include the leading whitespace ... + if (h) { + this.header[h][this.header[h].length - 1] += lines[i] + continue + } + } + + const posColon = lines[i].indexOf(':') + if ( + posColon === -1 || + posColon === 0 + ) { + return + } + m = RE_HDR.exec(lines[i]) + h = m[1].toLowerCase() + this.header[h] = this.header[h] || [] + this.header[h].push((m[2] || '')) + if (++this.npairs === this.maxHeaderPairs) { break } + } +} + +module.exports = HeaderParser diff --git a/dist/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js b/dist/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js new file mode 100644 index 0000000..c91da1c --- /dev/null +++ b/dist/node_modules/@fastify/busboy/deps/dicer/lib/PartStream.js @@ -0,0 +1,13 @@ +'use strict' + +const inherits = require('node:util').inherits +const ReadableStream = require('node:stream').Readable + +function PartStream (opts) { + ReadableStream.call(this, opts) +} +inherits(PartStream, ReadableStream) + +PartStream.prototype._read = function (n) {} + +module.exports = PartStream diff --git a/dist/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts b/dist/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts new file mode 100644 index 0000000..3c5b896 --- /dev/null +++ b/dist/node_modules/@fastify/busboy/deps/dicer/lib/dicer.d.ts @@ -0,0 +1,164 @@ +// Type definitions for dicer 0.2 +// Project: https://github.com/mscdex/dicer +// Definitions by: BendingBender +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped +// TypeScript Version: 2.2 +/// + +import stream = require("stream"); + +// tslint:disable:unified-signatures + +/** + * A very fast streaming multipart parser for node.js. + * Dicer is a WritableStream + * + * Dicer (special) events: + * - on('finish', ()) - Emitted when all parts have been parsed and the Dicer instance has been ended. + * - on('part', (stream: PartStream)) - Emitted when a new part has been found. + * - on('preamble', (stream: PartStream)) - Emitted for preamble if you should happen to need it (can usually be ignored). + * - on('trailer', (data: Buffer)) - Emitted when trailing data was found after the terminating boundary (as with the preamble, this can usually be ignored too). + */ +export class Dicer extends stream.Writable { + /** + * Creates and returns a new Dicer instance with the following valid config settings: + * + * @param config The configuration to use + */ + constructor(config: Dicer.Config); + /** + * Sets the boundary to use for parsing and performs some initialization needed for parsing. + * You should only need to use this if you set headerFirst to true in the constructor and are parsing the boundary from the preamble header. + * + * @param boundary The boundary to use + */ + setBoundary(boundary: string): void; + addListener(event: "finish", listener: () => void): this; + addListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + addListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + addListener(event: "trailer", listener: (data: Buffer) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "part", listener: (stream: Dicer.PartStream) => void): this; + on(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + on(event: "trailer", listener: (data: Buffer) => void): this; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "part", listener: (stream: Dicer.PartStream) => void): this; + once(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + once(event: "trailer", listener: (data: Buffer) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + prependListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + prependListener(event: "trailer", listener: (data: Buffer) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + prependOnceListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + prependOnceListener(event: "trailer", listener: (data: Buffer) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + removeListener(event: "finish", listener: () => void): this; + removeListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + removeListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + removeListener(event: "trailer", listener: (data: Buffer) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "drain", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "pipe", listener: (src: stream.Readable) => void): this; + removeListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + removeListener(event: string, listener: (...args: any[]) => void): this; +} + +declare namespace Dicer { + interface Config { + /** + * This is the boundary used to detect the beginning of a new part. + */ + boundary?: string | undefined; + /** + * If true, preamble header parsing will be performed first. + */ + headerFirst?: boolean | undefined; + /** + * The maximum number of header key=>value pairs to parse Default: 2000 (same as node's http). + */ + maxHeaderPairs?: number | undefined; + } + + /** + * PartStream is a _ReadableStream_ + * + * PartStream (special) events: + * - on('header', (header: object)) - An object containing the header for this particular part. Each property value is an array of one or more string values. + */ + interface PartStream extends stream.Readable { + addListener(event: "header", listener: (header: object) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + on(event: "header", listener: (header: object) => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "end", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: "header", listener: (header: object) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "end", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "header", listener: (header: object) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "header", listener: (header: object) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + removeListener(event: "header", listener: (header: object) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "data", listener: (chunk: Buffer | string) => void): this; + removeListener(event: "end", listener: () => void): this; + removeListener(event: "readable", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: string, listener: (...args: any[]) => void): this; + } +} \ No newline at end of file diff --git a/dist/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js b/dist/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js new file mode 100644 index 0000000..b90c0e8 --- /dev/null +++ b/dist/node_modules/@fastify/busboy/deps/streamsearch/sbmh.js @@ -0,0 +1,228 @@ +'use strict' + +/** + * Copyright Brian White. All rights reserved. + * + * @see https://github.com/mscdex/streamsearch + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool + */ +const EventEmitter = require('node:events').EventEmitter +const inherits = require('node:util').inherits + +function SBMH (needle) { + if (typeof needle === 'string') { + needle = Buffer.from(needle) + } + + if (!Buffer.isBuffer(needle)) { + throw new TypeError('The needle has to be a String or a Buffer.') + } + + const needleLength = needle.length + + if (needleLength === 0) { + throw new Error('The needle cannot be an empty String/Buffer.') + } + + if (needleLength > 256) { + throw new Error('The needle cannot have a length bigger than 256.') + } + + this.maxMatches = Infinity + this.matches = 0 + + this._occ = new Array(256) + .fill(needleLength) // Initialize occurrence table. + this._lookbehind_size = 0 + this._needle = needle + this._bufpos = 0 + + this._lookbehind = Buffer.alloc(needleLength) + + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var + this._occ[needle[i]] = needleLength - 1 - i + } +} +inherits(SBMH, EventEmitter) + +SBMH.prototype.reset = function () { + this._lookbehind_size = 0 + this.matches = 0 + this._bufpos = 0 +} + +SBMH.prototype.push = function (chunk, pos) { + if (!Buffer.isBuffer(chunk)) { + chunk = Buffer.from(chunk, 'binary') + } + const chlen = chunk.length + this._bufpos = pos || 0 + let r + while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } + return r +} + +SBMH.prototype._sbmh_feed = function (data) { + const len = data.length + const needle = this._needle + const needleLength = needle.length + const lastNeedleChar = needle[needleLength - 1] + + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + let pos = -this._lookbehind_size + let ch + + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needleLength) { + ch = this._sbmh_lookup_char(data, pos + needleLength - 1) + + if ( + ch === lastNeedleChar && + this._sbmh_memcmp(data, pos, needleLength - 1) + ) { + this._lookbehind_size = 0 + ++this.matches + this.emit('info', true) + + return (this._bufpos = pos + needleLength) + } + pos += this._occ[ch] + } + + // No match. + + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } + } + + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) + this._lookbehind_size = 0 + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = this._lookbehind_size + pos + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, this._lookbehind, 0, bytesToCutOff) + } + + this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff) + this._lookbehind_size -= bytesToCutOff + + data.copy(this._lookbehind, this._lookbehind_size) + this._lookbehind_size += len + + this._bufpos = len + return len + } + } + + pos += (pos >= 0) * this._bufpos + + // Lookbehind buffer is now empty. We only need to check if the + // needle is in the haystack. + if (data.indexOf(needle, pos) !== -1) { + pos = data.indexOf(needle, pos) + ++this.matches + if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } + + return (this._bufpos = pos + needleLength) + } else { + pos = len - needleLength + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while ( + pos < len && + ( + data[pos] !== needle[0] || + ( + (Buffer.compare( + data.subarray(pos, pos + len - pos), + needle.subarray(0, len - pos) + ) !== 0) + ) + ) + ) { + ++pos + } + if (pos < len) { + data.copy(this._lookbehind, 0, pos, pos + (len - pos)) + this._lookbehind_size = len - pos + } + + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } + + this._bufpos = len + return len +} + +SBMH.prototype._sbmh_lookup_char = function (data, pos) { + return (pos < 0) + ? this._lookbehind[this._lookbehind_size + pos] + : data[pos] +} + +SBMH.prototype._sbmh_memcmp = function (data, pos, len) { + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } + } + return true +} + +module.exports = SBMH diff --git a/dist/node_modules/@fastify/busboy/lib/main.d.ts b/dist/node_modules/@fastify/busboy/lib/main.d.ts new file mode 100644 index 0000000..91b6448 --- /dev/null +++ b/dist/node_modules/@fastify/busboy/lib/main.d.ts @@ -0,0 +1,196 @@ +// Definitions by: Jacob Baskin +// BendingBender +// Igor Savin + +/// + +import * as http from 'http'; +import { Readable, Writable } from 'stream'; +export { Dicer } from "../deps/dicer/lib/dicer"; + +export const Busboy: BusboyConstructor; +export default Busboy; + +export interface BusboyConfig { + /** + * These are the HTTP headers of the incoming request, which are used by individual parsers. + */ + headers: BusboyHeaders; + /** + * `highWaterMark` to use for this Busboy instance. + * @default WritableStream default. + */ + highWaterMark?: number | undefined; + /** + * highWaterMark to use for file streams. + * @default ReadableStream default. + */ + fileHwm?: number | undefined; + /** + * Default character set to use when one isn't defined. + * @default 'utf8' + */ + defCharset?: string | undefined; + /** + * Detect if a Part is a file. + * + * By default a file is detected if contentType + * is application/octet-stream or fileName is not + * undefined. + * + * Modify this to handle e.g. Blobs. + */ + isPartAFile?: (fieldName: string | undefined, contentType: string | undefined, fileName: string | undefined) => boolean; + /** + * If paths in the multipart 'filename' field shall be preserved. + * @default false + */ + preservePath?: boolean | undefined; + /** + * Various limits on incoming data. + */ + limits?: + | { + /** + * Max field name size (in bytes) + * @default 100 bytes + */ + fieldNameSize?: number | undefined; + /** + * Max field value size (in bytes) + * @default 1MB + */ + fieldSize?: number | undefined; + /** + * Max number of non-file fields + * @default Infinity + */ + fields?: number | undefined; + /** + * For multipart forms, the max file size (in bytes) + * @default Infinity + */ + fileSize?: number | undefined; + /** + * For multipart forms, the max number of file fields + * @default Infinity + */ + files?: number | undefined; + /** + * For multipart forms, the max number of parts (fields + files) + * @default Infinity + */ + parts?: number | undefined; + /** + * For multipart forms, the max number of header key=>value pairs to parse + * @default 2000 + */ + headerPairs?: number | undefined; + + /** + * For multipart forms, the max size of a header part + * @default 81920 + */ + headerSize?: number | undefined; + } + | undefined; +} + +export type BusboyHeaders = { 'content-type': string } & http.IncomingHttpHeaders; + +export interface BusboyFileStream extends + Readable { + + truncated: boolean; + + /** + * The number of bytes that have been read so far. + */ + bytesRead: number; +} + +export interface Busboy extends Writable { + addListener(event: Event, listener: BusboyEvents[Event]): this; + + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + + on(event: Event, listener: BusboyEvents[Event]): this; + + on(event: string | symbol, listener: (...args: any[]) => void): this; + + once(event: Event, listener: BusboyEvents[Event]): this; + + once(event: string | symbol, listener: (...args: any[]) => void): this; + + removeListener(event: Event, listener: BusboyEvents[Event]): this; + + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + + off(event: Event, listener: BusboyEvents[Event]): this; + + off(event: string | symbol, listener: (...args: any[]) => void): this; + + prependListener(event: Event, listener: BusboyEvents[Event]): this; + + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + + prependOnceListener(event: Event, listener: BusboyEvents[Event]): this; + + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; +} + +export interface BusboyEvents { + /** + * Emitted for each new file form field found. + * + * * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the + * file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), + * otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** + * incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically + * and safely discarded (these discarded files do still count towards `files` and `parts` limits). + * * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` + * (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens. + * + * @param listener.transferEncoding Contains the 'Content-Transfer-Encoding' value for the file stream. + * @param listener.mimeType Contains the 'Content-Type' value for the file stream. + */ + file: ( + fieldname: string, + stream: BusboyFileStream, + filename: string, + transferEncoding: string, + mimeType: string, + ) => void; + /** + * Emitted for each new non-file field found. + */ + field: ( + fieldname: string, + value: string, + fieldnameTruncated: boolean, + valueTruncated: boolean, + transferEncoding: string, + mimeType: string, + ) => void; + finish: () => void; + /** + * Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted. + */ + partsLimit: () => void; + /** + * Emitted when specified `files` limit has been reached. No more 'file' events will be emitted. + */ + filesLimit: () => void; + /** + * Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted. + */ + fieldsLimit: () => void; + error: (error: unknown) => void; +} + +export interface BusboyConstructor { + (options: BusboyConfig): Busboy; + + new(options: BusboyConfig): Busboy; +} + diff --git a/dist/node_modules/@fastify/busboy/lib/main.js b/dist/node_modules/@fastify/busboy/lib/main.js new file mode 100644 index 0000000..8794beb --- /dev/null +++ b/dist/node_modules/@fastify/busboy/lib/main.js @@ -0,0 +1,85 @@ +'use strict' + +const WritableStream = require('node:stream').Writable +const { inherits } = require('node:util') +const Dicer = require('../deps/dicer/lib/Dicer') + +const MultipartParser = require('./types/multipart') +const UrlencodedParser = require('./types/urlencoded') +const parseParams = require('./utils/parseParams') + +function Busboy (opts) { + if (!(this instanceof Busboy)) { return new Busboy(opts) } + + if (typeof opts !== 'object') { + throw new TypeError('Busboy expected an options-Object.') + } + if (typeof opts.headers !== 'object') { + throw new TypeError('Busboy expected an options-Object with headers-attribute.') + } + if (typeof opts.headers['content-type'] !== 'string') { + throw new TypeError('Missing Content-Type-header.') + } + + const { + headers, + ...streamOptions + } = opts + + this.opts = { + autoDestroy: false, + ...streamOptions + } + WritableStream.call(this, this.opts) + + this._done = false + this._parser = this.getParserByHeaders(headers) + this._finished = false +} +inherits(Busboy, WritableStream) + +Busboy.prototype.emit = function (ev) { + if (ev === 'finish') { + if (!this._done) { + this._parser?.end() + return + } else if (this._finished) { + return + } + this._finished = true + } + WritableStream.prototype.emit.apply(this, arguments) +} + +Busboy.prototype.getParserByHeaders = function (headers) { + const parsed = parseParams(headers['content-type']) + + const cfg = { + defCharset: this.opts.defCharset, + fileHwm: this.opts.fileHwm, + headers, + highWaterMark: this.opts.highWaterMark, + isPartAFile: this.opts.isPartAFile, + limits: this.opts.limits, + parsedConType: parsed, + preservePath: this.opts.preservePath + } + + if (MultipartParser.detect.test(parsed[0])) { + return new MultipartParser(this, cfg) + } + if (UrlencodedParser.detect.test(parsed[0])) { + return new UrlencodedParser(this, cfg) + } + throw new Error('Unsupported Content-Type.') +} + +Busboy.prototype._write = function (chunk, encoding, cb) { + this._parser.write(chunk, cb) +} + +module.exports = Busboy +module.exports.default = Busboy +module.exports.Busboy = Busboy + +module.exports.Dicer = Dicer diff --git a/dist/node_modules/@fastify/busboy/lib/types/multipart.js b/dist/node_modules/@fastify/busboy/lib/types/multipart.js new file mode 100644 index 0000000..d691eca --- /dev/null +++ b/dist/node_modules/@fastify/busboy/lib/types/multipart.js @@ -0,0 +1,306 @@ +'use strict' + +// TODO: +// * support 1 nested multipart level +// (see second multipart example here: +// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) +// * support limits.fieldNameSize +// -- this will require modifications to utils.parseParams + +const { Readable } = require('node:stream') +const { inherits } = require('node:util') + +const Dicer = require('../../deps/dicer/lib/Dicer') + +const parseParams = require('../utils/parseParams') +const decodeText = require('../utils/decodeText') +const basename = require('../utils/basename') +const getLimit = require('../utils/getLimit') + +const RE_BOUNDARY = /^boundary$/i +const RE_FIELD = /^form-data$/i +const RE_CHARSET = /^charset$/i +const RE_FILENAME = /^filename$/i +const RE_NAME = /^name$/i + +Multipart.detect = /^multipart\/form-data/i +function Multipart (boy, cfg) { + let i + let len + const self = this + let boundary + const limits = cfg.limits + const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) + const parsedConType = cfg.parsedConType || [] + const defCharset = cfg.defCharset || 'utf8' + const preservePath = cfg.preservePath + const fileOpts = { highWaterMark: cfg.fileHwm } + + for (i = 0, len = parsedConType.length; i < len; ++i) { + if (Array.isArray(parsedConType[i]) && + RE_BOUNDARY.test(parsedConType[i][0])) { + boundary = parsedConType[i][1] + break + } + } + + function checkFinished () { + if (nends === 0 && finished && !boy._done) { + finished = false + self.end() + } + } + + if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } + + const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) + const filesLimit = getLimit(limits, 'files', Infinity) + const fieldsLimit = getLimit(limits, 'fields', Infinity) + const partsLimit = getLimit(limits, 'parts', Infinity) + const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) + const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) + + let nfiles = 0 + let nfields = 0 + let nends = 0 + let curFile + let curField + let finished = false + + this._needDrain = false + this._pause = false + this._cb = undefined + this._nparts = 0 + this._boy = boy + + const parserCfg = { + boundary, + maxHeaderPairs: headerPairsLimit, + maxHeaderSize: headerSizeLimit, + partHwm: fileOpts.highWaterMark, + highWaterMark: cfg.highWaterMark + } + + this.parser = new Dicer(parserCfg) + this.parser.on('drain', function () { + self._needDrain = false + if (self._cb && !self._pause) { + const cb = self._cb + self._cb = undefined + cb() + } + }).on('part', function onPart (part) { + if (++self._nparts > partsLimit) { + self.parser.removeListener('part', onPart) + self.parser.on('part', skipPart) + boy.hitPartsLimit = true + boy.emit('partsLimit') + return skipPart(part) + } + + // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let + // us emit 'end' early since we know the part has ended if we are already + // seeing the next part + if (curField) { + const field = curField + field.emit('end') + field.removeAllListeners('end') + } + + part.on('header', function (header) { + let contype + let fieldname + let parsed + let charset + let encoding + let filename + let nsize = 0 + + if (header['content-type']) { + parsed = parseParams(header['content-type'][0]) + if (parsed[0]) { + contype = parsed[0].toLowerCase() + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_CHARSET.test(parsed[i][0])) { + charset = parsed[i][1].toLowerCase() + break + } + } + } + } + + if (contype === undefined) { contype = 'text/plain' } + if (charset === undefined) { charset = defCharset } + + if (header['content-disposition']) { + parsed = parseParams(header['content-disposition'][0]) + if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_NAME.test(parsed[i][0])) { + fieldname = parsed[i][1] + } else if (RE_FILENAME.test(parsed[i][0])) { + filename = parsed[i][1] + if (!preservePath) { filename = basename(filename) } + } + } + } else { return skipPart(part) } + + if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } + + let onData, + onEnd + + if (isPartAFile(fieldname, contype, filename)) { + // file/binary field + if (nfiles === filesLimit) { + if (!boy.hitFilesLimit) { + boy.hitFilesLimit = true + boy.emit('filesLimit') + } + return skipPart(part) + } + + ++nfiles + + if (boy.listenerCount('file') === 0) { + self.parser._ignore() + return + } + + ++nends + const file = new FileStream(fileOpts) + curFile = file + file.on('end', function () { + --nends + self._pause = false + checkFinished() + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + }) + file._read = function (n) { + if (!self._pause) { return } + self._pause = false + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + } + boy.emit('file', fieldname, file, filename, encoding, contype) + + onData = function (data) { + if ((nsize += data.length) > fileSizeLimit) { + const extralen = fileSizeLimit - nsize + data.length + if (extralen > 0) { file.push(data.slice(0, extralen)) } + file.truncated = true + file.bytesRead = fileSizeLimit + part.removeAllListeners('data') + file.emit('limit') + return + } else if (!file.push(data)) { self._pause = true } + + file.bytesRead = nsize + } + + onEnd = function () { + curFile = undefined + file.push(null) + } + } else { + // non-file field + if (nfields === fieldsLimit) { + if (!boy.hitFieldsLimit) { + boy.hitFieldsLimit = true + boy.emit('fieldsLimit') + } + return skipPart(part) + } + + ++nfields + ++nends + let buffer = '' + let truncated = false + curField = part + + onData = function (data) { + if ((nsize += data.length) > fieldSizeLimit) { + const extralen = (fieldSizeLimit - (nsize - data.length)) + buffer += data.toString('binary', 0, extralen) + truncated = true + part.removeAllListeners('data') + } else { buffer += data.toString('binary') } + } + + onEnd = function () { + curField = undefined + if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } + boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) + --nends + checkFinished() + } + } + + /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become + broken. Streams2/streams3 is a huge black box of confusion, but + somehow overriding the sync state seems to fix things again (and still + seems to work for previous node versions). + */ + part._readableState.sync = false + + part.on('data', onData) + part.on('end', onEnd) + }).on('error', function (err) { + if (curFile) { curFile.emit('error', err) } + }) + }).on('error', function (err) { + boy.emit('error', err) + }).on('finish', function () { + finished = true + checkFinished() + }) +} + +Multipart.prototype.write = function (chunk, cb) { + const r = this.parser.write(chunk) + if (r && !this._pause) { + cb() + } else { + this._needDrain = !r + this._cb = cb + } +} + +Multipart.prototype.end = function () { + const self = this + + if (self.parser.writable) { + self.parser.end() + } else if (!self._boy._done) { + process.nextTick(function () { + self._boy._done = true + self._boy.emit('finish') + }) + } +} + +function skipPart (part) { + part.resume() +} + +function FileStream (opts) { + Readable.call(this, opts) + + this.bytesRead = 0 + + this.truncated = false +} + +inherits(FileStream, Readable) + +FileStream.prototype._read = function (n) {} + +module.exports = Multipart diff --git a/dist/node_modules/@fastify/busboy/lib/types/urlencoded.js b/dist/node_modules/@fastify/busboy/lib/types/urlencoded.js new file mode 100644 index 0000000..6f5f784 --- /dev/null +++ b/dist/node_modules/@fastify/busboy/lib/types/urlencoded.js @@ -0,0 +1,190 @@ +'use strict' + +const Decoder = require('../utils/Decoder') +const decodeText = require('../utils/decodeText') +const getLimit = require('../utils/getLimit') + +const RE_CHARSET = /^charset$/i + +UrlEncoded.detect = /^application\/x-www-form-urlencoded/i +function UrlEncoded (boy, cfg) { + const limits = cfg.limits + const parsedConType = cfg.parsedConType + this.boy = boy + + this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) + this.fieldsLimit = getLimit(limits, 'fields', Infinity) + + let charset + for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var + if (Array.isArray(parsedConType[i]) && + RE_CHARSET.test(parsedConType[i][0])) { + charset = parsedConType[i][1].toLowerCase() + break + } + } + + if (charset === undefined) { charset = cfg.defCharset || 'utf8' } + + this.decoder = new Decoder() + this.charset = charset + this._fields = 0 + this._state = 'key' + this._checkingBytes = true + this._bytesKey = 0 + this._bytesVal = 0 + this._key = '' + this._val = '' + this._keyTrunc = false + this._valTrunc = false + this._hitLimit = false +} + +UrlEncoded.prototype.write = function (data, cb) { + if (this._fields === this.fieldsLimit) { + if (!this.boy.hitFieldsLimit) { + this.boy.hitFieldsLimit = true + this.boy.emit('fieldsLimit') + } + return cb() + } + + let idxeq; let idxamp; let i; let p = 0; const len = data.length + + while (p < len) { + if (this._state === 'key') { + idxeq = idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x3D/* = */) { + idxeq = i + break + } else if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesKey } + } + + if (idxeq !== undefined) { + // key with assignment + if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } + this._state = 'val' + + this._hitLimit = false + this._checkingBytes = true + this._val = '' + this._bytesVal = 0 + this._valTrunc = false + this.decoder.reset() + + p = idxeq + 1 + } else if (idxamp !== undefined) { + // key with no assignment + ++this._fields + let key; const keyTrunc = this._keyTrunc + if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + if (key.length) { + this.boy.emit('field', decodeText(key, 'binary', this.charset), + '', + keyTrunc, + false) + } + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._keyTrunc = true + } + } else { + if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } + p = len + } + } else { + idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesVal } + } + + if (idxamp !== undefined) { + ++this._fields + if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + this._state = 'key' + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._val === '' && this.fieldSizeLimit === 0) || + (this._bytesVal = this._val.length) === this.fieldSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._valTrunc = true + } + } else { + if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } + p = len + } + } + } + cb() +} + +UrlEncoded.prototype.end = function () { + if (this.boy._done) { return } + + if (this._state === 'key' && this._key.length > 0) { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + '', + this._keyTrunc, + false) + } else if (this._state === 'val') { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + } + this.boy._done = true + this.boy.emit('finish') +} + +module.exports = UrlEncoded diff --git a/dist/node_modules/@fastify/busboy/lib/utils/Decoder.js b/dist/node_modules/@fastify/busboy/lib/utils/Decoder.js new file mode 100644 index 0000000..7917678 --- /dev/null +++ b/dist/node_modules/@fastify/busboy/lib/utils/Decoder.js @@ -0,0 +1,54 @@ +'use strict' + +const RE_PLUS = /\+/g + +const HEX = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +] + +function Decoder () { + this.buffer = undefined +} +Decoder.prototype.write = function (str) { + // Replace '+' with ' ' before decoding + str = str.replace(RE_PLUS, ' ') + let res = '' + let i = 0; let p = 0; const len = str.length + for (; i < len; ++i) { + if (this.buffer !== undefined) { + if (!HEX[str.charCodeAt(i)]) { + res += '%' + this.buffer + this.buffer = undefined + --i // retry character + } else { + this.buffer += str[i] + ++p + if (this.buffer.length === 2) { + res += String.fromCharCode(parseInt(this.buffer, 16)) + this.buffer = undefined + } + } + } else if (str[i] === '%') { + if (i > p) { + res += str.substring(p, i) + p = i + } + this.buffer = '' + ++p + } + } + if (p < len && this.buffer === undefined) { res += str.substring(p) } + return res +} +Decoder.prototype.reset = function () { + this.buffer = undefined +} + +module.exports = Decoder diff --git a/dist/node_modules/@fastify/busboy/lib/utils/basename.js b/dist/node_modules/@fastify/busboy/lib/utils/basename.js new file mode 100644 index 0000000..db58819 --- /dev/null +++ b/dist/node_modules/@fastify/busboy/lib/utils/basename.js @@ -0,0 +1,14 @@ +'use strict' + +module.exports = function basename (path) { + if (typeof path !== 'string') { return '' } + for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var + switch (path.charCodeAt(i)) { + case 0x2F: // '/' + case 0x5C: // '\' + path = path.slice(i + 1) + return (path === '..' || path === '.' ? '' : path) + } + } + return (path === '..' || path === '.' ? '' : path) +} diff --git a/dist/node_modules/@fastify/busboy/lib/utils/decodeText.js b/dist/node_modules/@fastify/busboy/lib/utils/decodeText.js new file mode 100644 index 0000000..eac7d35 --- /dev/null +++ b/dist/node_modules/@fastify/busboy/lib/utils/decodeText.js @@ -0,0 +1,114 @@ +'use strict' + +// Node has always utf-8 +const utf8Decoder = new TextDecoder('utf-8') +const textDecoders = new Map([ + ['utf-8', utf8Decoder], + ['utf8', utf8Decoder] +]) + +function getDecoder (charset) { + let lc + while (true) { + switch (charset) { + case 'utf-8': + case 'utf8': + return decoders.utf8 + case 'latin1': + case 'ascii': // TODO: Make these a separate, strict decoder? + case 'us-ascii': + case 'iso-8859-1': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'windows-1252': + case 'iso_8859-1:1987': + case 'cp1252': + case 'x-cp1252': + return decoders.latin1 + case 'utf16le': + case 'utf-16le': + case 'ucs2': + case 'ucs-2': + return decoders.utf16le + case 'base64': + return decoders.base64 + default: + if (lc === undefined) { + lc = true + charset = charset.toLowerCase() + continue + } + return decoders.other.bind(charset) + } + } +} + +const decoders = { + utf8: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.utf8Slice(0, data.length) + }, + + latin1: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + return data + } + return data.latin1Slice(0, data.length) + }, + + utf16le: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.ucs2Slice(0, data.length) + }, + + base64: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.base64Slice(0, data.length) + }, + + other: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + + if (textDecoders.has(this.toString())) { + try { + return textDecoders.get(this).decode(data) + } catch {} + } + return typeof data === 'string' + ? data + : data.toString() + } +} + +function decodeText (text, sourceEncoding, destEncoding) { + if (text) { + return getDecoder(destEncoding)(text, sourceEncoding) + } + return text +} + +module.exports = decodeText diff --git a/dist/node_modules/@fastify/busboy/lib/utils/getLimit.js b/dist/node_modules/@fastify/busboy/lib/utils/getLimit.js new file mode 100644 index 0000000..cb64fd6 --- /dev/null +++ b/dist/node_modules/@fastify/busboy/lib/utils/getLimit.js @@ -0,0 +1,16 @@ +'use strict' + +module.exports = function getLimit (limits, name, defaultLimit) { + if ( + !limits || + limits[name] === undefined || + limits[name] === null + ) { return defaultLimit } + + if ( + typeof limits[name] !== 'number' || + isNaN(limits[name]) + ) { throw new TypeError('Limit ' + name + ' is not a valid number') } + + return limits[name] +} diff --git a/dist/node_modules/@fastify/busboy/lib/utils/parseParams.js b/dist/node_modules/@fastify/busboy/lib/utils/parseParams.js new file mode 100644 index 0000000..1698e62 --- /dev/null +++ b/dist/node_modules/@fastify/busboy/lib/utils/parseParams.js @@ -0,0 +1,196 @@ +/* eslint-disable object-property-newline */ +'use strict' + +const decodeText = require('./decodeText') + +const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g + +const EncodedLookup = { + '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04', + '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09', + '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c', + '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e', + '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12', + '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17', + '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b', + '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d', + '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20', + '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25', + '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a', + '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c', + '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f', + '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33', + '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38', + '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b', + '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e', + '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41', + '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46', + '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a', + '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d', + '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f', + '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54', + '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59', + '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c', + '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e', + '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62', + '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67', + '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b', + '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d', + '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70', + '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75', + '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a', + '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c', + '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f', + '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83', + '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88', + '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b', + '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e', + '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91', + '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96', + '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a', + '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d', + '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f', + '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2', + '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4', + '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7', + '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9', + '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab', + '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac', + '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad', + '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae', + '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0', + '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2', + '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5', + '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7', + '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba', + '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb', + '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc', + '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd', + '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf', + '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0', + '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3', + '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5', + '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8', + '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca', + '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb', + '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc', + '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce', + '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf', + '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1', + '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3', + '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6', + '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8', + '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda', + '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb', + '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd', + '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde', + '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf', + '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1', + '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4', + '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6', + '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9', + '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea', + '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec', + '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed', + '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee', + '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef', + '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2', + '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4', + '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7', + '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9', + '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb', + '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc', + '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd', + '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe', + '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff' +} + +function encodedReplacer (match) { + return EncodedLookup[match] +} + +const STATE_KEY = 0 +const STATE_VALUE = 1 +const STATE_CHARSET = 2 +const STATE_LANG = 3 + +function parseParams (str) { + const res = [] + let state = STATE_KEY + let charset = '' + let inquote = false + let escaping = false + let p = 0 + let tmp = '' + const len = str.length + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + const char = str[i] + if (char === '\\' && inquote) { + if (escaping) { escaping = false } else { + escaping = true + continue + } + } else if (char === '"') { + if (!escaping) { + if (inquote) { + inquote = false + state = STATE_KEY + } else { inquote = true } + continue + } else { escaping = false } + } else { + if (escaping && inquote) { tmp += '\\' } + escaping = false + if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") { + if (state === STATE_CHARSET) { + state = STATE_LANG + charset = tmp.substring(1) + } else { state = STATE_VALUE } + tmp = '' + continue + } else if (state === STATE_KEY && + (char === '*' || char === '=') && + res.length) { + state = char === '*' + ? STATE_CHARSET + : STATE_VALUE + res[p] = [tmp, undefined] + tmp = '' + continue + } else if (!inquote && char === ';') { + state = STATE_KEY + if (charset) { + if (tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } + charset = '' + } else if (tmp.length) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } + tmp = '' + ++p + continue + } else if (!inquote && (char === ' ' || char === '\t')) { continue } + } + tmp += char + } + if (charset && tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } else if (tmp) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + + if (res[p] === undefined) { + if (tmp) { res[p] = tmp } + } else { res[p][1] = tmp } + + return res +} + +module.exports = parseParams diff --git a/dist/node_modules/@fastify/busboy/package.json b/dist/node_modules/@fastify/busboy/package.json new file mode 100644 index 0000000..83693ac --- /dev/null +++ b/dist/node_modules/@fastify/busboy/package.json @@ -0,0 +1,86 @@ +{ + "name": "@fastify/busboy", + "version": "2.1.1", + "private": false, + "author": "Brian White ", + "contributors": [ + { + "name": "Igor Savin", + "email": "kibertoad@gmail.com", + "url": "https://github.com/kibertoad" + }, + { + "name": "Aras Abbasi", + "email": "aras.abbasi@gmail.com", + "url": "https://github.com/uzlopak" + } + ], + "description": "A streaming parser for HTML form data for node.js", + "main": "lib/main", + "type": "commonjs", + "types": "lib/main.d.ts", + "scripts": { + "bench:busboy": "cd benchmarks && npm install && npm run benchmark-fastify", + "bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js", + "coveralls": "nyc report --reporter=lcov", + "lint": "npm run lint:standard", + "lint:everything": "npm run lint && npm run test:types", + "lint:fix": "standard --fix", + "lint:standard": "standard --verbose | snazzy", + "test:mocha": "tap", + "test:types": "tsd", + "test:coverage": "nyc npm run test", + "test": "npm run test:mocha" + }, + "engines": { + "node": ">=14" + }, + "devDependencies": { + "@types/node": "^20.1.0", + "busboy": "^1.0.0", + "photofinish": "^1.8.0", + "snazzy": "^9.0.0", + "standard": "^17.0.0", + "tap": "^16.3.8", + "tinybench": "^2.5.1", + "tsd": "^0.30.0", + "typescript": "^5.0.2" + }, + "keywords": [ + "uploads", + "forms", + "multipart", + "form-data" + ], + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/fastify/busboy.git" + }, + "tsd": { + "directory": "test/types", + "compilerOptions": { + "esModuleInterop": false, + "module": "commonjs", + "target": "ES2017" + } + }, + "standard": { + "globals": [ + "describe", + "it" + ], + "ignore": [ + "bench" + ] + }, + "files": [ + "README.md", + "LICENSE", + "lib/*", + "deps/encoding/*", + "deps/dicer/lib", + "deps/streamsearch/", + "deps/dicer/LICENSE" + ] +} diff --git a/dist/node_modules/@octokit/app/LICENSE b/dist/node_modules/@octokit/app/LICENSE new file mode 100644 index 0000000..af5366d --- /dev/null +++ b/dist/node_modules/@octokit/app/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/@octokit/app/README.md b/dist/node_modules/@octokit/app/README.md new file mode 100644 index 0000000..565168a --- /dev/null +++ b/dist/node_modules/@octokit/app/README.md @@ -0,0 +1,431 @@ +# app.js + +> GitHub App toolset for Node.js + +[![@latest](https://img.shields.io/npm/v/@octokit/app.svg)](https://www.npmjs.com/package/@octokit/app) +[![Build Status](https://github.com/octokit/app.js/workflows/Test/badge.svg)](https://github.com/octokit/app.js/actions?workflow=Test) + + + +- [Usage](#usage) +- [`App.defaults(options)`](#appdefaultsoptions) +- [Constructor](#constructor) +- [API](#api) + - [`app.octokit`](#appoctokit) + - [`app.log`](#applog) + - [`app.getInstallationOctokit`](#appgetinstallationoctokit) + - [`app.eachInstallation`](#appeachinstallation) + - [`app.eachRepository`](#appeachrepository) + - [`app.webhooks`](#appwebhooks) + - [`app.oauth`](#appoauth) +- [Middlewares](#middlewares) + - [`createNodeMiddleware(app, options)`](#createnodemiddlewareapp-options) +- [Contributing](#contributing) +- [License](#license) + + + +## Usage + + + + + + +
+ +Browsers + + + +`@octokit/app` is not meant for browser usage. + +
+ +Node + + + +Install with `npm install @octokit/app` + +```js +const { App, createNodeMiddleware } = require("@octokit/app"); +``` + +
+ +```js +const app = new App({ + appId: 123, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + oauth: { + clientId: "0123", + clientSecret: "0123secret", + }, + webhooks: { + secret: "secret", + }, +}); + +const { data } = await app.octokit.request("/app"); +console.log("authenticated as %s", data.name); +for await (const { installation } of app.eachInstallation.iterator()) { + for await (const { octokit, repository } of app.eachRepository.iterator({ + installationId: installation.id, + })) { + await octokit.request("POST /repos/{owner}/{repo}/dispatches", { + owner: repository.owner.login, + repo: repository.name, + event_type: "my_event", + }); + } +} + +app.webhooks.on("issues.opened", async ({ octokit, payload }) => { + await octokit.request( + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments", + { + owner: payload.repository.owner.login, + repo: payload.repository.name, + issue_number: payload.issue.number, + body: "Hello World!", + } + ); +}); + +app.oauth.on("token", async ({ token, octokit }) => { + const { data } = await octokit.request("GET /user"); + console.log(`Token retrieved for ${data.login}`); +}); + +require("http").createServer(createNodeMiddleware(app)).listen(3000); +// can now receive requests at /api/github/* +``` + +## `App.defaults(options)` + +Create a new `App` with custom defaults for the [constructor options](#constructor-options) + +```js +const MyApp = App.defaults({ + Octokit: MyOctokit, +}); +const app = new MyApp({ clientId, clientSecret }); +// app.octokit is now an instance of MyOctokit +``` + +## Constructor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ appId + + number + + Required. Find the App ID on the app’s about page in settings. +
+ privateKey + + string + + Required. Content of the *.pem file you downloaded from the app’s about page. You can generate a new private key if needed. +
+ Octokit + + Constructor + + +You can pass in your own Octokit constructor with custom defaults and plugins. Note that `authStrategy` will be always be set to `createAppAuth` from [`@octokit/auth-app`](https://github.com/octokit/auth-app.js). + +For usage with enterprise, set `baseUrl` to the hostname + `/api/v3`. Example: + +```js +const { Octokit } = require("@octokit/core"); +new App({ + appId: 123, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + oauth: { + clientId: 123, + clientSecret: "secret", + }, + webhooks: { + secret: "secret", + }, + Octokit: Octokit.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +Defaults to [`@octokit/core`](https://github.com/octokit/core.js). + +
+ log + + object + + Used for internal logging. Defaults to console. +
+ webhooks.secret + + string + + Required. Secret as configured in the GitHub App's settings. +
+ webhooks.transform + + function + + Only relevant for `app.webhooks.on`. Transform emitted event before calling handlers. Can be asynchronous. +
+ oauth.clientId + + number + + Find the OAuth Client ID on the app’s about page in settings. +
+ oauth.clientSecret + + number + + Find the OAuth Client Secret on the app’s about page in settings. +
+ oauth.allowSignup + + boolean + + Sets the default value for app.oauth.getAuthorizationUrl(options). +
+ +## API + +### `app.octokit` + +Octokit instance. Uses the [`Octokit` constructor option](#constructor-option-octokit) if passed. + +### `app.log` + +See https://github.com/octokit/core.js#logging. Customize using the [`log` constructor option](#constructor-option-log). + +### `app.getInstallationOctokit` + +```js +const octokit = await app.getInstallationOctokit(123); +``` + +### `app.eachInstallation` + +```js +for await (const { octokit, installation } of app.eachInstallation.iterator()) { /* ... */ } +await app.eachInstallation(({ octokit, installation }) => /* ... */) +``` + +### `app.eachRepository` + +```js +for await (const { octokit, repository } of app.eachRepository.iterator()) { /* ... */ } +await app.eachRepository(({ octokit, repository }) => /* ... */) +``` + +Optionally pass installation ID to iterate through all repositories in one installation + +```js +for await (const { octokit, repository } of app.eachRepository.iterator({ installationId })) { /* ... */ } +await app.eachRepository({ installationId }, ({ octokit, repository }) => /* ... */) +``` + +### `app.webhooks` + +An [`@octokit/webhooks` instance](https://github.com/octokit/webhooks.js/#readme) + +### `app.oauth` + +An [`@octokit/oauth-app` instance](https://github.com/octokit/oauth-app.js/#readme) + +## Middlewares + +A middleware is a method or set of methods to handle requests for common environments. + +By default, all middlewares expose the following routes + +| Route | Route Description | +| -------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `POST /api/github/webhooks` | Endpoint to receive GitHub Webhook Event requests | +| `GET /api/github/oauth/login` | Redirects to GitHub's authorization endpoint. Accepts optional `?state` query parameter. | +| `GET /api/github/oauth/callback` | The client's redirect endpoint. This is where the `token` event gets triggered | +| `POST /api/github/oauth/token` | Exchange an authorization code for an OAuth Access token. If successful, the `token` event gets triggered. | +| `GET /api/github/oauth/token` | Check if token is valid. Must authenticate using token in `Authorization` header. Uses GitHub's [`POST /applications/{client_id}/token`](https://developer.github.com/v3/apps/oauth_applications/#check-a-token) endpoint | +| `PATCH /api/github/oauth/token` | Resets a token (invalidates current one, returns new token). Must authenticate using token in `Authorization` header. Uses GitHub's [`PATCH /applications/{client_id}/token`](https://developer.github.com/v3/apps/oauth_applications/#reset-a-token) endpoint. | +| `DELETE /api/github/oauth/token` | Invalidates current token, basically the equivalent of a logout. Must authenticate using token in `Authorization` header. | +| `DELETE /api/github/oauth/grant` | Revokes the user's grant, basically the equivalent of an uninstall. must authenticate using token in `Authorization` header. | + +### `createNodeMiddleware(app, options)` + +Middleware for Node's built in http server or [`express`](https://expressjs.com/). + +```js +const { App, createNodeMiddleware } = require("@octokit/app"); +const app = new App({ + appId: 123, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + oauth: { + clientId: "0123", + clientSecret: "0123secret", + }, + webhooks: { + secret: "secret", + }, +}); + +const middleware = createNodeMiddleware(app); + +require("http").createServer(middleware).listen(3000); +// can now receive user authorization callbacks at /api/github/* +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ app + + App instance + + Required. +
+ options.pathPrefix + + string + + +All exposed paths will be prefixed with the provided prefix. Defaults to `"/api/github"` + +
+ log + + object + + + +Used for internal logging. Defaults to [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console) with `debug` and `info` doing nothing. + +
+ options.onUnhandledRequest + + function + + +Defaults to + +```js +function onUnhandledRequest(request, response) { + response.writeHead(400, { + "content-type": "application/json", + }); + response.end( + JSON.stringify({ + error: error.message, + }) + ); +} +``` + +
+ +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/app/dist-node/index.js b/dist/node_modules/@octokit/app/dist-node/index.js new file mode 100644 index 0000000..8a3fc76 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-node/index.js @@ -0,0 +1,337 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + App: () => App, + createNodeMiddleware: () => createNodeMiddleware +}); +module.exports = __toCommonJS(dist_src_exports); +var import_core5 = require("@octokit/core"); +var import_auth_app3 = require("@octokit/auth-app"); +var import_oauth_app2 = require("@octokit/oauth-app"); +var import_webhooks3 = require("@octokit/webhooks"); + +// pkg/dist-src/version.js +var VERSION = "13.1.8"; + +// pkg/dist-src/webhooks.js +var import_core = require("@octokit/core"); +var import_auth_app = require("@octokit/auth-app"); +var import_auth_unauthenticated = require("@octokit/auth-unauthenticated"); +var import_webhooks = require("@octokit/webhooks"); +function webhooks(appOctokit, options) { + return new import_webhooks.Webhooks({ + secret: options.secret, + transform: async (event) => { + if (!("installation" in event.payload) || typeof event.payload.installation !== "object") { + const octokit2 = new appOctokit.constructor({ + authStrategy: import_auth_unauthenticated.createUnauthenticatedAuth, + auth: { + reason: `"installation" key missing in webhook event payload` + } + }); + return { + ...event, + octokit: octokit2 + }; + } + const installationId = event.payload.installation.id; + const octokit = await appOctokit.auth({ + type: "installation", + installationId, + factory(auth) { + return new auth.octokit.constructor({ + ...auth.octokitOptions, + authStrategy: import_auth_app.createAppAuth, + ...{ + auth: { + ...auth, + installationId + } + } + }); + } + }); + octokit.hook.before("request", (options2) => { + options2.headers["x-github-delivery"] = event.id; + }); + return { + ...event, + octokit + }; + } + }); +} + +// pkg/dist-src/each-installation.js +var import_plugin_paginate_rest = require("@octokit/plugin-paginate-rest"); +var import_core3 = require("@octokit/core"); + +// pkg/dist-src/get-installation-octokit.js +var import_auth_app2 = require("@octokit/auth-app"); +var import_core2 = require("@octokit/core"); +async function getInstallationOctokit(app, installationId) { + return app.octokit.auth({ + type: "installation", + installationId, + factory(auth) { + const options = { + ...auth.octokitOptions, + authStrategy: import_auth_app2.createAppAuth, + ...{ auth: { ...auth, installationId } } + }; + return new auth.octokit.constructor(options); + } + }); +} + +// pkg/dist-src/each-installation.js +function eachInstallationFactory(app) { + return Object.assign(eachInstallation.bind(null, app), { + iterator: eachInstallationIterator.bind(null, app) + }); +} +async function eachInstallation(app, callback) { + const i = eachInstallationIterator(app)[Symbol.asyncIterator](); + let result = await i.next(); + while (!result.done) { + await callback(result.value); + result = await i.next(); + } +} +function eachInstallationIterator(app) { + return { + async *[Symbol.asyncIterator]() { + const iterator = import_plugin_paginate_rest.composePaginateRest.iterator( + app.octokit, + "GET /app/installations" + ); + for await (const { data: installations } of iterator) { + for (const installation of installations) { + const installationOctokit = await getInstallationOctokit( + app, + installation.id + ); + yield { octokit: installationOctokit, installation }; + } + } + } + }; +} + +// pkg/dist-src/each-repository.js +var import_plugin_paginate_rest2 = require("@octokit/plugin-paginate-rest"); +var import_core4 = require("@octokit/core"); +function eachRepositoryFactory(app) { + return Object.assign(eachRepository.bind(null, app), { + iterator: eachRepositoryIterator.bind(null, app) + }); +} +async function eachRepository(app, queryOrCallback, callback) { + const i = eachRepositoryIterator( + app, + callback ? queryOrCallback : void 0 + )[Symbol.asyncIterator](); + let result = await i.next(); + while (!result.done) { + if (callback) { + await callback(result.value); + } else { + await queryOrCallback(result.value); + } + result = await i.next(); + } +} +function singleInstallationIterator(app, installationId) { + return { + async *[Symbol.asyncIterator]() { + yield { + octokit: await app.getInstallationOctokit(installationId) + }; + } + }; +} +function eachRepositoryIterator(app, query) { + return { + async *[Symbol.asyncIterator]() { + const iterator = query ? singleInstallationIterator(app, query.installationId) : app.eachInstallation.iterator(); + for await (const { octokit } of iterator) { + const repositoriesIterator = import_plugin_paginate_rest2.composePaginateRest.iterator( + octokit, + "GET /installation/repositories" + ); + for await (const { data: repositories } of repositoriesIterator) { + for (const repository of repositories) { + yield { octokit, repository }; + } + } + } + } + }; +} + +// pkg/dist-src/middleware/node/index.js +var import_oauth_app = require("@octokit/oauth-app"); +var import_webhooks2 = require("@octokit/webhooks"); + +// pkg/dist-src/middleware/node/on-unhandled-request-default.js +function onUnhandledRequestDefault(request, response) { + response.writeHead(404, { + "content-type": "application/json" + }); + response.end( + JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}` + }) + ); +} + +// pkg/dist-src/middleware/node/index.js +function noop() { +} +function createNodeMiddleware(app, options = {}) { + const log = Object.assign( + { + debug: noop, + info: noop, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, + options.log + ); + const optionsWithDefaults = { + onUnhandledRequest: onUnhandledRequestDefault, + pathPrefix: "/api/github", + ...options, + log + }; + const webhooksMiddleware = (0, import_webhooks2.createNodeMiddleware)(app.webhooks, { + path: optionsWithDefaults.pathPrefix + "/webhooks", + log, + onUnhandledRequest: optionsWithDefaults.onUnhandledRequest + }); + const oauthMiddleware = (0, import_oauth_app.createNodeMiddleware)(app.oauth, { + pathPrefix: optionsWithDefaults.pathPrefix + "/oauth", + onUnhandledRequest: optionsWithDefaults.onUnhandledRequest + }); + return middleware.bind(null, optionsWithDefaults, { + webhooksMiddleware, + oauthMiddleware + }); +} +async function middleware(options, { webhooksMiddleware, oauthMiddleware }, request, response, next) { + const { pathname } = new URL(request.url, "http://localhost"); + if (pathname === `${options.pathPrefix}/webhooks`) { + return webhooksMiddleware(request, response, next); + } + if (pathname.startsWith(`${options.pathPrefix}/oauth/`)) { + return oauthMiddleware(request, response, next); + } + const isExpressMiddleware = typeof next === "function"; + if (isExpressMiddleware) { + return next(); + } + return options.onUnhandledRequest(request, response); +} + +// pkg/dist-src/index.js +var App = class { + static defaults(defaults) { + const AppWithDefaults = class extends this { + constructor(...args) { + super({ + ...defaults, + ...args[0] + }); + } + }; + return AppWithDefaults; + } + constructor(options) { + const Octokit5 = options.Octokit || import_core5.Octokit; + const authOptions = Object.assign( + { + appId: options.appId, + privateKey: options.privateKey + }, + options.oauth ? { + clientId: options.oauth.clientId, + clientSecret: options.oauth.clientSecret + } : {} + ); + this.octokit = new Octokit5({ + authStrategy: import_auth_app3.createAppAuth, + auth: authOptions, + log: options.log + }); + this.log = Object.assign( + { + debug: () => { + }, + info: () => { + }, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, + options.log + ); + if (options.webhooks) { + this.webhooks = webhooks(this.octokit, options.webhooks); + } else { + Object.defineProperty(this, "webhooks", { + get() { + throw new Error("[@octokit/app] webhooks option not set"); + } + }); + } + if (options.oauth) { + this.oauth = new import_oauth_app2.OAuthApp({ + ...options.oauth, + clientType: "github-app", + Octokit: Octokit5 + }); + } else { + Object.defineProperty(this, "oauth", { + get() { + throw new Error( + "[@octokit/app] oauth.clientId / oauth.clientSecret options are not set" + ); + } + }); + } + this.getInstallationOctokit = getInstallationOctokit.bind( + null, + this + ); + this.eachInstallation = eachInstallationFactory( + this + ); + this.eachRepository = eachRepositoryFactory( + this + ); + } +}; +App.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + App, + createNodeMiddleware +}); diff --git a/dist/node_modules/@octokit/app/dist-node/index.js.map b/dist/node_modules/@octokit/app/dist-node/index.js.map new file mode 100644 index 0000000..074f524 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js", "../dist-src/webhooks.js", "../dist-src/each-installation.js", "../dist-src/get-installation-octokit.js", "../dist-src/each-repository.js", "../dist-src/middleware/node/index.js", "../dist-src/middleware/node/on-unhandled-request-default.js"], + "sourcesContent": ["import { Octokit as OctokitCore } from \"@octokit/core\";\nimport { createAppAuth } from \"@octokit/auth-app\";\nimport { OAuthApp } from \"@octokit/oauth-app\";\nimport { Webhooks } from \"@octokit/webhooks\";\nimport { VERSION } from \"./version\";\nimport { webhooks } from \"./webhooks\";\nimport { eachInstallationFactory } from \"./each-installation\";\nimport { eachRepositoryFactory } from \"./each-repository\";\nimport { getInstallationOctokit } from \"./get-installation-octokit\";\nclass App {\n static defaults(defaults) {\n const AppWithDefaults = class extends this {\n constructor(...args) {\n super({\n ...defaults,\n ...args[0]\n });\n }\n };\n return AppWithDefaults;\n }\n constructor(options) {\n const Octokit = options.Octokit || OctokitCore;\n const authOptions = Object.assign(\n {\n appId: options.appId,\n privateKey: options.privateKey\n },\n options.oauth ? {\n clientId: options.oauth.clientId,\n clientSecret: options.oauth.clientSecret\n } : {}\n );\n this.octokit = new Octokit({\n authStrategy: createAppAuth,\n auth: authOptions,\n log: options.log\n });\n this.log = Object.assign(\n {\n debug: () => {\n },\n info: () => {\n },\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n },\n options.log\n );\n if (options.webhooks) {\n this.webhooks = webhooks(this.octokit, options.webhooks);\n } else {\n Object.defineProperty(this, \"webhooks\", {\n get() {\n throw new Error(\"[@octokit/app] webhooks option not set\");\n }\n });\n }\n if (options.oauth) {\n this.oauth = new OAuthApp({\n ...options.oauth,\n clientType: \"github-app\",\n Octokit\n });\n } else {\n Object.defineProperty(this, \"oauth\", {\n get() {\n throw new Error(\n \"[@octokit/app] oauth.clientId / oauth.clientSecret options are not set\"\n );\n }\n });\n }\n this.getInstallationOctokit = getInstallationOctokit.bind(\n null,\n this\n );\n this.eachInstallation = eachInstallationFactory(\n this\n );\n this.eachRepository = eachRepositoryFactory(\n this\n );\n }\n}\nApp.VERSION = VERSION;\nimport { createNodeMiddleware } from \"./middleware/node/index\";\nexport {\n App,\n createNodeMiddleware\n};\n", "const VERSION = \"13.1.8\";\nexport {\n VERSION\n};\n", "import { Octokit } from \"@octokit/core\";\nimport { createAppAuth } from \"@octokit/auth-app\";\nimport { createUnauthenticatedAuth } from \"@octokit/auth-unauthenticated\";\nimport { Webhooks } from \"@octokit/webhooks\";\nfunction webhooks(appOctokit, options) {\n return new Webhooks({\n secret: options.secret,\n transform: async (event) => {\n if (!(\"installation\" in event.payload) || typeof event.payload.installation !== \"object\") {\n const octokit2 = new appOctokit.constructor({\n authStrategy: createUnauthenticatedAuth,\n auth: {\n reason: `\"installation\" key missing in webhook event payload`\n }\n });\n return {\n ...event,\n octokit: octokit2\n };\n }\n const installationId = event.payload.installation.id;\n const octokit = await appOctokit.auth({\n type: \"installation\",\n installationId,\n factory(auth) {\n return new auth.octokit.constructor({\n ...auth.octokitOptions,\n authStrategy: createAppAuth,\n ...{\n auth: {\n ...auth,\n installationId\n }\n }\n });\n }\n });\n octokit.hook.before(\"request\", (options2) => {\n options2.headers[\"x-github-delivery\"] = event.id;\n });\n return {\n ...event,\n octokit\n };\n }\n });\n}\nexport {\n webhooks\n};\n", "import { composePaginateRest } from \"@octokit/plugin-paginate-rest\";\nimport { Octokit } from \"@octokit/core\";\nimport { App } from \"./index\";\nimport { getInstallationOctokit } from \"./get-installation-octokit\";\nfunction eachInstallationFactory(app) {\n return Object.assign(eachInstallation.bind(null, app), {\n iterator: eachInstallationIterator.bind(null, app)\n });\n}\nasync function eachInstallation(app, callback) {\n const i = eachInstallationIterator(app)[Symbol.asyncIterator]();\n let result = await i.next();\n while (!result.done) {\n await callback(result.value);\n result = await i.next();\n }\n}\nfunction eachInstallationIterator(app) {\n return {\n async *[Symbol.asyncIterator]() {\n const iterator = composePaginateRest.iterator(\n app.octokit,\n \"GET /app/installations\"\n );\n for await (const { data: installations } of iterator) {\n for (const installation of installations) {\n const installationOctokit = await getInstallationOctokit(\n app,\n installation.id\n );\n yield { octokit: installationOctokit, installation };\n }\n }\n }\n };\n}\nexport {\n eachInstallation,\n eachInstallationFactory,\n eachInstallationIterator\n};\n", "import { createAppAuth } from \"@octokit/auth-app\";\nimport { Octokit } from \"@octokit/core\";\nimport { App } from \"./index\";\nasync function getInstallationOctokit(app, installationId) {\n return app.octokit.auth({\n type: \"installation\",\n installationId,\n factory(auth) {\n const options = {\n ...auth.octokitOptions,\n authStrategy: createAppAuth,\n ...{ auth: { ...auth, installationId } }\n };\n return new auth.octokit.constructor(options);\n }\n });\n}\nexport {\n getInstallationOctokit\n};\n", "import { composePaginateRest } from \"@octokit/plugin-paginate-rest\";\nimport { Octokit } from \"@octokit/core\";\nimport { App } from \"./index\";\nfunction eachRepositoryFactory(app) {\n return Object.assign(eachRepository.bind(null, app), {\n iterator: eachRepositoryIterator.bind(null, app)\n });\n}\nasync function eachRepository(app, queryOrCallback, callback) {\n const i = eachRepositoryIterator(\n app,\n callback ? queryOrCallback : void 0\n )[Symbol.asyncIterator]();\n let result = await i.next();\n while (!result.done) {\n if (callback) {\n await callback(result.value);\n } else {\n await queryOrCallback(result.value);\n }\n result = await i.next();\n }\n}\nfunction singleInstallationIterator(app, installationId) {\n return {\n async *[Symbol.asyncIterator]() {\n yield {\n octokit: await app.getInstallationOctokit(installationId)\n };\n }\n };\n}\nfunction eachRepositoryIterator(app, query) {\n return {\n async *[Symbol.asyncIterator]() {\n const iterator = query ? singleInstallationIterator(app, query.installationId) : app.eachInstallation.iterator();\n for await (const { octokit } of iterator) {\n const repositoriesIterator = composePaginateRest.iterator(\n octokit,\n \"GET /installation/repositories\"\n );\n for await (const { data: repositories } of repositoriesIterator) {\n for (const repository of repositories) {\n yield { octokit, repository };\n }\n }\n }\n }\n };\n}\nexport {\n eachRepository,\n eachRepositoryFactory,\n eachRepositoryIterator\n};\n", "import { createNodeMiddleware as oauthNodeMiddleware } from \"@octokit/oauth-app\";\nimport { createNodeMiddleware as webhooksNodeMiddleware } from \"@octokit/webhooks\";\nimport { App } from \"../../index\";\nimport { onUnhandledRequestDefault } from \"./on-unhandled-request-default\";\nfunction noop() {\n}\nfunction createNodeMiddleware(app, options = {}) {\n const log = Object.assign(\n {\n debug: noop,\n info: noop,\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n },\n options.log\n );\n const optionsWithDefaults = {\n onUnhandledRequest: onUnhandledRequestDefault,\n pathPrefix: \"/api/github\",\n ...options,\n log\n };\n const webhooksMiddleware = webhooksNodeMiddleware(app.webhooks, {\n path: optionsWithDefaults.pathPrefix + \"/webhooks\",\n log,\n onUnhandledRequest: optionsWithDefaults.onUnhandledRequest\n });\n const oauthMiddleware = oauthNodeMiddleware(app.oauth, {\n pathPrefix: optionsWithDefaults.pathPrefix + \"/oauth\",\n onUnhandledRequest: optionsWithDefaults.onUnhandledRequest\n });\n return middleware.bind(null, optionsWithDefaults, {\n webhooksMiddleware,\n oauthMiddleware\n });\n}\nasync function middleware(options, { webhooksMiddleware, oauthMiddleware }, request, response, next) {\n const { pathname } = new URL(request.url, \"http://localhost\");\n if (pathname === `${options.pathPrefix}/webhooks`) {\n return webhooksMiddleware(request, response, next);\n }\n if (pathname.startsWith(`${options.pathPrefix}/oauth/`)) {\n return oauthMiddleware(request, response, next);\n }\n const isExpressMiddleware = typeof next === \"function\";\n if (isExpressMiddleware) {\n return next();\n }\n return options.onUnhandledRequest(request, response);\n}\nexport {\n createNodeMiddleware,\n middleware\n};\n", "function onUnhandledRequestDefault(request, response) {\n response.writeHead(404, {\n \"content-type\": \"application/json\"\n });\n response.end(\n JSON.stringify({\n error: `Unknown route: ${request.method} ${request.url}`\n })\n );\n}\nexport {\n onUnhandledRequestDefault\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAAAA,eAAuC;AACvC,IAAAC,mBAA8B;AAC9B,IAAAC,oBAAyB;AACzB,IAAAC,mBAAyB;;;ACHzB,IAAM,UAAU;;;ACAhB,kBAAwB;AACxB,sBAA8B;AAC9B,kCAA0C;AAC1C,sBAAyB;AACzB,SAAS,SAAS,YAAY,SAAS;AACrC,SAAO,IAAI,yBAAS;AAAA,IAClB,QAAQ,QAAQ;AAAA,IAChB,WAAW,OAAO,UAAU;AAC1B,UAAI,EAAE,kBAAkB,MAAM,YAAY,OAAO,MAAM,QAAQ,iBAAiB,UAAU;AACxF,cAAM,WAAW,IAAI,WAAW,YAAY;AAAA,UAC1C,cAAc;AAAA,UACd,MAAM;AAAA,YACJ,QAAQ;AAAA,UACV;AAAA,QACF,CAAC;AACD,eAAO;AAAA,UACL,GAAG;AAAA,UACH,SAAS;AAAA,QACX;AAAA,MACF;AACA,YAAM,iBAAiB,MAAM,QAAQ,aAAa;AAClD,YAAM,UAAU,MAAM,WAAW,KAAK;AAAA,QACpC,MAAM;AAAA,QACN;AAAA,QACA,QAAQ,MAAM;AACZ,iBAAO,IAAI,KAAK,QAAQ,YAAY;AAAA,YAClC,GAAG,KAAK;AAAA,YACR,cAAc;AAAA,YACd,GAAG;AAAA,cACD,MAAM;AAAA,gBACJ,GAAG;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AACD,cAAQ,KAAK,OAAO,WAAW,CAAC,aAAa;AAC3C,iBAAS,QAAQ,mBAAmB,IAAI,MAAM;AAAA,MAChD,CAAC;AACD,aAAO;AAAA,QACL,GAAG;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AC9CA,kCAAoC;AACpC,IAAAC,eAAwB;;;ACDxB,IAAAC,mBAA8B;AAC9B,IAAAC,eAAwB;AAExB,eAAe,uBAAuB,KAAK,gBAAgB;AACzD,SAAO,IAAI,QAAQ,KAAK;AAAA,IACtB,MAAM;AAAA,IACN;AAAA,IACA,QAAQ,MAAM;AACZ,YAAM,UAAU;AAAA,QACd,GAAG,KAAK;AAAA,QACR,cAAc;AAAA,QACd,GAAG,EAAE,MAAM,EAAE,GAAG,MAAM,eAAe,EAAE;AAAA,MACzC;AACA,aAAO,IAAI,KAAK,QAAQ,YAAY,OAAO;AAAA,IAC7C;AAAA,EACF,CAAC;AACH;;;ADZA,SAAS,wBAAwB,KAAK;AACpC,SAAO,OAAO,OAAO,iBAAiB,KAAK,MAAM,GAAG,GAAG;AAAA,IACrD,UAAU,yBAAyB,KAAK,MAAM,GAAG;AAAA,EACnD,CAAC;AACH;AACA,eAAe,iBAAiB,KAAK,UAAU;AAC7C,QAAM,IAAI,yBAAyB,GAAG,EAAE,OAAO,aAAa,EAAE;AAC9D,MAAI,SAAS,MAAM,EAAE,KAAK;AAC1B,SAAO,CAAC,OAAO,MAAM;AACnB,UAAM,SAAS,OAAO,KAAK;AAC3B,aAAS,MAAM,EAAE,KAAK;AAAA,EACxB;AACF;AACA,SAAS,yBAAyB,KAAK;AACrC,SAAO;AAAA,IACL,QAAQ,OAAO,aAAa,IAAI;AAC9B,YAAM,WAAW,gDAAoB;AAAA,QACnC,IAAI;AAAA,QACJ;AAAA,MACF;AACA,uBAAiB,EAAE,MAAM,cAAc,KAAK,UAAU;AACpD,mBAAW,gBAAgB,eAAe;AACxC,gBAAM,sBAAsB,MAAM;AAAA,YAChC;AAAA,YACA,aAAa;AAAA,UACf;AACA,gBAAM,EAAE,SAAS,qBAAqB,aAAa;AAAA,QACrD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;AEnCA,IAAAC,+BAAoC;AACpC,IAAAC,eAAwB;AAExB,SAAS,sBAAsB,KAAK;AAClC,SAAO,OAAO,OAAO,eAAe,KAAK,MAAM,GAAG,GAAG;AAAA,IACnD,UAAU,uBAAuB,KAAK,MAAM,GAAG;AAAA,EACjD,CAAC;AACH;AACA,eAAe,eAAe,KAAK,iBAAiB,UAAU;AAC5D,QAAM,IAAI;AAAA,IACR;AAAA,IACA,WAAW,kBAAkB;AAAA,EAC/B,EAAE,OAAO,aAAa,EAAE;AACxB,MAAI,SAAS,MAAM,EAAE,KAAK;AAC1B,SAAO,CAAC,OAAO,MAAM;AACnB,QAAI,UAAU;AACZ,YAAM,SAAS,OAAO,KAAK;AAAA,IAC7B,OAAO;AACL,YAAM,gBAAgB,OAAO,KAAK;AAAA,IACpC;AACA,aAAS,MAAM,EAAE,KAAK;AAAA,EACxB;AACF;AACA,SAAS,2BAA2B,KAAK,gBAAgB;AACvD,SAAO;AAAA,IACL,QAAQ,OAAO,aAAa,IAAI;AAC9B,YAAM;AAAA,QACJ,SAAS,MAAM,IAAI,uBAAuB,cAAc;AAAA,MAC1D;AAAA,IACF;AAAA,EACF;AACF;AACA,SAAS,uBAAuB,KAAK,OAAO;AAC1C,SAAO;AAAA,IACL,QAAQ,OAAO,aAAa,IAAI;AAC9B,YAAM,WAAW,QAAQ,2BAA2B,KAAK,MAAM,cAAc,IAAI,IAAI,iBAAiB,SAAS;AAC/G,uBAAiB,EAAE,QAAQ,KAAK,UAAU;AACxC,cAAM,uBAAuB,iDAAoB;AAAA,UAC/C;AAAA,UACA;AAAA,QACF;AACA,yBAAiB,EAAE,MAAM,aAAa,KAAK,sBAAsB;AAC/D,qBAAW,cAAc,cAAc;AACrC,kBAAM,EAAE,SAAS,WAAW;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;ACjDA,uBAA4D;AAC5D,IAAAC,mBAA+D;;;ACD/D,SAAS,0BAA0B,SAAS,UAAU;AACpD,WAAS,UAAU,KAAK;AAAA,IACtB,gBAAgB;AAAA,EAClB,CAAC;AACD,WAAS;AAAA,IACP,KAAK,UAAU;AAAA,MACb,OAAO,kBAAkB,QAAQ,UAAU,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AACF;;;ADLA,SAAS,OAAO;AAChB;AACA,SAAS,qBAAqB,KAAK,UAAU,CAAC,GAAG;AAC/C,QAAM,MAAM,OAAO;AAAA,IACjB;AAAA,MACE,OAAO;AAAA,MACP,MAAM;AAAA,MACN,MAAM,QAAQ,KAAK,KAAK,OAAO;AAAA,MAC/B,OAAO,QAAQ,MAAM,KAAK,OAAO;AAAA,IACnC;AAAA,IACA,QAAQ;AAAA,EACV;AACA,QAAM,sBAAsB;AAAA,IAC1B,oBAAoB;AAAA,IACpB,YAAY;AAAA,IACZ,GAAG;AAAA,IACH;AAAA,EACF;AACA,QAAM,yBAAqB,iBAAAC,sBAAuB,IAAI,UAAU;AAAA,IAC9D,MAAM,oBAAoB,aAAa;AAAA,IACvC;AAAA,IACA,oBAAoB,oBAAoB;AAAA,EAC1C,CAAC;AACD,QAAM,sBAAkB,iBAAAC,sBAAoB,IAAI,OAAO;AAAA,IACrD,YAAY,oBAAoB,aAAa;AAAA,IAC7C,oBAAoB,oBAAoB;AAAA,EAC1C,CAAC;AACD,SAAO,WAAW,KAAK,MAAM,qBAAqB;AAAA,IAChD;AAAA,IACA;AAAA,EACF,CAAC;AACH;AACA,eAAe,WAAW,SAAS,EAAE,oBAAoB,gBAAgB,GAAG,SAAS,UAAU,MAAM;AACnG,QAAM,EAAE,SAAS,IAAI,IAAI,IAAI,QAAQ,KAAK,kBAAkB;AAC5D,MAAI,aAAa,GAAG,QAAQ,uBAAuB;AACjD,WAAO,mBAAmB,SAAS,UAAU,IAAI;AAAA,EACnD;AACA,MAAI,SAAS,WAAW,GAAG,QAAQ,mBAAmB,GAAG;AACvD,WAAO,gBAAgB,SAAS,UAAU,IAAI;AAAA,EAChD;AACA,QAAM,sBAAsB,OAAO,SAAS;AAC5C,MAAI,qBAAqB;AACvB,WAAO,KAAK;AAAA,EACd;AACA,SAAO,QAAQ,mBAAmB,SAAS,QAAQ;AACrD;;;ANxCA,IAAM,MAAN,MAAU;AAAA,EACR,OAAO,SAAS,UAAU;AACxB,UAAM,kBAAkB,cAAc,KAAK;AAAA,MACzC,eAAe,MAAM;AACnB,cAAM;AAAA,UACJ,GAAG;AAAA,UACH,GAAG,KAAK,CAAC;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EACA,YAAY,SAAS;AACnB,UAAMC,WAAU,QAAQ,WAAW,aAAAC;AACnC,UAAM,cAAc,OAAO;AAAA,MACzB;AAAA,QACE,OAAO,QAAQ;AAAA,QACf,YAAY,QAAQ;AAAA,MACtB;AAAA,MACA,QAAQ,QAAQ;AAAA,QACd,UAAU,QAAQ,MAAM;AAAA,QACxB,cAAc,QAAQ,MAAM;AAAA,MAC9B,IAAI,CAAC;AAAA,IACP;AACA,SAAK,UAAU,IAAID,SAAQ;AAAA,MACzB,cAAc;AAAA,MACd,MAAM;AAAA,MACN,KAAK,QAAQ;AAAA,IACf,CAAC;AACD,SAAK,MAAM,OAAO;AAAA,MAChB;AAAA,QACE,OAAO,MAAM;AAAA,QACb;AAAA,QACA,MAAM,MAAM;AAAA,QACZ;AAAA,QACA,MAAM,QAAQ,KAAK,KAAK,OAAO;AAAA,QAC/B,OAAO,QAAQ,MAAM,KAAK,OAAO;AAAA,MACnC;AAAA,MACA,QAAQ;AAAA,IACV;AACA,QAAI,QAAQ,UAAU;AACpB,WAAK,WAAW,SAAS,KAAK,SAAS,QAAQ,QAAQ;AAAA,IACzD,OAAO;AACL,aAAO,eAAe,MAAM,YAAY;AAAA,QACtC,MAAM;AACJ,gBAAM,IAAI,MAAM,wCAAwC;AAAA,QAC1D;AAAA,MACF,CAAC;AAAA,IACH;AACA,QAAI,QAAQ,OAAO;AACjB,WAAK,QAAQ,IAAI,2BAAS;AAAA,QACxB,GAAG,QAAQ;AAAA,QACX,YAAY;AAAA,QACZ,SAAAA;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,aAAO,eAAe,MAAM,SAAS;AAAA,QACnC,MAAM;AACJ,gBAAM,IAAI;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AACA,SAAK,yBAAyB,uBAAuB;AAAA,MACnD;AAAA,MACA;AAAA,IACF;AACA,SAAK,mBAAmB;AAAA,MACtB;AAAA,IACF;AACA,SAAK,iBAAiB;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AACF;AACA,IAAI,UAAU;", + "names": ["import_core", "import_auth_app", "import_oauth_app", "import_webhooks", "import_core", "import_auth_app", "import_core", "import_plugin_paginate_rest", "import_core", "import_webhooks", "webhooksNodeMiddleware", "oauthNodeMiddleware", "Octokit", "OctokitCore"] +} diff --git a/dist/node_modules/@octokit/app/dist-src/each-installation.js b/dist/node_modules/@octokit/app/dist-src/each-installation.js new file mode 100644 index 0000000..c4c6b6c --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-src/each-installation.js @@ -0,0 +1,41 @@ +import { composePaginateRest } from "@octokit/plugin-paginate-rest"; +import { Octokit } from "@octokit/core"; +import { App } from "./index"; +import { getInstallationOctokit } from "./get-installation-octokit"; +function eachInstallationFactory(app) { + return Object.assign(eachInstallation.bind(null, app), { + iterator: eachInstallationIterator.bind(null, app) + }); +} +async function eachInstallation(app, callback) { + const i = eachInstallationIterator(app)[Symbol.asyncIterator](); + let result = await i.next(); + while (!result.done) { + await callback(result.value); + result = await i.next(); + } +} +function eachInstallationIterator(app) { + return { + async *[Symbol.asyncIterator]() { + const iterator = composePaginateRest.iterator( + app.octokit, + "GET /app/installations" + ); + for await (const { data: installations } of iterator) { + for (const installation of installations) { + const installationOctokit = await getInstallationOctokit( + app, + installation.id + ); + yield { octokit: installationOctokit, installation }; + } + } + } + }; +} +export { + eachInstallation, + eachInstallationFactory, + eachInstallationIterator +}; diff --git a/dist/node_modules/@octokit/app/dist-src/each-repository.js b/dist/node_modules/@octokit/app/dist-src/each-repository.js new file mode 100644 index 0000000..13180d1 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-src/each-repository.js @@ -0,0 +1,55 @@ +import { composePaginateRest } from "@octokit/plugin-paginate-rest"; +import { Octokit } from "@octokit/core"; +import { App } from "./index"; +function eachRepositoryFactory(app) { + return Object.assign(eachRepository.bind(null, app), { + iterator: eachRepositoryIterator.bind(null, app) + }); +} +async function eachRepository(app, queryOrCallback, callback) { + const i = eachRepositoryIterator( + app, + callback ? queryOrCallback : void 0 + )[Symbol.asyncIterator](); + let result = await i.next(); + while (!result.done) { + if (callback) { + await callback(result.value); + } else { + await queryOrCallback(result.value); + } + result = await i.next(); + } +} +function singleInstallationIterator(app, installationId) { + return { + async *[Symbol.asyncIterator]() { + yield { + octokit: await app.getInstallationOctokit(installationId) + }; + } + }; +} +function eachRepositoryIterator(app, query) { + return { + async *[Symbol.asyncIterator]() { + const iterator = query ? singleInstallationIterator(app, query.installationId) : app.eachInstallation.iterator(); + for await (const { octokit } of iterator) { + const repositoriesIterator = composePaginateRest.iterator( + octokit, + "GET /installation/repositories" + ); + for await (const { data: repositories } of repositoriesIterator) { + for (const repository of repositories) { + yield { octokit, repository }; + } + } + } + } + }; +} +export { + eachRepository, + eachRepositoryFactory, + eachRepositoryIterator +}; diff --git a/dist/node_modules/@octokit/app/dist-src/get-installation-octokit.js b/dist/node_modules/@octokit/app/dist-src/get-installation-octokit.js new file mode 100644 index 0000000..540a43a --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-src/get-installation-octokit.js @@ -0,0 +1,20 @@ +import { createAppAuth } from "@octokit/auth-app"; +import { Octokit } from "@octokit/core"; +import { App } from "./index"; +async function getInstallationOctokit(app, installationId) { + return app.octokit.auth({ + type: "installation", + installationId, + factory(auth) { + const options = { + ...auth.octokitOptions, + authStrategy: createAppAuth, + ...{ auth: { ...auth, installationId } } + }; + return new auth.octokit.constructor(options); + } + }); +} +export { + getInstallationOctokit +}; diff --git a/dist/node_modules/@octokit/app/dist-src/index.js b/dist/node_modules/@octokit/app/dist-src/index.js new file mode 100644 index 0000000..c6d3676 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-src/index.js @@ -0,0 +1,91 @@ +import { Octokit as OctokitCore } from "@octokit/core"; +import { createAppAuth } from "@octokit/auth-app"; +import { OAuthApp } from "@octokit/oauth-app"; +import { Webhooks } from "@octokit/webhooks"; +import { VERSION } from "./version"; +import { webhooks } from "./webhooks"; +import { eachInstallationFactory } from "./each-installation"; +import { eachRepositoryFactory } from "./each-repository"; +import { getInstallationOctokit } from "./get-installation-octokit"; +class App { + static defaults(defaults) { + const AppWithDefaults = class extends this { + constructor(...args) { + super({ + ...defaults, + ...args[0] + }); + } + }; + return AppWithDefaults; + } + constructor(options) { + const Octokit = options.Octokit || OctokitCore; + const authOptions = Object.assign( + { + appId: options.appId, + privateKey: options.privateKey + }, + options.oauth ? { + clientId: options.oauth.clientId, + clientSecret: options.oauth.clientSecret + } : {} + ); + this.octokit = new Octokit({ + authStrategy: createAppAuth, + auth: authOptions, + log: options.log + }); + this.log = Object.assign( + { + debug: () => { + }, + info: () => { + }, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, + options.log + ); + if (options.webhooks) { + this.webhooks = webhooks(this.octokit, options.webhooks); + } else { + Object.defineProperty(this, "webhooks", { + get() { + throw new Error("[@octokit/app] webhooks option not set"); + } + }); + } + if (options.oauth) { + this.oauth = new OAuthApp({ + ...options.oauth, + clientType: "github-app", + Octokit + }); + } else { + Object.defineProperty(this, "oauth", { + get() { + throw new Error( + "[@octokit/app] oauth.clientId / oauth.clientSecret options are not set" + ); + } + }); + } + this.getInstallationOctokit = getInstallationOctokit.bind( + null, + this + ); + this.eachInstallation = eachInstallationFactory( + this + ); + this.eachRepository = eachRepositoryFactory( + this + ); + } +} +App.VERSION = VERSION; +import { createNodeMiddleware } from "./middleware/node/index"; +export { + App, + createNodeMiddleware +}; diff --git a/dist/node_modules/@octokit/app/dist-src/middleware/node/index.js b/dist/node_modules/@octokit/app/dist-src/middleware/node/index.js new file mode 100644 index 0000000..09a96f2 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-src/middleware/node/index.js @@ -0,0 +1,54 @@ +import { createNodeMiddleware as oauthNodeMiddleware } from "@octokit/oauth-app"; +import { createNodeMiddleware as webhooksNodeMiddleware } from "@octokit/webhooks"; +import { App } from "../../index"; +import { onUnhandledRequestDefault } from "./on-unhandled-request-default"; +function noop() { +} +function createNodeMiddleware(app, options = {}) { + const log = Object.assign( + { + debug: noop, + info: noop, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, + options.log + ); + const optionsWithDefaults = { + onUnhandledRequest: onUnhandledRequestDefault, + pathPrefix: "/api/github", + ...options, + log + }; + const webhooksMiddleware = webhooksNodeMiddleware(app.webhooks, { + path: optionsWithDefaults.pathPrefix + "/webhooks", + log, + onUnhandledRequest: optionsWithDefaults.onUnhandledRequest + }); + const oauthMiddleware = oauthNodeMiddleware(app.oauth, { + pathPrefix: optionsWithDefaults.pathPrefix + "/oauth", + onUnhandledRequest: optionsWithDefaults.onUnhandledRequest + }); + return middleware.bind(null, optionsWithDefaults, { + webhooksMiddleware, + oauthMiddleware + }); +} +async function middleware(options, { webhooksMiddleware, oauthMiddleware }, request, response, next) { + const { pathname } = new URL(request.url, "http://localhost"); + if (pathname === `${options.pathPrefix}/webhooks`) { + return webhooksMiddleware(request, response, next); + } + if (pathname.startsWith(`${options.pathPrefix}/oauth/`)) { + return oauthMiddleware(request, response, next); + } + const isExpressMiddleware = typeof next === "function"; + if (isExpressMiddleware) { + return next(); + } + return options.onUnhandledRequest(request, response); +} +export { + createNodeMiddleware, + middleware +}; diff --git a/dist/node_modules/@octokit/app/dist-src/middleware/node/on-unhandled-request-default.js b/dist/node_modules/@octokit/app/dist-src/middleware/node/on-unhandled-request-default.js new file mode 100644 index 0000000..73e4c84 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-src/middleware/node/on-unhandled-request-default.js @@ -0,0 +1,13 @@ +function onUnhandledRequestDefault(request, response) { + response.writeHead(404, { + "content-type": "application/json" + }); + response.end( + JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}` + }) + ); +} +export { + onUnhandledRequestDefault +}; diff --git a/dist/node_modules/@octokit/app/dist-src/version.js b/dist/node_modules/@octokit/app/dist-src/version.js new file mode 100644 index 0000000..794584b --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "13.1.8"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/app/dist-src/webhooks.js b/dist/node_modules/@octokit/app/dist-src/webhooks.js new file mode 100644 index 0000000..51865cb --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-src/webhooks.js @@ -0,0 +1,50 @@ +import { Octokit } from "@octokit/core"; +import { createAppAuth } from "@octokit/auth-app"; +import { createUnauthenticatedAuth } from "@octokit/auth-unauthenticated"; +import { Webhooks } from "@octokit/webhooks"; +function webhooks(appOctokit, options) { + return new Webhooks({ + secret: options.secret, + transform: async (event) => { + if (!("installation" in event.payload) || typeof event.payload.installation !== "object") { + const octokit2 = new appOctokit.constructor({ + authStrategy: createUnauthenticatedAuth, + auth: { + reason: `"installation" key missing in webhook event payload` + } + }); + return { + ...event, + octokit: octokit2 + }; + } + const installationId = event.payload.installation.id; + const octokit = await appOctokit.auth({ + type: "installation", + installationId, + factory(auth) { + return new auth.octokit.constructor({ + ...auth.octokitOptions, + authStrategy: createAppAuth, + ...{ + auth: { + ...auth, + installationId + } + } + }); + } + }); + octokit.hook.before("request", (options2) => { + options2.headers["x-github-delivery"] = event.id; + }); + return { + ...event, + octokit + }; + } + }); +} +export { + webhooks +}; diff --git a/dist/node_modules/@octokit/app/dist-types/each-installation.d.ts b/dist/node_modules/@octokit/app/dist-types/each-installation.d.ts new file mode 100644 index 0000000..53f9696 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-types/each-installation.d.ts @@ -0,0 +1,122 @@ +import { Octokit } from "@octokit/core"; +import { App } from "./index"; +import type { EachInstallationFunction, EachInstallationInterface } from "./types"; +export declare function eachInstallationFactory(app: App): EachInstallationInterface; +export declare function eachInstallation(app: App, callback: EachInstallationFunction): Promise; +export declare function eachInstallationIterator(app: App): { + [Symbol.asyncIterator](): AsyncGenerator<{ + octokit: Octokit; + installation: { + id: number; + account: { + name?: string | null | undefined; + email?: string | null | undefined; + login: string; + id: number; + node_id: string; + avatar_url: string; + gravatar_id: string | null; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + starred_at?: string | undefined; + } | { + description?: string | null | undefined; + html_url: string; + website_url?: string | null | undefined; + id: number; + node_id: string; + name: string; + slug: string; + created_at: string | null; + updated_at: string | null; + avatar_url: string; + } | null; + repository_selection: "all" | "selected"; + access_tokens_url: string; + repositories_url: string; + html_url: string; + app_id: number; + target_id: number; + target_type: string; + permissions: { + actions?: "write" | "read" | undefined; + administration?: "write" | "read" | undefined; + checks?: "write" | "read" | undefined; + contents?: "write" | "read" | undefined; + deployments?: "write" | "read" | undefined; + environments?: "write" | "read" | undefined; + issues?: "write" | "read" | undefined; + metadata?: "write" | "read" | undefined; + packages?: "write" | "read" | undefined; + pages?: "write" | "read" | undefined; + pull_requests?: "write" | "read" | undefined; + repository_hooks?: "write" | "read" | undefined; + repository_projects?: "write" | "read" | "admin" | undefined; + secret_scanning_alerts?: "write" | "read" | undefined; + secrets?: "write" | "read" | undefined; + security_events?: "write" | "read" | undefined; + single_file?: "write" | "read" | undefined; + statuses?: "write" | "read" | undefined; + vulnerability_alerts?: "write" | "read" | undefined; + workflows?: "write" | undefined; + members?: "write" | "read" | undefined; + organization_administration?: "write" | "read" | undefined; + organization_custom_roles?: "write" | "read" | undefined; + organization_announcement_banners?: "write" | "read" | undefined; + organization_hooks?: "write" | "read" | undefined; + organization_personal_access_tokens?: "write" | "read" | undefined; + organization_personal_access_token_requests?: "write" | "read" | undefined; + organization_plan?: "read" | undefined; + organization_projects?: "write" | "read" | "admin" | undefined; + organization_packages?: "write" | "read" | undefined; + organization_secrets?: "write" | "read" | undefined; + organization_self_hosted_runners?: "write" | "read" | undefined; + organization_user_blocking?: "write" | "read" | undefined; + team_discussions?: "write" | "read" | undefined; + }; + events: string[]; + created_at: string; + updated_at: string; + single_file_name: string | null; + has_multiple_single_files?: boolean | undefined; + single_file_paths?: string[] | undefined; + app_slug: string; + suspended_by: { + name?: string | null | undefined; + email?: string | null | undefined; + login: string; + id: number; + node_id: string; + avatar_url: string; + gravatar_id: string | null; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + starred_at?: string | undefined; + } | null; + suspended_at: string | null; + contact_email?: string | null | undefined; + }; + }, void, unknown>; +}; diff --git a/dist/node_modules/@octokit/app/dist-types/each-repository.d.ts b/dist/node_modules/@octokit/app/dist-types/each-repository.d.ts new file mode 100644 index 0000000..3598ae1 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-types/each-repository.d.ts @@ -0,0 +1,281 @@ +import { Octokit } from "@octokit/core"; +import { App } from "./index"; +import type { EachRepositoryFunction, EachRepositoryInterface, EachRepositoryQuery } from "./types"; +export declare function eachRepositoryFactory(app: App): EachRepositoryInterface; +export declare function eachRepository(app: App, queryOrCallback: EachRepositoryQuery | EachRepositoryFunction, callback?: EachRepositoryFunction): Promise; +export declare function eachRepositoryIterator(app: App, query?: EachRepositoryQuery): { + [Symbol.asyncIterator](): AsyncGenerator<{ + octokit: Octokit; + repository: { + id: number; + node_id: string; + name: string; + full_name: string; + license: { + key: string; + name: string; + url: string | null; + spdx_id: string | null; + node_id: string; + html_url?: string | undefined; + } | null; + organization?: { + name?: string | null | undefined; + email?: string | null | undefined; + login: string; + id: number; + node_id: string; + avatar_url: string; + gravatar_id: string | null; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + starred_at?: string | undefined; + } | null | undefined; + forks: number; + permissions?: { + admin: boolean; + pull: boolean; + triage?: boolean | undefined; + push: boolean; + maintain?: boolean | undefined; + } | undefined; + owner: { + name?: string | null | undefined; + email?: string | null | undefined; + login: string; + id: number; + node_id: string; + avatar_url: string; + gravatar_id: string | null; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + starred_at?: string | undefined; + }; + private: boolean; + html_url: string; + description: string | null; + fork: boolean; + url: string; + archive_url: string; + assignees_url: string; + blobs_url: string; + branches_url: string; + collaborators_url: string; + comments_url: string; + commits_url: string; + compare_url: string; + contents_url: string; + contributors_url: string; + deployments_url: string; + downloads_url: string; + events_url: string; + forks_url: string; + git_commits_url: string; + git_refs_url: string; + git_tags_url: string; + git_url: string; + issue_comment_url: string; + issue_events_url: string; + issues_url: string; + keys_url: string; + labels_url: string; + languages_url: string; + merges_url: string; + milestones_url: string; + notifications_url: string; + pulls_url: string; + releases_url: string; + ssh_url: string; + stargazers_url: string; + statuses_url: string; + subscribers_url: string; + subscription_url: string; + tags_url: string; + teams_url: string; + trees_url: string; + clone_url: string; + mirror_url: string | null; + hooks_url: string; + svn_url: string; + homepage: string | null; + language: string | null; + forks_count: number; + stargazers_count: number; + watchers_count: number; + size: number; + default_branch: string; + open_issues_count: number; + is_template?: boolean | undefined; + topics?: string[] | undefined; + has_issues: boolean; + has_projects: boolean; + has_wiki: boolean; + has_pages: boolean; + has_downloads: boolean; + has_discussions?: boolean | undefined; + archived: boolean; + disabled: boolean; + visibility?: string | undefined; + pushed_at: string | null; + created_at: string | null; + updated_at: string | null; + allow_rebase_merge?: boolean | undefined; + template_repository?: { + id?: number | undefined; + node_id?: string | undefined; + name?: string | undefined; + full_name?: string | undefined; + owner?: { + login?: string | undefined; + id?: number | undefined; + node_id?: string | undefined; + avatar_url?: string | undefined; + gravatar_id?: string | undefined; + url?: string | undefined; + html_url?: string | undefined; + followers_url?: string | undefined; + following_url?: string | undefined; + gists_url?: string | undefined; + starred_url?: string | undefined; + subscriptions_url?: string | undefined; + organizations_url?: string | undefined; + repos_url?: string | undefined; + events_url?: string | undefined; + received_events_url?: string | undefined; + type?: string | undefined; + site_admin?: boolean | undefined; + } | undefined; + private?: boolean | undefined; + html_url?: string | undefined; + description?: string | undefined; + fork?: boolean | undefined; + url?: string | undefined; + archive_url?: string | undefined; + assignees_url?: string | undefined; + blobs_url?: string | undefined; + branches_url?: string | undefined; + collaborators_url?: string | undefined; + comments_url?: string | undefined; + commits_url?: string | undefined; + compare_url?: string | undefined; + contents_url?: string | undefined; + contributors_url?: string | undefined; + deployments_url?: string | undefined; + downloads_url?: string | undefined; + events_url?: string | undefined; + forks_url?: string | undefined; + git_commits_url?: string | undefined; + git_refs_url?: string | undefined; + git_tags_url?: string | undefined; + git_url?: string | undefined; + issue_comment_url?: string | undefined; + issue_events_url?: string | undefined; + issues_url?: string | undefined; + keys_url?: string | undefined; + labels_url?: string | undefined; + languages_url?: string | undefined; + merges_url?: string | undefined; + milestones_url?: string | undefined; + notifications_url?: string | undefined; + pulls_url?: string | undefined; + releases_url?: string | undefined; + ssh_url?: string | undefined; + stargazers_url?: string | undefined; + statuses_url?: string | undefined; + subscribers_url?: string | undefined; + subscription_url?: string | undefined; + tags_url?: string | undefined; + teams_url?: string | undefined; + trees_url?: string | undefined; + clone_url?: string | undefined; + mirror_url?: string | undefined; + hooks_url?: string | undefined; + svn_url?: string | undefined; + homepage?: string | undefined; + language?: string | undefined; + forks_count?: number | undefined; + stargazers_count?: number | undefined; + watchers_count?: number | undefined; + size?: number | undefined; + default_branch?: string | undefined; + open_issues_count?: number | undefined; + is_template?: boolean | undefined; + topics?: string[] | undefined; + has_issues?: boolean | undefined; + has_projects?: boolean | undefined; + has_wiki?: boolean | undefined; + has_pages?: boolean | undefined; + has_downloads?: boolean | undefined; + archived?: boolean | undefined; + disabled?: boolean | undefined; + visibility?: string | undefined; + pushed_at?: string | undefined; + created_at?: string | undefined; + updated_at?: string | undefined; + permissions?: { + admin?: boolean | undefined; + maintain?: boolean | undefined; + push?: boolean | undefined; + triage?: boolean | undefined; + pull?: boolean | undefined; + } | undefined; + allow_rebase_merge?: boolean | undefined; + temp_clone_token?: string | undefined; + allow_squash_merge?: boolean | undefined; + allow_auto_merge?: boolean | undefined; + delete_branch_on_merge?: boolean | undefined; + allow_update_branch?: boolean | undefined; + use_squash_pr_title_as_default?: boolean | undefined; + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE" | undefined; + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK" | undefined; + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE" | undefined; + merge_commit_message?: "PR_TITLE" | "PR_BODY" | "BLANK" | undefined; + allow_merge_commit?: boolean | undefined; + subscribers_count?: number | undefined; + network_count?: number | undefined; + } | null | undefined; + temp_clone_token?: string | undefined; + allow_squash_merge?: boolean | undefined; + allow_auto_merge?: boolean | undefined; + delete_branch_on_merge?: boolean | undefined; + allow_update_branch?: boolean | undefined; + use_squash_pr_title_as_default?: boolean | undefined; + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE" | undefined; + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK" | undefined; + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE" | undefined; + merge_commit_message?: "PR_TITLE" | "PR_BODY" | "BLANK" | undefined; + allow_merge_commit?: boolean | undefined; + allow_forking?: boolean | undefined; + web_commit_signoff_required?: boolean | undefined; + subscribers_count?: number | undefined; + network_count?: number | undefined; + open_issues: number; + watchers: number; + master_branch?: string | undefined; + starred_at?: string | undefined; + anonymous_access_enabled?: boolean | undefined; + }; + }, void, unknown>; +}; diff --git a/dist/node_modules/@octokit/app/dist-types/get-installation-octokit.d.ts b/dist/node_modules/@octokit/app/dist-types/get-installation-octokit.d.ts new file mode 100644 index 0000000..a52fceb --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-types/get-installation-octokit.d.ts @@ -0,0 +1,3 @@ +import { Octokit } from "@octokit/core"; +import { App } from "./index"; +export declare function getInstallationOctokit(app: App, installationId: number): Promise; diff --git a/dist/node_modules/@octokit/app/dist-types/index.d.ts b/dist/node_modules/@octokit/app/dist-types/index.d.ts new file mode 100644 index 0000000..e37676f --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-types/index.d.ts @@ -0,0 +1,52 @@ +import { Octokit as OctokitCore } from "@octokit/core"; +import { OAuthApp } from "@octokit/oauth-app"; +import { Webhooks } from "@octokit/webhooks"; +import type { Options, ConstructorOptions, EachInstallationInterface, EachRepositoryInterface, GetInstallationOctokitInterface } from "./types"; +type Constructor = new (...args: any[]) => T; +type OctokitType = TOptions["Octokit"] extends typeof OctokitCore ? InstanceType : OctokitCore; +type OctokitClassType = TOptions["Octokit"] extends typeof OctokitCore ? TOptions["Octokit"] : typeof OctokitCore; +export declare class App { + static VERSION: string; + static defaults>>(this: S, defaults: Partial): { + new (...args: any[]): { + octokit: OctokitType; + webhooks: Webhooks<{ + octokit: OctokitType; + }>; + oauth: OAuthApp<{ + clientType: "github-app"; + Octokit: OctokitClassType; + }>; + getInstallationOctokit: GetInstallationOctokitInterface>; + eachInstallation: EachInstallationInterface>; + eachRepository: EachRepositoryInterface>; + log: { + [key: string]: unknown; + debug: (message: string, additionalInfo?: object | undefined) => void; + info: (message: string, additionalInfo?: object | undefined) => void; + warn: (message: string, additionalInfo?: object | undefined) => void; + error: (message: string, additionalInfo?: object | undefined) => void; + }; + }; + } & S; + octokit: OctokitType; + webhooks: Webhooks<{ + octokit: OctokitType; + }>; + oauth: OAuthApp<{ + clientType: "github-app"; + Octokit: OctokitClassType; + }>; + getInstallationOctokit: GetInstallationOctokitInterface>; + eachInstallation: EachInstallationInterface>; + eachRepository: EachRepositoryInterface>; + log: { + debug: (message: string, additionalInfo?: object) => void; + info: (message: string, additionalInfo?: object) => void; + warn: (message: string, additionalInfo?: object) => void; + error: (message: string, additionalInfo?: object) => void; + [key: string]: unknown; + }; + constructor(options: ConstructorOptions); +} +export { createNodeMiddleware } from "./middleware/node/index"; diff --git a/dist/node_modules/@octokit/app/dist-types/middleware/node/index.d.ts b/dist/node_modules/@octokit/app/dist-types/middleware/node/index.d.ts new file mode 100644 index 0000000..c97585c --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-types/middleware/node/index.d.ts @@ -0,0 +1,12 @@ +type IncomingMessage = any; +type ServerResponse = any; +import { App } from "../../index"; +import type { Options } from "../../types"; +export type MiddlewareOptions = { + pathPrefix?: string; + log?: Options["log"]; + onUnhandledRequest?: (request: IncomingMessage, response: ServerResponse) => void; +}; +export declare function createNodeMiddleware(app: App, options?: MiddlewareOptions): (args_0: any, args_1: any, args_2?: Function | undefined) => Promise; +export declare function middleware(options: Required, { webhooksMiddleware, oauthMiddleware }: any, request: IncomingMessage, response: ServerResponse, next?: Function): Promise; +export {}; diff --git a/dist/node_modules/@octokit/app/dist-types/middleware/node/on-unhandled-request-default.d.ts b/dist/node_modules/@octokit/app/dist-types/middleware/node/on-unhandled-request-default.d.ts new file mode 100644 index 0000000..3895815 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-types/middleware/node/on-unhandled-request-default.d.ts @@ -0,0 +1,4 @@ +type IncomingMessage = any; +type ServerResponse = any; +export declare function onUnhandledRequestDefault(request: IncomingMessage, response: ServerResponse): void; +export {}; diff --git a/dist/node_modules/@octokit/app/dist-types/types.d.ts b/dist/node_modules/@octokit/app/dist-types/types.d.ts new file mode 100644 index 0000000..9ecccd1 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-types/types.d.ts @@ -0,0 +1,51 @@ +import { Octokit } from "@octokit/core"; +import type { Endpoints } from "@octokit/types"; +export type Options = { + appId?: number | string; + privateKey?: string; + webhooks?: { + secret: string; + }; + oauth?: { + clientId: string; + clientSecret: string; + allowSignup?: boolean; + }; + Octokit?: typeof Octokit; + log?: { + debug: (...data: any[]) => void; + info: (...data: any[]) => void; + warn: (...data: any[]) => void; + error: (...data: any[]) => void; + }; +}; +export type ConstructorOptions = TOptions & { + appId: number | string; + privateKey: string; +}; +export type InstallationFunctionOptions = { + octokit: O; + installation: Endpoints["GET /app/installations"]["response"]["data"][0]; +}; +export type EachInstallationFunction = (options: InstallationFunctionOptions) => unknown | Promise; +export interface EachInstallationInterface { + (callback: EachInstallationFunction): Promise; + iterator: () => AsyncIterable>; +} +type EachRepositoryFunctionOptions = { + octokit: O; + repository: Endpoints["GET /installation/repositories"]["response"]["data"]["repositories"][0]; +}; +export type EachRepositoryFunction = (options: EachRepositoryFunctionOptions) => unknown | Promise; +export type EachRepositoryQuery = { + installationId: number; +}; +export interface EachRepositoryInterface { + (callback: EachRepositoryFunction): Promise; + (query: EachRepositoryQuery, callback: EachRepositoryFunction): Promise; + iterator: (query?: EachRepositoryQuery) => AsyncIterable>; +} +export interface GetInstallationOctokitInterface { + (installationId: number): Promise; +} +export {}; diff --git a/dist/node_modules/@octokit/app/dist-types/version.d.ts b/dist/node_modules/@octokit/app/dist-types/version.d.ts new file mode 100644 index 0000000..03588a6 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "13.1.8"; diff --git a/dist/node_modules/@octokit/app/dist-types/webhooks.d.ts b/dist/node_modules/@octokit/app/dist-types/webhooks.d.ts new file mode 100644 index 0000000..a96303b --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-types/webhooks.d.ts @@ -0,0 +1,6 @@ +import { Octokit } from "@octokit/core"; +import { Webhooks, type EmitterWebhookEvent } from "@octokit/webhooks"; +import type { Options } from "./types"; +export declare function webhooks(appOctokit: Octokit, options: Required["webhooks"]): Webhooks; diff --git a/dist/node_modules/@octokit/app/dist-web/index.js b/dist/node_modules/@octokit/app/dist-web/index.js new file mode 100644 index 0000000..74ec0f5 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-web/index.js @@ -0,0 +1,311 @@ +// pkg/dist-src/index.js +import { Octokit as OctokitCore } from "@octokit/core"; +import { createAppAuth as createAppAuth3 } from "@octokit/auth-app"; +import { OAuthApp } from "@octokit/oauth-app"; +import { Webhooks as Webhooks2 } from "@octokit/webhooks"; + +// pkg/dist-src/version.js +var VERSION = "13.1.8"; + +// pkg/dist-src/webhooks.js +import { Octokit } from "@octokit/core"; +import { createAppAuth } from "@octokit/auth-app"; +import { createUnauthenticatedAuth } from "@octokit/auth-unauthenticated"; +import { Webhooks } from "@octokit/webhooks"; +function webhooks(appOctokit, options) { + return new Webhooks({ + secret: options.secret, + transform: async (event) => { + if (!("installation" in event.payload) || typeof event.payload.installation !== "object") { + const octokit2 = new appOctokit.constructor({ + authStrategy: createUnauthenticatedAuth, + auth: { + reason: `"installation" key missing in webhook event payload` + } + }); + return { + ...event, + octokit: octokit2 + }; + } + const installationId = event.payload.installation.id; + const octokit = await appOctokit.auth({ + type: "installation", + installationId, + factory(auth) { + return new auth.octokit.constructor({ + ...auth.octokitOptions, + authStrategy: createAppAuth, + ...{ + auth: { + ...auth, + installationId + } + } + }); + } + }); + octokit.hook.before("request", (options2) => { + options2.headers["x-github-delivery"] = event.id; + }); + return { + ...event, + octokit + }; + } + }); +} + +// pkg/dist-src/each-installation.js +import { composePaginateRest } from "@octokit/plugin-paginate-rest"; +import { Octokit as Octokit3 } from "@octokit/core"; + +// pkg/dist-src/get-installation-octokit.js +import { createAppAuth as createAppAuth2 } from "@octokit/auth-app"; +import { Octokit as Octokit2 } from "@octokit/core"; +async function getInstallationOctokit(app, installationId) { + return app.octokit.auth({ + type: "installation", + installationId, + factory(auth) { + const options = { + ...auth.octokitOptions, + authStrategy: createAppAuth2, + ...{ auth: { ...auth, installationId } } + }; + return new auth.octokit.constructor(options); + } + }); +} + +// pkg/dist-src/each-installation.js +function eachInstallationFactory(app) { + return Object.assign(eachInstallation.bind(null, app), { + iterator: eachInstallationIterator.bind(null, app) + }); +} +async function eachInstallation(app, callback) { + const i = eachInstallationIterator(app)[Symbol.asyncIterator](); + let result = await i.next(); + while (!result.done) { + await callback(result.value); + result = await i.next(); + } +} +function eachInstallationIterator(app) { + return { + async *[Symbol.asyncIterator]() { + const iterator = composePaginateRest.iterator( + app.octokit, + "GET /app/installations" + ); + for await (const { data: installations } of iterator) { + for (const installation of installations) { + const installationOctokit = await getInstallationOctokit( + app, + installation.id + ); + yield { octokit: installationOctokit, installation }; + } + } + } + }; +} + +// pkg/dist-src/each-repository.js +import { composePaginateRest as composePaginateRest2 } from "@octokit/plugin-paginate-rest"; +import { Octokit as Octokit4 } from "@octokit/core"; +function eachRepositoryFactory(app) { + return Object.assign(eachRepository.bind(null, app), { + iterator: eachRepositoryIterator.bind(null, app) + }); +} +async function eachRepository(app, queryOrCallback, callback) { + const i = eachRepositoryIterator( + app, + callback ? queryOrCallback : void 0 + )[Symbol.asyncIterator](); + let result = await i.next(); + while (!result.done) { + if (callback) { + await callback(result.value); + } else { + await queryOrCallback(result.value); + } + result = await i.next(); + } +} +function singleInstallationIterator(app, installationId) { + return { + async *[Symbol.asyncIterator]() { + yield { + octokit: await app.getInstallationOctokit(installationId) + }; + } + }; +} +function eachRepositoryIterator(app, query) { + return { + async *[Symbol.asyncIterator]() { + const iterator = query ? singleInstallationIterator(app, query.installationId) : app.eachInstallation.iterator(); + for await (const { octokit } of iterator) { + const repositoriesIterator = composePaginateRest2.iterator( + octokit, + "GET /installation/repositories" + ); + for await (const { data: repositories } of repositoriesIterator) { + for (const repository of repositories) { + yield { octokit, repository }; + } + } + } + } + }; +} + +// pkg/dist-src/middleware/node/index.js +import { createNodeMiddleware as oauthNodeMiddleware } from "@octokit/oauth-app"; +import { createNodeMiddleware as webhooksNodeMiddleware } from "@octokit/webhooks"; + +// pkg/dist-src/middleware/node/on-unhandled-request-default.js +function onUnhandledRequestDefault(request, response) { + response.writeHead(404, { + "content-type": "application/json" + }); + response.end( + JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}` + }) + ); +} + +// pkg/dist-src/middleware/node/index.js +function noop() { +} +function createNodeMiddleware(app, options = {}) { + const log = Object.assign( + { + debug: noop, + info: noop, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, + options.log + ); + const optionsWithDefaults = { + onUnhandledRequest: onUnhandledRequestDefault, + pathPrefix: "/api/github", + ...options, + log + }; + const webhooksMiddleware = webhooksNodeMiddleware(app.webhooks, { + path: optionsWithDefaults.pathPrefix + "/webhooks", + log, + onUnhandledRequest: optionsWithDefaults.onUnhandledRequest + }); + const oauthMiddleware = oauthNodeMiddleware(app.oauth, { + pathPrefix: optionsWithDefaults.pathPrefix + "/oauth", + onUnhandledRequest: optionsWithDefaults.onUnhandledRequest + }); + return middleware.bind(null, optionsWithDefaults, { + webhooksMiddleware, + oauthMiddleware + }); +} +async function middleware(options, { webhooksMiddleware, oauthMiddleware }, request, response, next) { + const { pathname } = new URL(request.url, "http://localhost"); + if (pathname === `${options.pathPrefix}/webhooks`) { + return webhooksMiddleware(request, response, next); + } + if (pathname.startsWith(`${options.pathPrefix}/oauth/`)) { + return oauthMiddleware(request, response, next); + } + const isExpressMiddleware = typeof next === "function"; + if (isExpressMiddleware) { + return next(); + } + return options.onUnhandledRequest(request, response); +} + +// pkg/dist-src/index.js +var App = class { + static defaults(defaults) { + const AppWithDefaults = class extends this { + constructor(...args) { + super({ + ...defaults, + ...args[0] + }); + } + }; + return AppWithDefaults; + } + constructor(options) { + const Octokit5 = options.Octokit || OctokitCore; + const authOptions = Object.assign( + { + appId: options.appId, + privateKey: options.privateKey + }, + options.oauth ? { + clientId: options.oauth.clientId, + clientSecret: options.oauth.clientSecret + } : {} + ); + this.octokit = new Octokit5({ + authStrategy: createAppAuth3, + auth: authOptions, + log: options.log + }); + this.log = Object.assign( + { + debug: () => { + }, + info: () => { + }, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, + options.log + ); + if (options.webhooks) { + this.webhooks = webhooks(this.octokit, options.webhooks); + } else { + Object.defineProperty(this, "webhooks", { + get() { + throw new Error("[@octokit/app] webhooks option not set"); + } + }); + } + if (options.oauth) { + this.oauth = new OAuthApp({ + ...options.oauth, + clientType: "github-app", + Octokit: Octokit5 + }); + } else { + Object.defineProperty(this, "oauth", { + get() { + throw new Error( + "[@octokit/app] oauth.clientId / oauth.clientSecret options are not set" + ); + } + }); + } + this.getInstallationOctokit = getInstallationOctokit.bind( + null, + this + ); + this.eachInstallation = eachInstallationFactory( + this + ); + this.eachRepository = eachRepositoryFactory( + this + ); + } +}; +App.VERSION = VERSION; +export { + App, + createNodeMiddleware +}; diff --git a/dist/node_modules/@octokit/app/dist-web/index.js.map b/dist/node_modules/@octokit/app/dist-web/index.js.map new file mode 100644 index 0000000..dd4aa27 --- /dev/null +++ b/dist/node_modules/@octokit/app/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js", "../dist-src/webhooks.js", "../dist-src/each-installation.js", "../dist-src/get-installation-octokit.js", "../dist-src/each-repository.js", "../dist-src/middleware/node/index.js", "../dist-src/middleware/node/on-unhandled-request-default.js"], + "sourcesContent": ["import { Octokit as OctokitCore } from \"@octokit/core\";\nimport { createAppAuth } from \"@octokit/auth-app\";\nimport { OAuthApp } from \"@octokit/oauth-app\";\nimport { Webhooks } from \"@octokit/webhooks\";\nimport { VERSION } from \"./version\";\nimport { webhooks } from \"./webhooks\";\nimport { eachInstallationFactory } from \"./each-installation\";\nimport { eachRepositoryFactory } from \"./each-repository\";\nimport { getInstallationOctokit } from \"./get-installation-octokit\";\nclass App {\n static defaults(defaults) {\n const AppWithDefaults = class extends this {\n constructor(...args) {\n super({\n ...defaults,\n ...args[0]\n });\n }\n };\n return AppWithDefaults;\n }\n constructor(options) {\n const Octokit = options.Octokit || OctokitCore;\n const authOptions = Object.assign(\n {\n appId: options.appId,\n privateKey: options.privateKey\n },\n options.oauth ? {\n clientId: options.oauth.clientId,\n clientSecret: options.oauth.clientSecret\n } : {}\n );\n this.octokit = new Octokit({\n authStrategy: createAppAuth,\n auth: authOptions,\n log: options.log\n });\n this.log = Object.assign(\n {\n debug: () => {\n },\n info: () => {\n },\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n },\n options.log\n );\n if (options.webhooks) {\n this.webhooks = webhooks(this.octokit, options.webhooks);\n } else {\n Object.defineProperty(this, \"webhooks\", {\n get() {\n throw new Error(\"[@octokit/app] webhooks option not set\");\n }\n });\n }\n if (options.oauth) {\n this.oauth = new OAuthApp({\n ...options.oauth,\n clientType: \"github-app\",\n Octokit\n });\n } else {\n Object.defineProperty(this, \"oauth\", {\n get() {\n throw new Error(\n \"[@octokit/app] oauth.clientId / oauth.clientSecret options are not set\"\n );\n }\n });\n }\n this.getInstallationOctokit = getInstallationOctokit.bind(\n null,\n this\n );\n this.eachInstallation = eachInstallationFactory(\n this\n );\n this.eachRepository = eachRepositoryFactory(\n this\n );\n }\n}\nApp.VERSION = VERSION;\nimport { createNodeMiddleware } from \"./middleware/node/index\";\nexport {\n App,\n createNodeMiddleware\n};\n", "const VERSION = \"13.1.8\";\nexport {\n VERSION\n};\n", "import { Octokit } from \"@octokit/core\";\nimport { createAppAuth } from \"@octokit/auth-app\";\nimport { createUnauthenticatedAuth } from \"@octokit/auth-unauthenticated\";\nimport { Webhooks } from \"@octokit/webhooks\";\nfunction webhooks(appOctokit, options) {\n return new Webhooks({\n secret: options.secret,\n transform: async (event) => {\n if (!(\"installation\" in event.payload) || typeof event.payload.installation !== \"object\") {\n const octokit2 = new appOctokit.constructor({\n authStrategy: createUnauthenticatedAuth,\n auth: {\n reason: `\"installation\" key missing in webhook event payload`\n }\n });\n return {\n ...event,\n octokit: octokit2\n };\n }\n const installationId = event.payload.installation.id;\n const octokit = await appOctokit.auth({\n type: \"installation\",\n installationId,\n factory(auth) {\n return new auth.octokit.constructor({\n ...auth.octokitOptions,\n authStrategy: createAppAuth,\n ...{\n auth: {\n ...auth,\n installationId\n }\n }\n });\n }\n });\n octokit.hook.before(\"request\", (options2) => {\n options2.headers[\"x-github-delivery\"] = event.id;\n });\n return {\n ...event,\n octokit\n };\n }\n });\n}\nexport {\n webhooks\n};\n", "import { composePaginateRest } from \"@octokit/plugin-paginate-rest\";\nimport { Octokit } from \"@octokit/core\";\nimport { App } from \"./index\";\nimport { getInstallationOctokit } from \"./get-installation-octokit\";\nfunction eachInstallationFactory(app) {\n return Object.assign(eachInstallation.bind(null, app), {\n iterator: eachInstallationIterator.bind(null, app)\n });\n}\nasync function eachInstallation(app, callback) {\n const i = eachInstallationIterator(app)[Symbol.asyncIterator]();\n let result = await i.next();\n while (!result.done) {\n await callback(result.value);\n result = await i.next();\n }\n}\nfunction eachInstallationIterator(app) {\n return {\n async *[Symbol.asyncIterator]() {\n const iterator = composePaginateRest.iterator(\n app.octokit,\n \"GET /app/installations\"\n );\n for await (const { data: installations } of iterator) {\n for (const installation of installations) {\n const installationOctokit = await getInstallationOctokit(\n app,\n installation.id\n );\n yield { octokit: installationOctokit, installation };\n }\n }\n }\n };\n}\nexport {\n eachInstallation,\n eachInstallationFactory,\n eachInstallationIterator\n};\n", "import { createAppAuth } from \"@octokit/auth-app\";\nimport { Octokit } from \"@octokit/core\";\nimport { App } from \"./index\";\nasync function getInstallationOctokit(app, installationId) {\n return app.octokit.auth({\n type: \"installation\",\n installationId,\n factory(auth) {\n const options = {\n ...auth.octokitOptions,\n authStrategy: createAppAuth,\n ...{ auth: { ...auth, installationId } }\n };\n return new auth.octokit.constructor(options);\n }\n });\n}\nexport {\n getInstallationOctokit\n};\n", "import { composePaginateRest } from \"@octokit/plugin-paginate-rest\";\nimport { Octokit } from \"@octokit/core\";\nimport { App } from \"./index\";\nfunction eachRepositoryFactory(app) {\n return Object.assign(eachRepository.bind(null, app), {\n iterator: eachRepositoryIterator.bind(null, app)\n });\n}\nasync function eachRepository(app, queryOrCallback, callback) {\n const i = eachRepositoryIterator(\n app,\n callback ? queryOrCallback : void 0\n )[Symbol.asyncIterator]();\n let result = await i.next();\n while (!result.done) {\n if (callback) {\n await callback(result.value);\n } else {\n await queryOrCallback(result.value);\n }\n result = await i.next();\n }\n}\nfunction singleInstallationIterator(app, installationId) {\n return {\n async *[Symbol.asyncIterator]() {\n yield {\n octokit: await app.getInstallationOctokit(installationId)\n };\n }\n };\n}\nfunction eachRepositoryIterator(app, query) {\n return {\n async *[Symbol.asyncIterator]() {\n const iterator = query ? singleInstallationIterator(app, query.installationId) : app.eachInstallation.iterator();\n for await (const { octokit } of iterator) {\n const repositoriesIterator = composePaginateRest.iterator(\n octokit,\n \"GET /installation/repositories\"\n );\n for await (const { data: repositories } of repositoriesIterator) {\n for (const repository of repositories) {\n yield { octokit, repository };\n }\n }\n }\n }\n };\n}\nexport {\n eachRepository,\n eachRepositoryFactory,\n eachRepositoryIterator\n};\n", "import { createNodeMiddleware as oauthNodeMiddleware } from \"@octokit/oauth-app\";\nimport { createNodeMiddleware as webhooksNodeMiddleware } from \"@octokit/webhooks\";\nimport { App } from \"../../index\";\nimport { onUnhandledRequestDefault } from \"./on-unhandled-request-default\";\nfunction noop() {\n}\nfunction createNodeMiddleware(app, options = {}) {\n const log = Object.assign(\n {\n debug: noop,\n info: noop,\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n },\n options.log\n );\n const optionsWithDefaults = {\n onUnhandledRequest: onUnhandledRequestDefault,\n pathPrefix: \"/api/github\",\n ...options,\n log\n };\n const webhooksMiddleware = webhooksNodeMiddleware(app.webhooks, {\n path: optionsWithDefaults.pathPrefix + \"/webhooks\",\n log,\n onUnhandledRequest: optionsWithDefaults.onUnhandledRequest\n });\n const oauthMiddleware = oauthNodeMiddleware(app.oauth, {\n pathPrefix: optionsWithDefaults.pathPrefix + \"/oauth\",\n onUnhandledRequest: optionsWithDefaults.onUnhandledRequest\n });\n return middleware.bind(null, optionsWithDefaults, {\n webhooksMiddleware,\n oauthMiddleware\n });\n}\nasync function middleware(options, { webhooksMiddleware, oauthMiddleware }, request, response, next) {\n const { pathname } = new URL(request.url, \"http://localhost\");\n if (pathname === `${options.pathPrefix}/webhooks`) {\n return webhooksMiddleware(request, response, next);\n }\n if (pathname.startsWith(`${options.pathPrefix}/oauth/`)) {\n return oauthMiddleware(request, response, next);\n }\n const isExpressMiddleware = typeof next === \"function\";\n if (isExpressMiddleware) {\n return next();\n }\n return options.onUnhandledRequest(request, response);\n}\nexport {\n createNodeMiddleware,\n middleware\n};\n", "function onUnhandledRequestDefault(request, response) {\n response.writeHead(404, {\n \"content-type\": \"application/json\"\n });\n response.end(\n JSON.stringify({\n error: `Unknown route: ${request.method} ${request.url}`\n })\n );\n}\nexport {\n onUnhandledRequestDefault\n};\n"], + "mappings": ";AAAA,SAAS,WAAW,mBAAmB;AACvC,SAAS,iBAAAA,sBAAqB;AAC9B,SAAS,gBAAgB;AACzB,SAAS,YAAAC,iBAAgB;;;ACHzB,IAAM,UAAU;;;ACAhB,SAAS,eAAe;AACxB,SAAS,qBAAqB;AAC9B,SAAS,iCAAiC;AAC1C,SAAS,gBAAgB;AACzB,SAAS,SAAS,YAAY,SAAS;AACrC,SAAO,IAAI,SAAS;AAAA,IAClB,QAAQ,QAAQ;AAAA,IAChB,WAAW,OAAO,UAAU;AAC1B,UAAI,EAAE,kBAAkB,MAAM,YAAY,OAAO,MAAM,QAAQ,iBAAiB,UAAU;AACxF,cAAM,WAAW,IAAI,WAAW,YAAY;AAAA,UAC1C,cAAc;AAAA,UACd,MAAM;AAAA,YACJ,QAAQ;AAAA,UACV;AAAA,QACF,CAAC;AACD,eAAO;AAAA,UACL,GAAG;AAAA,UACH,SAAS;AAAA,QACX;AAAA,MACF;AACA,YAAM,iBAAiB,MAAM,QAAQ,aAAa;AAClD,YAAM,UAAU,MAAM,WAAW,KAAK;AAAA,QACpC,MAAM;AAAA,QACN;AAAA,QACA,QAAQ,MAAM;AACZ,iBAAO,IAAI,KAAK,QAAQ,YAAY;AAAA,YAClC,GAAG,KAAK;AAAA,YACR,cAAc;AAAA,YACd,GAAG;AAAA,cACD,MAAM;AAAA,gBACJ,GAAG;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AACD,cAAQ,KAAK,OAAO,WAAW,CAAC,aAAa;AAC3C,iBAAS,QAAQ,mBAAmB,IAAI,MAAM;AAAA,MAChD,CAAC;AACD,aAAO;AAAA,QACL,GAAG;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF,CAAC;AACH;;;AC9CA,SAAS,2BAA2B;AACpC,SAAS,WAAAC,gBAAe;;;ACDxB,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,WAAAC,gBAAe;AAExB,eAAe,uBAAuB,KAAK,gBAAgB;AACzD,SAAO,IAAI,QAAQ,KAAK;AAAA,IACtB,MAAM;AAAA,IACN;AAAA,IACA,QAAQ,MAAM;AACZ,YAAM,UAAU;AAAA,QACd,GAAG,KAAK;AAAA,QACR,cAAcC;AAAA,QACd,GAAG,EAAE,MAAM,EAAE,GAAG,MAAM,eAAe,EAAE;AAAA,MACzC;AACA,aAAO,IAAI,KAAK,QAAQ,YAAY,OAAO;AAAA,IAC7C;AAAA,EACF,CAAC;AACH;;;ADZA,SAAS,wBAAwB,KAAK;AACpC,SAAO,OAAO,OAAO,iBAAiB,KAAK,MAAM,GAAG,GAAG;AAAA,IACrD,UAAU,yBAAyB,KAAK,MAAM,GAAG;AAAA,EACnD,CAAC;AACH;AACA,eAAe,iBAAiB,KAAK,UAAU;AAC7C,QAAM,IAAI,yBAAyB,GAAG,EAAE,OAAO,aAAa,EAAE;AAC9D,MAAI,SAAS,MAAM,EAAE,KAAK;AAC1B,SAAO,CAAC,OAAO,MAAM;AACnB,UAAM,SAAS,OAAO,KAAK;AAC3B,aAAS,MAAM,EAAE,KAAK;AAAA,EACxB;AACF;AACA,SAAS,yBAAyB,KAAK;AACrC,SAAO;AAAA,IACL,QAAQ,OAAO,aAAa,IAAI;AAC9B,YAAM,WAAW,oBAAoB;AAAA,QACnC,IAAI;AAAA,QACJ;AAAA,MACF;AACA,uBAAiB,EAAE,MAAM,cAAc,KAAK,UAAU;AACpD,mBAAW,gBAAgB,eAAe;AACxC,gBAAM,sBAAsB,MAAM;AAAA,YAChC;AAAA,YACA,aAAa;AAAA,UACf;AACA,gBAAM,EAAE,SAAS,qBAAqB,aAAa;AAAA,QACrD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;AEnCA,SAAS,uBAAAC,4BAA2B;AACpC,SAAS,WAAAC,gBAAe;AAExB,SAAS,sBAAsB,KAAK;AAClC,SAAO,OAAO,OAAO,eAAe,KAAK,MAAM,GAAG,GAAG;AAAA,IACnD,UAAU,uBAAuB,KAAK,MAAM,GAAG;AAAA,EACjD,CAAC;AACH;AACA,eAAe,eAAe,KAAK,iBAAiB,UAAU;AAC5D,QAAM,IAAI;AAAA,IACR;AAAA,IACA,WAAW,kBAAkB;AAAA,EAC/B,EAAE,OAAO,aAAa,EAAE;AACxB,MAAI,SAAS,MAAM,EAAE,KAAK;AAC1B,SAAO,CAAC,OAAO,MAAM;AACnB,QAAI,UAAU;AACZ,YAAM,SAAS,OAAO,KAAK;AAAA,IAC7B,OAAO;AACL,YAAM,gBAAgB,OAAO,KAAK;AAAA,IACpC;AACA,aAAS,MAAM,EAAE,KAAK;AAAA,EACxB;AACF;AACA,SAAS,2BAA2B,KAAK,gBAAgB;AACvD,SAAO;AAAA,IACL,QAAQ,OAAO,aAAa,IAAI;AAC9B,YAAM;AAAA,QACJ,SAAS,MAAM,IAAI,uBAAuB,cAAc;AAAA,MAC1D;AAAA,IACF;AAAA,EACF;AACF;AACA,SAAS,uBAAuB,KAAK,OAAO;AAC1C,SAAO;AAAA,IACL,QAAQ,OAAO,aAAa,IAAI;AAC9B,YAAM,WAAW,QAAQ,2BAA2B,KAAK,MAAM,cAAc,IAAI,IAAI,iBAAiB,SAAS;AAC/G,uBAAiB,EAAE,QAAQ,KAAK,UAAU;AACxC,cAAM,uBAAuBC,qBAAoB;AAAA,UAC/C;AAAA,UACA;AAAA,QACF;AACA,yBAAiB,EAAE,MAAM,aAAa,KAAK,sBAAsB;AAC/D,qBAAW,cAAc,cAAc;AACrC,kBAAM,EAAE,SAAS,WAAW;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;ACjDA,SAAS,wBAAwB,2BAA2B;AAC5D,SAAS,wBAAwB,8BAA8B;;;ACD/D,SAAS,0BAA0B,SAAS,UAAU;AACpD,WAAS,UAAU,KAAK;AAAA,IACtB,gBAAgB;AAAA,EAClB,CAAC;AACD,WAAS;AAAA,IACP,KAAK,UAAU;AAAA,MACb,OAAO,kBAAkB,QAAQ,UAAU,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AACF;;;ADLA,SAAS,OAAO;AAChB;AACA,SAAS,qBAAqB,KAAK,UAAU,CAAC,GAAG;AAC/C,QAAM,MAAM,OAAO;AAAA,IACjB;AAAA,MACE,OAAO;AAAA,MACP,MAAM;AAAA,MACN,MAAM,QAAQ,KAAK,KAAK,OAAO;AAAA,MAC/B,OAAO,QAAQ,MAAM,KAAK,OAAO;AAAA,IACnC;AAAA,IACA,QAAQ;AAAA,EACV;AACA,QAAM,sBAAsB;AAAA,IAC1B,oBAAoB;AAAA,IACpB,YAAY;AAAA,IACZ,GAAG;AAAA,IACH;AAAA,EACF;AACA,QAAM,qBAAqB,uBAAuB,IAAI,UAAU;AAAA,IAC9D,MAAM,oBAAoB,aAAa;AAAA,IACvC;AAAA,IACA,oBAAoB,oBAAoB;AAAA,EAC1C,CAAC;AACD,QAAM,kBAAkB,oBAAoB,IAAI,OAAO;AAAA,IACrD,YAAY,oBAAoB,aAAa;AAAA,IAC7C,oBAAoB,oBAAoB;AAAA,EAC1C,CAAC;AACD,SAAO,WAAW,KAAK,MAAM,qBAAqB;AAAA,IAChD;AAAA,IACA;AAAA,EACF,CAAC;AACH;AACA,eAAe,WAAW,SAAS,EAAE,oBAAoB,gBAAgB,GAAG,SAAS,UAAU,MAAM;AACnG,QAAM,EAAE,SAAS,IAAI,IAAI,IAAI,QAAQ,KAAK,kBAAkB;AAC5D,MAAI,aAAa,GAAG,QAAQ,uBAAuB;AACjD,WAAO,mBAAmB,SAAS,UAAU,IAAI;AAAA,EACnD;AACA,MAAI,SAAS,WAAW,GAAG,QAAQ,mBAAmB,GAAG;AACvD,WAAO,gBAAgB,SAAS,UAAU,IAAI;AAAA,EAChD;AACA,QAAM,sBAAsB,OAAO,SAAS;AAC5C,MAAI,qBAAqB;AACvB,WAAO,KAAK;AAAA,EACd;AACA,SAAO,QAAQ,mBAAmB,SAAS,QAAQ;AACrD;;;ANxCA,IAAM,MAAN,MAAU;AAAA,EACR,OAAO,SAAS,UAAU;AACxB,UAAM,kBAAkB,cAAc,KAAK;AAAA,MACzC,eAAe,MAAM;AACnB,cAAM;AAAA,UACJ,GAAG;AAAA,UACH,GAAG,KAAK,CAAC;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EACA,YAAY,SAAS;AACnB,UAAMC,WAAU,QAAQ,WAAW;AACnC,UAAM,cAAc,OAAO;AAAA,MACzB;AAAA,QACE,OAAO,QAAQ;AAAA,QACf,YAAY,QAAQ;AAAA,MACtB;AAAA,MACA,QAAQ,QAAQ;AAAA,QACd,UAAU,QAAQ,MAAM;AAAA,QACxB,cAAc,QAAQ,MAAM;AAAA,MAC9B,IAAI,CAAC;AAAA,IACP;AACA,SAAK,UAAU,IAAIA,SAAQ;AAAA,MACzB,cAAcC;AAAA,MACd,MAAM;AAAA,MACN,KAAK,QAAQ;AAAA,IACf,CAAC;AACD,SAAK,MAAM,OAAO;AAAA,MAChB;AAAA,QACE,OAAO,MAAM;AAAA,QACb;AAAA,QACA,MAAM,MAAM;AAAA,QACZ;AAAA,QACA,MAAM,QAAQ,KAAK,KAAK,OAAO;AAAA,QAC/B,OAAO,QAAQ,MAAM,KAAK,OAAO;AAAA,MACnC;AAAA,MACA,QAAQ;AAAA,IACV;AACA,QAAI,QAAQ,UAAU;AACpB,WAAK,WAAW,SAAS,KAAK,SAAS,QAAQ,QAAQ;AAAA,IACzD,OAAO;AACL,aAAO,eAAe,MAAM,YAAY;AAAA,QACtC,MAAM;AACJ,gBAAM,IAAI,MAAM,wCAAwC;AAAA,QAC1D;AAAA,MACF,CAAC;AAAA,IACH;AACA,QAAI,QAAQ,OAAO;AACjB,WAAK,QAAQ,IAAI,SAAS;AAAA,QACxB,GAAG,QAAQ;AAAA,QACX,YAAY;AAAA,QACZ,SAAAD;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,aAAO,eAAe,MAAM,SAAS;AAAA,QACnC,MAAM;AACJ,gBAAM,IAAI;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AACA,SAAK,yBAAyB,uBAAuB;AAAA,MACnD;AAAA,MACA;AAAA,IACF;AACA,SAAK,mBAAmB;AAAA,MACtB;AAAA,IACF;AACA,SAAK,iBAAiB;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AACF;AACA,IAAI,UAAU;", + "names": ["createAppAuth", "Webhooks", "Octokit", "createAppAuth", "Octokit", "createAppAuth", "composePaginateRest", "Octokit", "composePaginateRest", "Octokit", "createAppAuth"] +} diff --git a/dist/node_modules/@octokit/app/package.json b/dist/node_modules/@octokit/app/package.json new file mode 100644 index 0000000..5db1c3c --- /dev/null +++ b/dist/node_modules/@octokit/app/package.json @@ -0,0 +1,49 @@ +{ + "name": "@octokit/app", + "publishConfig": { + "access": "public" + }, + "version": "13.1.8", + "description": "GitHub Apps toolset for Node.js", + "main": "dist-node/index.js", + "repository": "github:octokit/app.js", + "author": "Gregor Martynus (https://github.com/gr2m)", + "license": "MIT", + "dependencies": { + "@octokit/auth-app": "^4.0.13", + "@octokit/auth-unauthenticated": "^3.0.0", + "@octokit/core": "^4.0.0", + "@octokit/oauth-app": "^4.0.7", + "@octokit/plugin-paginate-rest": "^6.0.0", + "@octokit/types": "^9.0.0", + "@octokit/webhooks": "^10.0.0" + }, + "devDependencies": { + "@octokit/tsconfig": "^2.0.0", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "@types/node-fetch": "^2.5.8", + "esbuild": "^0.18.0", + "express": "^4.17.1", + "fetch-mock": "^9.10.7", + "glob": "^10.0.0", + "jest": "^29.0.0", + "mockdate": "^3.0.2", + "node-fetch": "^2.6.7", + "prettier": "2.8.8", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "module": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "source": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/auth-app/LICENSE b/dist/node_modules/@octokit/auth-app/LICENSE new file mode 100644 index 0000000..ef2c18e --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/@octokit/auth-app/README.md b/dist/node_modules/@octokit/auth-app/README.md new file mode 100644 index 0000000..819b3fa --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/README.md @@ -0,0 +1,1396 @@ +# auth-app.js + +> GitHub App authentication for JavaScript + +[![@latest](https://img.shields.io/npm/v/@octokit/auth-app.svg)](https://www.npmjs.com/package/@octokit/auth-app) +[![Build Status](https://github.com/octokit/auth-app.js/workflows/Test/badge.svg)](https://github.com/octokit/auth-app.js/actions?query=workflow%3ATest) + +`@octokit/auth-app` implements authentication for GitHub Apps using [JSON Web Token](https://jwt.io/), installation access tokens, and OAuth user-to-server access tokens. + + + +- [Standalone usage](#standalone-usage) + - [Authenticate as GitHub App (JSON Web Token)](#authenticate-as-github-app-json-web-token) + - [Authenticate as OAuth App (client ID/client secret)](#authenticate-as-oauth-app-client-idclient-secret) + - [Authenticate as installation](#authenticate-as-installation) + - [Authenticate as user](#authenticate-as-user) +- [Usage with Octokit](#usage-with-octokit) +- [`createAppAuth(options)` or `new Octokit({ auth })`](#createappauthoptions-or-new-octokit-auth-) +- [`auth(options)` or `octokit.auth(options)`](#authoptions-or-octokitauthoptions) + - [JSON Web Token (JWT) Authentication](#json-web-token-jwt-authentication) + - [OAuth App authentication](#oauth-app-authentication) + - [Installation authentication](#installation-authentication) + - [User authentication (web flow)](#user-authentication-web-flow) + - [User authentication (device flow)](#user-authentication-device-flow) +- [Authentication object](#authentication-object) + - [JSON Web Token (JWT) authentication](#json-web-token-jwt-authentication) + - [OAuth App authentication](#oauth-app-authentication-1) + - [Installation access token authentication](#installation-access-token-authentication) + - [GitHub APP user authentication token with expiring disabled](#github-app-user-authentication-token-with-expiring-disabled) + - [GitHub APP user authentication token with expiring enabled](#github-app-user-authentication-token-with-expiring-enabled) +- [`auth.hook(request, route, parameters)` or `auth.hook(request, options)`](#authhookrequest-route-parameters-or-authhookrequest-options) +- [Types](#types) +- [Implementation details](#implementation-details) +- [License](#license) + + + +## Standalone usage + + + + + + +
+Browsers + + +⚠️ `@octokit/auth-app` is not meant for usage in the browser. A private key and client secret must not be exposed to users. + +The private keys provided by GitHub are in `PKCS#1` format, but the WebCrypto API only supports `PKCS#8`. You need to convert it first: + +```shell +openssl pkcs8 -topk8 -inform PEM -outform PEM -nocrypt -in private-key.pem -out private-key-pkcs8.key +``` + +The OAuth APIs to create user-to-server tokens cannot be used because they do not have CORS enabled. + +If you know what you are doing, load `@octokit/auth-app` directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+Node + + +Install with npm install @octokit/auth-app + +```js +const { createAppAuth } = require("@octokit/auth-app"); +// or: import { createAppAuth } from "@octokit/auth-app"; +``` + +
+ +### Authenticate as GitHub App (JSON Web Token) + +```js +const auth = createAppAuth({ + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef12341234567890abcdef1234", +}); + +// Retrieve JSON Web Token (JWT) to authenticate as app +const appAuthentication = await auth({ type: "app" }); +``` + +resolves with + +```json +{ + "type": "app", + "token": "jsonwebtoken123", + "appId": 123, + "expiresAt": "2018-07-07T00:09:30.000Z" +} +``` + +### Authenticate as OAuth App (client ID/client secret) + +The [OAuth Application APIs](https://docs.github.com/en/rest/reference/apps#oauth-applications-api) require the app to authenticate using clientID/client as Basic Authentication + +```js +const auth = createAppAuth({ + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef12341234567890abcdef1234", +}); + +const appAuthentication = await auth({ + type: "oauth-app", +}); +``` + +resolves with + +```json +{ + "type": "oauth-app", + "clientId": "lv1.1234567890abcdef", + "clientSecret": "1234567890abcdef1234567890abcdef12345678", + "headers": { + "authorization": "basic bHYxLjEyMzQ1Njc4OTBhYmNkZWY6MTIzNDU2Nzg5MGFiY2RlZjEyMzQ1Njc4OTBhYmNkZWYxMjM0NTY3OA==" + } +} +``` + +### Authenticate as installation + +```js +const auth = createAppAuth({ + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef12341234567890abcdef1234", +}); + +// Retrieve installation access token +const installationAuthentication = await auth({ + type: "installation", + installationId: 123, +}); +``` + +resolves with + +```json +{ + "type": "token", + "tokenType": "installation", + "token": "token123", + "installationId": 123, + "createdAt": "2018-07-07T00:00:00.000Z", + "expiresAt": "2018-07-07T00:59:00.000Z" +} +``` + +### Authenticate as user + +```js +const auth = createAppAuth({ + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef12341234567890abcdef1234", +}); + +// Retrieve an oauth-access token +const userAuthentication = await auth({ type: "oauth-user", code: "123456" }); +``` + +Resolves with + +```json +{ + "type": "token", + "tokenType": "oauth", + "token": "token123" +} +``` + +## Usage with Octokit + + + + + + +
+ +Browsers + + + +⚠️ `@octokit/auth-app` is not meant for usage in the browser. A private key and client secret must not be exposed to users. + +The private keys provided by GitHub are in `PKCS#1` format, but the WebCrypto API only supports `PKCS#8`. You need to convert it first: + +```shell +openssl pkcs8 -topk8 -inform PEM -outform PEM -nocrypt -in private-key.pem -out private-key-pkcs8.key +``` + +The OAuth APIs to create user-to-server tokens cannot be used because they do not have CORS enabled. + +If you know what you are doing, load `@octokit/auth-app` and `@octokit/core` (or a compatible module) directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+ +Node + + + +Install with `npm install @octokit/core @octokit/auth-app`. Optionally replace `@octokit/core` with a compatible module + +```js +const { Octokit } = require("@octokit/core"); +const { createAppAuth, createOAuthUserAuth } = require("@octokit/auth-app"); +``` + +
+ +```js +const appOctokit = new Octokit({ + authStrategy: createAppAuth, + auth: { + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + }, +}); + +// Send requests as GitHub App +const { slug } = await appOctokit.request("GET /user"); +console.log("authenticated as %s", slug); + +// Send requests as OAuth App +await appOctokit.request("POST /application/{client_id}/token", { + client_id: "1234567890abcdef1234", + access_token: "existingtoken123", +}); +console.log("token is valid"); + +// create a new octokit instance that is authenticated as the user +const userOctokit = await appOctokit.auth({ + type: "oauth-user", + code: "code123", + factory: (options) => { + return new Octokit({ + authStrategy: createOAuthUserAuth, + auth: options, + }); + }, +}); + +// Exchanges the code for the user access token authentication on first request +// and caches the authentication for successive requests +const { + data: { login }, +} = await userOctokit.request("GET /user"); +console.log("Hello, %s!", login); +``` + +In order to create an `octokit` instance that is authenticated as an installation, with automated installation token refresh, set `installationId` as `auth` option + +```js +const installationOctokit = new Octokit({ + authStrategy: createAppAuth, + auth: { + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + installationId: 123, + }, +}); + +// transparently creates an installation access token the first time it is needed +// and refreshes it when it expires +await installationOctokit.request("POST /repos/{owner}/{repo}/issues", { + owner: "octocat", + repo: "hello-world", + title: "title", +}); +``` + +## `createAppAuth(options)` or `new Octokit({ auth })` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ appId + + number + + Required. Find App ID on the app’s about page in settings. +
+ privateKey + + string + + Required. Content of the *.pem file you downloaded from the app’s about page. You can generate a new private key if needed. +
+ installationId + + number + + Default installationId to be used when calling auth({ type: "installation" }). +
+ clientId + + string + + The client ID of the GitHub App. +
+ clientSecret + + string + + A client secret for the GitHub App. +
+ request + + function + + +Automatically set to `octokit.request` when using with an `Octokit` constructor. + +For standalone usage, you can pass in your own [`@octokit/request`](https://github.com/octokit/request.js) instance. For usage with enterprise, set `baseUrl` to the hostname + `/api/v3`. Example: + +```js +const { request } = require("@octokit/request"); +createAppAuth({ + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ cache + + object + + Installation tokens expire after an hour. By default, @octokit/auth-app is caching up to 15000 tokens simultaneously using lru-cache. You can pass your own cache implementation by passing options.cache.{get,set} to the constructor. Example: + +```js +const CACHE = {}; +createAppAuth({ + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + cache: { + async get(key) { + return CACHE[key]; + }, + async set(key, value) { + CACHE[key] = value; + }, + }, +}); +``` + +
+ log + + object + + You can pass in your preferred logging tool by passing option.log to the constructor. If you would like to make the log level configurable using an environment variable or external option, we recommend the console-log-level package. For example: + +```js +createAppAuth({ + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + log: require("console-log-level")({ level: "info" }), +}); +``` + +
+ +## `auth(options)` or `octokit.auth(options)` + +The async `auth()` method accepts different options depending on your use case + +### JSON Web Token (JWT) Authentication + +Authenticate as the GitHub app to list installations, repositories, and create installation access tokens. + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + Required. Must be either "app". +
+ +### OAuth App authentication + +Create, reset, refresh, delete OAuth user-to-server tokens + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + Required. Must be either "oauth-app". +
+ +### Installation authentication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + Required. Must be "installation". +
+ installationId + + number + + Required unless a default installationId was passed to createAppAuth(). ID of installation to retrieve authentication for. +
+ repositoryIds + + array of numbers + + The id of the repositories that the installation token can access. Also known as a databaseID when querying the repository object in GitHub's GraphQL API. This option is **(recommended)** over repositoryNames when needing to limit the scope of the access token, due to repositoryNames having the possibility of changing. Additionally, you should only include either repositoryIds or repositoryNames, but not both. +
+ repositoryNames + + array of strings + + The name of the repositories that the installation token can access. As mentioned in the repositoryIds description, you should only include either repositoryIds or repositoryNames, but not both. +
+ permissions + + object + + The permissions granted to the access token. The permissions object includes the permission names and their access type. For a complete list of permissions and allowable values, see GitHub App permissions. +
+ factory + + function + + +The `auth({type: "installation", installationId, factory })` call with resolve with whatever the factory function returns. The `factory` function will be called with all the strategy option that `auth` was created with, plus the additional options passed to `auth`, besides `type` and `factory`. + +For example, you can create a new `auth` instance for an installation which shares the internal state (especially the access token cache) with the calling `auth` instance: + +```js +const appAuth = createAppAuth({ + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", +}); + +const installationAuth123 = await appAuth({ + type: "installation", + installationId: 123, + factory: createAppAuth, +}); +``` + +
+ refresh + + boolean + + +Installation tokens expire after one hour. By default, tokens are cached and returned from cache until expired. To bypass and update a cached token for the given `installationId`, set `refresh` to `true`. + +Defaults to `false`. + +
+ +### User authentication (web flow) + +Exchange code received from the web flow redirect described in [step 2 of GitHub's OAuth web flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#web-application-flow) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + Required. Must be "oauth-user". +
+ factory + + function + + +The `auth({type: "oauth-user", factory })` call with resolve with whatever the factory function returns. The `factory` function will be called with all the strategy option that `auth` was created with, plus the additional options passed to `auth`, besides `type` and `factory`. + +For example, you can create a new `auth` instance for an installation which shares the internal state (especially the access token cache) with the calling `auth` instance: + +```js +const { + createAppAuth, + createOAuthUserAuth, +} = require("@octokit/auth-oauth-app"); + +const appAuth = createAppAuth({ + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef1234567890abcdef12345678", +}); + +const userAuth = await appAuth({ + type: "oauth-user", + code, + factory: createOAuthUserAuth, +}); + +// will create token upon first call, then cache authentication for successive calls, +// until token needs to be refreshed (if enabled for the GitHub App) +const authentication = await userAuth(); +``` + +
+ code + + string + + The authorization code which was passed as query parameter to the callback URL from the OAuth web application flow. +
+ redirectUrl + + string + + The URL in your application where users are sent after authorization. See redirect urls. +
+ state + + string + + The unguessable random string you provided in Step 1 of the OAuth web application flow. +
+ +### User authentication (device flow) + +Create a token using [GitHub's device flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#device-flow). + +The device flow does not require a client secret, but it is required as strategy option for `@octokit/auth-app`, even for the device flow. If you want to implement the device flow without requiring a client secret, use [`@octokit/auth-oauth-device`](https://github.com/octokit/auth-oauth-device.js#readme). + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + Required. Must be "oauth-user". +
+ onVerification + + function + + +**Required**. A function that is called once the device and user codes were retrieved. + +The `onVerification()` callback can be used to pause until the user completes step 2, which might result in a better user experience. + +```js +const auth = auth({ + type: "oauth-user", + onVerification(verification) { + console.log("Open %s", verification.verification_uri); + console.log("Enter code: %s", verification.user_code); + await prompt("press enter when you are ready to continue"); + }, +}); +``` + +
+ factory + + function + + +The `auth({type: "oauth-user", factory })` call with resolve with whatever the factory function returns. The `factory` function will be called with all the strategy option that `auth` was created with, plus the additional options passed to `auth`, besides `type` and `factory`. + +For example, you can create a new `auth` instance for an installation which shares the internal state (especially the access token cache) with the calling `auth` instance: + +```js +const { + createAppAuth, + createOAuthUserAuth, +} = require("@octokit/auth-oauth-app"); + +const appAuth = createAppAuth({ + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef1234567890abcdef12345678", +}); + +const userAuth = await appAuth({ + type: "oauth-user", + code, + factory: createOAuthUserAuth, +}); + +// will create token upon first call, then cache authentication for successive calls, +// until token needs to be refreshed (if enabled for the GitHub App) +const authentication = await userAuth(); +``` + +
+ +## Authentication object + +Depending on on the `auth()` call, the resulting authentication object can be one of + +1. JSON Web Token (JWT) authentication +1. OAuth App authentication +1. Installation access token authentication +1. GitHub APP user authentication token with expiring disabled +1. GitHub APP user authentication token with expiring enabled + +### JSON Web Token (JWT) authentication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "app" +
+ token + + string + + The JSON Web Token (JWT) to authenticate as the app. +
+ appId + + number + + GitHub App database ID. +
+ expiresAt + + string + + Timestamp in UTC format, e.g. "2018-07-07T00:09:30.000Z". A Date object can be created using new Date(authentication.expiresAt). +
+ +### OAuth App authentication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "oauth-app" +
+ clientType + + string + + "github-app" +
+ clientId + + string + + The client ID as passed to the constructor. +
+ clientSecret + + string + + The client secret as passed to the constructor. +
+ headers + + object + + { authorization }. +
+ +### Installation access token authentication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ token + + string + + The installation access token. +
+ tokenType + + string + + "installation" +
+ installationId + + number + + Installation database ID. +
+ createdAt + + string + + Timestamp in UTC format, e.g. "2018-07-07T00:00:00.000Z". A Date object can be created using new Date(authentication.expiresAt). +
+ expiresAt + + string + + Timestamp in UTC format, e.g. "2018-07-07T00:59:00.000Z". A Date object can be created using new Date(authentication.expiresAt). +
+ repositoryIds + + array of numbers + + Only present if repositoryIds option passed to auth(options). +
+ repositoryNames + + array of strings + + Only present if repositoryNames option passed to auth(options). +
+ permissions + + object + + An object where keys are the permission name and the value is either "read" or "write". See the list of all GitHub App Permissions. +
+ singleFileName + + string + + If the single file permission is enabled, the singleFileName property is set to the path of the accessible file. +
+ +### GitHub APP user authentication token with expiring disabled + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ tokenType + + string + + "oauth" +
+ clientType + + string + + "github-app" +
+ clientId + + string + + The app's Client ID +
+ clientSecret + + string + + One of the app's client secrets +
+ token + + string + + The user access token +
+ +### GitHub APP user authentication token with expiring enabled + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ tokenType + + string + + "oauth" +
+ clientType + + string + + "github-app" +
+ clientId + + string + + The app's Client ID +
+ clientSecret + + string + + One of the app's client secrets +
+ token + + string + + The user access token +
+ refreshToken + + string + + The refresh token +
+ expiresAt + + string + + Date timestamp in ISO 8601 standard. Example: 2022-01-01T08:00:0.000Z +
+ refreshTokenExpiresAt + + string + + Date timestamp in ISO 8601 standard. Example: 2021-07-01T00:00:0.000Z +
+ +## `auth.hook(request, route, parameters)` or `auth.hook(request, options)` + +`auth.hook()` hooks directly into the request life cycle. It amends the request to authenticate either as app or as installation based on the request URL. Although the `"machine-man"` preview has graduated to the official API, https://developer.github.com/changes/2020-08-20-graduate-machine-man-and-sailor-v-previews/, it is still required in versions of GitHub Enterprise up to 2.21 so it automatically sets the `"machine-man"` preview for all endpoints requiring JWT authentication. + +The `request` option is an instance of [`@octokit/request`](https://github.com/octokit/request.js#readme). The arguments are the same as for the [`request()` method](https://github.com/octokit/request.js#request). + +`auth.hook()` can be called directly to send an authenticated request + +```js +const { data: installations } = await auth.hook( + request, + "GET /app/installations" +); +``` + +Or it can be passed as option to [`request()`](https://github.com/octokit/request.js#request). + +```js +const requestWithAuth = request.defaults({ + request: { + hook: auth.hook, + }, +}); + +const { data: installations } = await requestWithAuth("GET /app/installations"); +``` + +Note that `auth.hook()` does not create and set an OAuth authentication token. But you can use [`@octokit/auth-oauth-app`](https://github.com/octokit/auth-oauth-app.js#readme) for that functionality. And if you don't plan on sending requests to routes that require authentication with `client_id` and `client_secret`, you can just retrieve the token and then create a new instance of [`request()`](https://github.com/octokit/request.js#request) with the authentication header set: + +```js +const { token } = await auth({ + type: "oauth-user", + code: "123456", +}); +const requestWithAuth = request.defaults({ + headers: { + authentication: `token ${token}`, + }, +}); +``` + +## Types + +```ts +import { + // strategy options + StrategyOptions, + // auth options + AuthOptions, + AppAuthOptions, + OAuthAppAuthOptions, + InstallationAuthOptions, + OAuthWebFlowAuthOptions, + OAuthDeviceFlowAuthOptions, + // authentication objects + Authentication, + AppAuthentication, + OAuthAppAuthentication, + InstallationAccessTokenAuthentication, + GitHubAppUserAuthentication, + GitHubAppUserAuthenticationWithExpiration, +} from "@octokit/auth-app"; +``` + +## Implementation details + +When creating a JSON Web Token, it sets the "issued at time" (iat) to 30s in the past as we have seen people running situations where the GitHub API claimed the iat would be in future. It turned out the clocks on the different machine were not in sync. + +Installation access tokens are valid for 60 minutes. This library invalidates them after 59 minutes to account for request delays. + +All OAuth features are implemented internally using [@octokit/auth-oauth-app](https://github.com/octokit/auth-oauth-app.js/#readme). + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/auth-app/dist-node/index.js b/dist/node_modules/@octokit/auth-app/dist-node/index.js new file mode 100644 index 0000000..07aa563 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-node/index.js @@ -0,0 +1,477 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createAppAuth: () => createAppAuth, + createOAuthUserAuth: () => import_auth_oauth_user2.createOAuthUserAuth +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = require("universal-user-agent"); +var import_request = require("@octokit/request"); +var import_auth_oauth_app = require("@octokit/auth-oauth-app"); + +// pkg/dist-src/auth.js +var import_deprecation = require("deprecation"); + +// pkg/dist-src/get-app-authentication.js +var import_universal_github_app_jwt = require("universal-github-app-jwt"); +async function getAppAuthentication({ + appId, + privateKey, + timeDifference +}) { + try { + const appAuthentication = await (0, import_universal_github_app_jwt.githubAppJwt)({ + id: +appId, + privateKey, + now: timeDifference && Math.floor(Date.now() / 1e3) + timeDifference + }); + return { + type: "app", + token: appAuthentication.token, + appId: appAuthentication.appId, + expiresAt: new Date(appAuthentication.expiration * 1e3).toISOString() + }; + } catch (error) { + if (privateKey === "-----BEGIN RSA PRIVATE KEY-----") { + throw new Error( + "The 'privateKey` option contains only the first line '-----BEGIN RSA PRIVATE KEY-----'. If you are setting it using a `.env` file, make sure it is set on a single line with newlines replaced by '\n'" + ); + } else { + throw error; + } + } +} + +// pkg/dist-src/cache.js +var import_lru_cache = require("lru-cache"); +function getCache() { + return new import_lru_cache.LRUCache({ + // cache max. 15000 tokens, that will use less than 10mb memory + max: 15e3, + // Cache for 1 minute less than GitHub expiry + ttl: 1e3 * 60 * 59 + }); +} +async function get(cache, options) { + const cacheKey = optionsToCacheKey(options); + const result = await cache.get(cacheKey); + if (!result) { + return; + } + const [ + token, + createdAt, + expiresAt, + repositorySelection, + permissionsString, + singleFileName + ] = result.split("|"); + const permissions = options.permissions || permissionsString.split(/,/).reduce((permissions2, string) => { + if (/!$/.test(string)) { + permissions2[string.slice(0, -1)] = "write"; + } else { + permissions2[string] = "read"; + } + return permissions2; + }, {}); + return { + token, + createdAt, + expiresAt, + permissions, + repositoryIds: options.repositoryIds, + repositoryNames: options.repositoryNames, + singleFileName, + repositorySelection + }; +} +async function set(cache, options, data) { + const key = optionsToCacheKey(options); + const permissionsString = options.permissions ? "" : Object.keys(data.permissions).map( + (name) => `${name}${data.permissions[name] === "write" ? "!" : ""}` + ).join(","); + const value = [ + data.token, + data.createdAt, + data.expiresAt, + data.repositorySelection, + permissionsString, + data.singleFileName + ].join("|"); + await cache.set(key, value); +} +function optionsToCacheKey({ + installationId, + permissions = {}, + repositoryIds = [], + repositoryNames = [] +}) { + const permissionsString = Object.keys(permissions).sort().map((name) => permissions[name] === "read" ? name : `${name}!`).join(","); + const repositoryIdsString = repositoryIds.sort().join(","); + const repositoryNamesString = repositoryNames.join(","); + return [ + installationId, + repositoryIdsString, + repositoryNamesString, + permissionsString + ].filter(Boolean).join("|"); +} + +// pkg/dist-src/to-token-authentication.js +function toTokenAuthentication({ + installationId, + token, + createdAt, + expiresAt, + repositorySelection, + permissions, + repositoryIds, + repositoryNames, + singleFileName +}) { + return Object.assign( + { + type: "token", + tokenType: "installation", + token, + installationId, + permissions, + createdAt, + expiresAt, + repositorySelection + }, + repositoryIds ? { repositoryIds } : null, + repositoryNames ? { repositoryNames } : null, + singleFileName ? { singleFileName } : null + ); +} + +// pkg/dist-src/get-installation-authentication.js +async function getInstallationAuthentication(state, options, customRequest) { + const installationId = Number(options.installationId || state.installationId); + if (!installationId) { + throw new Error( + "[@octokit/auth-app] installationId option is required for installation authentication." + ); + } + if (options.factory) { + const { type, factory, oauthApp, ...factoryAuthOptions } = { + ...state, + ...options + }; + return factory(factoryAuthOptions); + } + const optionsWithInstallationTokenFromState = Object.assign( + { installationId }, + options + ); + if (!options.refresh) { + const result = await get( + state.cache, + optionsWithInstallationTokenFromState + ); + if (result) { + const { + token: token2, + createdAt: createdAt2, + expiresAt: expiresAt2, + permissions: permissions2, + repositoryIds: repositoryIds2, + repositoryNames: repositoryNames2, + singleFileName: singleFileName2, + repositorySelection: repositorySelection2 + } = result; + return toTokenAuthentication({ + installationId, + token: token2, + createdAt: createdAt2, + expiresAt: expiresAt2, + permissions: permissions2, + repositorySelection: repositorySelection2, + repositoryIds: repositoryIds2, + repositoryNames: repositoryNames2, + singleFileName: singleFileName2 + }); + } + } + const appAuthentication = await getAppAuthentication(state); + const request = customRequest || state.request; + const { + data: { + token, + expires_at: expiresAt, + repositories, + permissions: permissionsOptional, + repository_selection: repositorySelectionOptional, + single_file: singleFileName + } + } = await request("POST /app/installations/{installation_id}/access_tokens", { + installation_id: installationId, + repository_ids: options.repositoryIds, + repositories: options.repositoryNames, + permissions: options.permissions, + mediaType: { + previews: ["machine-man"] + }, + headers: { + authorization: `bearer ${appAuthentication.token}` + } + }); + const permissions = permissionsOptional || {}; + const repositorySelection = repositorySelectionOptional || "all"; + const repositoryIds = repositories ? repositories.map((r) => r.id) : void 0; + const repositoryNames = repositories ? repositories.map((repo) => repo.name) : void 0; + const createdAt = (/* @__PURE__ */ new Date()).toISOString(); + await set(state.cache, optionsWithInstallationTokenFromState, { + token, + createdAt, + expiresAt, + repositorySelection, + permissions, + repositoryIds, + repositoryNames, + singleFileName + }); + return toTokenAuthentication({ + installationId, + token, + createdAt, + expiresAt, + repositorySelection, + permissions, + repositoryIds, + repositoryNames, + singleFileName + }); +} + +// pkg/dist-src/auth.js +async function auth(state, authOptions) { + switch (authOptions.type) { + case "app": + return getAppAuthentication(state); + case "oauth": + state.log.warn( + // @ts-expect-error `log.warn()` expects string + new import_deprecation.Deprecation( + `[@octokit/auth-app] {type: "oauth"} is deprecated. Use {type: "oauth-app"} instead` + ) + ); + case "oauth-app": + return state.oauthApp({ type: "oauth-app" }); + case "installation": + authOptions; + return getInstallationAuthentication(state, { + ...authOptions, + type: "installation" + }); + case "oauth-user": + return state.oauthApp(authOptions); + default: + throw new Error(`Invalid auth type: ${authOptions.type}`); + } +} + +// pkg/dist-src/hook.js +var import_auth_oauth_user = require("@octokit/auth-oauth-user"); + +// pkg/dist-src/requires-app-auth.js +var PATHS = [ + "/app", + "/app/hook/config", + "/app/hook/deliveries", + "/app/hook/deliveries/{delivery_id}", + "/app/hook/deliveries/{delivery_id}/attempts", + "/app/installations", + "/app/installations/{installation_id}", + "/app/installations/{installation_id}/access_tokens", + "/app/installations/{installation_id}/suspended", + "/marketplace_listing/accounts/{account_id}", + "/marketplace_listing/plan", + "/marketplace_listing/plans", + "/marketplace_listing/plans/{plan_id}/accounts", + "/marketplace_listing/stubbed/accounts/{account_id}", + "/marketplace_listing/stubbed/plan", + "/marketplace_listing/stubbed/plans", + "/marketplace_listing/stubbed/plans/{plan_id}/accounts", + "/orgs/{org}/installation", + "/repos/{owner}/{repo}/installation", + "/users/{username}/installation" +]; +function routeMatcher(paths) { + const regexes = paths.map( + (p) => p.split("/").map((c) => c.startsWith("{") ? "(?:.+?)" : c).join("/") + ); + const regex = `^(?:${regexes.map((r) => `(?:${r})`).join("|")})$`; + return new RegExp(regex, "i"); +} +var REGEX = routeMatcher(PATHS); +function requiresAppAuth(url) { + return !!url && REGEX.test(url.split("?")[0]); +} + +// pkg/dist-src/hook.js +var FIVE_SECONDS_IN_MS = 5 * 1e3; +function isNotTimeSkewError(error) { + return !(error.message.match( + /'Expiration time' claim \('exp'\) must be a numeric value representing the future time at which the assertion expires/ + ) || error.message.match( + /'Issued at' claim \('iat'\) must be an Integer representing the time that the assertion was issued/ + )); +} +async function hook(state, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + const url = endpoint.url; + if (/\/login\/oauth\/access_token$/.test(url)) { + return request(endpoint); + } + if (requiresAppAuth(url.replace(request.endpoint.DEFAULTS.baseUrl, ""))) { + const { token: token2 } = await getAppAuthentication(state); + endpoint.headers.authorization = `bearer ${token2}`; + let response; + try { + response = await request(endpoint); + } catch (error) { + if (isNotTimeSkewError(error)) { + throw error; + } + if (typeof error.response.headers.date === "undefined") { + throw error; + } + const diff = Math.floor( + (Date.parse(error.response.headers.date) - Date.parse((/* @__PURE__ */ new Date()).toString())) / 1e3 + ); + state.log.warn(error.message); + state.log.warn( + `[@octokit/auth-app] GitHub API time and system time are different by ${diff} seconds. Retrying request with the difference accounted for.` + ); + const { token: token3 } = await getAppAuthentication({ + ...state, + timeDifference: diff + }); + endpoint.headers.authorization = `bearer ${token3}`; + return request(endpoint); + } + return response; + } + if ((0, import_auth_oauth_user.requiresBasicAuth)(url)) { + const authentication = await state.oauthApp({ type: "oauth-app" }); + endpoint.headers.authorization = authentication.headers.authorization; + return request(endpoint); + } + const { token, createdAt } = await getInstallationAuthentication( + state, + // @ts-expect-error TBD + {}, + request + ); + endpoint.headers.authorization = `token ${token}`; + return sendRequestWithRetries( + state, + request, + endpoint, + createdAt + ); +} +async function sendRequestWithRetries(state, request, options, createdAt, retries = 0) { + const timeSinceTokenCreationInMs = +/* @__PURE__ */ new Date() - +new Date(createdAt); + try { + return await request(options); + } catch (error) { + if (error.status !== 401) { + throw error; + } + if (timeSinceTokenCreationInMs >= FIVE_SECONDS_IN_MS) { + if (retries > 0) { + error.message = `After ${retries} retries within ${timeSinceTokenCreationInMs / 1e3}s of creating the installation access token, the response remains 401. At this point, the cause may be an authentication problem or a system outage. Please check https://www.githubstatus.com for status information`; + } + throw error; + } + ++retries; + const awaitTime = retries * 1e3; + state.log.warn( + `[@octokit/auth-app] Retrying after 401 response to account for token replication delay (retry: ${retries}, wait: ${awaitTime / 1e3}s)` + ); + await new Promise((resolve) => setTimeout(resolve, awaitTime)); + return sendRequestWithRetries(state, request, options, createdAt, retries); + } +} + +// pkg/dist-src/version.js +var VERSION = "4.0.13"; + +// pkg/dist-src/index.js +var import_auth_oauth_user2 = require("@octokit/auth-oauth-user"); +function createAppAuth(options) { + if (!options.appId) { + throw new Error("[@octokit/auth-app] appId option is required"); + } + if (!Number.isFinite(+options.appId)) { + throw new Error( + "[@octokit/auth-app] appId option must be a number or numeric string" + ); + } + if (!options.privateKey) { + throw new Error("[@octokit/auth-app] privateKey option is required"); + } + if ("installationId" in options && !options.installationId) { + throw new Error( + "[@octokit/auth-app] installationId is set to a falsy value" + ); + } + const log = Object.assign( + { + warn: console.warn.bind(console) + }, + options.log + ); + const request = options.request || import_request.request.defaults({ + headers: { + "user-agent": `octokit-auth-app.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + } + }); + const state = Object.assign( + { + request, + cache: getCache() + }, + options, + options.installationId ? { installationId: Number(options.installationId) } : {}, + { + log, + oauthApp: (0, import_auth_oauth_app.createOAuthAppAuth)({ + clientType: "github-app", + clientId: options.clientId || "", + clientSecret: options.clientSecret || "", + request + }) + } + ); + return Object.assign(auth.bind(null, state), { + hook: hook.bind(null, state) + }); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + createAppAuth, + createOAuthUserAuth +}); diff --git a/dist/node_modules/@octokit/auth-app/dist-node/index.js.map b/dist/node_modules/@octokit/auth-app/dist-node/index.js.map new file mode 100644 index 0000000..745a1b8 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/auth.js", "../dist-src/get-app-authentication.js", "../dist-src/cache.js", "../dist-src/to-token-authentication.js", "../dist-src/get-installation-authentication.js", "../dist-src/hook.js", "../dist-src/requires-app-auth.js", "../dist-src/version.js"], + "sourcesContent": ["import { getUserAgent } from \"universal-user-agent\";\nimport { request as defaultRequest } from \"@octokit/request\";\nimport { createOAuthAppAuth } from \"@octokit/auth-oauth-app\";\nimport { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nimport { getCache } from \"./cache\";\nimport { VERSION } from \"./version\";\nimport { createOAuthUserAuth } from \"@octokit/auth-oauth-user\";\nfunction createAppAuth(options) {\n if (!options.appId) {\n throw new Error(\"[@octokit/auth-app] appId option is required\");\n }\n if (!Number.isFinite(+options.appId)) {\n throw new Error(\n \"[@octokit/auth-app] appId option must be a number or numeric string\"\n );\n }\n if (!options.privateKey) {\n throw new Error(\"[@octokit/auth-app] privateKey option is required\");\n }\n if (\"installationId\" in options && !options.installationId) {\n throw new Error(\n \"[@octokit/auth-app] installationId is set to a falsy value\"\n );\n }\n const log = Object.assign(\n {\n warn: console.warn.bind(console)\n },\n options.log\n );\n const request = options.request || defaultRequest.defaults({\n headers: {\n \"user-agent\": `octokit-auth-app.js/${VERSION} ${getUserAgent()}`\n }\n });\n const state = Object.assign(\n {\n request,\n cache: getCache()\n },\n options,\n options.installationId ? { installationId: Number(options.installationId) } : {},\n {\n log,\n oauthApp: createOAuthAppAuth({\n clientType: \"github-app\",\n clientId: options.clientId || \"\",\n clientSecret: options.clientSecret || \"\",\n request\n })\n }\n );\n return Object.assign(auth.bind(null, state), {\n hook: hook.bind(null, state)\n });\n}\nexport {\n createAppAuth,\n createOAuthUserAuth\n};\n", "import { Deprecation } from \"deprecation\";\nimport { getAppAuthentication } from \"./get-app-authentication\";\nimport { getInstallationAuthentication } from \"./get-installation-authentication\";\nasync function auth(state, authOptions) {\n switch (authOptions.type) {\n case \"app\":\n return getAppAuthentication(state);\n case \"oauth\":\n state.log.warn(\n // @ts-expect-error `log.warn()` expects string\n new Deprecation(\n `[@octokit/auth-app] {type: \"oauth\"} is deprecated. Use {type: \"oauth-app\"} instead`\n )\n );\n case \"oauth-app\":\n return state.oauthApp({ type: \"oauth-app\" });\n case \"installation\":\n authOptions;\n return getInstallationAuthentication(state, {\n ...authOptions,\n type: \"installation\"\n });\n case \"oauth-user\":\n return state.oauthApp(authOptions);\n default:\n throw new Error(`Invalid auth type: ${authOptions.type}`);\n }\n}\nexport {\n auth\n};\n", "import { githubAppJwt } from \"universal-github-app-jwt\";\nasync function getAppAuthentication({\n appId,\n privateKey,\n timeDifference\n}) {\n try {\n const appAuthentication = await githubAppJwt({\n id: +appId,\n privateKey,\n now: timeDifference && Math.floor(Date.now() / 1e3) + timeDifference\n });\n return {\n type: \"app\",\n token: appAuthentication.token,\n appId: appAuthentication.appId,\n expiresAt: new Date(appAuthentication.expiration * 1e3).toISOString()\n };\n } catch (error) {\n if (privateKey === \"-----BEGIN RSA PRIVATE KEY-----\") {\n throw new Error(\n \"The 'privateKey` option contains only the first line '-----BEGIN RSA PRIVATE KEY-----'. If you are setting it using a `.env` file, make sure it is set on a single line with newlines replaced by '\\n'\"\n );\n } else {\n throw error;\n }\n }\n}\nexport {\n getAppAuthentication\n};\n", "import { LRUCache } from \"lru-cache\";\nfunction getCache() {\n return new LRUCache({\n // cache max. 15000 tokens, that will use less than 10mb memory\n max: 15e3,\n // Cache for 1 minute less than GitHub expiry\n ttl: 1e3 * 60 * 59\n });\n}\nasync function get(cache, options) {\n const cacheKey = optionsToCacheKey(options);\n const result = await cache.get(cacheKey);\n if (!result) {\n return;\n }\n const [\n token,\n createdAt,\n expiresAt,\n repositorySelection,\n permissionsString,\n singleFileName\n ] = result.split(\"|\");\n const permissions = options.permissions || permissionsString.split(/,/).reduce((permissions2, string) => {\n if (/!$/.test(string)) {\n permissions2[string.slice(0, -1)] = \"write\";\n } else {\n permissions2[string] = \"read\";\n }\n return permissions2;\n }, {});\n return {\n token,\n createdAt,\n expiresAt,\n permissions,\n repositoryIds: options.repositoryIds,\n repositoryNames: options.repositoryNames,\n singleFileName,\n repositorySelection\n };\n}\nasync function set(cache, options, data) {\n const key = optionsToCacheKey(options);\n const permissionsString = options.permissions ? \"\" : Object.keys(data.permissions).map(\n (name) => `${name}${data.permissions[name] === \"write\" ? \"!\" : \"\"}`\n ).join(\",\");\n const value = [\n data.token,\n data.createdAt,\n data.expiresAt,\n data.repositorySelection,\n permissionsString,\n data.singleFileName\n ].join(\"|\");\n await cache.set(key, value);\n}\nfunction optionsToCacheKey({\n installationId,\n permissions = {},\n repositoryIds = [],\n repositoryNames = []\n}) {\n const permissionsString = Object.keys(permissions).sort().map((name) => permissions[name] === \"read\" ? name : `${name}!`).join(\",\");\n const repositoryIdsString = repositoryIds.sort().join(\",\");\n const repositoryNamesString = repositoryNames.join(\",\");\n return [\n installationId,\n repositoryIdsString,\n repositoryNamesString,\n permissionsString\n ].filter(Boolean).join(\"|\");\n}\nexport {\n get,\n getCache,\n set\n};\n", "function toTokenAuthentication({\n installationId,\n token,\n createdAt,\n expiresAt,\n repositorySelection,\n permissions,\n repositoryIds,\n repositoryNames,\n singleFileName\n}) {\n return Object.assign(\n {\n type: \"token\",\n tokenType: \"installation\",\n token,\n installationId,\n permissions,\n createdAt,\n expiresAt,\n repositorySelection\n },\n repositoryIds ? { repositoryIds } : null,\n repositoryNames ? { repositoryNames } : null,\n singleFileName ? { singleFileName } : null\n );\n}\nexport {\n toTokenAuthentication\n};\n", "import { get, set } from \"./cache\";\nimport { getAppAuthentication } from \"./get-app-authentication\";\nimport { toTokenAuthentication } from \"./to-token-authentication\";\nasync function getInstallationAuthentication(state, options, customRequest) {\n const installationId = Number(options.installationId || state.installationId);\n if (!installationId) {\n throw new Error(\n \"[@octokit/auth-app] installationId option is required for installation authentication.\"\n );\n }\n if (options.factory) {\n const { type, factory, oauthApp, ...factoryAuthOptions } = {\n ...state,\n ...options\n };\n return factory(factoryAuthOptions);\n }\n const optionsWithInstallationTokenFromState = Object.assign(\n { installationId },\n options\n );\n if (!options.refresh) {\n const result = await get(\n state.cache,\n optionsWithInstallationTokenFromState\n );\n if (result) {\n const {\n token: token2,\n createdAt: createdAt2,\n expiresAt: expiresAt2,\n permissions: permissions2,\n repositoryIds: repositoryIds2,\n repositoryNames: repositoryNames2,\n singleFileName: singleFileName2,\n repositorySelection: repositorySelection2\n } = result;\n return toTokenAuthentication({\n installationId,\n token: token2,\n createdAt: createdAt2,\n expiresAt: expiresAt2,\n permissions: permissions2,\n repositorySelection: repositorySelection2,\n repositoryIds: repositoryIds2,\n repositoryNames: repositoryNames2,\n singleFileName: singleFileName2\n });\n }\n }\n const appAuthentication = await getAppAuthentication(state);\n const request = customRequest || state.request;\n const {\n data: {\n token,\n expires_at: expiresAt,\n repositories,\n permissions: permissionsOptional,\n repository_selection: repositorySelectionOptional,\n single_file: singleFileName\n }\n } = await request(\"POST /app/installations/{installation_id}/access_tokens\", {\n installation_id: installationId,\n repository_ids: options.repositoryIds,\n repositories: options.repositoryNames,\n permissions: options.permissions,\n mediaType: {\n previews: [\"machine-man\"]\n },\n headers: {\n authorization: `bearer ${appAuthentication.token}`\n }\n });\n const permissions = permissionsOptional || {};\n const repositorySelection = repositorySelectionOptional || \"all\";\n const repositoryIds = repositories ? repositories.map((r) => r.id) : void 0;\n const repositoryNames = repositories ? repositories.map((repo) => repo.name) : void 0;\n const createdAt = (/* @__PURE__ */ new Date()).toISOString();\n await set(state.cache, optionsWithInstallationTokenFromState, {\n token,\n createdAt,\n expiresAt,\n repositorySelection,\n permissions,\n repositoryIds,\n repositoryNames,\n singleFileName\n });\n return toTokenAuthentication({\n installationId,\n token,\n createdAt,\n expiresAt,\n repositorySelection,\n permissions,\n repositoryIds,\n repositoryNames,\n singleFileName\n });\n}\nexport {\n getInstallationAuthentication\n};\n", "import { requiresBasicAuth } from \"@octokit/auth-oauth-user\";\nimport { getAppAuthentication } from \"./get-app-authentication\";\nimport { getInstallationAuthentication } from \"./get-installation-authentication\";\nimport { requiresAppAuth } from \"./requires-app-auth\";\nconst FIVE_SECONDS_IN_MS = 5 * 1e3;\nfunction isNotTimeSkewError(error) {\n return !(error.message.match(\n /'Expiration time' claim \\('exp'\\) must be a numeric value representing the future time at which the assertion expires/\n ) || error.message.match(\n /'Issued at' claim \\('iat'\\) must be an Integer representing the time that the assertion was issued/\n ));\n}\nasync function hook(state, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n const url = endpoint.url;\n if (/\\/login\\/oauth\\/access_token$/.test(url)) {\n return request(endpoint);\n }\n if (requiresAppAuth(url.replace(request.endpoint.DEFAULTS.baseUrl, \"\"))) {\n const { token: token2 } = await getAppAuthentication(state);\n endpoint.headers.authorization = `bearer ${token2}`;\n let response;\n try {\n response = await request(endpoint);\n } catch (error) {\n if (isNotTimeSkewError(error)) {\n throw error;\n }\n if (typeof error.response.headers.date === \"undefined\") {\n throw error;\n }\n const diff = Math.floor(\n (Date.parse(error.response.headers.date) - Date.parse((/* @__PURE__ */ new Date()).toString())) / 1e3\n );\n state.log.warn(error.message);\n state.log.warn(\n `[@octokit/auth-app] GitHub API time and system time are different by ${diff} seconds. Retrying request with the difference accounted for.`\n );\n const { token: token3 } = await getAppAuthentication({\n ...state,\n timeDifference: diff\n });\n endpoint.headers.authorization = `bearer ${token3}`;\n return request(endpoint);\n }\n return response;\n }\n if (requiresBasicAuth(url)) {\n const authentication = await state.oauthApp({ type: \"oauth-app\" });\n endpoint.headers.authorization = authentication.headers.authorization;\n return request(endpoint);\n }\n const { token, createdAt } = await getInstallationAuthentication(\n state,\n // @ts-expect-error TBD\n {},\n request\n );\n endpoint.headers.authorization = `token ${token}`;\n return sendRequestWithRetries(\n state,\n request,\n endpoint,\n createdAt\n );\n}\nasync function sendRequestWithRetries(state, request, options, createdAt, retries = 0) {\n const timeSinceTokenCreationInMs = +/* @__PURE__ */ new Date() - +new Date(createdAt);\n try {\n return await request(options);\n } catch (error) {\n if (error.status !== 401) {\n throw error;\n }\n if (timeSinceTokenCreationInMs >= FIVE_SECONDS_IN_MS) {\n if (retries > 0) {\n error.message = `After ${retries} retries within ${timeSinceTokenCreationInMs / 1e3}s of creating the installation access token, the response remains 401. At this point, the cause may be an authentication problem or a system outage. Please check https://www.githubstatus.com for status information`;\n }\n throw error;\n }\n ++retries;\n const awaitTime = retries * 1e3;\n state.log.warn(\n `[@octokit/auth-app] Retrying after 401 response to account for token replication delay (retry: ${retries}, wait: ${awaitTime / 1e3}s)`\n );\n await new Promise((resolve) => setTimeout(resolve, awaitTime));\n return sendRequestWithRetries(state, request, options, createdAt, retries);\n }\n}\nexport {\n hook\n};\n", "const PATHS = [\n \"/app\",\n \"/app/hook/config\",\n \"/app/hook/deliveries\",\n \"/app/hook/deliveries/{delivery_id}\",\n \"/app/hook/deliveries/{delivery_id}/attempts\",\n \"/app/installations\",\n \"/app/installations/{installation_id}\",\n \"/app/installations/{installation_id}/access_tokens\",\n \"/app/installations/{installation_id}/suspended\",\n \"/marketplace_listing/accounts/{account_id}\",\n \"/marketplace_listing/plan\",\n \"/marketplace_listing/plans\",\n \"/marketplace_listing/plans/{plan_id}/accounts\",\n \"/marketplace_listing/stubbed/accounts/{account_id}\",\n \"/marketplace_listing/stubbed/plan\",\n \"/marketplace_listing/stubbed/plans\",\n \"/marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"/orgs/{org}/installation\",\n \"/repos/{owner}/{repo}/installation\",\n \"/users/{username}/installation\"\n];\nfunction routeMatcher(paths) {\n const regexes = paths.map(\n (p) => p.split(\"/\").map((c) => c.startsWith(\"{\") ? \"(?:.+?)\" : c).join(\"/\")\n );\n const regex = `^(?:${regexes.map((r) => `(?:${r})`).join(\"|\")})$`;\n return new RegExp(regex, \"i\");\n}\nconst REGEX = routeMatcher(PATHS);\nfunction requiresAppAuth(url) {\n return !!url && REGEX.test(url.split(\"?\")[0]);\n}\nexport {\n requiresAppAuth\n};\n", "const VERSION = \"4.0.13\";\nexport {\n VERSION\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAA6B;AAC7B,qBAA0C;AAC1C,4BAAmC;;;ACFnC,yBAA4B;;;ACA5B,sCAA6B;AAC7B,eAAe,qBAAqB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAAG;AACD,MAAI;AACF,UAAM,oBAAoB,UAAM,8CAAa;AAAA,MAC3C,IAAI,CAAC;AAAA,MACL;AAAA,MACA,KAAK,kBAAkB,KAAK,MAAM,KAAK,IAAI,IAAI,GAAG,IAAI;AAAA,IACxD,CAAC;AACD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO,kBAAkB;AAAA,MACzB,OAAO,kBAAkB;AAAA,MACzB,WAAW,IAAI,KAAK,kBAAkB,aAAa,GAAG,EAAE,YAAY;AAAA,IACtE;AAAA,EACF,SAAS,OAAP;AACA,QAAI,eAAe,mCAAmC;AACpD,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;;;AC3BA,uBAAyB;AACzB,SAAS,WAAW;AAClB,SAAO,IAAI,0BAAS;AAAA;AAAA,IAElB,KAAK;AAAA;AAAA,IAEL,KAAK,MAAM,KAAK;AAAA,EAClB,CAAC;AACH;AACA,eAAe,IAAI,OAAO,SAAS;AACjC,QAAM,WAAW,kBAAkB,OAAO;AAC1C,QAAM,SAAS,MAAM,MAAM,IAAI,QAAQ;AACvC,MAAI,CAAC,QAAQ;AACX;AAAA,EACF;AACA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,OAAO,MAAM,GAAG;AACpB,QAAM,cAAc,QAAQ,eAAe,kBAAkB,MAAM,GAAG,EAAE,OAAO,CAAC,cAAc,WAAW;AACvG,QAAI,KAAK,KAAK,MAAM,GAAG;AACrB,mBAAa,OAAO,MAAM,GAAG,EAAE,CAAC,IAAI;AAAA,IACtC,OAAO;AACL,mBAAa,MAAM,IAAI;AAAA,IACzB;AACA,WAAO;AAAA,EACT,GAAG,CAAC,CAAC;AACL,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe,QAAQ;AAAA,IACvB,iBAAiB,QAAQ;AAAA,IACzB;AAAA,IACA;AAAA,EACF;AACF;AACA,eAAe,IAAI,OAAO,SAAS,MAAM;AACvC,QAAM,MAAM,kBAAkB,OAAO;AACrC,QAAM,oBAAoB,QAAQ,cAAc,KAAK,OAAO,KAAK,KAAK,WAAW,EAAE;AAAA,IACjF,CAAC,SAAS,GAAG,OAAO,KAAK,YAAY,IAAI,MAAM,UAAU,MAAM;AAAA,EACjE,EAAE,KAAK,GAAG;AACV,QAAM,QAAQ;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL;AAAA,IACA,KAAK;AAAA,EACP,EAAE,KAAK,GAAG;AACV,QAAM,MAAM,IAAI,KAAK,KAAK;AAC5B;AACA,SAAS,kBAAkB;AAAA,EACzB;AAAA,EACA,cAAc,CAAC;AAAA,EACf,gBAAgB,CAAC;AAAA,EACjB,kBAAkB,CAAC;AACrB,GAAG;AACD,QAAM,oBAAoB,OAAO,KAAK,WAAW,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,YAAY,IAAI,MAAM,SAAS,OAAO,GAAG,OAAO,EAAE,KAAK,GAAG;AAClI,QAAM,sBAAsB,cAAc,KAAK,EAAE,KAAK,GAAG;AACzD,QAAM,wBAAwB,gBAAgB,KAAK,GAAG;AACtD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,EAAE,OAAO,OAAO,EAAE,KAAK,GAAG;AAC5B;;;ACxEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAAG;AACD,SAAO,OAAO;AAAA,IACZ;AAAA,MACE,MAAM;AAAA,MACN,WAAW;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,gBAAgB,EAAE,cAAc,IAAI;AAAA,IACpC,kBAAkB,EAAE,gBAAgB,IAAI;AAAA,IACxC,iBAAiB,EAAE,eAAe,IAAI;AAAA,EACxC;AACF;;;ACvBA,eAAe,8BAA8B,OAAO,SAAS,eAAe;AAC1E,QAAM,iBAAiB,OAAO,QAAQ,kBAAkB,MAAM,cAAc;AAC5E,MAAI,CAAC,gBAAgB;AACnB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,QAAQ,SAAS;AACnB,UAAM,EAAE,MAAM,SAAS,UAAU,GAAG,mBAAmB,IAAI;AAAA,MACzD,GAAG;AAAA,MACH,GAAG;AAAA,IACL;AACA,WAAO,QAAQ,kBAAkB;AAAA,EACnC;AACA,QAAM,wCAAwC,OAAO;AAAA,IACnD,EAAE,eAAe;AAAA,IACjB;AAAA,EACF;AACA,MAAI,CAAC,QAAQ,SAAS;AACpB,UAAM,SAAS,MAAM;AAAA,MACnB,MAAM;AAAA,MACN;AAAA,IACF;AACA,QAAI,QAAQ;AACV,YAAM;AAAA,QACJ,OAAO;AAAA,QACP,WAAW;AAAA,QACX,WAAW;AAAA,QACX,aAAa;AAAA,QACb,eAAe;AAAA,QACf,iBAAiB;AAAA,QACjB,gBAAgB;AAAA,QAChB,qBAAqB;AAAA,MACvB,IAAI;AACJ,aAAO,sBAAsB;AAAA,QAC3B;AAAA,QACA,OAAO;AAAA,QACP,WAAW;AAAA,QACX,WAAW;AAAA,QACX,aAAa;AAAA,QACb,qBAAqB;AAAA,QACrB,eAAe;AAAA,QACf,iBAAiB;AAAA,QACjB,gBAAgB;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,EACF;AACA,QAAM,oBAAoB,MAAM,qBAAqB,KAAK;AAC1D,QAAM,UAAU,iBAAiB,MAAM;AACvC,QAAM;AAAA,IACJ,MAAM;AAAA,MACJ;AAAA,MACA,YAAY;AAAA,MACZ;AAAA,MACA,aAAa;AAAA,MACb,sBAAsB;AAAA,MACtB,aAAa;AAAA,IACf;AAAA,EACF,IAAI,MAAM,QAAQ,2DAA2D;AAAA,IAC3E,iBAAiB;AAAA,IACjB,gBAAgB,QAAQ;AAAA,IACxB,cAAc,QAAQ;AAAA,IACtB,aAAa,QAAQ;AAAA,IACrB,WAAW;AAAA,MACT,UAAU,CAAC,aAAa;AAAA,IAC1B;AAAA,IACA,SAAS;AAAA,MACP,eAAe,UAAU,kBAAkB;AAAA,IAC7C;AAAA,EACF,CAAC;AACD,QAAM,cAAc,uBAAuB,CAAC;AAC5C,QAAM,sBAAsB,+BAA+B;AAC3D,QAAM,gBAAgB,eAAe,aAAa,IAAI,CAAC,MAAM,EAAE,EAAE,IAAI;AACrE,QAAM,kBAAkB,eAAe,aAAa,IAAI,CAAC,SAAS,KAAK,IAAI,IAAI;AAC/E,QAAM,aAA6B,oBAAI,KAAK,GAAG,YAAY;AAC3D,QAAM,IAAI,MAAM,OAAO,uCAAuC;AAAA,IAC5D;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACD,SAAO,sBAAsB;AAAA,IAC3B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;;;AJhGA,eAAe,KAAK,OAAO,aAAa;AACtC,UAAQ,YAAY,MAAM;AAAA,IACxB,KAAK;AACH,aAAO,qBAAqB,KAAK;AAAA,IACnC,KAAK;AACH,YAAM,IAAI;AAAA;AAAA,QAER,IAAI;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO,MAAM,SAAS,EAAE,MAAM,YAAY,CAAC;AAAA,IAC7C,KAAK;AACH;AACA,aAAO,8BAA8B,OAAO;AAAA,QAC1C,GAAG;AAAA,QACH,MAAM;AAAA,MACR,CAAC;AAAA,IACH,KAAK;AACH,aAAO,MAAM,SAAS,WAAW;AAAA,IACnC;AACE,YAAM,IAAI,MAAM,sBAAsB,YAAY,MAAM;AAAA,EAC5D;AACF;;;AK3BA,6BAAkC;;;ACAlC,IAAM,QAAQ;AAAA,EACZ;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AACA,SAAS,aAAa,OAAO;AAC3B,QAAM,UAAU,MAAM;AAAA,IACpB,CAAC,MAAM,EAAE,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,EAAE,WAAW,GAAG,IAAI,YAAY,CAAC,EAAE,KAAK,GAAG;AAAA,EAC5E;AACA,QAAM,QAAQ,OAAO,QAAQ,IAAI,CAAC,MAAM,MAAM,IAAI,EAAE,KAAK,GAAG;AAC5D,SAAO,IAAI,OAAO,OAAO,GAAG;AAC9B;AACA,IAAM,QAAQ,aAAa,KAAK;AAChC,SAAS,gBAAgB,KAAK;AAC5B,SAAO,CAAC,CAAC,OAAO,MAAM,KAAK,IAAI,MAAM,GAAG,EAAE,CAAC,CAAC;AAC9C;;;AD5BA,IAAM,qBAAqB,IAAI;AAC/B,SAAS,mBAAmB,OAAO;AACjC,SAAO,EAAE,MAAM,QAAQ;AAAA,IACrB;AAAA,EACF,KAAK,MAAM,QAAQ;AAAA,IACjB;AAAA,EACF;AACF;AACA,eAAe,KAAK,OAAO,SAAS,OAAO,YAAY;AACrD,QAAM,WAAW,QAAQ,SAAS,MAAM,OAAO,UAAU;AACzD,QAAM,MAAM,SAAS;AACrB,MAAI,gCAAgC,KAAK,GAAG,GAAG;AAC7C,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACA,MAAI,gBAAgB,IAAI,QAAQ,QAAQ,SAAS,SAAS,SAAS,EAAE,CAAC,GAAG;AACvE,UAAM,EAAE,OAAO,OAAO,IAAI,MAAM,qBAAqB,KAAK;AAC1D,aAAS,QAAQ,gBAAgB,UAAU;AAC3C,QAAI;AACJ,QAAI;AACF,iBAAW,MAAM,QAAQ,QAAQ;AAAA,IACnC,SAAS,OAAP;AACA,UAAI,mBAAmB,KAAK,GAAG;AAC7B,cAAM;AAAA,MACR;AACA,UAAI,OAAO,MAAM,SAAS,QAAQ,SAAS,aAAa;AACtD,cAAM;AAAA,MACR;AACA,YAAM,OAAO,KAAK;AAAA,SACf,KAAK,MAAM,MAAM,SAAS,QAAQ,IAAI,IAAI,KAAK,OAAuB,oBAAI,KAAK,GAAG,SAAS,CAAC,KAAK;AAAA,MACpG;AACA,YAAM,IAAI,KAAK,MAAM,OAAO;AAC5B,YAAM,IAAI;AAAA,QACR,wEAAwE;AAAA,MAC1E;AACA,YAAM,EAAE,OAAO,OAAO,IAAI,MAAM,qBAAqB;AAAA,QACnD,GAAG;AAAA,QACH,gBAAgB;AAAA,MAClB,CAAC;AACD,eAAS,QAAQ,gBAAgB,UAAU;AAC3C,aAAO,QAAQ,QAAQ;AAAA,IACzB;AACA,WAAO;AAAA,EACT;AACA,UAAI,0CAAkB,GAAG,GAAG;AAC1B,UAAM,iBAAiB,MAAM,MAAM,SAAS,EAAE,MAAM,YAAY,CAAC;AACjE,aAAS,QAAQ,gBAAgB,eAAe,QAAQ;AACxD,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACA,QAAM,EAAE,OAAO,UAAU,IAAI,MAAM;AAAA,IACjC;AAAA;AAAA,IAEA,CAAC;AAAA,IACD;AAAA,EACF;AACA,WAAS,QAAQ,gBAAgB,SAAS;AAC1C,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AACA,eAAe,uBAAuB,OAAO,SAAS,SAAS,WAAW,UAAU,GAAG;AACrF,QAAM,6BAA6B,CAAiB,oBAAI,KAAK,IAAI,CAAC,IAAI,KAAK,SAAS;AACpF,MAAI;AACF,WAAO,MAAM,QAAQ,OAAO;AAAA,EAC9B,SAAS,OAAP;AACA,QAAI,MAAM,WAAW,KAAK;AACxB,YAAM;AAAA,IACR;AACA,QAAI,8BAA8B,oBAAoB;AACpD,UAAI,UAAU,GAAG;AACf,cAAM,UAAU,SAAS,0BAA0B,6BAA6B;AAAA,MAClF;AACA,YAAM;AAAA,IACR;AACA,MAAE;AACF,UAAM,YAAY,UAAU;AAC5B,UAAM,IAAI;AAAA,MACR,kGAAkG,kBAAkB,YAAY;AAAA,IAClI;AACA,UAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,SAAS,CAAC;AAC7D,WAAO,uBAAuB,OAAO,SAAS,SAAS,WAAW,OAAO;AAAA,EAC3E;AACF;;;AExFA,IAAM,UAAU;;;AROhB,IAAAA,0BAAoC;AACpC,SAAS,cAAc,SAAS;AAC9B,MAAI,CAAC,QAAQ,OAAO;AAClB,UAAM,IAAI,MAAM,8CAA8C;AAAA,EAChE;AACA,MAAI,CAAC,OAAO,SAAS,CAAC,QAAQ,KAAK,GAAG;AACpC,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,QAAQ,YAAY;AACvB,UAAM,IAAI,MAAM,mDAAmD;AAAA,EACrE;AACA,MAAI,oBAAoB,WAAW,CAAC,QAAQ,gBAAgB;AAC1D,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,QAAM,MAAM,OAAO;AAAA,IACjB;AAAA,MACE,MAAM,QAAQ,KAAK,KAAK,OAAO;AAAA,IACjC;AAAA,IACA,QAAQ;AAAA,EACV;AACA,QAAM,UAAU,QAAQ,WAAW,eAAAC,QAAe,SAAS;AAAA,IACzD,SAAS;AAAA,MACP,cAAc,uBAAuB,eAAW,0CAAa;AAAA,IAC/D;AAAA,EACF,CAAC;AACD,QAAM,QAAQ,OAAO;AAAA,IACnB;AAAA,MACE;AAAA,MACA,OAAO,SAAS;AAAA,IAClB;AAAA,IACA;AAAA,IACA,QAAQ,iBAAiB,EAAE,gBAAgB,OAAO,QAAQ,cAAc,EAAE,IAAI,CAAC;AAAA,IAC/E;AAAA,MACE;AAAA,MACA,cAAU,0CAAmB;AAAA,QAC3B,YAAY;AAAA,QACZ,UAAU,QAAQ,YAAY;AAAA,QAC9B,cAAc,QAAQ,gBAAgB;AAAA,QACtC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACA,SAAO,OAAO,OAAO,KAAK,KAAK,MAAM,KAAK,GAAG;AAAA,IAC3C,MAAM,KAAK,KAAK,MAAM,KAAK;AAAA,EAC7B,CAAC;AACH;", + "names": ["import_auth_oauth_user", "defaultRequest"] +} diff --git a/dist/node_modules/@octokit/auth-app/dist-src/auth.js b/dist/node_modules/@octokit/auth-app/dist-src/auth.js new file mode 100644 index 0000000..c8f1fa8 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-src/auth.js @@ -0,0 +1,31 @@ +import { Deprecation } from "deprecation"; +import { getAppAuthentication } from "./get-app-authentication"; +import { getInstallationAuthentication } from "./get-installation-authentication"; +async function auth(state, authOptions) { + switch (authOptions.type) { + case "app": + return getAppAuthentication(state); + case "oauth": + state.log.warn( + // @ts-expect-error `log.warn()` expects string + new Deprecation( + `[@octokit/auth-app] {type: "oauth"} is deprecated. Use {type: "oauth-app"} instead` + ) + ); + case "oauth-app": + return state.oauthApp({ type: "oauth-app" }); + case "installation": + authOptions; + return getInstallationAuthentication(state, { + ...authOptions, + type: "installation" + }); + case "oauth-user": + return state.oauthApp(authOptions); + default: + throw new Error(`Invalid auth type: ${authOptions.type}`); + } +} +export { + auth +}; diff --git a/dist/node_modules/@octokit/auth-app/dist-src/cache.js b/dist/node_modules/@octokit/auth-app/dist-src/cache.js new file mode 100644 index 0000000..a3e373d --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-src/cache.js @@ -0,0 +1,78 @@ +import { LRUCache } from "lru-cache"; +function getCache() { + return new LRUCache({ + // cache max. 15000 tokens, that will use less than 10mb memory + max: 15e3, + // Cache for 1 minute less than GitHub expiry + ttl: 1e3 * 60 * 59 + }); +} +async function get(cache, options) { + const cacheKey = optionsToCacheKey(options); + const result = await cache.get(cacheKey); + if (!result) { + return; + } + const [ + token, + createdAt, + expiresAt, + repositorySelection, + permissionsString, + singleFileName + ] = result.split("|"); + const permissions = options.permissions || permissionsString.split(/,/).reduce((permissions2, string) => { + if (/!$/.test(string)) { + permissions2[string.slice(0, -1)] = "write"; + } else { + permissions2[string] = "read"; + } + return permissions2; + }, {}); + return { + token, + createdAt, + expiresAt, + permissions, + repositoryIds: options.repositoryIds, + repositoryNames: options.repositoryNames, + singleFileName, + repositorySelection + }; +} +async function set(cache, options, data) { + const key = optionsToCacheKey(options); + const permissionsString = options.permissions ? "" : Object.keys(data.permissions).map( + (name) => `${name}${data.permissions[name] === "write" ? "!" : ""}` + ).join(","); + const value = [ + data.token, + data.createdAt, + data.expiresAt, + data.repositorySelection, + permissionsString, + data.singleFileName + ].join("|"); + await cache.set(key, value); +} +function optionsToCacheKey({ + installationId, + permissions = {}, + repositoryIds = [], + repositoryNames = [] +}) { + const permissionsString = Object.keys(permissions).sort().map((name) => permissions[name] === "read" ? name : `${name}!`).join(","); + const repositoryIdsString = repositoryIds.sort().join(","); + const repositoryNamesString = repositoryNames.join(","); + return [ + installationId, + repositoryIdsString, + repositoryNamesString, + permissionsString + ].filter(Boolean).join("|"); +} +export { + get, + getCache, + set +}; diff --git a/dist/node_modules/@octokit/auth-app/dist-src/get-app-authentication.js b/dist/node_modules/@octokit/auth-app/dist-src/get-app-authentication.js new file mode 100644 index 0000000..8ebe620 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-src/get-app-authentication.js @@ -0,0 +1,31 @@ +import { githubAppJwt } from "universal-github-app-jwt"; +async function getAppAuthentication({ + appId, + privateKey, + timeDifference +}) { + try { + const appAuthentication = await githubAppJwt({ + id: +appId, + privateKey, + now: timeDifference && Math.floor(Date.now() / 1e3) + timeDifference + }); + return { + type: "app", + token: appAuthentication.token, + appId: appAuthentication.appId, + expiresAt: new Date(appAuthentication.expiration * 1e3).toISOString() + }; + } catch (error) { + if (privateKey === "-----BEGIN RSA PRIVATE KEY-----") { + throw new Error( + "The 'privateKey` option contains only the first line '-----BEGIN RSA PRIVATE KEY-----'. If you are setting it using a `.env` file, make sure it is set on a single line with newlines replaced by '\n'" + ); + } else { + throw error; + } + } +} +export { + getAppAuthentication +}; diff --git a/dist/node_modules/@octokit/auth-app/dist-src/get-installation-authentication.js b/dist/node_modules/@octokit/auth-app/dist-src/get-installation-authentication.js new file mode 100644 index 0000000..4be735a --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-src/get-installation-authentication.js @@ -0,0 +1,103 @@ +import { get, set } from "./cache"; +import { getAppAuthentication } from "./get-app-authentication"; +import { toTokenAuthentication } from "./to-token-authentication"; +async function getInstallationAuthentication(state, options, customRequest) { + const installationId = Number(options.installationId || state.installationId); + if (!installationId) { + throw new Error( + "[@octokit/auth-app] installationId option is required for installation authentication." + ); + } + if (options.factory) { + const { type, factory, oauthApp, ...factoryAuthOptions } = { + ...state, + ...options + }; + return factory(factoryAuthOptions); + } + const optionsWithInstallationTokenFromState = Object.assign( + { installationId }, + options + ); + if (!options.refresh) { + const result = await get( + state.cache, + optionsWithInstallationTokenFromState + ); + if (result) { + const { + token: token2, + createdAt: createdAt2, + expiresAt: expiresAt2, + permissions: permissions2, + repositoryIds: repositoryIds2, + repositoryNames: repositoryNames2, + singleFileName: singleFileName2, + repositorySelection: repositorySelection2 + } = result; + return toTokenAuthentication({ + installationId, + token: token2, + createdAt: createdAt2, + expiresAt: expiresAt2, + permissions: permissions2, + repositorySelection: repositorySelection2, + repositoryIds: repositoryIds2, + repositoryNames: repositoryNames2, + singleFileName: singleFileName2 + }); + } + } + const appAuthentication = await getAppAuthentication(state); + const request = customRequest || state.request; + const { + data: { + token, + expires_at: expiresAt, + repositories, + permissions: permissionsOptional, + repository_selection: repositorySelectionOptional, + single_file: singleFileName + } + } = await request("POST /app/installations/{installation_id}/access_tokens", { + installation_id: installationId, + repository_ids: options.repositoryIds, + repositories: options.repositoryNames, + permissions: options.permissions, + mediaType: { + previews: ["machine-man"] + }, + headers: { + authorization: `bearer ${appAuthentication.token}` + } + }); + const permissions = permissionsOptional || {}; + const repositorySelection = repositorySelectionOptional || "all"; + const repositoryIds = repositories ? repositories.map((r) => r.id) : void 0; + const repositoryNames = repositories ? repositories.map((repo) => repo.name) : void 0; + const createdAt = (/* @__PURE__ */ new Date()).toISOString(); + await set(state.cache, optionsWithInstallationTokenFromState, { + token, + createdAt, + expiresAt, + repositorySelection, + permissions, + repositoryIds, + repositoryNames, + singleFileName + }); + return toTokenAuthentication({ + installationId, + token, + createdAt, + expiresAt, + repositorySelection, + permissions, + repositoryIds, + repositoryNames, + singleFileName + }); +} +export { + getInstallationAuthentication +}; diff --git a/dist/node_modules/@octokit/auth-app/dist-src/hook.js b/dist/node_modules/@octokit/auth-app/dist-src/hook.js new file mode 100644 index 0000000..891a6f7 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-src/hook.js @@ -0,0 +1,92 @@ +import { requiresBasicAuth } from "@octokit/auth-oauth-user"; +import { getAppAuthentication } from "./get-app-authentication"; +import { getInstallationAuthentication } from "./get-installation-authentication"; +import { requiresAppAuth } from "./requires-app-auth"; +const FIVE_SECONDS_IN_MS = 5 * 1e3; +function isNotTimeSkewError(error) { + return !(error.message.match( + /'Expiration time' claim \('exp'\) must be a numeric value representing the future time at which the assertion expires/ + ) || error.message.match( + /'Issued at' claim \('iat'\) must be an Integer representing the time that the assertion was issued/ + )); +} +async function hook(state, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + const url = endpoint.url; + if (/\/login\/oauth\/access_token$/.test(url)) { + return request(endpoint); + } + if (requiresAppAuth(url.replace(request.endpoint.DEFAULTS.baseUrl, ""))) { + const { token: token2 } = await getAppAuthentication(state); + endpoint.headers.authorization = `bearer ${token2}`; + let response; + try { + response = await request(endpoint); + } catch (error) { + if (isNotTimeSkewError(error)) { + throw error; + } + if (typeof error.response.headers.date === "undefined") { + throw error; + } + const diff = Math.floor( + (Date.parse(error.response.headers.date) - Date.parse((/* @__PURE__ */ new Date()).toString())) / 1e3 + ); + state.log.warn(error.message); + state.log.warn( + `[@octokit/auth-app] GitHub API time and system time are different by ${diff} seconds. Retrying request with the difference accounted for.` + ); + const { token: token3 } = await getAppAuthentication({ + ...state, + timeDifference: diff + }); + endpoint.headers.authorization = `bearer ${token3}`; + return request(endpoint); + } + return response; + } + if (requiresBasicAuth(url)) { + const authentication = await state.oauthApp({ type: "oauth-app" }); + endpoint.headers.authorization = authentication.headers.authorization; + return request(endpoint); + } + const { token, createdAt } = await getInstallationAuthentication( + state, + // @ts-expect-error TBD + {}, + request + ); + endpoint.headers.authorization = `token ${token}`; + return sendRequestWithRetries( + state, + request, + endpoint, + createdAt + ); +} +async function sendRequestWithRetries(state, request, options, createdAt, retries = 0) { + const timeSinceTokenCreationInMs = +/* @__PURE__ */ new Date() - +new Date(createdAt); + try { + return await request(options); + } catch (error) { + if (error.status !== 401) { + throw error; + } + if (timeSinceTokenCreationInMs >= FIVE_SECONDS_IN_MS) { + if (retries > 0) { + error.message = `After ${retries} retries within ${timeSinceTokenCreationInMs / 1e3}s of creating the installation access token, the response remains 401. At this point, the cause may be an authentication problem or a system outage. Please check https://www.githubstatus.com for status information`; + } + throw error; + } + ++retries; + const awaitTime = retries * 1e3; + state.log.warn( + `[@octokit/auth-app] Retrying after 401 response to account for token replication delay (retry: ${retries}, wait: ${awaitTime / 1e3}s)` + ); + await new Promise((resolve) => setTimeout(resolve, awaitTime)); + return sendRequestWithRetries(state, request, options, createdAt, retries); + } +} +export { + hook +}; diff --git a/dist/node_modules/@octokit/auth-app/dist-src/index.js b/dist/node_modules/@octokit/auth-app/dist-src/index.js new file mode 100644 index 0000000..33c1b32 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-src/index.js @@ -0,0 +1,61 @@ +import { getUserAgent } from "universal-user-agent"; +import { request as defaultRequest } from "@octokit/request"; +import { createOAuthAppAuth } from "@octokit/auth-oauth-app"; +import { auth } from "./auth"; +import { hook } from "./hook"; +import { getCache } from "./cache"; +import { VERSION } from "./version"; +import { createOAuthUserAuth } from "@octokit/auth-oauth-user"; +function createAppAuth(options) { + if (!options.appId) { + throw new Error("[@octokit/auth-app] appId option is required"); + } + if (!Number.isFinite(+options.appId)) { + throw new Error( + "[@octokit/auth-app] appId option must be a number or numeric string" + ); + } + if (!options.privateKey) { + throw new Error("[@octokit/auth-app] privateKey option is required"); + } + if ("installationId" in options && !options.installationId) { + throw new Error( + "[@octokit/auth-app] installationId is set to a falsy value" + ); + } + const log = Object.assign( + { + warn: console.warn.bind(console) + }, + options.log + ); + const request = options.request || defaultRequest.defaults({ + headers: { + "user-agent": `octokit-auth-app.js/${VERSION} ${getUserAgent()}` + } + }); + const state = Object.assign( + { + request, + cache: getCache() + }, + options, + options.installationId ? { installationId: Number(options.installationId) } : {}, + { + log, + oauthApp: createOAuthAppAuth({ + clientType: "github-app", + clientId: options.clientId || "", + clientSecret: options.clientSecret || "", + request + }) + } + ); + return Object.assign(auth.bind(null, state), { + hook: hook.bind(null, state) + }); +} +export { + createAppAuth, + createOAuthUserAuth +}; diff --git a/dist/node_modules/@octokit/auth-app/dist-src/requires-app-auth.js b/dist/node_modules/@octokit/auth-app/dist-src/requires-app-auth.js new file mode 100644 index 0000000..1c44298 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-src/requires-app-auth.js @@ -0,0 +1,36 @@ +const PATHS = [ + "/app", + "/app/hook/config", + "/app/hook/deliveries", + "/app/hook/deliveries/{delivery_id}", + "/app/hook/deliveries/{delivery_id}/attempts", + "/app/installations", + "/app/installations/{installation_id}", + "/app/installations/{installation_id}/access_tokens", + "/app/installations/{installation_id}/suspended", + "/marketplace_listing/accounts/{account_id}", + "/marketplace_listing/plan", + "/marketplace_listing/plans", + "/marketplace_listing/plans/{plan_id}/accounts", + "/marketplace_listing/stubbed/accounts/{account_id}", + "/marketplace_listing/stubbed/plan", + "/marketplace_listing/stubbed/plans", + "/marketplace_listing/stubbed/plans/{plan_id}/accounts", + "/orgs/{org}/installation", + "/repos/{owner}/{repo}/installation", + "/users/{username}/installation" +]; +function routeMatcher(paths) { + const regexes = paths.map( + (p) => p.split("/").map((c) => c.startsWith("{") ? "(?:.+?)" : c).join("/") + ); + const regex = `^(?:${regexes.map((r) => `(?:${r})`).join("|")})$`; + return new RegExp(regex, "i"); +} +const REGEX = routeMatcher(PATHS); +function requiresAppAuth(url) { + return !!url && REGEX.test(url.split("?")[0]); +} +export { + requiresAppAuth +}; diff --git a/dist/node_modules/@octokit/auth-app/dist-src/to-token-authentication.js b/dist/node_modules/@octokit/auth-app/dist-src/to-token-authentication.js new file mode 100644 index 0000000..9ba6e03 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-src/to-token-authentication.js @@ -0,0 +1,30 @@ +function toTokenAuthentication({ + installationId, + token, + createdAt, + expiresAt, + repositorySelection, + permissions, + repositoryIds, + repositoryNames, + singleFileName +}) { + return Object.assign( + { + type: "token", + tokenType: "installation", + token, + installationId, + permissions, + createdAt, + expiresAt, + repositorySelection + }, + repositoryIds ? { repositoryIds } : null, + repositoryNames ? { repositoryNames } : null, + singleFileName ? { singleFileName } : null + ); +} +export { + toTokenAuthentication +}; diff --git a/dist/node_modules/@octokit/auth-app/dist-src/version.js b/dist/node_modules/@octokit/auth-app/dist-src/version.js new file mode 100644 index 0000000..eaa0c0d --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "4.0.13"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/auth-app/dist-types/auth.d.ts b/dist/node_modules/@octokit/auth-app/dist-types/auth.d.ts new file mode 100644 index 0000000..b035b9c --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-types/auth.d.ts @@ -0,0 +1,20 @@ +import * as OAuthAppAuth from "@octokit/auth-oauth-app"; +import type { State, AppAuthOptions, AppAuthentication, OAuthAppAuthentication, OAuthAppAuthOptions, InstallationAuthOptions, InstallationAccessTokenAuthentication, GitHubAppUserAuthentication, GitHubAppUserAuthenticationWithExpiration, OAuthWebFlowAuthOptions, OAuthDeviceFlowAuthOptions } from "./types"; +/** GitHub App authentication */ +export declare function auth(state: State, authOptions: AppAuthOptions): Promise; +/** OAuth App authentication */ +export declare function auth(state: State, authOptions: OAuthAppAuthOptions): Promise; +/** Installation authentication */ +export declare function auth(state: State, authOptions: InstallationAuthOptions): Promise; +/** User Authentication via OAuth web flow */ +export declare function auth(state: State, authOptions: OAuthWebFlowAuthOptions): Promise; +/** GitHub App Web flow with `factory` option */ +export declare function auth(state: State, authOptions: OAuthWebFlowAuthOptions & { + factory: OAuthAppAuth.FactoryGitHubWebFlow; +}): Promise; +/** User Authentication via OAuth Device flow */ +export declare function auth(state: State, authOptions: OAuthDeviceFlowAuthOptions): Promise; +/** GitHub App Device flow with `factory` option */ +export declare function auth(state: State, authOptions: OAuthDeviceFlowAuthOptions & { + factory: OAuthAppAuth.FactoryGitHubDeviceFlow; +}): Promise; diff --git a/dist/node_modules/@octokit/auth-app/dist-types/cache.d.ts b/dist/node_modules/@octokit/auth-app/dist-types/cache.d.ts new file mode 100644 index 0000000..631fbcb --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-types/cache.d.ts @@ -0,0 +1,5 @@ +import { LRUCache } from "lru-cache"; +import type { InstallationAuthOptions, Cache, CacheData, InstallationAccessTokenData } from "./types"; +export declare function getCache(): LRUCache; +export declare function get(cache: Cache, options: InstallationAuthOptions): Promise; +export declare function set(cache: Cache, options: InstallationAuthOptions, data: CacheData): Promise; diff --git a/dist/node_modules/@octokit/auth-app/dist-types/get-app-authentication.d.ts b/dist/node_modules/@octokit/auth-app/dist-types/get-app-authentication.d.ts new file mode 100644 index 0000000..9678e1d --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-types/get-app-authentication.d.ts @@ -0,0 +1,4 @@ +import type { AppAuthentication, State } from "./types"; +export declare function getAppAuthentication({ appId, privateKey, timeDifference, }: State & { + timeDifference?: number; +}): Promise; diff --git a/dist/node_modules/@octokit/auth-app/dist-types/get-installation-authentication.d.ts b/dist/node_modules/@octokit/auth-app/dist-types/get-installation-authentication.d.ts new file mode 100644 index 0000000..529f093 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-types/get-installation-authentication.d.ts @@ -0,0 +1,2 @@ +import type { InstallationAuthOptions, InstallationAccessTokenAuthentication, RequestInterface, State } from "./types"; +export declare function getInstallationAuthentication(state: State, options: InstallationAuthOptions, customRequest?: RequestInterface): Promise; diff --git a/dist/node_modules/@octokit/auth-app/dist-types/hook.d.ts b/dist/node_modules/@octokit/auth-app/dist-types/hook.d.ts new file mode 100644 index 0000000..2748ff4 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-types/hook.d.ts @@ -0,0 +1,2 @@ +import type { AnyResponse, EndpointOptions, RequestParameters, RequestInterface, Route, State } from "./types"; +export declare function hook(state: State, request: RequestInterface, route: Route | EndpointOptions, parameters?: RequestParameters): Promise; diff --git a/dist/node_modules/@octokit/auth-app/dist-types/index.d.ts b/dist/node_modules/@octokit/auth-app/dist-types/index.d.ts new file mode 100644 index 0000000..148c061 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-types/index.d.ts @@ -0,0 +1,4 @@ +import type { AuthInterface, StrategyOptions } from "./types"; +export { createOAuthUserAuth } from "@octokit/auth-oauth-user"; +export type { StrategyOptions, AppAuthOptions, OAuthAppAuthOptions, InstallationAuthOptions, OAuthWebFlowAuthOptions, OAuthDeviceFlowAuthOptions, Authentication, AppAuthentication, OAuthAppAuthentication, InstallationAccessTokenAuthentication, GitHubAppUserAuthentication, GitHubAppUserAuthenticationWithExpiration, } from "./types"; +export declare function createAppAuth(options: StrategyOptions): AuthInterface; diff --git a/dist/node_modules/@octokit/auth-app/dist-types/requires-app-auth.d.ts b/dist/node_modules/@octokit/auth-app/dist-types/requires-app-auth.d.ts new file mode 100644 index 0000000..2d86d3f --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-types/requires-app-auth.d.ts @@ -0,0 +1 @@ +export declare function requiresAppAuth(url: string | undefined): Boolean; diff --git a/dist/node_modules/@octokit/auth-app/dist-types/to-token-authentication.d.ts b/dist/node_modules/@octokit/auth-app/dist-types/to-token-authentication.d.ts new file mode 100644 index 0000000..dfa92ac --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-types/to-token-authentication.d.ts @@ -0,0 +1,2 @@ +import type { CacheData, InstallationAccessTokenAuthentication, WithInstallationId } from "./types"; +export declare function toTokenAuthentication({ installationId, token, createdAt, expiresAt, repositorySelection, permissions, repositoryIds, repositoryNames, singleFileName, }: CacheData & WithInstallationId): InstallationAccessTokenAuthentication; diff --git a/dist/node_modules/@octokit/auth-app/dist-types/types.d.ts b/dist/node_modules/@octokit/auth-app/dist-types/types.d.ts new file mode 100644 index 0000000..f9b69ab --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-types/types.d.ts @@ -0,0 +1,125 @@ +import * as OctokitTypes from "@octokit/types"; +import { LRUCache } from "lru-cache"; +import * as OAuthAppAuth from "@octokit/auth-oauth-app"; +type OAuthStrategyOptions = { + clientId?: string; + clientSecret?: string; +}; +type CommonStrategyOptions = { + appId: number | string; + privateKey: string; + installationId?: number | string; + request?: OctokitTypes.RequestInterface; + cache?: Cache; + log?: { + warn: (message: string, additionalInfo?: object) => any; + [key: string]: any; + }; +}; +export type StrategyOptions = OAuthStrategyOptions & CommonStrategyOptions & Record; +export type AppAuthOptions = { + type: "app"; +}; +/** +Users SHOULD only enter repositoryIds || repositoryNames. +However, this moduke still passes both to the backend API to +let the API decide how to handle the logic. We just throw the +reponse back to the client making the request. +**/ +export type InstallationAuthOptions = { + type: "installation"; + installationId?: number | string; + repositoryIds?: number[]; + repositoryNames?: string[]; + permissions?: Permissions; + refresh?: boolean; + factory?: never; + [key: string]: unknown; +}; +export type InstallationAuthOptionsWithFactory = { + type: "installation"; + installationId?: number | string; + repositoryIds?: number[]; + repositoryNames?: string[]; + permissions?: Permissions; + refresh?: boolean; + factory: FactoryInstallation; + [key: string]: unknown; +}; +export type OAuthAppAuthOptions = OAuthAppAuth.AppAuthOptions; +export type OAuthWebFlowAuthOptions = OAuthAppAuth.WebFlowAuthOptions; +export type OAuthDeviceFlowAuthOptions = OAuthAppAuth.GitHubAppDeviceFlowAuthOptions; +export type Authentication = AppAuthentication | OAuthAppAuthentication | InstallationAccessTokenAuthentication | GitHubAppUserAuthentication | GitHubAppUserAuthenticationWithExpiration; +export type FactoryInstallationOptions = StrategyOptions & Omit; +export interface FactoryInstallation { + (options: FactoryInstallationOptions): T; +} +export interface AuthInterface { + (options: AppAuthOptions): Promise; + (options: OAuthAppAuthOptions): Promise; + (options: InstallationAuthOptions): Promise; + (options: InstallationAuthOptionsWithFactory): Promise; + (options: OAuthWebFlowAuthOptions): Promise; + (options: OAuthDeviceFlowAuthOptions): Promise; + (options: OAuthWebFlowAuthOptions & { + factory: OAuthAppAuth.FactoryGitHubWebFlow; + }): Promise; + (options: OAuthDeviceFlowAuthOptions & { + factory: OAuthAppAuth.FactoryGitHubDeviceFlow; + }): Promise; + hook(request: RequestInterface, route: Route | EndpointOptions, parameters?: RequestParameters): Promise>; +} +export type AnyResponse = OctokitTypes.OctokitResponse; +export type EndpointDefaults = OctokitTypes.EndpointDefaults; +export type EndpointOptions = OctokitTypes.EndpointOptions; +export type RequestParameters = OctokitTypes.RequestParameters; +export type Route = OctokitTypes.Route; +export type RequestInterface = OctokitTypes.RequestInterface; +export type Cache = LRUCache | { + get: (key: string) => string; + set: (key: string, value: string) => any; +}; +export type APP_TYPE = "app"; +export type TOKEN_TYPE = "token"; +export type INSTALLATION_TOKEN_TYPE = "installation"; +export type OAUTH_TOKEN_TYPE = "oauth"; +export type REPOSITORY_SELECTION = "all" | "selected"; +export type JWT = string; +export type ACCESS_TOKEN = string; +export type UTC_TIMESTAMP = string; +export type AppAuthentication = { + type: APP_TYPE; + token: JWT; + appId: number; + expiresAt: string; +}; +export type InstallationAccessTokenData = { + token: ACCESS_TOKEN; + createdAt: UTC_TIMESTAMP; + expiresAt: UTC_TIMESTAMP; + permissions: Permissions; + repositorySelection: REPOSITORY_SELECTION; + repositoryIds?: number[]; + repositoryNames?: string[]; + singleFileName?: string; +}; +export type CacheData = InstallationAccessTokenData; +export type InstallationAccessTokenAuthentication = InstallationAccessTokenData & { + type: TOKEN_TYPE; + tokenType: INSTALLATION_TOKEN_TYPE; + installationId: number; +}; +export type OAuthAppAuthentication = OAuthAppAuth.AppAuthentication; +export type GitHubAppUserAuthentication = OAuthAppAuth.GitHubAppUserAuthentication; +export type GitHubAppUserAuthenticationWithExpiration = OAuthAppAuth.GitHubAppUserAuthenticationWithExpiration; +export type FactoryOptions = Required> & State; +export type Permissions = Record; +export type WithInstallationId = { + installationId: number; +}; +export type State = Required> & { + installationId?: number; +} & OAuthStrategyOptions & { + oauthApp: OAuthAppAuth.GitHubAuthInterface; +}; +export {}; diff --git a/dist/node_modules/@octokit/auth-app/dist-types/version.d.ts b/dist/node_modules/@octokit/auth-app/dist-types/version.d.ts new file mode 100644 index 0000000..ceaf7e4 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "4.0.13"; diff --git a/dist/node_modules/@octokit/auth-app/dist-web/index.js b/dist/node_modules/@octokit/auth-app/dist-web/index.js new file mode 100644 index 0000000..e7ab87d --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-web/index.js @@ -0,0 +1,451 @@ +// pkg/dist-src/index.js +import { getUserAgent } from "universal-user-agent"; +import { request as defaultRequest } from "@octokit/request"; +import { createOAuthAppAuth } from "@octokit/auth-oauth-app"; + +// pkg/dist-src/auth.js +import { Deprecation } from "deprecation"; + +// pkg/dist-src/get-app-authentication.js +import { githubAppJwt } from "universal-github-app-jwt"; +async function getAppAuthentication({ + appId, + privateKey, + timeDifference +}) { + try { + const appAuthentication = await githubAppJwt({ + id: +appId, + privateKey, + now: timeDifference && Math.floor(Date.now() / 1e3) + timeDifference + }); + return { + type: "app", + token: appAuthentication.token, + appId: appAuthentication.appId, + expiresAt: new Date(appAuthentication.expiration * 1e3).toISOString() + }; + } catch (error) { + if (privateKey === "-----BEGIN RSA PRIVATE KEY-----") { + throw new Error( + "The 'privateKey` option contains only the first line '-----BEGIN RSA PRIVATE KEY-----'. If you are setting it using a `.env` file, make sure it is set on a single line with newlines replaced by '\n'" + ); + } else { + throw error; + } + } +} + +// pkg/dist-src/cache.js +import { LRUCache } from "lru-cache"; +function getCache() { + return new LRUCache({ + // cache max. 15000 tokens, that will use less than 10mb memory + max: 15e3, + // Cache for 1 minute less than GitHub expiry + ttl: 1e3 * 60 * 59 + }); +} +async function get(cache, options) { + const cacheKey = optionsToCacheKey(options); + const result = await cache.get(cacheKey); + if (!result) { + return; + } + const [ + token, + createdAt, + expiresAt, + repositorySelection, + permissionsString, + singleFileName + ] = result.split("|"); + const permissions = options.permissions || permissionsString.split(/,/).reduce((permissions2, string) => { + if (/!$/.test(string)) { + permissions2[string.slice(0, -1)] = "write"; + } else { + permissions2[string] = "read"; + } + return permissions2; + }, {}); + return { + token, + createdAt, + expiresAt, + permissions, + repositoryIds: options.repositoryIds, + repositoryNames: options.repositoryNames, + singleFileName, + repositorySelection + }; +} +async function set(cache, options, data) { + const key = optionsToCacheKey(options); + const permissionsString = options.permissions ? "" : Object.keys(data.permissions).map( + (name) => `${name}${data.permissions[name] === "write" ? "!" : ""}` + ).join(","); + const value = [ + data.token, + data.createdAt, + data.expiresAt, + data.repositorySelection, + permissionsString, + data.singleFileName + ].join("|"); + await cache.set(key, value); +} +function optionsToCacheKey({ + installationId, + permissions = {}, + repositoryIds = [], + repositoryNames = [] +}) { + const permissionsString = Object.keys(permissions).sort().map((name) => permissions[name] === "read" ? name : `${name}!`).join(","); + const repositoryIdsString = repositoryIds.sort().join(","); + const repositoryNamesString = repositoryNames.join(","); + return [ + installationId, + repositoryIdsString, + repositoryNamesString, + permissionsString + ].filter(Boolean).join("|"); +} + +// pkg/dist-src/to-token-authentication.js +function toTokenAuthentication({ + installationId, + token, + createdAt, + expiresAt, + repositorySelection, + permissions, + repositoryIds, + repositoryNames, + singleFileName +}) { + return Object.assign( + { + type: "token", + tokenType: "installation", + token, + installationId, + permissions, + createdAt, + expiresAt, + repositorySelection + }, + repositoryIds ? { repositoryIds } : null, + repositoryNames ? { repositoryNames } : null, + singleFileName ? { singleFileName } : null + ); +} + +// pkg/dist-src/get-installation-authentication.js +async function getInstallationAuthentication(state, options, customRequest) { + const installationId = Number(options.installationId || state.installationId); + if (!installationId) { + throw new Error( + "[@octokit/auth-app] installationId option is required for installation authentication." + ); + } + if (options.factory) { + const { type, factory, oauthApp, ...factoryAuthOptions } = { + ...state, + ...options + }; + return factory(factoryAuthOptions); + } + const optionsWithInstallationTokenFromState = Object.assign( + { installationId }, + options + ); + if (!options.refresh) { + const result = await get( + state.cache, + optionsWithInstallationTokenFromState + ); + if (result) { + const { + token: token2, + createdAt: createdAt2, + expiresAt: expiresAt2, + permissions: permissions2, + repositoryIds: repositoryIds2, + repositoryNames: repositoryNames2, + singleFileName: singleFileName2, + repositorySelection: repositorySelection2 + } = result; + return toTokenAuthentication({ + installationId, + token: token2, + createdAt: createdAt2, + expiresAt: expiresAt2, + permissions: permissions2, + repositorySelection: repositorySelection2, + repositoryIds: repositoryIds2, + repositoryNames: repositoryNames2, + singleFileName: singleFileName2 + }); + } + } + const appAuthentication = await getAppAuthentication(state); + const request = customRequest || state.request; + const { + data: { + token, + expires_at: expiresAt, + repositories, + permissions: permissionsOptional, + repository_selection: repositorySelectionOptional, + single_file: singleFileName + } + } = await request("POST /app/installations/{installation_id}/access_tokens", { + installation_id: installationId, + repository_ids: options.repositoryIds, + repositories: options.repositoryNames, + permissions: options.permissions, + mediaType: { + previews: ["machine-man"] + }, + headers: { + authorization: `bearer ${appAuthentication.token}` + } + }); + const permissions = permissionsOptional || {}; + const repositorySelection = repositorySelectionOptional || "all"; + const repositoryIds = repositories ? repositories.map((r) => r.id) : void 0; + const repositoryNames = repositories ? repositories.map((repo) => repo.name) : void 0; + const createdAt = (/* @__PURE__ */ new Date()).toISOString(); + await set(state.cache, optionsWithInstallationTokenFromState, { + token, + createdAt, + expiresAt, + repositorySelection, + permissions, + repositoryIds, + repositoryNames, + singleFileName + }); + return toTokenAuthentication({ + installationId, + token, + createdAt, + expiresAt, + repositorySelection, + permissions, + repositoryIds, + repositoryNames, + singleFileName + }); +} + +// pkg/dist-src/auth.js +async function auth(state, authOptions) { + switch (authOptions.type) { + case "app": + return getAppAuthentication(state); + case "oauth": + state.log.warn( + // @ts-expect-error `log.warn()` expects string + new Deprecation( + `[@octokit/auth-app] {type: "oauth"} is deprecated. Use {type: "oauth-app"} instead` + ) + ); + case "oauth-app": + return state.oauthApp({ type: "oauth-app" }); + case "installation": + authOptions; + return getInstallationAuthentication(state, { + ...authOptions, + type: "installation" + }); + case "oauth-user": + return state.oauthApp(authOptions); + default: + throw new Error(`Invalid auth type: ${authOptions.type}`); + } +} + +// pkg/dist-src/hook.js +import { requiresBasicAuth } from "@octokit/auth-oauth-user"; + +// pkg/dist-src/requires-app-auth.js +var PATHS = [ + "/app", + "/app/hook/config", + "/app/hook/deliveries", + "/app/hook/deliveries/{delivery_id}", + "/app/hook/deliveries/{delivery_id}/attempts", + "/app/installations", + "/app/installations/{installation_id}", + "/app/installations/{installation_id}/access_tokens", + "/app/installations/{installation_id}/suspended", + "/marketplace_listing/accounts/{account_id}", + "/marketplace_listing/plan", + "/marketplace_listing/plans", + "/marketplace_listing/plans/{plan_id}/accounts", + "/marketplace_listing/stubbed/accounts/{account_id}", + "/marketplace_listing/stubbed/plan", + "/marketplace_listing/stubbed/plans", + "/marketplace_listing/stubbed/plans/{plan_id}/accounts", + "/orgs/{org}/installation", + "/repos/{owner}/{repo}/installation", + "/users/{username}/installation" +]; +function routeMatcher(paths) { + const regexes = paths.map( + (p) => p.split("/").map((c) => c.startsWith("{") ? "(?:.+?)" : c).join("/") + ); + const regex = `^(?:${regexes.map((r) => `(?:${r})`).join("|")})$`; + return new RegExp(regex, "i"); +} +var REGEX = routeMatcher(PATHS); +function requiresAppAuth(url) { + return !!url && REGEX.test(url.split("?")[0]); +} + +// pkg/dist-src/hook.js +var FIVE_SECONDS_IN_MS = 5 * 1e3; +function isNotTimeSkewError(error) { + return !(error.message.match( + /'Expiration time' claim \('exp'\) must be a numeric value representing the future time at which the assertion expires/ + ) || error.message.match( + /'Issued at' claim \('iat'\) must be an Integer representing the time that the assertion was issued/ + )); +} +async function hook(state, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + const url = endpoint.url; + if (/\/login\/oauth\/access_token$/.test(url)) { + return request(endpoint); + } + if (requiresAppAuth(url.replace(request.endpoint.DEFAULTS.baseUrl, ""))) { + const { token: token2 } = await getAppAuthentication(state); + endpoint.headers.authorization = `bearer ${token2}`; + let response; + try { + response = await request(endpoint); + } catch (error) { + if (isNotTimeSkewError(error)) { + throw error; + } + if (typeof error.response.headers.date === "undefined") { + throw error; + } + const diff = Math.floor( + (Date.parse(error.response.headers.date) - Date.parse((/* @__PURE__ */ new Date()).toString())) / 1e3 + ); + state.log.warn(error.message); + state.log.warn( + `[@octokit/auth-app] GitHub API time and system time are different by ${diff} seconds. Retrying request with the difference accounted for.` + ); + const { token: token3 } = await getAppAuthentication({ + ...state, + timeDifference: diff + }); + endpoint.headers.authorization = `bearer ${token3}`; + return request(endpoint); + } + return response; + } + if (requiresBasicAuth(url)) { + const authentication = await state.oauthApp({ type: "oauth-app" }); + endpoint.headers.authorization = authentication.headers.authorization; + return request(endpoint); + } + const { token, createdAt } = await getInstallationAuthentication( + state, + // @ts-expect-error TBD + {}, + request + ); + endpoint.headers.authorization = `token ${token}`; + return sendRequestWithRetries( + state, + request, + endpoint, + createdAt + ); +} +async function sendRequestWithRetries(state, request, options, createdAt, retries = 0) { + const timeSinceTokenCreationInMs = +/* @__PURE__ */ new Date() - +new Date(createdAt); + try { + return await request(options); + } catch (error) { + if (error.status !== 401) { + throw error; + } + if (timeSinceTokenCreationInMs >= FIVE_SECONDS_IN_MS) { + if (retries > 0) { + error.message = `After ${retries} retries within ${timeSinceTokenCreationInMs / 1e3}s of creating the installation access token, the response remains 401. At this point, the cause may be an authentication problem or a system outage. Please check https://www.githubstatus.com for status information`; + } + throw error; + } + ++retries; + const awaitTime = retries * 1e3; + state.log.warn( + `[@octokit/auth-app] Retrying after 401 response to account for token replication delay (retry: ${retries}, wait: ${awaitTime / 1e3}s)` + ); + await new Promise((resolve) => setTimeout(resolve, awaitTime)); + return sendRequestWithRetries(state, request, options, createdAt, retries); + } +} + +// pkg/dist-src/version.js +var VERSION = "4.0.13"; + +// pkg/dist-src/index.js +import { createOAuthUserAuth } from "@octokit/auth-oauth-user"; +function createAppAuth(options) { + if (!options.appId) { + throw new Error("[@octokit/auth-app] appId option is required"); + } + if (!Number.isFinite(+options.appId)) { + throw new Error( + "[@octokit/auth-app] appId option must be a number or numeric string" + ); + } + if (!options.privateKey) { + throw new Error("[@octokit/auth-app] privateKey option is required"); + } + if ("installationId" in options && !options.installationId) { + throw new Error( + "[@octokit/auth-app] installationId is set to a falsy value" + ); + } + const log = Object.assign( + { + warn: console.warn.bind(console) + }, + options.log + ); + const request = options.request || defaultRequest.defaults({ + headers: { + "user-agent": `octokit-auth-app.js/${VERSION} ${getUserAgent()}` + } + }); + const state = Object.assign( + { + request, + cache: getCache() + }, + options, + options.installationId ? { installationId: Number(options.installationId) } : {}, + { + log, + oauthApp: createOAuthAppAuth({ + clientType: "github-app", + clientId: options.clientId || "", + clientSecret: options.clientSecret || "", + request + }) + } + ); + return Object.assign(auth.bind(null, state), { + hook: hook.bind(null, state) + }); +} +export { + createAppAuth, + createOAuthUserAuth +}; diff --git a/dist/node_modules/@octokit/auth-app/dist-web/index.js.map b/dist/node_modules/@octokit/auth-app/dist-web/index.js.map new file mode 100644 index 0000000..97f10d4 --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/auth.js", "../dist-src/get-app-authentication.js", "../dist-src/cache.js", "../dist-src/to-token-authentication.js", "../dist-src/get-installation-authentication.js", "../dist-src/hook.js", "../dist-src/requires-app-auth.js", "../dist-src/version.js"], + "sourcesContent": ["import { getUserAgent } from \"universal-user-agent\";\nimport { request as defaultRequest } from \"@octokit/request\";\nimport { createOAuthAppAuth } from \"@octokit/auth-oauth-app\";\nimport { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nimport { getCache } from \"./cache\";\nimport { VERSION } from \"./version\";\nimport { createOAuthUserAuth } from \"@octokit/auth-oauth-user\";\nfunction createAppAuth(options) {\n if (!options.appId) {\n throw new Error(\"[@octokit/auth-app] appId option is required\");\n }\n if (!Number.isFinite(+options.appId)) {\n throw new Error(\n \"[@octokit/auth-app] appId option must be a number or numeric string\"\n );\n }\n if (!options.privateKey) {\n throw new Error(\"[@octokit/auth-app] privateKey option is required\");\n }\n if (\"installationId\" in options && !options.installationId) {\n throw new Error(\n \"[@octokit/auth-app] installationId is set to a falsy value\"\n );\n }\n const log = Object.assign(\n {\n warn: console.warn.bind(console)\n },\n options.log\n );\n const request = options.request || defaultRequest.defaults({\n headers: {\n \"user-agent\": `octokit-auth-app.js/${VERSION} ${getUserAgent()}`\n }\n });\n const state = Object.assign(\n {\n request,\n cache: getCache()\n },\n options,\n options.installationId ? { installationId: Number(options.installationId) } : {},\n {\n log,\n oauthApp: createOAuthAppAuth({\n clientType: \"github-app\",\n clientId: options.clientId || \"\",\n clientSecret: options.clientSecret || \"\",\n request\n })\n }\n );\n return Object.assign(auth.bind(null, state), {\n hook: hook.bind(null, state)\n });\n}\nexport {\n createAppAuth,\n createOAuthUserAuth\n};\n", "import { Deprecation } from \"deprecation\";\nimport { getAppAuthentication } from \"./get-app-authentication\";\nimport { getInstallationAuthentication } from \"./get-installation-authentication\";\nasync function auth(state, authOptions) {\n switch (authOptions.type) {\n case \"app\":\n return getAppAuthentication(state);\n case \"oauth\":\n state.log.warn(\n // @ts-expect-error `log.warn()` expects string\n new Deprecation(\n `[@octokit/auth-app] {type: \"oauth\"} is deprecated. Use {type: \"oauth-app\"} instead`\n )\n );\n case \"oauth-app\":\n return state.oauthApp({ type: \"oauth-app\" });\n case \"installation\":\n authOptions;\n return getInstallationAuthentication(state, {\n ...authOptions,\n type: \"installation\"\n });\n case \"oauth-user\":\n return state.oauthApp(authOptions);\n default:\n throw new Error(`Invalid auth type: ${authOptions.type}`);\n }\n}\nexport {\n auth\n};\n", "import { githubAppJwt } from \"universal-github-app-jwt\";\nasync function getAppAuthentication({\n appId,\n privateKey,\n timeDifference\n}) {\n try {\n const appAuthentication = await githubAppJwt({\n id: +appId,\n privateKey,\n now: timeDifference && Math.floor(Date.now() / 1e3) + timeDifference\n });\n return {\n type: \"app\",\n token: appAuthentication.token,\n appId: appAuthentication.appId,\n expiresAt: new Date(appAuthentication.expiration * 1e3).toISOString()\n };\n } catch (error) {\n if (privateKey === \"-----BEGIN RSA PRIVATE KEY-----\") {\n throw new Error(\n \"The 'privateKey` option contains only the first line '-----BEGIN RSA PRIVATE KEY-----'. If you are setting it using a `.env` file, make sure it is set on a single line with newlines replaced by '\\n'\"\n );\n } else {\n throw error;\n }\n }\n}\nexport {\n getAppAuthentication\n};\n", "import { LRUCache } from \"lru-cache\";\nfunction getCache() {\n return new LRUCache({\n // cache max. 15000 tokens, that will use less than 10mb memory\n max: 15e3,\n // Cache for 1 minute less than GitHub expiry\n ttl: 1e3 * 60 * 59\n });\n}\nasync function get(cache, options) {\n const cacheKey = optionsToCacheKey(options);\n const result = await cache.get(cacheKey);\n if (!result) {\n return;\n }\n const [\n token,\n createdAt,\n expiresAt,\n repositorySelection,\n permissionsString,\n singleFileName\n ] = result.split(\"|\");\n const permissions = options.permissions || permissionsString.split(/,/).reduce((permissions2, string) => {\n if (/!$/.test(string)) {\n permissions2[string.slice(0, -1)] = \"write\";\n } else {\n permissions2[string] = \"read\";\n }\n return permissions2;\n }, {});\n return {\n token,\n createdAt,\n expiresAt,\n permissions,\n repositoryIds: options.repositoryIds,\n repositoryNames: options.repositoryNames,\n singleFileName,\n repositorySelection\n };\n}\nasync function set(cache, options, data) {\n const key = optionsToCacheKey(options);\n const permissionsString = options.permissions ? \"\" : Object.keys(data.permissions).map(\n (name) => `${name}${data.permissions[name] === \"write\" ? \"!\" : \"\"}`\n ).join(\",\");\n const value = [\n data.token,\n data.createdAt,\n data.expiresAt,\n data.repositorySelection,\n permissionsString,\n data.singleFileName\n ].join(\"|\");\n await cache.set(key, value);\n}\nfunction optionsToCacheKey({\n installationId,\n permissions = {},\n repositoryIds = [],\n repositoryNames = []\n}) {\n const permissionsString = Object.keys(permissions).sort().map((name) => permissions[name] === \"read\" ? name : `${name}!`).join(\",\");\n const repositoryIdsString = repositoryIds.sort().join(\",\");\n const repositoryNamesString = repositoryNames.join(\",\");\n return [\n installationId,\n repositoryIdsString,\n repositoryNamesString,\n permissionsString\n ].filter(Boolean).join(\"|\");\n}\nexport {\n get,\n getCache,\n set\n};\n", "function toTokenAuthentication({\n installationId,\n token,\n createdAt,\n expiresAt,\n repositorySelection,\n permissions,\n repositoryIds,\n repositoryNames,\n singleFileName\n}) {\n return Object.assign(\n {\n type: \"token\",\n tokenType: \"installation\",\n token,\n installationId,\n permissions,\n createdAt,\n expiresAt,\n repositorySelection\n },\n repositoryIds ? { repositoryIds } : null,\n repositoryNames ? { repositoryNames } : null,\n singleFileName ? { singleFileName } : null\n );\n}\nexport {\n toTokenAuthentication\n};\n", "import { get, set } from \"./cache\";\nimport { getAppAuthentication } from \"./get-app-authentication\";\nimport { toTokenAuthentication } from \"./to-token-authentication\";\nasync function getInstallationAuthentication(state, options, customRequest) {\n const installationId = Number(options.installationId || state.installationId);\n if (!installationId) {\n throw new Error(\n \"[@octokit/auth-app] installationId option is required for installation authentication.\"\n );\n }\n if (options.factory) {\n const { type, factory, oauthApp, ...factoryAuthOptions } = {\n ...state,\n ...options\n };\n return factory(factoryAuthOptions);\n }\n const optionsWithInstallationTokenFromState = Object.assign(\n { installationId },\n options\n );\n if (!options.refresh) {\n const result = await get(\n state.cache,\n optionsWithInstallationTokenFromState\n );\n if (result) {\n const {\n token: token2,\n createdAt: createdAt2,\n expiresAt: expiresAt2,\n permissions: permissions2,\n repositoryIds: repositoryIds2,\n repositoryNames: repositoryNames2,\n singleFileName: singleFileName2,\n repositorySelection: repositorySelection2\n } = result;\n return toTokenAuthentication({\n installationId,\n token: token2,\n createdAt: createdAt2,\n expiresAt: expiresAt2,\n permissions: permissions2,\n repositorySelection: repositorySelection2,\n repositoryIds: repositoryIds2,\n repositoryNames: repositoryNames2,\n singleFileName: singleFileName2\n });\n }\n }\n const appAuthentication = await getAppAuthentication(state);\n const request = customRequest || state.request;\n const {\n data: {\n token,\n expires_at: expiresAt,\n repositories,\n permissions: permissionsOptional,\n repository_selection: repositorySelectionOptional,\n single_file: singleFileName\n }\n } = await request(\"POST /app/installations/{installation_id}/access_tokens\", {\n installation_id: installationId,\n repository_ids: options.repositoryIds,\n repositories: options.repositoryNames,\n permissions: options.permissions,\n mediaType: {\n previews: [\"machine-man\"]\n },\n headers: {\n authorization: `bearer ${appAuthentication.token}`\n }\n });\n const permissions = permissionsOptional || {};\n const repositorySelection = repositorySelectionOptional || \"all\";\n const repositoryIds = repositories ? repositories.map((r) => r.id) : void 0;\n const repositoryNames = repositories ? repositories.map((repo) => repo.name) : void 0;\n const createdAt = (/* @__PURE__ */ new Date()).toISOString();\n await set(state.cache, optionsWithInstallationTokenFromState, {\n token,\n createdAt,\n expiresAt,\n repositorySelection,\n permissions,\n repositoryIds,\n repositoryNames,\n singleFileName\n });\n return toTokenAuthentication({\n installationId,\n token,\n createdAt,\n expiresAt,\n repositorySelection,\n permissions,\n repositoryIds,\n repositoryNames,\n singleFileName\n });\n}\nexport {\n getInstallationAuthentication\n};\n", "import { requiresBasicAuth } from \"@octokit/auth-oauth-user\";\nimport { getAppAuthentication } from \"./get-app-authentication\";\nimport { getInstallationAuthentication } from \"./get-installation-authentication\";\nimport { requiresAppAuth } from \"./requires-app-auth\";\nconst FIVE_SECONDS_IN_MS = 5 * 1e3;\nfunction isNotTimeSkewError(error) {\n return !(error.message.match(\n /'Expiration time' claim \\('exp'\\) must be a numeric value representing the future time at which the assertion expires/\n ) || error.message.match(\n /'Issued at' claim \\('iat'\\) must be an Integer representing the time that the assertion was issued/\n ));\n}\nasync function hook(state, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n const url = endpoint.url;\n if (/\\/login\\/oauth\\/access_token$/.test(url)) {\n return request(endpoint);\n }\n if (requiresAppAuth(url.replace(request.endpoint.DEFAULTS.baseUrl, \"\"))) {\n const { token: token2 } = await getAppAuthentication(state);\n endpoint.headers.authorization = `bearer ${token2}`;\n let response;\n try {\n response = await request(endpoint);\n } catch (error) {\n if (isNotTimeSkewError(error)) {\n throw error;\n }\n if (typeof error.response.headers.date === \"undefined\") {\n throw error;\n }\n const diff = Math.floor(\n (Date.parse(error.response.headers.date) - Date.parse((/* @__PURE__ */ new Date()).toString())) / 1e3\n );\n state.log.warn(error.message);\n state.log.warn(\n `[@octokit/auth-app] GitHub API time and system time are different by ${diff} seconds. Retrying request with the difference accounted for.`\n );\n const { token: token3 } = await getAppAuthentication({\n ...state,\n timeDifference: diff\n });\n endpoint.headers.authorization = `bearer ${token3}`;\n return request(endpoint);\n }\n return response;\n }\n if (requiresBasicAuth(url)) {\n const authentication = await state.oauthApp({ type: \"oauth-app\" });\n endpoint.headers.authorization = authentication.headers.authorization;\n return request(endpoint);\n }\n const { token, createdAt } = await getInstallationAuthentication(\n state,\n // @ts-expect-error TBD\n {},\n request\n );\n endpoint.headers.authorization = `token ${token}`;\n return sendRequestWithRetries(\n state,\n request,\n endpoint,\n createdAt\n );\n}\nasync function sendRequestWithRetries(state, request, options, createdAt, retries = 0) {\n const timeSinceTokenCreationInMs = +/* @__PURE__ */ new Date() - +new Date(createdAt);\n try {\n return await request(options);\n } catch (error) {\n if (error.status !== 401) {\n throw error;\n }\n if (timeSinceTokenCreationInMs >= FIVE_SECONDS_IN_MS) {\n if (retries > 0) {\n error.message = `After ${retries} retries within ${timeSinceTokenCreationInMs / 1e3}s of creating the installation access token, the response remains 401. At this point, the cause may be an authentication problem or a system outage. Please check https://www.githubstatus.com for status information`;\n }\n throw error;\n }\n ++retries;\n const awaitTime = retries * 1e3;\n state.log.warn(\n `[@octokit/auth-app] Retrying after 401 response to account for token replication delay (retry: ${retries}, wait: ${awaitTime / 1e3}s)`\n );\n await new Promise((resolve) => setTimeout(resolve, awaitTime));\n return sendRequestWithRetries(state, request, options, createdAt, retries);\n }\n}\nexport {\n hook\n};\n", "const PATHS = [\n \"/app\",\n \"/app/hook/config\",\n \"/app/hook/deliveries\",\n \"/app/hook/deliveries/{delivery_id}\",\n \"/app/hook/deliveries/{delivery_id}/attempts\",\n \"/app/installations\",\n \"/app/installations/{installation_id}\",\n \"/app/installations/{installation_id}/access_tokens\",\n \"/app/installations/{installation_id}/suspended\",\n \"/marketplace_listing/accounts/{account_id}\",\n \"/marketplace_listing/plan\",\n \"/marketplace_listing/plans\",\n \"/marketplace_listing/plans/{plan_id}/accounts\",\n \"/marketplace_listing/stubbed/accounts/{account_id}\",\n \"/marketplace_listing/stubbed/plan\",\n \"/marketplace_listing/stubbed/plans\",\n \"/marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"/orgs/{org}/installation\",\n \"/repos/{owner}/{repo}/installation\",\n \"/users/{username}/installation\"\n];\nfunction routeMatcher(paths) {\n const regexes = paths.map(\n (p) => p.split(\"/\").map((c) => c.startsWith(\"{\") ? \"(?:.+?)\" : c).join(\"/\")\n );\n const regex = `^(?:${regexes.map((r) => `(?:${r})`).join(\"|\")})$`;\n return new RegExp(regex, \"i\");\n}\nconst REGEX = routeMatcher(PATHS);\nfunction requiresAppAuth(url) {\n return !!url && REGEX.test(url.split(\"?\")[0]);\n}\nexport {\n requiresAppAuth\n};\n", "const VERSION = \"4.0.13\";\nexport {\n VERSION\n};\n"], + "mappings": ";AAAA,SAAS,oBAAoB;AAC7B,SAAS,WAAW,sBAAsB;AAC1C,SAAS,0BAA0B;;;ACFnC,SAAS,mBAAmB;;;ACA5B,SAAS,oBAAoB;AAC7B,eAAe,qBAAqB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAAG;AACD,MAAI;AACF,UAAM,oBAAoB,MAAM,aAAa;AAAA,MAC3C,IAAI,CAAC;AAAA,MACL;AAAA,MACA,KAAK,kBAAkB,KAAK,MAAM,KAAK,IAAI,IAAI,GAAG,IAAI;AAAA,IACxD,CAAC;AACD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO,kBAAkB;AAAA,MACzB,OAAO,kBAAkB;AAAA,MACzB,WAAW,IAAI,KAAK,kBAAkB,aAAa,GAAG,EAAE,YAAY;AAAA,IACtE;AAAA,EACF,SAAS,OAAP;AACA,QAAI,eAAe,mCAAmC;AACpD,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;;;AC3BA,SAAS,gBAAgB;AACzB,SAAS,WAAW;AAClB,SAAO,IAAI,SAAS;AAAA;AAAA,IAElB,KAAK;AAAA;AAAA,IAEL,KAAK,MAAM,KAAK;AAAA,EAClB,CAAC;AACH;AACA,eAAe,IAAI,OAAO,SAAS;AACjC,QAAM,WAAW,kBAAkB,OAAO;AAC1C,QAAM,SAAS,MAAM,MAAM,IAAI,QAAQ;AACvC,MAAI,CAAC,QAAQ;AACX;AAAA,EACF;AACA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,OAAO,MAAM,GAAG;AACpB,QAAM,cAAc,QAAQ,eAAe,kBAAkB,MAAM,GAAG,EAAE,OAAO,CAAC,cAAc,WAAW;AACvG,QAAI,KAAK,KAAK,MAAM,GAAG;AACrB,mBAAa,OAAO,MAAM,GAAG,EAAE,CAAC,IAAI;AAAA,IACtC,OAAO;AACL,mBAAa,MAAM,IAAI;AAAA,IACzB;AACA,WAAO;AAAA,EACT,GAAG,CAAC,CAAC;AACL,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe,QAAQ;AAAA,IACvB,iBAAiB,QAAQ;AAAA,IACzB;AAAA,IACA;AAAA,EACF;AACF;AACA,eAAe,IAAI,OAAO,SAAS,MAAM;AACvC,QAAM,MAAM,kBAAkB,OAAO;AACrC,QAAM,oBAAoB,QAAQ,cAAc,KAAK,OAAO,KAAK,KAAK,WAAW,EAAE;AAAA,IACjF,CAAC,SAAS,GAAG,OAAO,KAAK,YAAY,IAAI,MAAM,UAAU,MAAM;AAAA,EACjE,EAAE,KAAK,GAAG;AACV,QAAM,QAAQ;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL;AAAA,IACA,KAAK;AAAA,EACP,EAAE,KAAK,GAAG;AACV,QAAM,MAAM,IAAI,KAAK,KAAK;AAC5B;AACA,SAAS,kBAAkB;AAAA,EACzB;AAAA,EACA,cAAc,CAAC;AAAA,EACf,gBAAgB,CAAC;AAAA,EACjB,kBAAkB,CAAC;AACrB,GAAG;AACD,QAAM,oBAAoB,OAAO,KAAK,WAAW,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,YAAY,IAAI,MAAM,SAAS,OAAO,GAAG,OAAO,EAAE,KAAK,GAAG;AAClI,QAAM,sBAAsB,cAAc,KAAK,EAAE,KAAK,GAAG;AACzD,QAAM,wBAAwB,gBAAgB,KAAK,GAAG;AACtD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,EAAE,OAAO,OAAO,EAAE,KAAK,GAAG;AAC5B;;;ACxEA,SAAS,sBAAsB;AAAA,EAC7B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAAG;AACD,SAAO,OAAO;AAAA,IACZ;AAAA,MACE,MAAM;AAAA,MACN,WAAW;AAAA,MACX;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,gBAAgB,EAAE,cAAc,IAAI;AAAA,IACpC,kBAAkB,EAAE,gBAAgB,IAAI;AAAA,IACxC,iBAAiB,EAAE,eAAe,IAAI;AAAA,EACxC;AACF;;;ACvBA,eAAe,8BAA8B,OAAO,SAAS,eAAe;AAC1E,QAAM,iBAAiB,OAAO,QAAQ,kBAAkB,MAAM,cAAc;AAC5E,MAAI,CAAC,gBAAgB;AACnB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,QAAQ,SAAS;AACnB,UAAM,EAAE,MAAM,SAAS,UAAU,GAAG,mBAAmB,IAAI;AAAA,MACzD,GAAG;AAAA,MACH,GAAG;AAAA,IACL;AACA,WAAO,QAAQ,kBAAkB;AAAA,EACnC;AACA,QAAM,wCAAwC,OAAO;AAAA,IACnD,EAAE,eAAe;AAAA,IACjB;AAAA,EACF;AACA,MAAI,CAAC,QAAQ,SAAS;AACpB,UAAM,SAAS,MAAM;AAAA,MACnB,MAAM;AAAA,MACN;AAAA,IACF;AACA,QAAI,QAAQ;AACV,YAAM;AAAA,QACJ,OAAO;AAAA,QACP,WAAW;AAAA,QACX,WAAW;AAAA,QACX,aAAa;AAAA,QACb,eAAe;AAAA,QACf,iBAAiB;AAAA,QACjB,gBAAgB;AAAA,QAChB,qBAAqB;AAAA,MACvB,IAAI;AACJ,aAAO,sBAAsB;AAAA,QAC3B;AAAA,QACA,OAAO;AAAA,QACP,WAAW;AAAA,QACX,WAAW;AAAA,QACX,aAAa;AAAA,QACb,qBAAqB;AAAA,QACrB,eAAe;AAAA,QACf,iBAAiB;AAAA,QACjB,gBAAgB;AAAA,MAClB,CAAC;AAAA,IACH;AAAA,EACF;AACA,QAAM,oBAAoB,MAAM,qBAAqB,KAAK;AAC1D,QAAM,UAAU,iBAAiB,MAAM;AACvC,QAAM;AAAA,IACJ,MAAM;AAAA,MACJ;AAAA,MACA,YAAY;AAAA,MACZ;AAAA,MACA,aAAa;AAAA,MACb,sBAAsB;AAAA,MACtB,aAAa;AAAA,IACf;AAAA,EACF,IAAI,MAAM,QAAQ,2DAA2D;AAAA,IAC3E,iBAAiB;AAAA,IACjB,gBAAgB,QAAQ;AAAA,IACxB,cAAc,QAAQ;AAAA,IACtB,aAAa,QAAQ;AAAA,IACrB,WAAW;AAAA,MACT,UAAU,CAAC,aAAa;AAAA,IAC1B;AAAA,IACA,SAAS;AAAA,MACP,eAAe,UAAU,kBAAkB;AAAA,IAC7C;AAAA,EACF,CAAC;AACD,QAAM,cAAc,uBAAuB,CAAC;AAC5C,QAAM,sBAAsB,+BAA+B;AAC3D,QAAM,gBAAgB,eAAe,aAAa,IAAI,CAAC,MAAM,EAAE,EAAE,IAAI;AACrE,QAAM,kBAAkB,eAAe,aAAa,IAAI,CAAC,SAAS,KAAK,IAAI,IAAI;AAC/E,QAAM,aAA6B,oBAAI,KAAK,GAAG,YAAY;AAC3D,QAAM,IAAI,MAAM,OAAO,uCAAuC;AAAA,IAC5D;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACD,SAAO,sBAAsB;AAAA,IAC3B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACH;;;AJhGA,eAAe,KAAK,OAAO,aAAa;AACtC,UAAQ,YAAY,MAAM;AAAA,IACxB,KAAK;AACH,aAAO,qBAAqB,KAAK;AAAA,IACnC,KAAK;AACH,YAAM,IAAI;AAAA;AAAA,QAER,IAAI;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,KAAK;AACH,aAAO,MAAM,SAAS,EAAE,MAAM,YAAY,CAAC;AAAA,IAC7C,KAAK;AACH;AACA,aAAO,8BAA8B,OAAO;AAAA,QAC1C,GAAG;AAAA,QACH,MAAM;AAAA,MACR,CAAC;AAAA,IACH,KAAK;AACH,aAAO,MAAM,SAAS,WAAW;AAAA,IACnC;AACE,YAAM,IAAI,MAAM,sBAAsB,YAAY,MAAM;AAAA,EAC5D;AACF;;;AK3BA,SAAS,yBAAyB;;;ACAlC,IAAM,QAAQ;AAAA,EACZ;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AACA,SAAS,aAAa,OAAO;AAC3B,QAAM,UAAU,MAAM;AAAA,IACpB,CAAC,MAAM,EAAE,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,EAAE,WAAW,GAAG,IAAI,YAAY,CAAC,EAAE,KAAK,GAAG;AAAA,EAC5E;AACA,QAAM,QAAQ,OAAO,QAAQ,IAAI,CAAC,MAAM,MAAM,IAAI,EAAE,KAAK,GAAG;AAC5D,SAAO,IAAI,OAAO,OAAO,GAAG;AAC9B;AACA,IAAM,QAAQ,aAAa,KAAK;AAChC,SAAS,gBAAgB,KAAK;AAC5B,SAAO,CAAC,CAAC,OAAO,MAAM,KAAK,IAAI,MAAM,GAAG,EAAE,CAAC,CAAC;AAC9C;;;AD5BA,IAAM,qBAAqB,IAAI;AAC/B,SAAS,mBAAmB,OAAO;AACjC,SAAO,EAAE,MAAM,QAAQ;AAAA,IACrB;AAAA,EACF,KAAK,MAAM,QAAQ;AAAA,IACjB;AAAA,EACF;AACF;AACA,eAAe,KAAK,OAAO,SAAS,OAAO,YAAY;AACrD,QAAM,WAAW,QAAQ,SAAS,MAAM,OAAO,UAAU;AACzD,QAAM,MAAM,SAAS;AACrB,MAAI,gCAAgC,KAAK,GAAG,GAAG;AAC7C,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACA,MAAI,gBAAgB,IAAI,QAAQ,QAAQ,SAAS,SAAS,SAAS,EAAE,CAAC,GAAG;AACvE,UAAM,EAAE,OAAO,OAAO,IAAI,MAAM,qBAAqB,KAAK;AAC1D,aAAS,QAAQ,gBAAgB,UAAU;AAC3C,QAAI;AACJ,QAAI;AACF,iBAAW,MAAM,QAAQ,QAAQ;AAAA,IACnC,SAAS,OAAP;AACA,UAAI,mBAAmB,KAAK,GAAG;AAC7B,cAAM;AAAA,MACR;AACA,UAAI,OAAO,MAAM,SAAS,QAAQ,SAAS,aAAa;AACtD,cAAM;AAAA,MACR;AACA,YAAM,OAAO,KAAK;AAAA,SACf,KAAK,MAAM,MAAM,SAAS,QAAQ,IAAI,IAAI,KAAK,OAAuB,oBAAI,KAAK,GAAG,SAAS,CAAC,KAAK;AAAA,MACpG;AACA,YAAM,IAAI,KAAK,MAAM,OAAO;AAC5B,YAAM,IAAI;AAAA,QACR,wEAAwE;AAAA,MAC1E;AACA,YAAM,EAAE,OAAO,OAAO,IAAI,MAAM,qBAAqB;AAAA,QACnD,GAAG;AAAA,QACH,gBAAgB;AAAA,MAClB,CAAC;AACD,eAAS,QAAQ,gBAAgB,UAAU;AAC3C,aAAO,QAAQ,QAAQ;AAAA,IACzB;AACA,WAAO;AAAA,EACT;AACA,MAAI,kBAAkB,GAAG,GAAG;AAC1B,UAAM,iBAAiB,MAAM,MAAM,SAAS,EAAE,MAAM,YAAY,CAAC;AACjE,aAAS,QAAQ,gBAAgB,eAAe,QAAQ;AACxD,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACA,QAAM,EAAE,OAAO,UAAU,IAAI,MAAM;AAAA,IACjC;AAAA;AAAA,IAEA,CAAC;AAAA,IACD;AAAA,EACF;AACA,WAAS,QAAQ,gBAAgB,SAAS;AAC1C,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AACA,eAAe,uBAAuB,OAAO,SAAS,SAAS,WAAW,UAAU,GAAG;AACrF,QAAM,6BAA6B,CAAiB,oBAAI,KAAK,IAAI,CAAC,IAAI,KAAK,SAAS;AACpF,MAAI;AACF,WAAO,MAAM,QAAQ,OAAO;AAAA,EAC9B,SAAS,OAAP;AACA,QAAI,MAAM,WAAW,KAAK;AACxB,YAAM;AAAA,IACR;AACA,QAAI,8BAA8B,oBAAoB;AACpD,UAAI,UAAU,GAAG;AACf,cAAM,UAAU,SAAS,0BAA0B,6BAA6B;AAAA,MAClF;AACA,YAAM;AAAA,IACR;AACA,MAAE;AACF,UAAM,YAAY,UAAU;AAC5B,UAAM,IAAI;AAAA,MACR,kGAAkG,kBAAkB,YAAY;AAAA,IAClI;AACA,UAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,SAAS,CAAC;AAC7D,WAAO,uBAAuB,OAAO,SAAS,SAAS,WAAW,OAAO;AAAA,EAC3E;AACF;;;AExFA,IAAM,UAAU;;;AROhB,SAAS,2BAA2B;AACpC,SAAS,cAAc,SAAS;AAC9B,MAAI,CAAC,QAAQ,OAAO;AAClB,UAAM,IAAI,MAAM,8CAA8C;AAAA,EAChE;AACA,MAAI,CAAC,OAAO,SAAS,CAAC,QAAQ,KAAK,GAAG;AACpC,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,QAAQ,YAAY;AACvB,UAAM,IAAI,MAAM,mDAAmD;AAAA,EACrE;AACA,MAAI,oBAAoB,WAAW,CAAC,QAAQ,gBAAgB;AAC1D,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,QAAM,MAAM,OAAO;AAAA,IACjB;AAAA,MACE,MAAM,QAAQ,KAAK,KAAK,OAAO;AAAA,IACjC;AAAA,IACA,QAAQ;AAAA,EACV;AACA,QAAM,UAAU,QAAQ,WAAW,eAAe,SAAS;AAAA,IACzD,SAAS;AAAA,MACP,cAAc,uBAAuB,WAAW,aAAa;AAAA,IAC/D;AAAA,EACF,CAAC;AACD,QAAM,QAAQ,OAAO;AAAA,IACnB;AAAA,MACE;AAAA,MACA,OAAO,SAAS;AAAA,IAClB;AAAA,IACA;AAAA,IACA,QAAQ,iBAAiB,EAAE,gBAAgB,OAAO,QAAQ,cAAc,EAAE,IAAI,CAAC;AAAA,IAC/E;AAAA,MACE;AAAA,MACA,UAAU,mBAAmB;AAAA,QAC3B,YAAY;AAAA,QACZ,UAAU,QAAQ,YAAY;AAAA,QAC9B,cAAc,QAAQ,gBAAgB;AAAA,QACtC;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACA,SAAO,OAAO,OAAO,KAAK,KAAK,MAAM,KAAK,GAAG;AAAA,IAC3C,MAAM,KAAK,KAAK,MAAM,KAAK;AAAA,EAC7B,CAAC;AACH;", + "names": [] +} diff --git a/dist/node_modules/@octokit/auth-app/package.json b/dist/node_modules/@octokit/auth-app/package.json new file mode 100644 index 0000000..e1218ef --- /dev/null +++ b/dist/node_modules/@octokit/auth-app/package.json @@ -0,0 +1,56 @@ +{ + "name": "@octokit/auth-app", + "publishConfig": { + "access": "public" + }, + "version": "4.0.13", + "description": "GitHub App authentication for JavaScript", + "repository": "github:octokit/auth-app.js", + "keywords": [ + "github", + "octokit", + "authentication", + "api" + ], + "author": "Gregor Martynus (https://github.com/gr2m)", + "license": "MIT", + "dependencies": { + "@octokit/auth-oauth-app": "^5.0.0", + "@octokit/auth-oauth-user": "^2.0.0", + "@octokit/request": "^6.0.0", + "@octokit/request-error": "^3.0.0", + "@octokit/types": "^9.0.0", + "deprecation": "^2.3.1", + "lru-cache": "^9.0.0", + "universal-github-app-jwt": "^1.1.1", + "universal-user-agent": "^6.0.0" + }, + "devDependencies": { + "@octokit/tsconfig": "^1.0.2", + "@sinonjs/fake-timers": "^8.0.0", + "@types/fetch-mock": "^7.3.1", + "@types/jest": "^29.0.0", + "@types/node": "^18.11.18", + "@types/sinonjs__fake-timers": "^8.0.0", + "esbuild": "^0.17.19", + "fetch-mock": "^9.0.0", + "glob": "^10.2.5", + "jest": "^29.0.0", + "prettier": "2.8.8", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "source": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/auth-oauth-app/LICENSE b/dist/node_modules/@octokit/auth-oauth-app/LICENSE new file mode 100644 index 0000000..ef2c18e --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/@octokit/auth-oauth-app/README.md b/dist/node_modules/@octokit/auth-oauth-app/README.md new file mode 100644 index 0000000..27ed15b --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/README.md @@ -0,0 +1,1054 @@ +# auth-oauth-app.js + +> GitHub OAuth App authentication for JavaScript + +[![@latest](https://img.shields.io/npm/v/@octokit/auth-oauth-app.svg)](https://www.npmjs.com/package/@octokit/auth-oauth-app) +[![Build Status](https://github.com/octokit/auth-oauth-app.js/workflows/Test/badge.svg)](https://github.com/octokit/auth-oauth-app.js/actions?query=workflow%3ATest) + +`@octokit/auth-oauth-app` is implementing one of [GitHub’s authentication strategies](https://github.com/octokit/auth.js). + +It implements authentication using an OAuth app’s client ID and secret as well as creating user access tokens GitHub's OAuth [web application flow](https://developer.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow) and [device flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#device-flow). + + + +- [Standalone Usage](#standalone-usage) + - [Authenticate as app](#authenticate-as-app) + - [Authenticate user using OAuth Web Flow](#authenticate-user-using-oauth-web-flow) + - [Authenticate user using OAuth Device flow](#authenticate-user-using-oauth-device-flow) +- [Usage with Octokit](#usage-with-octokit) +- [`createOAuthAppAuth(options)` or `new Octokit({ auth })`](#createoauthappauthoptions-or-new-octokit-auth-) +- [`auth(options)` or `octokit.auth(options)`](#authoptions-or-octokitauthoptions) + - [Client ID/Client Secret Basic authentication](#client-idclient-secret-basic-authentication) + - [OAuth web flow](#oauth-web-flow) + - [OAuth device flow](#oauth-device-flow) +- [Authentication object](#authentication-object) + - [OAuth App authentication](#oauth-app-authentication) + - [OAuth user access token authentication](#oauth-user-access-token-authentication) + - [GitHub APP user authentication token with expiring disabled](#github-app-user-authentication-token-with-expiring-disabled) + - [GitHub APP user authentication token with expiring enabled](#github-app-user-authentication-token-with-expiring-enabled) +- [`auth.hook(request, route, parameters)` or `auth.hook(request, options)`](#authhookrequest-route-parameters-or-authhookrequest-options) +- [Types](#types) +- [Implementation details](#implementation-details) +- [License](#license) + + + +## Standalone Usage + + + + + + +
+Browsers + + +⚠️ `@octokit/auth-oauth-app` is not meant for usage in the browser. The OAuth APIs to create tokens do not have CORS enabled, and a client secret must not be exposed to the client. + +If you know what you are doing, load `@octokit/auth-oauth-app` directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+Node + + +Install with npm install @octokit/auth-oauth-app + +```js +const { createOAuthAppAuth } = require("@octokit/auth-oauth-app"); +// or: import { createOAuthAppAuth } from "@octokit/auth-oauth-app"; +``` + +
+ +### Authenticate as app + +```js +const auth = createOAuthAppAuth({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", +}); + +const appAuthentication = await auth({ + type: "oauth-app", +}); +``` + +resolves with + +```json +{ + "type": "oauth-app", + "clientId": "1234567890abcdef1234", + "clientSecret": "1234567890abcdef1234567890abcdef12345678", + "headers": { + "authorization": "basic MTIzNDU2Nzg5MGFiY2RlZjEyMzQ6MTIzNDU2Nzg5MGFiY2RlZjEyMzQ1Njc4OTBhYmNkZWYxMjM0NTY3OA==" + } +} +``` + +### Authenticate user using OAuth Web Flow + +Exchange code from GitHub's OAuth web flow, see https://docs.github.com/en/developers/apps/authorizing-oauth-apps#2-users-are-redirected-back-to-your-site-by-github + +```js +const auth = createOAuthAppAuth({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", +}); + +const userAuthenticationFromWebFlow = await auth({ + type: "oauth-user", + code: "random123", + state: "mystate123", +}); +``` + +resolves with + +```json +{ + "clientType": "oauth-app", + "clientId": "1234567890abcdef1234", + "clientSecret": "1234567890abcdef1234567890abcdef12345678", + "type": "token", + "tokenType": "oauth", + "token": "useraccesstoken123", + "scopes": [] +} +``` + +### Authenticate user using OAuth Device flow + +Pass an asynchronous `onVerification()` method which will be called with the response from step 1 of the device flow. In that function you have to prompt the user to enter the user code at the provided verification URL. + +`auth()` will not resolve until the user entered the code and granted access to the app. + +See https://docs.github.com/en/developers/apps/authorizing-oauth-apps#2-users-are-redirected-back-to-your-site-by-github + +```js +const auth = createOAuthAppAuth({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", +}); + +const userAuthenticationFromDeviceFlow = await auth({ + async onVerification(verification) { + // verification example + // { + // device_code: "3584d83530557fdd1f46af8289938c8ef79f9dc5", + // user_code: "WDJB-MJHT", + // verification_uri: "https://github.com/login/device", + // expires_in: 900, + // interval: 5, + // }; + + console.log("Open %s", verification.verification_uri); + console.log("Enter code: %s", verification.user_code); + }, +}); +``` + +resolves with + +```json +{ + "clientType": "oauth-app", + "clientId": "1234567890abcdef1234", + "clientSecret": "1234567890abcdef1234567890abcdef12345678", + "type": "token", + "tokenType": "oauth", + "token": "useraccesstoken123", + "scopes": [] +} +``` + +## Usage with Octokit + + + + + + +
+ +Browsers + + + +⚠️ `@octokit/auth-oauth-app` is not meant for usage in the browser. The OAuth APIs to create tokens do not have CORS enabled, and a client secret must not be exposed to the client. + +If you know what you are doing, load `@octokit/auth-oauth-app` and `@octokit/core` (or a compatible module) directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+ +Node + + + +Install with `npm install @octokit/core @octokit/auth-oauth-app`. Optionally replace `@octokit/core` with a compatible module + +```js +const { Octokit } = require("@octokit/core"); +const { + createOAuthAppAuth, + createOAuthUserAuth, +} = require("@octokit/auth-oauth-app"); +``` + +
+ +```js +const appOctokit = new Octokit({ + authStrategy: createOAuthAppAuth, + auth: { + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + }, +}); + +// Send requests as app +await appOctokit.request("POST /application/{client_id}/token", { + client_id: "1234567890abcdef1234", + access_token: "existingtoken123", +}); +console.log("token is valid"); + +// create a new octokit instance that is authenticated as the user +const userOctokit = await appOctokit.auth({ + type: "oauth-user", + code: "code123", + factory: (options) => { + return new Octokit({ + authStrategy: createOAuthUserAuth, + auth: options, + }); + }, +}); + +// Exchanges the code for the user access token authentication on first request +// and caches the authentication for successive requests +const { + data: { login }, +} = await userOctokit.request("GET /user"); +console.log("Hello, %s!", login); +``` + +## `createOAuthAppAuth(options)` or `new Octokit({ auth })` + +The `createOAuthAppAuth` method accepts a single `options` object as argument. The same set of options can be passed as `auth` to the `Octokit` constructor when setting `authStrategy: createOAuthAppAuth` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientId + + string + + Required. Find your OAuth app’s Client ID in your account’s developer settings. +
+ clientSecret + + string + + Required. Find your OAuth app’s Client Secret in your account’s developer settings. +
+ clientType + + string + + Must be set to either "oauth-app" or "github-app". Defaults to "oauth-app" +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +createOAuthAppAuth({ + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +## `auth(options)` or `octokit.auth(options)` + +The async `auth()` method returned by `createOAuthAppAuth(options)` accepts different options depending on your use case + +### Client ID/Client Secret Basic authentication + +All REST API routes starting with `/applications/{client_id}` need to be authenticated using the OAuth/GitHub App's Client ID and a client secret. + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + Required. Must be set to "oauth-app" +
+ +### OAuth web flow + +Exchange `code` for a user access token. See [Web application flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#web-application-flow). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + Required. Must be set to "oauth-user". +
+ code + + string + + Required. The authorization code which was passed as query parameter to the callback URL from the OAuth web application flow. +
+ redirectUrl + + string + + The URL in your application where users are sent after authorization. See redirect urls. +
+ state + + string + + The unguessable random string you provided in Step 1 of the OAuth web application flow. +
+ factory + + function + + +When the `factory` option is, the `auth({type: "oauth-user", code, factory })` call with resolve with whatever the `factory` function returns. The `factory` function will be called with all the strategy option that `auth` was created with, plus the additional options passed to `auth`, besides `type` and `factory`. + +For example, you can create a new `auth` instance for for a user using [`createOAuthUserAuth`](https://github.com/octokit/auth-oauth-user.js/#readme) which implements auto-refreshing tokens, among other features. You can import `createOAuthUserAuth` directly from `@octokit/auth-oauth-app` which will ensure compatibility. + +```js +const { + createOAuthAppAuth, + createOAuthUserAuth, +} = require("@octokit/auth-oauth-app"); + +const appAuth = createOAuthAppAuth({ + clientType: "github-app", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef1234567890abcdef12345678", +}); + +const userAuth = await appAuth({ + type: "oauth-user", + code, + factory: createOAuthUserAuth, +}); + +// will create token upon first call, then cache authentication for successive calls, +// until token needs to be refreshed (if enabled for the GitHub App) +const authentication = await userAuth(); +``` + +
+ +### OAuth device flow + +Create a user access token without an http redirect. See [Device flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#device-flow). + +The device flow does not require a client secret, but it is required as strategy option for `@octokit/auth-oauth-app`, even for the device flow. If you want to implement the device flow without requiring a client secret, use [`@octokit/auth-oauth-device`](https://github.com/octokit/auth-oauth-device.js#readme). + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + Required. Must be set to "oauth-user". +
+ onVerification + + function + + +**Required**. A function that is called once the device and user codes were retrieved. + +The `onVerification()` callback can be used to pause until the user completes step 2, which might result in a better user experience. + +```js +const auth = auth({ + type: "oauth-user", + onVerification(verification) { + console.log("Open %s", verification.verification_uri); + console.log("Enter code: %s", verification.user_code); + + await prompt("press enter when you are ready to continue"); + }, +}); +``` + +
+ scopes + + array of strings + + Only relevant if the clientType strategy option is set to "oauth-app".Array of OAuth scope names that the user access token should be granted. Defaults to no scopes ([]). +
+ factory + + function + + +When the `factory` option is, the `auth({type: "oauth-user", code, factory })` call with resolve with whatever the `factory` function returns. The `factory` function will be called with all the strategy option that `auth` was created with, plus the additional options passed to `auth`, besides `type` and `factory`. + +For example, you can create a new `auth` instance for for a user using [`createOAuthUserAuth`](https://github.com/octokit/auth-oauth-user.js/#readme) which implements auto-refreshing tokens, among other features. You can import `createOAuthUserAuth` directly from `@octokit/auth-oauth-app` which will ensure compatibility. + +```js +const { + createOAuthAppAuth, + createOAuthUserAuth, +} = require("@octokit/auth-oauth-app"); + +const appAuth = createOAuthAppAuth({ + clientType: "github-app", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef1234567890abcdef12345678", +}); + +const userAuth = await appAuth({ + type: "oauth-user", + onVerification, + factory: createOAuthUserAuth, +}); + +// will create token upon first call, then cache authentication for successive calls, +// until token needs to be refreshed (if enabled for the GitHub App) +const authentication = await userAuth(); +``` + +
+ +## Authentication object + +The async `auth(options)` method to one of four possible authentication objects + +1. **OAuth App authentication** for `auth({ type: "oauth-app" })` +2. **OAuth user access token authentication** for `auth({ type: "oauth-app" })` and App is an OAuth App (OAuth user access token) +3. **GitHub APP user authentication token with expiring disabled** for `auth({ type: "oauth-app" })` and App is a GitHub App (user-to-server token) +4. **GitHub APP user authentication token with expiring enabled** for `auth({ type: "oauth-app" })` and App is a GitHub App (user-to-server token) + +### OAuth App authentication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "oauth-app" +
+ clientType + + string + + "oauth-app" or "github-app" +
+ clientId + + string + + The client ID as passed to the constructor. +
+ clientSecret + + string + + The client secret as passed to the constructor. +
+ headers + + object + + { authorization }. +
+ +### OAuth user access token authentication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ tokenType + + string + + "oauth" +
+ clientType + + string + + "oauth-app" +
+ clientId + + string + + The clientId from the strategy options +
+ clientSecret + + string + + The clientSecret from the strategy options +
+ token + + string + + The user access token +
+ scopes + + array of strings + + array of scope names enabled for the token +
+ +### GitHub APP user authentication token with expiring disabled + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ tokenType + + string + + "oauth" +
+ clientType + + string + + "github-app" +
+ clientId + + string + + The app's Client ID +
+ clientSecret + + string + + One of the app's client secrets +
+ token + + string + + The user access token +
+ +### GitHub APP user authentication token with expiring enabled + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ tokenType + + string + + "oauth" +
+ clientType + + string + + "github-app" +
+ clientId + + string + + The app's Client ID +
+ clientSecret + + string + + One of the app's client secrets +
+ token + + string + + The user access token +
+ refreshToken + + string + + The refresh token +
+ expiresAt + + string + + Date timestamp in ISO 8601 standard. Example: 2022-01-01T08:00:0.000Z +
+ refreshTokenExpiresAt + + string + + Date timestamp in ISO 8601 standard. Example: 2021-07-01T00:00:0.000Z +
+ +## `auth.hook(request, route, parameters)` or `auth.hook(request, options)` + +`auth.hook()` hooks directly into the request life cycle. It amends the request to authenticate correctly using `clientId` and `clientSecret` as basic auth for the API endpoints that support it. It throws an error in other cases. + +The `request` option is an instance of [`@octokit/request`](https://github.com/octokit/request.js#readme). The `route`/`options` parameters are the same as for the [`request()` method](https://github.com/octokit/request.js#request). + +`auth.hook()` can be called directly to send an authenticated request + +```js +const { data: user } = await auth.hook( + request, + "POST /applications/{client_id}/token", + { + client_id: "1234567890abcdef1234", + access_token: "token123", + } +); +``` + +Or it can be passed as option to [`request()`](https://github.com/octokit/request.js#request). + +```js +const requestWithAuth = request.defaults({ + request: { + hook: auth.hook, + }, +}); + +const { data: user } = await requestWithAuth( + "POST /applications/{client_id}/token", + { + client_id: "1234567890abcdef1234", + access_token: "token123", + } +); +``` + +## Types + +```ts +import { + // strategy options + OAuthAppStrategyOptions, + GitHubAppStrategyOptions, + // auth options + AppAuthOptions, + WebFlowAuthOptions, + OAuthAppDeviceFlowAuthOptions, + GitHubAppDeviceFlowAuthOptions, + // auth interfaces + OAuthAppAuthInterface, + GitHubAuthInterface, + // authentication object + AppAuthentication, + OAuthAppUserAuthentication, + GitHubAppUserAuthentication, + GitHubAppUserAuthenticationWithExpiration, +} from "@octokit/auth-oauth-app"; +``` + +## Implementation details + +Client ID and secret can be passed as Basic auth in the `Authorization` header in order to get a higher rate limit compared to unauthenticated requests. This is meant for the use on servers only: never expose an OAuth client secret on a client such as a web application! + +`auth.hook` will set the correct authentication header automatically based on the request URL. For all [OAuth Application endpoints](https://developer.github.com/v3/apps/oauth_applications/), the `Authorization` header is set to basic auth. For all other endpoints and token is retrieved and used in the `Authorization` header. The token is cached and used for succeeding requests. + +To reset the cached access token, you can do this + +```js +const { token } = await auth({ type: "oauth-user" }); +await auth.hook(request, "POST /applications/{client_id}/token", { + client_id: "1234567890abcdef1234", + access_token: token, +}); +``` + +The internally cached token will be replaced and used for succeeding requests. See also ["the REST API documentation"](https://developer.github.com/v3/oauth_authorizations/). + +See also: [octokit/oauth-authorization-url.js](https://github.com/octokit/oauth-authorization-url.js). + +## License + +[MIT](LICENSE) + +``` + +``` diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-node/index.js b/dist/node_modules/@octokit/auth-oauth-app/dist-node/index.js new file mode 100644 index 0000000..e518fe5 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-node/index.js @@ -0,0 +1,133 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createOAuthAppAuth: () => createOAuthAppAuth, + createOAuthUserAuth: () => import_auth_oauth_user3.createOAuthUserAuth +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = require("universal-user-agent"); +var import_request = require("@octokit/request"); + +// pkg/dist-src/auth.js +var import_btoa_lite = __toESM(require("btoa-lite")); +var import_auth_oauth_user = require("@octokit/auth-oauth-user"); +async function auth(state, authOptions) { + if (authOptions.type === "oauth-app") { + return { + type: "oauth-app", + clientId: state.clientId, + clientSecret: state.clientSecret, + clientType: state.clientType, + headers: { + authorization: `basic ${(0, import_btoa_lite.default)( + `${state.clientId}:${state.clientSecret}` + )}` + } + }; + } + if ("factory" in authOptions) { + const { type, ...options } = { + ...authOptions, + ...state + }; + return authOptions.factory(options); + } + const common = { + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.request, + ...authOptions + }; + const userAuth = state.clientType === "oauth-app" ? await (0, import_auth_oauth_user.createOAuthUserAuth)({ + ...common, + clientType: state.clientType + }) : await (0, import_auth_oauth_user.createOAuthUserAuth)({ + ...common, + clientType: state.clientType + }); + return userAuth(); +} + +// pkg/dist-src/hook.js +var import_btoa_lite2 = __toESM(require("btoa-lite")); +var import_auth_oauth_user2 = require("@octokit/auth-oauth-user"); +async function hook(state, request2, route, parameters) { + let endpoint = request2.endpoint.merge( + route, + parameters + ); + if (/\/login\/(oauth\/access_token|device\/code)$/.test(endpoint.url)) { + return request2(endpoint); + } + if (state.clientType === "github-app" && !(0, import_auth_oauth_user2.requiresBasicAuth)(endpoint.url)) { + throw new Error( + `[@octokit/auth-oauth-app] GitHub Apps cannot use their client ID/secret for basic authentication for endpoints other than "/applications/{client_id}/**". "${endpoint.method} ${endpoint.url}" is not supported.` + ); + } + const credentials = (0, import_btoa_lite2.default)(`${state.clientId}:${state.clientSecret}`); + endpoint.headers.authorization = `basic ${credentials}`; + try { + return await request2(endpoint); + } catch (error) { + if (error.status !== 401) + throw error; + error.message = `[@octokit/auth-oauth-app] "${endpoint.method} ${endpoint.url}" does not support clientId/clientSecret basic authentication.`; + throw error; + } +} + +// pkg/dist-src/version.js +var VERSION = "5.0.6"; + +// pkg/dist-src/index.js +var import_auth_oauth_user3 = require("@octokit/auth-oauth-user"); +function createOAuthAppAuth(options) { + const state = Object.assign( + { + request: import_request.request.defaults({ + headers: { + "user-agent": `octokit-auth-oauth-app.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + } + }), + clientType: "oauth-app" + }, + options + ); + return Object.assign(auth.bind(null, state), { + hook: hook.bind(null, state) + }); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + createOAuthAppAuth, + createOAuthUserAuth +}); diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-node/index.js.map b/dist/node_modules/@octokit/auth-oauth-app/dist-node/index.js.map new file mode 100644 index 0000000..10f30aa --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/auth.js", "../dist-src/hook.js", "../dist-src/version.js"], + "sourcesContent": ["import { getUserAgent } from \"universal-user-agent\";\nimport { request } from \"@octokit/request\";\nimport { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nimport { VERSION } from \"./version\";\nimport { createOAuthUserAuth } from \"@octokit/auth-oauth-user\";\nfunction createOAuthAppAuth(options) {\n const state = Object.assign(\n {\n request: request.defaults({\n headers: {\n \"user-agent\": `octokit-auth-oauth-app.js/${VERSION} ${getUserAgent()}`\n }\n }),\n clientType: \"oauth-app\"\n },\n options\n );\n return Object.assign(auth.bind(null, state), {\n hook: hook.bind(null, state)\n });\n}\nexport {\n createOAuthAppAuth,\n createOAuthUserAuth\n};\n", "import btoa from \"btoa-lite\";\nimport { createOAuthUserAuth } from \"@octokit/auth-oauth-user\";\nasync function auth(state, authOptions) {\n if (authOptions.type === \"oauth-app\") {\n return {\n type: \"oauth-app\",\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n clientType: state.clientType,\n headers: {\n authorization: `basic ${btoa(\n `${state.clientId}:${state.clientSecret}`\n )}`\n }\n };\n }\n if (\"factory\" in authOptions) {\n const { type, ...options } = {\n ...authOptions,\n ...state\n };\n return authOptions.factory(options);\n }\n const common = {\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n request: state.request,\n ...authOptions\n };\n const userAuth = state.clientType === \"oauth-app\" ? await createOAuthUserAuth({\n ...common,\n clientType: state.clientType\n }) : await createOAuthUserAuth({\n ...common,\n clientType: state.clientType\n });\n return userAuth();\n}\nexport {\n auth\n};\n", "import btoa from \"btoa-lite\";\nimport { requiresBasicAuth } from \"@octokit/auth-oauth-user\";\nasync function hook(state, request, route, parameters) {\n let endpoint = request.endpoint.merge(\n route,\n parameters\n );\n if (/\\/login\\/(oauth\\/access_token|device\\/code)$/.test(endpoint.url)) {\n return request(endpoint);\n }\n if (state.clientType === \"github-app\" && !requiresBasicAuth(endpoint.url)) {\n throw new Error(\n `[@octokit/auth-oauth-app] GitHub Apps cannot use their client ID/secret for basic authentication for endpoints other than \"/applications/{client_id}/**\". \"${endpoint.method} ${endpoint.url}\" is not supported.`\n );\n }\n const credentials = btoa(`${state.clientId}:${state.clientSecret}`);\n endpoint.headers.authorization = `basic ${credentials}`;\n try {\n return await request(endpoint);\n } catch (error) {\n if (error.status !== 401)\n throw error;\n error.message = `[@octokit/auth-oauth-app] \"${endpoint.method} ${endpoint.url}\" does not support clientId/clientSecret basic authentication.`;\n throw error;\n }\n}\nexport {\n hook\n};\n", "const VERSION = \"5.0.6\";\nexport {\n VERSION\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAA6B;AAC7B,qBAAwB;;;ACDxB,uBAAiB;AACjB,6BAAoC;AACpC,eAAe,KAAK,OAAO,aAAa;AACtC,MAAI,YAAY,SAAS,aAAa;AACpC,WAAO;AAAA,MACL,MAAM;AAAA,MACN,UAAU,MAAM;AAAA,MAChB,cAAc,MAAM;AAAA,MACpB,YAAY,MAAM;AAAA,MAClB,SAAS;AAAA,QACP,eAAe,aAAS,iBAAAA;AAAA,UACtB,GAAG,MAAM,YAAY,MAAM;AAAA,QAC7B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,MAAI,aAAa,aAAa;AAC5B,UAAM,EAAE,MAAM,GAAG,QAAQ,IAAI;AAAA,MAC3B,GAAG;AAAA,MACH,GAAG;AAAA,IACL;AACA,WAAO,YAAY,QAAQ,OAAO;AAAA,EACpC;AACA,QAAM,SAAS;AAAA,IACb,UAAU,MAAM;AAAA,IAChB,cAAc,MAAM;AAAA,IACpB,SAAS,MAAM;AAAA,IACf,GAAG;AAAA,EACL;AACA,QAAM,WAAW,MAAM,eAAe,cAAc,UAAM,4CAAoB;AAAA,IAC5E,GAAG;AAAA,IACH,YAAY,MAAM;AAAA,EACpB,CAAC,IAAI,UAAM,4CAAoB;AAAA,IAC7B,GAAG;AAAA,IACH,YAAY,MAAM;AAAA,EACpB,CAAC;AACD,SAAO,SAAS;AAClB;;;ACrCA,IAAAC,oBAAiB;AACjB,IAAAC,0BAAkC;AAClC,eAAe,KAAK,OAAOC,UAAS,OAAO,YAAY;AACrD,MAAI,WAAWA,SAAQ,SAAS;AAAA,IAC9B;AAAA,IACA;AAAA,EACF;AACA,MAAI,+CAA+C,KAAK,SAAS,GAAG,GAAG;AACrE,WAAOA,SAAQ,QAAQ;AAAA,EACzB;AACA,MAAI,MAAM,eAAe,gBAAgB,KAAC,2CAAkB,SAAS,GAAG,GAAG;AACzE,UAAM,IAAI;AAAA,MACR,8JAA8J,SAAS,UAAU,SAAS;AAAA,IAC5L;AAAA,EACF;AACA,QAAM,kBAAc,kBAAAC,SAAK,GAAG,MAAM,YAAY,MAAM,cAAc;AAClE,WAAS,QAAQ,gBAAgB,SAAS;AAC1C,MAAI;AACF,WAAO,MAAMD,SAAQ,QAAQ;AAAA,EAC/B,SAAS,OAAP;AACA,QAAI,MAAM,WAAW;AACnB,YAAM;AACR,UAAM,UAAU,8BAA8B,SAAS,UAAU,SAAS;AAC1E,UAAM;AAAA,EACR;AACF;;;ACzBA,IAAM,UAAU;;;AHKhB,IAAAE,0BAAoC;AACpC,SAAS,mBAAmB,SAAS;AACnC,QAAM,QAAQ,OAAO;AAAA,IACnB;AAAA,MACE,SAAS,uBAAQ,SAAS;AAAA,QACxB,SAAS;AAAA,UACP,cAAc,6BAA6B,eAAW,0CAAa;AAAA,QACrE;AAAA,MACF,CAAC;AAAA,MACD,YAAY;AAAA,IACd;AAAA,IACA;AAAA,EACF;AACA,SAAO,OAAO,OAAO,KAAK,KAAK,MAAM,KAAK,GAAG;AAAA,IAC3C,MAAM,KAAK,KAAK,MAAM,KAAK;AAAA,EAC7B,CAAC;AACH;", + "names": ["btoa", "import_btoa_lite", "import_auth_oauth_user", "request", "btoa", "import_auth_oauth_user"] +} diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-src/auth.js b/dist/node_modules/@octokit/auth-oauth-app/dist-src/auth.js new file mode 100644 index 0000000..2d499d5 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-src/auth.js @@ -0,0 +1,41 @@ +import btoa from "btoa-lite"; +import { createOAuthUserAuth } from "@octokit/auth-oauth-user"; +async function auth(state, authOptions) { + if (authOptions.type === "oauth-app") { + return { + type: "oauth-app", + clientId: state.clientId, + clientSecret: state.clientSecret, + clientType: state.clientType, + headers: { + authorization: `basic ${btoa( + `${state.clientId}:${state.clientSecret}` + )}` + } + }; + } + if ("factory" in authOptions) { + const { type, ...options } = { + ...authOptions, + ...state + }; + return authOptions.factory(options); + } + const common = { + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.request, + ...authOptions + }; + const userAuth = state.clientType === "oauth-app" ? await createOAuthUserAuth({ + ...common, + clientType: state.clientType + }) : await createOAuthUserAuth({ + ...common, + clientType: state.clientType + }); + return userAuth(); +} +export { + auth +}; diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-src/hook.js b/dist/node_modules/@octokit/auth-oauth-app/dist-src/hook.js new file mode 100644 index 0000000..b2876f7 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-src/hook.js @@ -0,0 +1,29 @@ +import btoa from "btoa-lite"; +import { requiresBasicAuth } from "@octokit/auth-oauth-user"; +async function hook(state, request, route, parameters) { + let endpoint = request.endpoint.merge( + route, + parameters + ); + if (/\/login\/(oauth\/access_token|device\/code)$/.test(endpoint.url)) { + return request(endpoint); + } + if (state.clientType === "github-app" && !requiresBasicAuth(endpoint.url)) { + throw new Error( + `[@octokit/auth-oauth-app] GitHub Apps cannot use their client ID/secret for basic authentication for endpoints other than "/applications/{client_id}/**". "${endpoint.method} ${endpoint.url}" is not supported.` + ); + } + const credentials = btoa(`${state.clientId}:${state.clientSecret}`); + endpoint.headers.authorization = `basic ${credentials}`; + try { + return await request(endpoint); + } catch (error) { + if (error.status !== 401) + throw error; + error.message = `[@octokit/auth-oauth-app] "${endpoint.method} ${endpoint.url}" does not support clientId/clientSecret basic authentication.`; + throw error; + } +} +export { + hook +}; diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-src/index.js b/dist/node_modules/@octokit/auth-oauth-app/dist-src/index.js new file mode 100644 index 0000000..c30903f --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-src/index.js @@ -0,0 +1,26 @@ +import { getUserAgent } from "universal-user-agent"; +import { request } from "@octokit/request"; +import { auth } from "./auth"; +import { hook } from "./hook"; +import { VERSION } from "./version"; +import { createOAuthUserAuth } from "@octokit/auth-oauth-user"; +function createOAuthAppAuth(options) { + const state = Object.assign( + { + request: request.defaults({ + headers: { + "user-agent": `octokit-auth-oauth-app.js/${VERSION} ${getUserAgent()}` + } + }), + clientType: "oauth-app" + }, + options + ); + return Object.assign(auth.bind(null, state), { + hook: hook.bind(null, state) + }); +} +export { + createOAuthAppAuth, + createOAuthUserAuth +}; diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-src/version.js b/dist/node_modules/@octokit/auth-oauth-app/dist-src/version.js new file mode 100644 index 0000000..e41c386 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "5.0.6"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-types/auth.d.ts b/dist/node_modules/@octokit/auth-oauth-app/dist-types/auth.d.ts new file mode 100644 index 0000000..bca2bc9 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-types/auth.d.ts @@ -0,0 +1,18 @@ +import type { OAuthAppState, GitHubAppState, AppAuthOptions, WebFlowAuthOptions, OAuthAppDeviceFlowAuthOptions, GitHubAppDeviceFlowAuthOptions, FactoryOAuthAppWebFlow, FactoryOAuthAppDeviceFlow, FactoryGitHubWebFlow, FactoryGitHubDeviceFlow, AppAuthentication, OAuthAppUserAuthentication, GitHubAppUserAuthentication, GitHubAppUserAuthenticationWithExpiration } from "./types"; +export declare function auth(state: OAuthAppState | GitHubAppState, authOptions: AppAuthOptions): Promise; +export declare function auth(state: OAuthAppState, authOptions: WebFlowAuthOptions): Promise; +export declare function auth(state: OAuthAppState, authOptions: WebFlowAuthOptions & { + factory: FactoryOAuthAppWebFlow; +}): Promise; +export declare function auth(state: OAuthAppState, authOptions: OAuthAppDeviceFlowAuthOptions): Promise; +export declare function auth(state: OAuthAppState, authOptions: OAuthAppDeviceFlowAuthOptions & { + factory: FactoryOAuthAppDeviceFlow; +}): Promise; +export declare function auth(state: GitHubAppState, authOptions: WebFlowAuthOptions): Promise; +export declare function auth(state: GitHubAppState, authOptions: WebFlowAuthOptions & { + factory: FactoryGitHubWebFlow; +}): Promise; +export declare function auth(state: GitHubAppState, authOptions: GitHubAppDeviceFlowAuthOptions): Promise; +export declare function auth(state: GitHubAppState, authOptions: GitHubAppDeviceFlowAuthOptions & { + factory: FactoryGitHubDeviceFlow; +}): Promise; diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-types/hook.d.ts b/dist/node_modules/@octokit/auth-oauth-app/dist-types/hook.d.ts new file mode 100644 index 0000000..31f3376 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-types/hook.d.ts @@ -0,0 +1,3 @@ +import type { EndpointOptions, RequestParameters, Route, RequestInterface, OctokitResponse } from "@octokit/types"; +import type { OAuthAppState, GitHubAppState } from "./types"; +export declare function hook(state: OAuthAppState | GitHubAppState, request: RequestInterface, route: Route | EndpointOptions, parameters?: RequestParameters): Promise>; diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-types/index.d.ts b/dist/node_modules/@octokit/auth-oauth-app/dist-types/index.d.ts new file mode 100644 index 0000000..d54e109 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import type { OAuthAppStrategyOptions, GitHubAppStrategyOptions, OAuthAppAuthInterface, GitHubAuthInterface } from "./types"; +export type { OAuthAppStrategyOptions, GitHubAppStrategyOptions, AppAuthOptions, WebFlowAuthOptions, OAuthAppDeviceFlowAuthOptions, GitHubAppDeviceFlowAuthOptions, OAuthAppAuthInterface, GitHubAuthInterface, AppAuthentication, OAuthAppUserAuthentication, GitHubAppUserAuthentication, GitHubAppUserAuthenticationWithExpiration, FactoryGitHubWebFlow, FactoryGitHubDeviceFlow, } from "./types"; +export { createOAuthUserAuth } from "@octokit/auth-oauth-user"; +export declare function createOAuthAppAuth(options: OAuthAppStrategyOptions): OAuthAppAuthInterface; +export declare function createOAuthAppAuth(options: GitHubAppStrategyOptions): GitHubAuthInterface; diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-types/types.d.ts b/dist/node_modules/@octokit/auth-oauth-app/dist-types/types.d.ts new file mode 100644 index 0000000..fab74de --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-types/types.d.ts @@ -0,0 +1,98 @@ +import type { EndpointOptions, RequestParameters, Route, RequestInterface, OctokitResponse } from "@octokit/types"; +import * as AuthOAuthUser from "@octokit/auth-oauth-user"; +import * as DeviceTypes from "@octokit/auth-oauth-device"; +export type ClientType = "oauth-app" | "github-app"; +export type OAuthAppStrategyOptions = { + clientType?: "oauth-app"; + clientId: string; + clientSecret: string; + request?: RequestInterface; +}; +export type GitHubAppStrategyOptions = { + clientType: "github-app"; + clientId: string; + clientSecret: string; + request?: RequestInterface; +}; +export type AppAuthOptions = { + type: "oauth-app"; +}; +export type WebFlowAuthOptions = { + type: "oauth-user"; + code: string; + redirectUrl?: string; + state?: string; +}; +export type OAuthAppDeviceFlowAuthOptions = { + type: "oauth-user"; + onVerification: DeviceTypes.OAuthAppStrategyOptions["onVerification"]; + scopes?: string[]; +}; +export type GitHubAppDeviceFlowAuthOptions = { + type: "oauth-user"; + onVerification: DeviceTypes.OAuthAppStrategyOptions["onVerification"]; +}; +export type AppAuthentication = { + type: "oauth-app"; + clientId: string; + clientSecret: string; + clientType: ClientType; + headers: { + authorization: string; + }; +}; +export type OAuthAppUserAuthentication = AuthOAuthUser.OAuthAppAuthentication; +export type GitHubAppUserAuthentication = AuthOAuthUser.GitHubAppAuthentication; +export type GitHubAppUserAuthenticationWithExpiration = AuthOAuthUser.GitHubAppAuthenticationWithExpiration; +export type FactoryOAuthAppWebFlowOptions = OAuthAppStrategyOptions & Omit & { + clientType: "oauth-app"; +}; +export type FactoryOAuthAppDeviceFlowOptions = OAuthAppStrategyOptions & Omit & { + clientType: "oauth-app"; +}; +export type FactoryGitHubAppWebFlowOptions = GitHubAppStrategyOptions & Omit; +export type FactoryGitHubAppDeviceFlowOptions = GitHubAppStrategyOptions & Omit; +export interface FactoryOAuthAppWebFlow { + (options: FactoryOAuthAppWebFlowOptions): T; +} +export interface FactoryOAuthAppDeviceFlow { + (options: FactoryOAuthAppDeviceFlowOptions): T; +} +export interface FactoryGitHubWebFlow { + (options: FactoryGitHubAppWebFlowOptions): T; +} +export interface FactoryGitHubDeviceFlow { + (options: FactoryGitHubAppDeviceFlowOptions): T; +} +export interface OAuthAppAuthInterface { + (options: AppAuthOptions): Promise; + (options: WebFlowAuthOptions & { + factory: FactoryOAuthAppWebFlow; + }): Promise; + (options: OAuthAppDeviceFlowAuthOptions & { + factory: FactoryOAuthAppDeviceFlow; + }): Promise; + (options: WebFlowAuthOptions): Promise; + (options: OAuthAppDeviceFlowAuthOptions): Promise; + hook(request: RequestInterface, route: Route | EndpointOptions, parameters?: RequestParameters): Promise>; +} +export interface GitHubAuthInterface { + (options?: AppAuthOptions): Promise; + (options: WebFlowAuthOptions & { + factory: FactoryGitHubWebFlow; + }): Promise; + (options: GitHubAppDeviceFlowAuthOptions & { + factory: FactoryGitHubDeviceFlow; + }): Promise; + (options?: WebFlowAuthOptions): Promise; + (options?: GitHubAppDeviceFlowAuthOptions): Promise; + hook(request: RequestInterface, route: Route | EndpointOptions, parameters?: RequestParameters): Promise>; +} +export type OAuthAppState = OAuthAppStrategyOptions & { + clientType: "oauth-app"; + request: RequestInterface; +}; +export type GitHubAppState = GitHubAppStrategyOptions & { + clientType: "github-app"; + request: RequestInterface; +}; diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-types/version.d.ts b/dist/node_modules/@octokit/auth-oauth-app/dist-types/version.d.ts new file mode 100644 index 0000000..2fc0fc4 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "5.0.6"; diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-web/index.js b/dist/node_modules/@octokit/auth-oauth-app/dist-web/index.js new file mode 100644 index 0000000..021e38f --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-web/index.js @@ -0,0 +1,97 @@ +// pkg/dist-src/index.js +import { getUserAgent } from "universal-user-agent"; +import { request } from "@octokit/request"; + +// pkg/dist-src/auth.js +import btoa from "btoa-lite"; +import { createOAuthUserAuth } from "@octokit/auth-oauth-user"; +async function auth(state, authOptions) { + if (authOptions.type === "oauth-app") { + return { + type: "oauth-app", + clientId: state.clientId, + clientSecret: state.clientSecret, + clientType: state.clientType, + headers: { + authorization: `basic ${btoa( + `${state.clientId}:${state.clientSecret}` + )}` + } + }; + } + if ("factory" in authOptions) { + const { type, ...options } = { + ...authOptions, + ...state + }; + return authOptions.factory(options); + } + const common = { + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.request, + ...authOptions + }; + const userAuth = state.clientType === "oauth-app" ? await createOAuthUserAuth({ + ...common, + clientType: state.clientType + }) : await createOAuthUserAuth({ + ...common, + clientType: state.clientType + }); + return userAuth(); +} + +// pkg/dist-src/hook.js +import btoa2 from "btoa-lite"; +import { requiresBasicAuth } from "@octokit/auth-oauth-user"; +async function hook(state, request2, route, parameters) { + let endpoint = request2.endpoint.merge( + route, + parameters + ); + if (/\/login\/(oauth\/access_token|device\/code)$/.test(endpoint.url)) { + return request2(endpoint); + } + if (state.clientType === "github-app" && !requiresBasicAuth(endpoint.url)) { + throw new Error( + `[@octokit/auth-oauth-app] GitHub Apps cannot use their client ID/secret for basic authentication for endpoints other than "/applications/{client_id}/**". "${endpoint.method} ${endpoint.url}" is not supported.` + ); + } + const credentials = btoa2(`${state.clientId}:${state.clientSecret}`); + endpoint.headers.authorization = `basic ${credentials}`; + try { + return await request2(endpoint); + } catch (error) { + if (error.status !== 401) + throw error; + error.message = `[@octokit/auth-oauth-app] "${endpoint.method} ${endpoint.url}" does not support clientId/clientSecret basic authentication.`; + throw error; + } +} + +// pkg/dist-src/version.js +var VERSION = "5.0.6"; + +// pkg/dist-src/index.js +import { createOAuthUserAuth as createOAuthUserAuth2 } from "@octokit/auth-oauth-user"; +function createOAuthAppAuth(options) { + const state = Object.assign( + { + request: request.defaults({ + headers: { + "user-agent": `octokit-auth-oauth-app.js/${VERSION} ${getUserAgent()}` + } + }), + clientType: "oauth-app" + }, + options + ); + return Object.assign(auth.bind(null, state), { + hook: hook.bind(null, state) + }); +} +export { + createOAuthAppAuth, + createOAuthUserAuth2 as createOAuthUserAuth +}; diff --git a/dist/node_modules/@octokit/auth-oauth-app/dist-web/index.js.map b/dist/node_modules/@octokit/auth-oauth-app/dist-web/index.js.map new file mode 100644 index 0000000..723aa8e --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/auth.js", "../dist-src/hook.js", "../dist-src/version.js"], + "sourcesContent": ["import { getUserAgent } from \"universal-user-agent\";\nimport { request } from \"@octokit/request\";\nimport { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nimport { VERSION } from \"./version\";\nimport { createOAuthUserAuth } from \"@octokit/auth-oauth-user\";\nfunction createOAuthAppAuth(options) {\n const state = Object.assign(\n {\n request: request.defaults({\n headers: {\n \"user-agent\": `octokit-auth-oauth-app.js/${VERSION} ${getUserAgent()}`\n }\n }),\n clientType: \"oauth-app\"\n },\n options\n );\n return Object.assign(auth.bind(null, state), {\n hook: hook.bind(null, state)\n });\n}\nexport {\n createOAuthAppAuth,\n createOAuthUserAuth\n};\n", "import btoa from \"btoa-lite\";\nimport { createOAuthUserAuth } from \"@octokit/auth-oauth-user\";\nasync function auth(state, authOptions) {\n if (authOptions.type === \"oauth-app\") {\n return {\n type: \"oauth-app\",\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n clientType: state.clientType,\n headers: {\n authorization: `basic ${btoa(\n `${state.clientId}:${state.clientSecret}`\n )}`\n }\n };\n }\n if (\"factory\" in authOptions) {\n const { type, ...options } = {\n ...authOptions,\n ...state\n };\n return authOptions.factory(options);\n }\n const common = {\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n request: state.request,\n ...authOptions\n };\n const userAuth = state.clientType === \"oauth-app\" ? await createOAuthUserAuth({\n ...common,\n clientType: state.clientType\n }) : await createOAuthUserAuth({\n ...common,\n clientType: state.clientType\n });\n return userAuth();\n}\nexport {\n auth\n};\n", "import btoa from \"btoa-lite\";\nimport { requiresBasicAuth } from \"@octokit/auth-oauth-user\";\nasync function hook(state, request, route, parameters) {\n let endpoint = request.endpoint.merge(\n route,\n parameters\n );\n if (/\\/login\\/(oauth\\/access_token|device\\/code)$/.test(endpoint.url)) {\n return request(endpoint);\n }\n if (state.clientType === \"github-app\" && !requiresBasicAuth(endpoint.url)) {\n throw new Error(\n `[@octokit/auth-oauth-app] GitHub Apps cannot use their client ID/secret for basic authentication for endpoints other than \"/applications/{client_id}/**\". \"${endpoint.method} ${endpoint.url}\" is not supported.`\n );\n }\n const credentials = btoa(`${state.clientId}:${state.clientSecret}`);\n endpoint.headers.authorization = `basic ${credentials}`;\n try {\n return await request(endpoint);\n } catch (error) {\n if (error.status !== 401)\n throw error;\n error.message = `[@octokit/auth-oauth-app] \"${endpoint.method} ${endpoint.url}\" does not support clientId/clientSecret basic authentication.`;\n throw error;\n }\n}\nexport {\n hook\n};\n", "const VERSION = \"5.0.6\";\nexport {\n VERSION\n};\n"], + "mappings": ";AAAA,SAAS,oBAAoB;AAC7B,SAAS,eAAe;;;ACDxB,OAAO,UAAU;AACjB,SAAS,2BAA2B;AACpC,eAAe,KAAK,OAAO,aAAa;AACtC,MAAI,YAAY,SAAS,aAAa;AACpC,WAAO;AAAA,MACL,MAAM;AAAA,MACN,UAAU,MAAM;AAAA,MAChB,cAAc,MAAM;AAAA,MACpB,YAAY,MAAM;AAAA,MAClB,SAAS;AAAA,QACP,eAAe,SAAS;AAAA,UACtB,GAAG,MAAM,YAAY,MAAM;AAAA,QAC7B;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,MAAI,aAAa,aAAa;AAC5B,UAAM,EAAE,MAAM,GAAG,QAAQ,IAAI;AAAA,MAC3B,GAAG;AAAA,MACH,GAAG;AAAA,IACL;AACA,WAAO,YAAY,QAAQ,OAAO;AAAA,EACpC;AACA,QAAM,SAAS;AAAA,IACb,UAAU,MAAM;AAAA,IAChB,cAAc,MAAM;AAAA,IACpB,SAAS,MAAM;AAAA,IACf,GAAG;AAAA,EACL;AACA,QAAM,WAAW,MAAM,eAAe,cAAc,MAAM,oBAAoB;AAAA,IAC5E,GAAG;AAAA,IACH,YAAY,MAAM;AAAA,EACpB,CAAC,IAAI,MAAM,oBAAoB;AAAA,IAC7B,GAAG;AAAA,IACH,YAAY,MAAM;AAAA,EACpB,CAAC;AACD,SAAO,SAAS;AAClB;;;ACrCA,OAAOA,WAAU;AACjB,SAAS,yBAAyB;AAClC,eAAe,KAAK,OAAOC,UAAS,OAAO,YAAY;AACrD,MAAI,WAAWA,SAAQ,SAAS;AAAA,IAC9B;AAAA,IACA;AAAA,EACF;AACA,MAAI,+CAA+C,KAAK,SAAS,GAAG,GAAG;AACrE,WAAOA,SAAQ,QAAQ;AAAA,EACzB;AACA,MAAI,MAAM,eAAe,gBAAgB,CAAC,kBAAkB,SAAS,GAAG,GAAG;AACzE,UAAM,IAAI;AAAA,MACR,8JAA8J,SAAS,UAAU,SAAS;AAAA,IAC5L;AAAA,EACF;AACA,QAAM,cAAcD,MAAK,GAAG,MAAM,YAAY,MAAM,cAAc;AAClE,WAAS,QAAQ,gBAAgB,SAAS;AAC1C,MAAI;AACF,WAAO,MAAMC,SAAQ,QAAQ;AAAA,EAC/B,SAAS,OAAP;AACA,QAAI,MAAM,WAAW;AACnB,YAAM;AACR,UAAM,UAAU,8BAA8B,SAAS,UAAU,SAAS;AAC1E,UAAM;AAAA,EACR;AACF;;;ACzBA,IAAM,UAAU;;;AHKhB,SAAS,uBAAAC,4BAA2B;AACpC,SAAS,mBAAmB,SAAS;AACnC,QAAM,QAAQ,OAAO;AAAA,IACnB;AAAA,MACE,SAAS,QAAQ,SAAS;AAAA,QACxB,SAAS;AAAA,UACP,cAAc,6BAA6B,WAAW,aAAa;AAAA,QACrE;AAAA,MACF,CAAC;AAAA,MACD,YAAY;AAAA,IACd;AAAA,IACA;AAAA,EACF;AACA,SAAO,OAAO,OAAO,KAAK,KAAK,MAAM,KAAK,GAAG;AAAA,IAC3C,MAAM,KAAK,KAAK,MAAM,KAAK;AAAA,EAC7B,CAAC;AACH;", + "names": ["btoa", "request", "createOAuthUserAuth"] +} diff --git a/dist/node_modules/@octokit/auth-oauth-app/package.json b/dist/node_modules/@octokit/auth-oauth-app/package.json new file mode 100644 index 0000000..5f495ba --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-app/package.json @@ -0,0 +1,53 @@ +{ + "name": "@octokit/auth-oauth-app", + "publishConfig": { + "access": "public" + }, + "version": "5.0.6", + "description": "GitHub OAuth App authentication for JavaScript", + "repository": "github:octokit/auth-oauth-app.js", + "keywords": [ + "github", + "octokit", + "authentication", + "oauth", + "api" + ], + "author": "Gregor Martynus (https://github.com/gr2m)", + "license": "MIT", + "dependencies": { + "@octokit/auth-oauth-device": "^4.0.0", + "@octokit/auth-oauth-user": "^2.0.0", + "@octokit/request": "^6.0.0", + "@octokit/types": "^9.0.0", + "@types/btoa-lite": "^1.0.0", + "btoa-lite": "^1.0.0", + "universal-user-agent": "^6.0.0" + }, + "devDependencies": { + "@octokit/core": "^4.0.0", + "@octokit/tsconfig": "^2.0.0", + "@types/fetch-mock": "^7.3.1", + "@types/jest": "^29.0.0", + "esbuild": "^0.17.19", + "fetch-mock": "^9.0.0", + "glob": "^10.2.7", + "jest": "^29.0.0", + "prettier": "2.8.8", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "browser": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "module": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/auth-oauth-device/LICENSE b/dist/node_modules/@octokit/auth-oauth-device/LICENSE new file mode 100644 index 0000000..57049d0 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/LICENSE @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2021 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/@octokit/auth-oauth-device/README.md b/dist/node_modules/@octokit/auth-oauth-device/README.md new file mode 100644 index 0000000..2fbe50d --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/README.md @@ -0,0 +1,656 @@ +# auth-oauth-device.js + +> GitHub OAuth Device authentication strategy for JavaScript + +[![@latest](https://img.shields.io/npm/v/@octokit/auth-oauth-device.svg)](https://www.npmjs.com/package/@octokit/auth-oauth-device) +[![Build Status](https://github.com/octokit/auth-oauth-device.js/workflows/Test/badge.svg)](https://github.com/octokit/auth-oauth-device.js/actions?query=workflow%3ATest+branch%3Amain) + +`@octokit/auth-oauth-device` is implementing one of [GitHub’s OAuth Device Flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#device-flow). + + + +- [Usage](#usage) + - [For OAuth Apps](#for-oauth-apps) + - [For GitHub Apps](#for-github-apps) +- [`createOAuthDeviceAuth(options)`](#createoauthdeviceauthoptions) +- [`auth(options)`](#authoptions) +- [Authentication object](#authentication-object) + - [OAuth APP user authentication](#oauth-app-user-authentication) + - [GitHub APP user authentication with expiring tokens disabled](#github-app-user-authentication-with-expiring-tokens-disabled) + - [GitHub APP user authentication with expiring tokens enabled](#github-app-user-authentication-with-expiring-tokens-enabled) +- [`auth.hook(request, route, parameters)` or `auth.hook(request, options)`](#authhookrequest-route-parameters-or-authhookrequest-options) +- [Types](#types) +- [How it works](#how-it-works) +- [Contributing](#contributing) +- [License](#license) + + + +## Usage + + + + + + +
+ +Browsers + + + +Load `@octokit/auth-oauth-device` directly from [cdn.pika.dev](https://cdn.pika.dev) + +```html + +``` + +
+ +Node + + + +Install with `npm install @octokit/core @octokit/auth-oauth-device` + +```js +const { createOAuthDeviceAuth } = require("@octokit/auth-oauth-device"); +``` + +
+ +### For OAuth Apps + +```js +const auth = createOAuthDeviceAuth({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + scopes: ["public_repo"], + onVerification(verification) { + // verification example + // { + // device_code: "3584d83530557fdd1f46af8289938c8ef79f9dc5", + // user_code: "WDJB-MJHT", + // verification_uri: "https://github.com/login/device", + // expires_in: 900, + // interval: 5, + // }; + + console.log("Open %s", verification.verification_uri); + console.log("Enter code: %s", verification.user_code); + }, +}); + +const tokenAuthentication = await auth({ + type: "oauth", +}); +// resolves with +// { +// type: "token", +// tokenType: "oauth", +// clientType: "oauth-app", +// clientId: "1234567890abcdef1234", +// token: "...", /* the created oauth token */ +// scopes: [] /* depend on request scopes by OAuth app */ +// } +``` + +### For GitHub Apps + +GitHub Apps do not support `scopes`. Client IDs of GitHub Apps have a `lv1.` prefix. If the GitHub App has expiring user tokens enabled, the resulting `authentication` object has extra properties related to expiration and refreshing the token. + +```js +const auth = createOAuthDeviceAuth({ + clientType: "github-app", + clientId: "lv1.1234567890abcdef", + onVerification(verification) { + // verification example + // { + // device_code: "3584d83530557fdd1f46af8289938c8ef79f9dc5", + // user_code: "WDJB-MJHT", + // verification_uri: "https://github.com/login/device", + // expires_in: 900, + // interval: 5, + // }; + + console.log("Open %s", verification.verification_uri); + console.log("Enter code: %s", verification.user_code); + }, +}); + +const tokenAuthentication = await auth({ + type: "oauth", +}); +// resolves with +// { +// type: "token", +// tokenType: "oauth", +// clientType: "github-app", +// clientId: "lv1.1234567890abcdef", +// token: "...", /* the created oauth token */ +// } +// or if expiring user tokens are enabled +// { +// type: "token", +// tokenType: "oauth", +// clientType: "github-app", +// clientId: "lv1.1234567890abcdef", +// token: "...", /* the created oauth token */ +// refreshToken: "...", +// expiresAt: "2022-01-01T08:00:0.000Z", +// refreshTokenExpiresAt: "2021-07-01T00:00:0.000Z", +// } +``` + +## `createOAuthDeviceAuth(options)` + +The `createOAuthDeviceAuth` method accepts a single `options` parameter + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientId + + string + + Required. Find your OAuth app’s Client ID in your account’s developer settings. +
+ onVerification + + function + + Required. A function that is called once the device and user codes were retrieved + +The `onVerification()` callback can be used to pause until the user completes step 2, which might result in a better user experience. + +```js +const auth = createOAuthDeviceAuth({ + clientId: "1234567890abcdef1234", + onVerification(verification) { + console.log("Open %s", verification.verification_uri); + console.log("Enter code: %s", verification.user_code); + + await prompt("press enter when you are ready to continue"); + }, +}); +``` + +
+ clientType + + string + + +Must be either `oauth-app` or `github-app`. Defaults to `oauth-app`. + +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +createOAuthDeviceAuth({ + clientId: "1234567890abcdef1234", + clientSecret: "secret", + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ scopes + + array of strings + + +Only relavant if `clientType` is set to `"oauth-app"`. + +Array of scope names enabled for the token. Defaults to `[]`. See [available scopes](https://docs.github.com/en/developers/apps/scopes-for-oauth-apps#available-scopes). + +
+ +## `auth(options)` + +The async `auth()` method returned by `createOAuthDeviceAuth(options)` accepts the following options + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + Required. Must be set to "oauth" +
+ scopes + + array of strings + + +Only relevant if the `clientType` strategy options was set to `"oauth-app"` + +Array of scope names enabled for the token. Defaults to what was set in the [strategy options](#createoauthdeviceauthoptions). See available scopes + +
+ refresh + + boolean + + +Defaults to `false`. When set to `false`, calling `auth(options)` will resolve with a token that was previously created for the same scopes if it exists. If set to `true` a new token will always be created. + +
+ +## Authentication object + +The async `auth(options)` method resolves to one of three possible objects + +1. OAuth APP user authentication +1. GitHub APP user authentication with expiring tokens disabled +1. GitHub APP user authentication with expiring tokens enabled + +The differences are + +1. `scopes` is only present for OAuth Apps +2. `refreshToken`, `expiresAt`, `refreshTokenExpiresAt` are only present for GitHub Apps, and only if token expiration is enabled + +### OAuth APP user authentication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ tokenType + + string + + "oauth" +
+ clientType + + string + + "github-app" +
+ clientId + + string + + The app's Client ID +
+ token + + string + + The personal access token +
+ scopes + + array of strings + + array of scope names enabled for the token +
+ +### GitHub APP user authentication with expiring tokens disabled + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ tokenType + + string + + "oauth" +
+ clientType + + string + + "github-app" +
+ clientId + + string + + The app's Client ID +
+ token + + string + + The personal access token +
+ +### GitHub APP user authentication with expiring tokens enabled + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ tokenType + + string + + "oauth" +
+ clientType + + string + + "github-app" +
+ clientId + + string + + The app's Client ID +
+ token + + string + + The user access token +
+ refreshToken + + string + + The refresh token +
+ expiresAt + + string + + Date timestamp in ISO 8601 standard. Example: 2022-01-01T08:00:0.000Z +
+ refreshTokenExpiresAt + + string + + Date timestamp in ISO 8601 standard. Example: 2021-07-01T00:00:0.000Z +
+ +## `auth.hook(request, route, parameters)` or `auth.hook(request, options)` + +`auth.hook()` hooks directly into the request life cycle. It amends the request to authenticate correctly based on the request URL. + +The `request` option is an instance of [`@octokit/request`](https://github.com/octokit/request.js#readme). The `route`/`options` parameters are the same as for the [`request()` method](https://github.com/octokit/request.js#request). + +`auth.hook()` can be called directly to send an authenticated request + +```js +const { data: user } = await auth.hook(request, "GET /user"); +``` + +Or it can be passed as option to [`request()`](https://github.com/octokit/request.js#request). + +```js +const requestWithAuth = request.defaults({ + request: { + hook: auth.hook, + }, +}); + +const { data: user } = await requestWithAuth("GET /user"); +``` + +## Types + +```ts +import { + OAuthAppStrategyOptions, + OAuthAppAuthOptions, + OAuthAppAuthentication, + GitHubAppStrategyOptions, + GitHubAppAuthOptions, + GitHubAppAuthentication, + GitHubAppAuthenticationWithExpiration, +} from "@octokit/auth-oauth-device"; +``` + +## How it works + +GitHub's OAuth Device flow is different from the web flow in two ways + +1. It does not require a URL redirect, which makes it great for devices and CLI apps +2. It does not require the OAuth client secret, which means there is no user-owned server component required. + +The flow has 3 parts (see [GitHub documentation](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#device-flow)) + +1. `@octokit/auth-oauth-device` requests a device and user code +2. Then the user has to open https://github.com/login/device (or it's GitHub Enterprise Server equivalent) and enter the user code +3. While the user enters the code, `@octokit/auth-oauth-device` is sending requests in the background to retrieve the OAuth access token. Once the user completed step 2, the request will succeed and the token will be returned + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-node/index.js b/dist/node_modules/@octokit/auth-oauth-device/dist-node/index.js new file mode 100644 index 0000000..b3923e3 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-node/index.js @@ -0,0 +1,167 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createOAuthDeviceAuth: () => createOAuthDeviceAuth +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = require("universal-user-agent"); +var import_request = require("@octokit/request"); + +// pkg/dist-src/get-oauth-access-token.js +var import_oauth_methods = require("@octokit/oauth-methods"); +async function getOAuthAccessToken(state, options) { + const cachedAuthentication = getCachedAuthentication(state, options.auth); + if (cachedAuthentication) + return cachedAuthentication; + const { data: verification } = await (0, import_oauth_methods.createDeviceCode)({ + clientType: state.clientType, + clientId: state.clientId, + request: options.request || state.request, + // @ts-expect-error the extra code to make TS happy is not worth it + scopes: options.auth.scopes || state.scopes + }); + await state.onVerification(verification); + const authentication = await waitForAccessToken( + options.request || state.request, + state.clientId, + state.clientType, + verification + ); + state.authentication = authentication; + return authentication; +} +function getCachedAuthentication(state, auth2) { + if (auth2.refresh === true) + return false; + if (!state.authentication) + return false; + if (state.clientType === "github-app") { + return state.authentication; + } + const authentication = state.authentication; + const newScope = ("scopes" in auth2 && auth2.scopes || state.scopes).join( + " " + ); + const currentScope = authentication.scopes.join(" "); + return newScope === currentScope ? authentication : false; +} +async function wait(seconds) { + await new Promise((resolve) => setTimeout(resolve, seconds * 1e3)); +} +async function waitForAccessToken(request, clientId, clientType, verification) { + try { + const options = { + clientId, + request, + code: verification.device_code + }; + const { authentication } = clientType === "oauth-app" ? await (0, import_oauth_methods.exchangeDeviceCode)({ + ...options, + clientType: "oauth-app" + }) : await (0, import_oauth_methods.exchangeDeviceCode)({ + ...options, + clientType: "github-app" + }); + return { + type: "token", + tokenType: "oauth", + ...authentication + }; + } catch (error) { + if (!error.response) + throw error; + const errorType = error.response.data.error; + if (errorType === "authorization_pending") { + await wait(verification.interval); + return waitForAccessToken(request, clientId, clientType, verification); + } + if (errorType === "slow_down") { + await wait(verification.interval + 5); + return waitForAccessToken(request, clientId, clientType, verification); + } + throw error; + } +} + +// pkg/dist-src/auth.js +async function auth(state, authOptions) { + return getOAuthAccessToken(state, { + auth: authOptions + }); +} + +// pkg/dist-src/hook.js +async function hook(state, request, route, parameters) { + let endpoint = request.endpoint.merge( + route, + parameters + ); + if (/\/login\/(oauth\/access_token|device\/code)$/.test(endpoint.url)) { + return request(endpoint); + } + const { token } = await getOAuthAccessToken(state, { + request, + auth: { type: "oauth" } + }); + endpoint.headers.authorization = `token ${token}`; + return request(endpoint); +} + +// pkg/dist-src/version.js +var VERSION = "4.0.5"; + +// pkg/dist-src/index.js +function createOAuthDeviceAuth(options) { + const requestWithDefaults = options.request || import_request.request.defaults({ + headers: { + "user-agent": `octokit-auth-oauth-device.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + } + }); + const { request = requestWithDefaults, ...otherOptions } = options; + const state = options.clientType === "github-app" ? { + ...otherOptions, + clientType: "github-app", + request + } : { + ...otherOptions, + clientType: "oauth-app", + request, + scopes: options.scopes || [] + }; + if (!options.clientId) { + throw new Error( + '[@octokit/auth-oauth-device] "clientId" option must be set (https://github.com/octokit/auth-oauth-device.js#usage)' + ); + } + if (!options.onVerification) { + throw new Error( + '[@octokit/auth-oauth-device] "onVerification" option must be a function (https://github.com/octokit/auth-oauth-device.js#usage)' + ); + } + return Object.assign(auth.bind(null, state), { + hook: hook.bind(null, state) + }); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + createOAuthDeviceAuth +}); diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-node/index.js.map b/dist/node_modules/@octokit/auth-oauth-device/dist-node/index.js.map new file mode 100644 index 0000000..0f3cda8 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/get-oauth-access-token.js", "../dist-src/auth.js", "../dist-src/hook.js", "../dist-src/version.js"], + "sourcesContent": ["import { getUserAgent } from \"universal-user-agent\";\nimport { request as octokitRequest } from \"@octokit/request\";\nimport { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nimport { VERSION } from \"./version\";\nfunction createOAuthDeviceAuth(options) {\n const requestWithDefaults = options.request || octokitRequest.defaults({\n headers: {\n \"user-agent\": `octokit-auth-oauth-device.js/${VERSION} ${getUserAgent()}`\n }\n });\n const { request = requestWithDefaults, ...otherOptions } = options;\n const state = options.clientType === \"github-app\" ? {\n ...otherOptions,\n clientType: \"github-app\",\n request\n } : {\n ...otherOptions,\n clientType: \"oauth-app\",\n request,\n scopes: options.scopes || []\n };\n if (!options.clientId) {\n throw new Error(\n '[@octokit/auth-oauth-device] \"clientId\" option must be set (https://github.com/octokit/auth-oauth-device.js#usage)'\n );\n }\n if (!options.onVerification) {\n throw new Error(\n '[@octokit/auth-oauth-device] \"onVerification\" option must be a function (https://github.com/octokit/auth-oauth-device.js#usage)'\n );\n }\n return Object.assign(auth.bind(null, state), {\n hook: hook.bind(null, state)\n });\n}\nexport {\n createOAuthDeviceAuth\n};\n", "import { createDeviceCode, exchangeDeviceCode } from \"@octokit/oauth-methods\";\nasync function getOAuthAccessToken(state, options) {\n const cachedAuthentication = getCachedAuthentication(state, options.auth);\n if (cachedAuthentication)\n return cachedAuthentication;\n const { data: verification } = await createDeviceCode({\n clientType: state.clientType,\n clientId: state.clientId,\n request: options.request || state.request,\n // @ts-expect-error the extra code to make TS happy is not worth it\n scopes: options.auth.scopes || state.scopes\n });\n await state.onVerification(verification);\n const authentication = await waitForAccessToken(\n options.request || state.request,\n state.clientId,\n state.clientType,\n verification\n );\n state.authentication = authentication;\n return authentication;\n}\nfunction getCachedAuthentication(state, auth) {\n if (auth.refresh === true)\n return false;\n if (!state.authentication)\n return false;\n if (state.clientType === \"github-app\") {\n return state.authentication;\n }\n const authentication = state.authentication;\n const newScope = (\"scopes\" in auth && auth.scopes || state.scopes).join(\n \" \"\n );\n const currentScope = authentication.scopes.join(\" \");\n return newScope === currentScope ? authentication : false;\n}\nasync function wait(seconds) {\n await new Promise((resolve) => setTimeout(resolve, seconds * 1e3));\n}\nasync function waitForAccessToken(request, clientId, clientType, verification) {\n try {\n const options = {\n clientId,\n request,\n code: verification.device_code\n };\n const { authentication } = clientType === \"oauth-app\" ? await exchangeDeviceCode({\n ...options,\n clientType: \"oauth-app\"\n }) : await exchangeDeviceCode({\n ...options,\n clientType: \"github-app\"\n });\n return {\n type: \"token\",\n tokenType: \"oauth\",\n ...authentication\n };\n } catch (error) {\n if (!error.response)\n throw error;\n const errorType = error.response.data.error;\n if (errorType === \"authorization_pending\") {\n await wait(verification.interval);\n return waitForAccessToken(request, clientId, clientType, verification);\n }\n if (errorType === \"slow_down\") {\n await wait(verification.interval + 5);\n return waitForAccessToken(request, clientId, clientType, verification);\n }\n throw error;\n }\n}\nexport {\n getOAuthAccessToken\n};\n", "import { getOAuthAccessToken } from \"./get-oauth-access-token\";\nasync function auth(state, authOptions) {\n return getOAuthAccessToken(state, {\n auth: authOptions\n });\n}\nexport {\n auth\n};\n", "import { getOAuthAccessToken } from \"./get-oauth-access-token\";\nasync function hook(state, request, route, parameters) {\n let endpoint = request.endpoint.merge(\n route,\n parameters\n );\n if (/\\/login\\/(oauth\\/access_token|device\\/code)$/.test(endpoint.url)) {\n return request(endpoint);\n }\n const { token } = await getOAuthAccessToken(state, {\n request,\n auth: { type: \"oauth\" }\n });\n endpoint.headers.authorization = `token ${token}`;\n return request(endpoint);\n}\nexport {\n hook\n};\n", "const VERSION = \"4.0.5\";\nexport {\n VERSION\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAA6B;AAC7B,qBAA0C;;;ACD1C,2BAAqD;AACrD,eAAe,oBAAoB,OAAO,SAAS;AACjD,QAAM,uBAAuB,wBAAwB,OAAO,QAAQ,IAAI;AACxE,MAAI;AACF,WAAO;AACT,QAAM,EAAE,MAAM,aAAa,IAAI,UAAM,uCAAiB;AAAA,IACpD,YAAY,MAAM;AAAA,IAClB,UAAU,MAAM;AAAA,IAChB,SAAS,QAAQ,WAAW,MAAM;AAAA;AAAA,IAElC,QAAQ,QAAQ,KAAK,UAAU,MAAM;AAAA,EACvC,CAAC;AACD,QAAM,MAAM,eAAe,YAAY;AACvC,QAAM,iBAAiB,MAAM;AAAA,IAC3B,QAAQ,WAAW,MAAM;AAAA,IACzB,MAAM;AAAA,IACN,MAAM;AAAA,IACN;AAAA,EACF;AACA,QAAM,iBAAiB;AACvB,SAAO;AACT;AACA,SAAS,wBAAwB,OAAOA,OAAM;AAC5C,MAAIA,MAAK,YAAY;AACnB,WAAO;AACT,MAAI,CAAC,MAAM;AACT,WAAO;AACT,MAAI,MAAM,eAAe,cAAc;AACrC,WAAO,MAAM;AAAA,EACf;AACA,QAAM,iBAAiB,MAAM;AAC7B,QAAM,YAAY,YAAYA,SAAQA,MAAK,UAAU,MAAM,QAAQ;AAAA,IACjE;AAAA,EACF;AACA,QAAM,eAAe,eAAe,OAAO,KAAK,GAAG;AACnD,SAAO,aAAa,eAAe,iBAAiB;AACtD;AACA,eAAe,KAAK,SAAS;AAC3B,QAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,UAAU,GAAG,CAAC;AACnE;AACA,eAAe,mBAAmB,SAAS,UAAU,YAAY,cAAc;AAC7E,MAAI;AACF,UAAM,UAAU;AAAA,MACd;AAAA,MACA;AAAA,MACA,MAAM,aAAa;AAAA,IACrB;AACA,UAAM,EAAE,eAAe,IAAI,eAAe,cAAc,UAAM,yCAAmB;AAAA,MAC/E,GAAG;AAAA,MACH,YAAY;AAAA,IACd,CAAC,IAAI,UAAM,yCAAmB;AAAA,MAC5B,GAAG;AAAA,MACH,YAAY;AAAA,IACd,CAAC;AACD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX,GAAG;AAAA,IACL;AAAA,EACF,SAAS,OAAP;AACA,QAAI,CAAC,MAAM;AACT,YAAM;AACR,UAAM,YAAY,MAAM,SAAS,KAAK;AACtC,QAAI,cAAc,yBAAyB;AACzC,YAAM,KAAK,aAAa,QAAQ;AAChC,aAAO,mBAAmB,SAAS,UAAU,YAAY,YAAY;AAAA,IACvE;AACA,QAAI,cAAc,aAAa;AAC7B,YAAM,KAAK,aAAa,WAAW,CAAC;AACpC,aAAO,mBAAmB,SAAS,UAAU,YAAY,YAAY;AAAA,IACvE;AACA,UAAM;AAAA,EACR;AACF;;;ACxEA,eAAe,KAAK,OAAO,aAAa;AACtC,SAAO,oBAAoB,OAAO;AAAA,IAChC,MAAM;AAAA,EACR,CAAC;AACH;;;ACJA,eAAe,KAAK,OAAO,SAAS,OAAO,YAAY;AACrD,MAAI,WAAW,QAAQ,SAAS;AAAA,IAC9B;AAAA,IACA;AAAA,EACF;AACA,MAAI,+CAA+C,KAAK,SAAS,GAAG,GAAG;AACrE,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACA,QAAM,EAAE,MAAM,IAAI,MAAM,oBAAoB,OAAO;AAAA,IACjD;AAAA,IACA,MAAM,EAAE,MAAM,QAAQ;AAAA,EACxB,CAAC;AACD,WAAS,QAAQ,gBAAgB,SAAS;AAC1C,SAAO,QAAQ,QAAQ;AACzB;;;ACfA,IAAM,UAAU;;;AJKhB,SAAS,sBAAsB,SAAS;AACtC,QAAM,sBAAsB,QAAQ,WAAW,eAAAC,QAAe,SAAS;AAAA,IACrE,SAAS;AAAA,MACP,cAAc,gCAAgC,eAAW,0CAAa;AAAA,IACxE;AAAA,EACF,CAAC;AACD,QAAM,EAAE,UAAU,qBAAqB,GAAG,aAAa,IAAI;AAC3D,QAAM,QAAQ,QAAQ,eAAe,eAAe;AAAA,IAClD,GAAG;AAAA,IACH,YAAY;AAAA,IACZ;AAAA,EACF,IAAI;AAAA,IACF,GAAG;AAAA,IACH,YAAY;AAAA,IACZ;AAAA,IACA,QAAQ,QAAQ,UAAU,CAAC;AAAA,EAC7B;AACA,MAAI,CAAC,QAAQ,UAAU;AACrB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,QAAQ,gBAAgB;AAC3B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,SAAO,OAAO,OAAO,KAAK,KAAK,MAAM,KAAK,GAAG;AAAA,IAC3C,MAAM,KAAK,KAAK,MAAM,KAAK;AAAA,EAC7B,CAAC;AACH;", + "names": ["auth", "octokitRequest"] +} diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-src/auth.js b/dist/node_modules/@octokit/auth-oauth-device/dist-src/auth.js new file mode 100644 index 0000000..6d1b69f --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-src/auth.js @@ -0,0 +1,9 @@ +import { getOAuthAccessToken } from "./get-oauth-access-token"; +async function auth(state, authOptions) { + return getOAuthAccessToken(state, { + auth: authOptions + }); +} +export { + auth +}; diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-src/get-oauth-access-token.js b/dist/node_modules/@octokit/auth-oauth-device/dist-src/get-oauth-access-token.js new file mode 100644 index 0000000..2002734 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-src/get-oauth-access-token.js @@ -0,0 +1,77 @@ +import { createDeviceCode, exchangeDeviceCode } from "@octokit/oauth-methods"; +async function getOAuthAccessToken(state, options) { + const cachedAuthentication = getCachedAuthentication(state, options.auth); + if (cachedAuthentication) + return cachedAuthentication; + const { data: verification } = await createDeviceCode({ + clientType: state.clientType, + clientId: state.clientId, + request: options.request || state.request, + // @ts-expect-error the extra code to make TS happy is not worth it + scopes: options.auth.scopes || state.scopes + }); + await state.onVerification(verification); + const authentication = await waitForAccessToken( + options.request || state.request, + state.clientId, + state.clientType, + verification + ); + state.authentication = authentication; + return authentication; +} +function getCachedAuthentication(state, auth) { + if (auth.refresh === true) + return false; + if (!state.authentication) + return false; + if (state.clientType === "github-app") { + return state.authentication; + } + const authentication = state.authentication; + const newScope = ("scopes" in auth && auth.scopes || state.scopes).join( + " " + ); + const currentScope = authentication.scopes.join(" "); + return newScope === currentScope ? authentication : false; +} +async function wait(seconds) { + await new Promise((resolve) => setTimeout(resolve, seconds * 1e3)); +} +async function waitForAccessToken(request, clientId, clientType, verification) { + try { + const options = { + clientId, + request, + code: verification.device_code + }; + const { authentication } = clientType === "oauth-app" ? await exchangeDeviceCode({ + ...options, + clientType: "oauth-app" + }) : await exchangeDeviceCode({ + ...options, + clientType: "github-app" + }); + return { + type: "token", + tokenType: "oauth", + ...authentication + }; + } catch (error) { + if (!error.response) + throw error; + const errorType = error.response.data.error; + if (errorType === "authorization_pending") { + await wait(verification.interval); + return waitForAccessToken(request, clientId, clientType, verification); + } + if (errorType === "slow_down") { + await wait(verification.interval + 5); + return waitForAccessToken(request, clientId, clientType, verification); + } + throw error; + } +} +export { + getOAuthAccessToken +}; diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-src/hook.js b/dist/node_modules/@octokit/auth-oauth-device/dist-src/hook.js new file mode 100644 index 0000000..1ff0ce9 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-src/hook.js @@ -0,0 +1,19 @@ +import { getOAuthAccessToken } from "./get-oauth-access-token"; +async function hook(state, request, route, parameters) { + let endpoint = request.endpoint.merge( + route, + parameters + ); + if (/\/login\/(oauth\/access_token|device\/code)$/.test(endpoint.url)) { + return request(endpoint); + } + const { token } = await getOAuthAccessToken(state, { + request, + auth: { type: "oauth" } + }); + endpoint.headers.authorization = `token ${token}`; + return request(endpoint); +} +export { + hook +}; diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-src/index.js b/dist/node_modules/@octokit/auth-oauth-device/dist-src/index.js new file mode 100644 index 0000000..7f0d89e --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-src/index.js @@ -0,0 +1,39 @@ +import { getUserAgent } from "universal-user-agent"; +import { request as octokitRequest } from "@octokit/request"; +import { auth } from "./auth"; +import { hook } from "./hook"; +import { VERSION } from "./version"; +function createOAuthDeviceAuth(options) { + const requestWithDefaults = options.request || octokitRequest.defaults({ + headers: { + "user-agent": `octokit-auth-oauth-device.js/${VERSION} ${getUserAgent()}` + } + }); + const { request = requestWithDefaults, ...otherOptions } = options; + const state = options.clientType === "github-app" ? { + ...otherOptions, + clientType: "github-app", + request + } : { + ...otherOptions, + clientType: "oauth-app", + request, + scopes: options.scopes || [] + }; + if (!options.clientId) { + throw new Error( + '[@octokit/auth-oauth-device] "clientId" option must be set (https://github.com/octokit/auth-oauth-device.js#usage)' + ); + } + if (!options.onVerification) { + throw new Error( + '[@octokit/auth-oauth-device] "onVerification" option must be a function (https://github.com/octokit/auth-oauth-device.js#usage)' + ); + } + return Object.assign(auth.bind(null, state), { + hook: hook.bind(null, state) + }); +} +export { + createOAuthDeviceAuth +}; diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-src/version.js b/dist/node_modules/@octokit/auth-oauth-device/dist-src/version.js new file mode 100644 index 0000000..0b9391f --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "4.0.5"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-types/auth.d.ts b/dist/node_modules/@octokit/auth-oauth-device/dist-types/auth.d.ts new file mode 100644 index 0000000..87a8098 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-types/auth.d.ts @@ -0,0 +1,2 @@ +import type { OAuthAppAuthOptions, GitHubAppAuthOptions, OAuthAppAuthentication, GitHubAppAuthentication, OAuthAppState, GitHubAppState } from "./types"; +export declare function auth(state: OAuthAppState | GitHubAppState, authOptions: OAuthAppAuthOptions | GitHubAppAuthOptions): Promise; diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-types/get-oauth-access-token.d.ts b/dist/node_modules/@octokit/auth-oauth-device/dist-types/get-oauth-access-token.d.ts new file mode 100644 index 0000000..7dfc404 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-types/get-oauth-access-token.d.ts @@ -0,0 +1,6 @@ +import type { RequestInterface } from "@octokit/types"; +import type { OAuthAppState, GitHubAppState, OAuthAppAuthOptions, GitHubAppAuthOptions, OAuthAppAuthentication, GitHubAppAuthentication } from "./types"; +export declare function getOAuthAccessToken(state: OAuthAppState | GitHubAppState, options: { + request?: RequestInterface; + auth: OAuthAppAuthOptions | GitHubAppAuthOptions; +}): Promise; diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-types/hook.d.ts b/dist/node_modules/@octokit/auth-oauth-device/dist-types/hook.d.ts new file mode 100644 index 0000000..7b91a69 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-types/hook.d.ts @@ -0,0 +1,3 @@ +import type { RequestInterface, OctokitResponse, EndpointOptions, RequestParameters, Route } from "@octokit/types"; +import type { OAuthAppState, GitHubAppState } from "./types"; +export declare function hook(state: OAuthAppState | GitHubAppState, request: RequestInterface, route: Route | EndpointOptions, parameters?: RequestParameters): Promise>; diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-types/index.d.ts b/dist/node_modules/@octokit/auth-oauth-device/dist-types/index.d.ts new file mode 100644 index 0000000..45df840 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-types/index.d.ts @@ -0,0 +1,4 @@ +import type { GitHubAppAuthInterface, GitHubAppStrategyOptions, OAuthAppAuthInterface, OAuthAppStrategyOptions } from "./types"; +export type { OAuthAppStrategyOptions, OAuthAppAuthOptions, OAuthAppAuthentication, GitHubAppStrategyOptions, GitHubAppAuthOptions, GitHubAppAuthentication, GitHubAppAuthenticationWithExpiration, } from "./types"; +export declare function createOAuthDeviceAuth(options: OAuthAppStrategyOptions): OAuthAppAuthInterface; +export declare function createOAuthDeviceAuth(options: GitHubAppStrategyOptions): GitHubAppAuthInterface; diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-types/types.d.ts b/dist/node_modules/@octokit/auth-oauth-device/dist-types/types.d.ts new file mode 100644 index 0000000..464d1e3 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-types/types.d.ts @@ -0,0 +1,68 @@ +import type { RequestInterface, Route, EndpointOptions, RequestParameters, OctokitResponse } from "@octokit/types"; +import * as OAuthMethodsTypes from "@octokit/oauth-methods"; +export type ClientType = "oauth-app" | "github-app"; +export type OAuthAppStrategyOptions = { + clientId: string; + clientType?: "oauth-app"; + onVerification: OnVerificationCallback; + scopes?: string[]; + request?: RequestInterface; +}; +export type GitHubAppStrategyOptions = { + clientId: string; + clientType: "github-app"; + onVerification: OnVerificationCallback; + request?: RequestInterface; +}; +export interface OAuthAppAuthInterface { + (options: OAuthAppAuthOptions): Promise; + hook(request: RequestInterface, route: Route | EndpointOptions, parameters?: RequestParameters): Promise>; +} +export interface GitHubAppAuthInterface { + (options: GitHubAppAuthOptions): Promise; + hook(request: RequestInterface, route: Route | EndpointOptions, parameters?: RequestParameters): Promise>; +} +export type OAuthAppAuthOptions = { + type: "oauth"; + scopes?: string[]; + refresh?: boolean; +}; +export type GitHubAppAuthOptions = { + type: "oauth"; + refresh?: boolean; +}; +export type OAuthAppAuthentication = { + type: "token"; + tokenType: "oauth"; +} & Omit; +export type GitHubAppAuthentication = { + type: "token"; + tokenType: "oauth"; +} & Omit; +export type GitHubAppAuthenticationWithExpiration = { + type: "token"; + tokenType: "oauth"; +} & Omit; +export type Verification = { + device_code: string; + user_code: string; + verification_uri: string; + expires_in: number; + interval: number; +}; +export type OnVerificationCallback = (verification: Verification) => any | Promise; +export type OAuthAppState = { + clientId: string; + clientType: "oauth-app"; + onVerification: OnVerificationCallback; + scopes: string[]; + request: RequestInterface; + authentication?: OAuthAppAuthentication; +}; +export type GitHubAppState = { + clientId: string; + clientType: "github-app"; + onVerification: OnVerificationCallback; + request: RequestInterface; + authentication?: GitHubAppAuthentication | GitHubAppAuthenticationWithExpiration; +}; diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-types/version.d.ts b/dist/node_modules/@octokit/auth-oauth-device/dist-types/version.d.ts new file mode 100644 index 0000000..7d1ba1b --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "4.0.5"; diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-web/index.js b/dist/node_modules/@octokit/auth-oauth-device/dist-web/index.js new file mode 100644 index 0000000..d5ddfcb --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-web/index.js @@ -0,0 +1,142 @@ +// pkg/dist-src/index.js +import { getUserAgent } from "universal-user-agent"; +import { request as octokitRequest } from "@octokit/request"; + +// pkg/dist-src/get-oauth-access-token.js +import { createDeviceCode, exchangeDeviceCode } from "@octokit/oauth-methods"; +async function getOAuthAccessToken(state, options) { + const cachedAuthentication = getCachedAuthentication(state, options.auth); + if (cachedAuthentication) + return cachedAuthentication; + const { data: verification } = await createDeviceCode({ + clientType: state.clientType, + clientId: state.clientId, + request: options.request || state.request, + // @ts-expect-error the extra code to make TS happy is not worth it + scopes: options.auth.scopes || state.scopes + }); + await state.onVerification(verification); + const authentication = await waitForAccessToken( + options.request || state.request, + state.clientId, + state.clientType, + verification + ); + state.authentication = authentication; + return authentication; +} +function getCachedAuthentication(state, auth2) { + if (auth2.refresh === true) + return false; + if (!state.authentication) + return false; + if (state.clientType === "github-app") { + return state.authentication; + } + const authentication = state.authentication; + const newScope = ("scopes" in auth2 && auth2.scopes || state.scopes).join( + " " + ); + const currentScope = authentication.scopes.join(" "); + return newScope === currentScope ? authentication : false; +} +async function wait(seconds) { + await new Promise((resolve) => setTimeout(resolve, seconds * 1e3)); +} +async function waitForAccessToken(request, clientId, clientType, verification) { + try { + const options = { + clientId, + request, + code: verification.device_code + }; + const { authentication } = clientType === "oauth-app" ? await exchangeDeviceCode({ + ...options, + clientType: "oauth-app" + }) : await exchangeDeviceCode({ + ...options, + clientType: "github-app" + }); + return { + type: "token", + tokenType: "oauth", + ...authentication + }; + } catch (error) { + if (!error.response) + throw error; + const errorType = error.response.data.error; + if (errorType === "authorization_pending") { + await wait(verification.interval); + return waitForAccessToken(request, clientId, clientType, verification); + } + if (errorType === "slow_down") { + await wait(verification.interval + 5); + return waitForAccessToken(request, clientId, clientType, verification); + } + throw error; + } +} + +// pkg/dist-src/auth.js +async function auth(state, authOptions) { + return getOAuthAccessToken(state, { + auth: authOptions + }); +} + +// pkg/dist-src/hook.js +async function hook(state, request, route, parameters) { + let endpoint = request.endpoint.merge( + route, + parameters + ); + if (/\/login\/(oauth\/access_token|device\/code)$/.test(endpoint.url)) { + return request(endpoint); + } + const { token } = await getOAuthAccessToken(state, { + request, + auth: { type: "oauth" } + }); + endpoint.headers.authorization = `token ${token}`; + return request(endpoint); +} + +// pkg/dist-src/version.js +var VERSION = "4.0.5"; + +// pkg/dist-src/index.js +function createOAuthDeviceAuth(options) { + const requestWithDefaults = options.request || octokitRequest.defaults({ + headers: { + "user-agent": `octokit-auth-oauth-device.js/${VERSION} ${getUserAgent()}` + } + }); + const { request = requestWithDefaults, ...otherOptions } = options; + const state = options.clientType === "github-app" ? { + ...otherOptions, + clientType: "github-app", + request + } : { + ...otherOptions, + clientType: "oauth-app", + request, + scopes: options.scopes || [] + }; + if (!options.clientId) { + throw new Error( + '[@octokit/auth-oauth-device] "clientId" option must be set (https://github.com/octokit/auth-oauth-device.js#usage)' + ); + } + if (!options.onVerification) { + throw new Error( + '[@octokit/auth-oauth-device] "onVerification" option must be a function (https://github.com/octokit/auth-oauth-device.js#usage)' + ); + } + return Object.assign(auth.bind(null, state), { + hook: hook.bind(null, state) + }); +} +export { + createOAuthDeviceAuth +}; diff --git a/dist/node_modules/@octokit/auth-oauth-device/dist-web/index.js.map b/dist/node_modules/@octokit/auth-oauth-device/dist-web/index.js.map new file mode 100644 index 0000000..083181d --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/get-oauth-access-token.js", "../dist-src/auth.js", "../dist-src/hook.js", "../dist-src/version.js"], + "sourcesContent": ["import { getUserAgent } from \"universal-user-agent\";\nimport { request as octokitRequest } from \"@octokit/request\";\nimport { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nimport { VERSION } from \"./version\";\nfunction createOAuthDeviceAuth(options) {\n const requestWithDefaults = options.request || octokitRequest.defaults({\n headers: {\n \"user-agent\": `octokit-auth-oauth-device.js/${VERSION} ${getUserAgent()}`\n }\n });\n const { request = requestWithDefaults, ...otherOptions } = options;\n const state = options.clientType === \"github-app\" ? {\n ...otherOptions,\n clientType: \"github-app\",\n request\n } : {\n ...otherOptions,\n clientType: \"oauth-app\",\n request,\n scopes: options.scopes || []\n };\n if (!options.clientId) {\n throw new Error(\n '[@octokit/auth-oauth-device] \"clientId\" option must be set (https://github.com/octokit/auth-oauth-device.js#usage)'\n );\n }\n if (!options.onVerification) {\n throw new Error(\n '[@octokit/auth-oauth-device] \"onVerification\" option must be a function (https://github.com/octokit/auth-oauth-device.js#usage)'\n );\n }\n return Object.assign(auth.bind(null, state), {\n hook: hook.bind(null, state)\n });\n}\nexport {\n createOAuthDeviceAuth\n};\n", "import { createDeviceCode, exchangeDeviceCode } from \"@octokit/oauth-methods\";\nasync function getOAuthAccessToken(state, options) {\n const cachedAuthentication = getCachedAuthentication(state, options.auth);\n if (cachedAuthentication)\n return cachedAuthentication;\n const { data: verification } = await createDeviceCode({\n clientType: state.clientType,\n clientId: state.clientId,\n request: options.request || state.request,\n // @ts-expect-error the extra code to make TS happy is not worth it\n scopes: options.auth.scopes || state.scopes\n });\n await state.onVerification(verification);\n const authentication = await waitForAccessToken(\n options.request || state.request,\n state.clientId,\n state.clientType,\n verification\n );\n state.authentication = authentication;\n return authentication;\n}\nfunction getCachedAuthentication(state, auth) {\n if (auth.refresh === true)\n return false;\n if (!state.authentication)\n return false;\n if (state.clientType === \"github-app\") {\n return state.authentication;\n }\n const authentication = state.authentication;\n const newScope = (\"scopes\" in auth && auth.scopes || state.scopes).join(\n \" \"\n );\n const currentScope = authentication.scopes.join(\" \");\n return newScope === currentScope ? authentication : false;\n}\nasync function wait(seconds) {\n await new Promise((resolve) => setTimeout(resolve, seconds * 1e3));\n}\nasync function waitForAccessToken(request, clientId, clientType, verification) {\n try {\n const options = {\n clientId,\n request,\n code: verification.device_code\n };\n const { authentication } = clientType === \"oauth-app\" ? await exchangeDeviceCode({\n ...options,\n clientType: \"oauth-app\"\n }) : await exchangeDeviceCode({\n ...options,\n clientType: \"github-app\"\n });\n return {\n type: \"token\",\n tokenType: \"oauth\",\n ...authentication\n };\n } catch (error) {\n if (!error.response)\n throw error;\n const errorType = error.response.data.error;\n if (errorType === \"authorization_pending\") {\n await wait(verification.interval);\n return waitForAccessToken(request, clientId, clientType, verification);\n }\n if (errorType === \"slow_down\") {\n await wait(verification.interval + 5);\n return waitForAccessToken(request, clientId, clientType, verification);\n }\n throw error;\n }\n}\nexport {\n getOAuthAccessToken\n};\n", "import { getOAuthAccessToken } from \"./get-oauth-access-token\";\nasync function auth(state, authOptions) {\n return getOAuthAccessToken(state, {\n auth: authOptions\n });\n}\nexport {\n auth\n};\n", "import { getOAuthAccessToken } from \"./get-oauth-access-token\";\nasync function hook(state, request, route, parameters) {\n let endpoint = request.endpoint.merge(\n route,\n parameters\n );\n if (/\\/login\\/(oauth\\/access_token|device\\/code)$/.test(endpoint.url)) {\n return request(endpoint);\n }\n const { token } = await getOAuthAccessToken(state, {\n request,\n auth: { type: \"oauth\" }\n });\n endpoint.headers.authorization = `token ${token}`;\n return request(endpoint);\n}\nexport {\n hook\n};\n", "const VERSION = \"4.0.5\";\nexport {\n VERSION\n};\n"], + "mappings": ";AAAA,SAAS,oBAAoB;AAC7B,SAAS,WAAW,sBAAsB;;;ACD1C,SAAS,kBAAkB,0BAA0B;AACrD,eAAe,oBAAoB,OAAO,SAAS;AACjD,QAAM,uBAAuB,wBAAwB,OAAO,QAAQ,IAAI;AACxE,MAAI;AACF,WAAO;AACT,QAAM,EAAE,MAAM,aAAa,IAAI,MAAM,iBAAiB;AAAA,IACpD,YAAY,MAAM;AAAA,IAClB,UAAU,MAAM;AAAA,IAChB,SAAS,QAAQ,WAAW,MAAM;AAAA;AAAA,IAElC,QAAQ,QAAQ,KAAK,UAAU,MAAM;AAAA,EACvC,CAAC;AACD,QAAM,MAAM,eAAe,YAAY;AACvC,QAAM,iBAAiB,MAAM;AAAA,IAC3B,QAAQ,WAAW,MAAM;AAAA,IACzB,MAAM;AAAA,IACN,MAAM;AAAA,IACN;AAAA,EACF;AACA,QAAM,iBAAiB;AACvB,SAAO;AACT;AACA,SAAS,wBAAwB,OAAOA,OAAM;AAC5C,MAAIA,MAAK,YAAY;AACnB,WAAO;AACT,MAAI,CAAC,MAAM;AACT,WAAO;AACT,MAAI,MAAM,eAAe,cAAc;AACrC,WAAO,MAAM;AAAA,EACf;AACA,QAAM,iBAAiB,MAAM;AAC7B,QAAM,YAAY,YAAYA,SAAQA,MAAK,UAAU,MAAM,QAAQ;AAAA,IACjE;AAAA,EACF;AACA,QAAM,eAAe,eAAe,OAAO,KAAK,GAAG;AACnD,SAAO,aAAa,eAAe,iBAAiB;AACtD;AACA,eAAe,KAAK,SAAS;AAC3B,QAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,UAAU,GAAG,CAAC;AACnE;AACA,eAAe,mBAAmB,SAAS,UAAU,YAAY,cAAc;AAC7E,MAAI;AACF,UAAM,UAAU;AAAA,MACd;AAAA,MACA;AAAA,MACA,MAAM,aAAa;AAAA,IACrB;AACA,UAAM,EAAE,eAAe,IAAI,eAAe,cAAc,MAAM,mBAAmB;AAAA,MAC/E,GAAG;AAAA,MACH,YAAY;AAAA,IACd,CAAC,IAAI,MAAM,mBAAmB;AAAA,MAC5B,GAAG;AAAA,MACH,YAAY;AAAA,IACd,CAAC;AACD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX,GAAG;AAAA,IACL;AAAA,EACF,SAAS,OAAP;AACA,QAAI,CAAC,MAAM;AACT,YAAM;AACR,UAAM,YAAY,MAAM,SAAS,KAAK;AACtC,QAAI,cAAc,yBAAyB;AACzC,YAAM,KAAK,aAAa,QAAQ;AAChC,aAAO,mBAAmB,SAAS,UAAU,YAAY,YAAY;AAAA,IACvE;AACA,QAAI,cAAc,aAAa;AAC7B,YAAM,KAAK,aAAa,WAAW,CAAC;AACpC,aAAO,mBAAmB,SAAS,UAAU,YAAY,YAAY;AAAA,IACvE;AACA,UAAM;AAAA,EACR;AACF;;;ACxEA,eAAe,KAAK,OAAO,aAAa;AACtC,SAAO,oBAAoB,OAAO;AAAA,IAChC,MAAM;AAAA,EACR,CAAC;AACH;;;ACJA,eAAe,KAAK,OAAO,SAAS,OAAO,YAAY;AACrD,MAAI,WAAW,QAAQ,SAAS;AAAA,IAC9B;AAAA,IACA;AAAA,EACF;AACA,MAAI,+CAA+C,KAAK,SAAS,GAAG,GAAG;AACrE,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACA,QAAM,EAAE,MAAM,IAAI,MAAM,oBAAoB,OAAO;AAAA,IACjD;AAAA,IACA,MAAM,EAAE,MAAM,QAAQ;AAAA,EACxB,CAAC;AACD,WAAS,QAAQ,gBAAgB,SAAS;AAC1C,SAAO,QAAQ,QAAQ;AACzB;;;ACfA,IAAM,UAAU;;;AJKhB,SAAS,sBAAsB,SAAS;AACtC,QAAM,sBAAsB,QAAQ,WAAW,eAAe,SAAS;AAAA,IACrE,SAAS;AAAA,MACP,cAAc,gCAAgC,WAAW,aAAa;AAAA,IACxE;AAAA,EACF,CAAC;AACD,QAAM,EAAE,UAAU,qBAAqB,GAAG,aAAa,IAAI;AAC3D,QAAM,QAAQ,QAAQ,eAAe,eAAe;AAAA,IAClD,GAAG;AAAA,IACH,YAAY;AAAA,IACZ;AAAA,EACF,IAAI;AAAA,IACF,GAAG;AAAA,IACH,YAAY;AAAA,IACZ;AAAA,IACA,QAAQ,QAAQ,UAAU,CAAC;AAAA,EAC7B;AACA,MAAI,CAAC,QAAQ,UAAU;AACrB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,QAAQ,gBAAgB;AAC3B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,SAAO,OAAO,OAAO,KAAK,KAAK,MAAM,KAAK,GAAG;AAAA,IAC3C,MAAM,KAAK,KAAK,MAAM,KAAK;AAAA,EAC7B,CAAC;AACH;", + "names": ["auth"] +} diff --git a/dist/node_modules/@octokit/auth-oauth-device/package.json b/dist/node_modules/@octokit/auth-oauth-device/package.json new file mode 100644 index 0000000..2b1a5d7 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-device/package.json @@ -0,0 +1,49 @@ +{ + "name": "@octokit/auth-oauth-device", + "version": "4.0.5", + "description": "GitHub OAuth Device authentication strategy for JavaScript", + "repository": "github:octokit/auth-oauth-device.js", + "keywords": [ + "github", + "api", + "sdk", + "toolkit" + ], + "author": "Gregor Martynus (https://dev.to/gr2m)", + "license": "MIT", + "dependencies": { + "@octokit/oauth-methods": "^2.0.0", + "@octokit/request": "^6.0.0", + "@octokit/types": "^9.0.0", + "universal-user-agent": "^6.0.0" + }, + "devDependencies": { + "@octokit/tsconfig": "^2.0.0", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "esbuild": "^0.17.19", + "fetch-mock": "^9.11.0", + "glob": "^10.2.7", + "jest": "^29.0.0", + "prettier": "2.8.8", + "semantic-release": "^21.0.0", + "semantic-release-plugin-update-version-in-files": "^1.1.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "publishConfig": { + "access": "public" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "browser": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "module": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/auth-oauth-user/LICENSE b/dist/node_modules/@octokit/auth-oauth-user/LICENSE new file mode 100644 index 0000000..57049d0 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/LICENSE @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2021 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/@octokit/auth-oauth-user/README.md b/dist/node_modules/@octokit/auth-oauth-user/README.md new file mode 100644 index 0000000..0f32390 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/README.md @@ -0,0 +1,1032 @@ +# auth-oauth-user.js + +> Octokit authentication strategy for OAuth user authentication + +[![@latest](https://img.shields.io/npm/v/@octokit/auth-oauth-user.svg)](https://www.npmjs.com/package/@octokit/auth-oauth-user) +[![Build Status](https://github.com/octokit/auth-oauth-user.js/workflows/Test/badge.svg)](https://github.com/octokit/auth-oauth-user.js/actions?query=workflow%3ATest+branch%3Amain) + +**Important:** `@octokit/auth-oauth-user` requires your app's `client_secret`, which must not be exposed. If you are looking for an OAuth user authentication strategy that can be used on a client (browser, IoT, CLI), check out [`@octokit/auth-oauth-user-client`](https://github.com/octokit/auth-oauth-user-client.js#readme). Note that `@octokit/auth-oauth-user-client` requires a backend. The only exception is [`@octokit/auth-oauth-device`](https://github.com/octokit/auth-oauth-device.js#readme) which does not require the `client_secret`, but does not work in browsers due to CORS constraints. + +
+Table of contents + + + +- [Features](#features) +- [Standalone usage](#standalone-usage) + - [Exchange code from OAuth web flow](#exchange-code-from-oauth-web-flow) + - [OAuth Device flow](#oauth-device-flow) + - [Use an existing authentication](#use-an-existing-authentication) +- [Usage with Octokit](#usage-with-octokit) +- [`createOAuthUserAuth(options)` or `new Octokit({ auth })`](#createoauthuserauthoptions-or-new-octokit-auth-) + - [When using GitHub's OAuth web flow](#when-using-githubs-oauth-web-flow) + - [When using GitHub's OAuth device flow](#when-using-githubs-oauth-device-flow) + - [When passing an existing authentication object](#when-passing-an-existing-authentication-object) +- [`auth(options)` or `octokit.auth(options)`](#authoptions-or-octokitauthoptions) +- [Authentication object](#authentication-object) + - [OAuth APP authentication token](#oauth-app-authentication-token) + - [GitHub APP user authentication token with expiring disabled](#github-app-user-authentication-token-with-expiring-disabled) + - [GitHub APP user authentication token with expiring enabled](#github-app-user-authentication-token-with-expiring-enabled) +- [`auth.hook(request, route, parameters)` or `auth.hook(request, options)`](#authhookrequest-route-parameters-or-authhookrequest-options) +- [Types](#types) +- [Contributing](#contributing) +- [License](#license) + + + +
+ +## Features + +- Code for token exchange from [GitHub's OAuth web flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#web-application-flow) +- [GitHub's OAuth device flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#device-flow) +- Caches token for succesive calls +- Auto-refreshing for [expiring user access tokens](https://docs.github.com/en/developers/apps/refreshing-user-to-server-access-tokens) +- Applies the correct authentication strategy based on the request URL when using with `Octokit` +- Token verification +- Token reset +- Token invalidation +- Application grant revocation + +## Standalone usage + + + + + + +
+ +Browsers + + + +Load `@octokit/auth-oauth-user` directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+ +Node + + + +Install with `npm install @octokit/auth-oauth-user` + +```js +const { createOAuthUserAuth } = require("@octokit/auth-oauth-user"); +``` + +
+ +### Exchange code from OAuth web flow + +```js +const auth = createOAuthUserAuth({ + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + code: "code123", + // optional + state: "state123", + redirectUrl: "https://acme-inc.com/login", +}); + +// Exchanges the code for the user access token authentication on first call +// and caches the authentication for successive calls +const { token } = await auth(); +``` + +About [GitHub's OAuth web flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#web-application-flow) + +### OAuth Device flow + +```js +const auth = createOAuthUserAuth({ + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + onVerification(verification) { + // verification example + // { + // device_code: "3584d83530557fdd1f46af8289938c8ef79f9dc5", + // user_code: "WDJB-MJHT", + // verification_uri: "https://github.com/login/device", + // expires_in: 900, + // interval: 5, + // }; + + console.log("Open %s", verification.verification_uri); + console.log("Enter code: %s", verification.user_code); + }, +}); + +// resolves once the user entered the `user_code` on `verification_uri` +const { token } = await auth(); +``` + +About [GitHub's OAuth device flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#device-flow) + +### Use an existing authentication + +```js +const auth = createOAuthUserAuth({ + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + clientType: "oauth-app", + token: "token123", +}); + +// will return the passed authentication +const { token } = await auth(); +``` + +See [Authentication object](#authentication-object). + +## Usage with Octokit + + + + + + +
+ +Browsers + + + +`@octokit/auth-oauth-user` cannot be used in the browser. It requires `clientSecret` to be set which must not be exposed to clients, and some of the OAuth APIs it uses do not support CORS. + +
+ +Node + + + +Install with `npm install @octokit/core @octokit/auth-oauth-user`. Optionally replace `@octokit/core` with a compatible module + +```js +const { Octokit } = require("@octokit/core"); +const { createOAuthUserAuth } = require("@octokit/auth-oauth-user"); +``` + +
+ +```js +const octokit = new Octokit({ + authStrategy: createOAuthUserAuth, + auth: { + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + code: "code123", + }, +}); + +// Exchanges the code for the user access token authentication on first request +// and caches the authentication for successive requests +const { + data: { login }, +} = await octokit.request("GET /user"); +console.log("Hello, %s!", login); +``` + +## `createOAuthUserAuth(options)` or `new Octokit({ auth })` + +The `createOAuthUserAuth` method accepts a single `options` object as argument. The same set of options can be passed as `auth` to the `Octokit` constructor when setting `authStrategy: createOAuthUserAuth` + +### When using GitHub's OAuth web flow + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientId + + string + + Required. Client ID of your GitHub/OAuth App. Find it on your app's settings page. +
+ clientSecret + + string + + Required. Client Secret for your GitHub/OAuth App. Create one on your app's settings page. +
+ clientType + + string + + Either "oauth-app" or "github-app". Defaults to "oauth-app". +
+ code + + string + + +**Required.** The authorization code which was passed as query parameter to the callback URL from [GitHub's OAuth web application flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#web-application-flow). + +
+ state + + string + + +The unguessable random string you provided in [Step 1 of GitHub's OAuth web application flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#1-request-a-users-github-identity). + +
+ redirectUrl + + string + + +The redirect_uri parameter you provided in [Step 1 of GitHub's OAuth web application flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#1-request-a-users-github-identity). + +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +createOAuthAppAuth({ + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +### When using GitHub's OAuth device flow + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientId + + string + + Required. Client ID of your GitHub/OAuth App. Find it on your app's settings page. +
+ clientSecret + + string + + Required. Client Secret for your GitHub/OAuth App. The clientSecret is not needed for the OAuth device flow itself, but it is required for resetting, refreshing, and invalidating a token. Find the Client Secret on your app's settings page. +
+ clientType + + string + + Either "oauth-app" or "github-app". Defaults to "oauth-app". +
+ onVerification + + function + + +**Required**. A function that is called once the device and user codes were retrieved + +The `onVerification()` callback can be used to pause until the user completes step 2, which might result in a better user experience. + +```js +const auth = createOAuthUserAuth({ + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + onVerification(verification) { + console.log("Open %s", verification.verification_uri); + console.log("Enter code: %s", verification.user_code); + + await prompt("press enter when you are ready to continue"); + }, +}); +``` + +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +createOAuthAppAuth({ + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + onVerification(verification) { + console.log("Open %s", verification.verification_uri); + console.log("Enter code: %s", verification.user_code); + + await prompt("press enter when you are ready to continue"); + }, + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +### When passing an existing authentication object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + Required. Either "oauth-app" or "github". +
+ clientId + + string + + Required. Client ID of your GitHub/OAuth App. Find it on your app's settings page. +
+ clientSecret + + string + + Required. Client Secret for your GitHub/OAuth App. Create one on your app's settings page. +
+ token + + string + + Required. The user access token +
+ scopes + + array of strings + + Required if clientType is set to "oauth-app". Array of OAuth scope names the token was granted +
+ refreshToken + + string + + Only relevant if clientType is set to "github-app" and token expiration is enabled. +
+ expiresAt + + string + + Only relevant if clientType is set to "github-app" and token expiration is enabled. Date timestamp in ISO 8601 standard. Example: 2022-01-01T08:00:0.000Z +
+ refreshTokenExpiresAt + + string + + Only relevant if clientType is set to "github-app" and token expiration is enabled. Date timestamp in ISO 8601 standard. Example: 2021-07-01T00:00:0.000Z +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +createOAuthAppAuth({ + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +## `auth(options)` or `octokit.auth(options)` + +The async `auth()` method is returned by `createOAuthUserAuth(options)` or set on the `octokit` instance when the `Octokit` constructor was called with `authStrategy: createOAuthUserAuth`. + +Once `auth()` receives a valid authentication object it caches it in memory and uses it for subsequent calls. It also caches if the token is invalid and no longer tries to send any requests. If the authentication is using a refresh token, a new token will be requested as needed. Calling `auth({ type: "reset" })` will replace the internally cached authentication. + +Resolves with an [authentication object](#authentication-object). + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + +Without setting `type` auth will return the current authentication object, or exchange the `code` from the strategy options on first call. If the current authentication token is expired, the tokens will be refreshed. + +Possible values for `type` are + +- `"get"`: returns the token from internal state and creates it if none was created yet +- `"check"`: sends request to verify the validity of the current token +- `"reset"`: invalidates current token and replaces it with a new one +- `"refresh"`: GitHub Apps only, and only if expiring user tokens are enabled. +- `"delete"`: invalidates current token +- `"deleteAuthorization"`: revokes OAuth access for application. All tokens for the current user created by the same app are invalidated. The user will be prompted to grant access again during the next OAuth web flow. + +
+ +## Authentication object + +There are three possible results + +1. [OAuth APP authentication token](#oauth-app-authentication-token) +1. [GitHub APP user authentication token with expiring disabled](#github-app-user-authentication-token-with-expiring-disabled) +1. [GitHub APP user authentication token with expiring enabled](#github-app-user-authentication-token-with-expiring-enabled) + +The differences are + +1. `scopes` is only present for OAuth Apps +2. `refreshToken`, `expiresAt`, `refreshTokenExpiresAt` are only present for GitHub Apps, and only if token expiration is enabled + +### OAuth APP authentication token + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ tokenType + + string + + "oauth" +
+ clientType + + string + + "oauth-app" +
+ clientId + + string + + The clientId from the strategy options +
+ clientSecret + + string + + The clientSecret from the strategy options +
+ token + + string + + The user access token +
+ scopes + + array of strings + + array of scope names enabled for the token +
+ onTokenCreated + + function + + callback invoked when a token is "reset" or "refreshed" +
+ invalid + + boolean + + +Either `undefined` or `true`. Will be set to `true` if the token was invalided explicitly or found to be invalid + +
+ +### GitHub APP user authentication token with expiring disabled + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ tokenType + + string + + "oauth" +
+ clientType + + string + + "github-app" +
+ clientId + + string + + The clientId from the strategy options +
+ clientSecret + + string + + The clientSecret from the strategy options +
+ token + + string + + The user access token +
+ onTokenCreated + + function + + callback invoked when a token is "reset" or "refreshed" +
+ invalid + + boolean + + +Either `undefined` or `true`. Will be set to `true` if the token was invalided explicitly or found to be invalid + +
+ +### GitHub APP user authentication token with expiring enabled + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ tokenType + + string + + "oauth" +
+ clientType + + string + + "github-app" +
+ clientId + + string + + The clientId from the strategy options +
+ clientSecret + + string + + The clientSecret from the strategy options +
+ token + + string + + The user access token +
+ refreshToken + + string + + The refresh token +
+ expiresAt + + string + + Date timestamp in ISO 8601 standard. Example: 2022-01-01T08:00:0.000Z +
+ refreshTokenExpiresAt + + string + + Date timestamp in ISO 8601 standard. Example: 2021-07-01T00:00:0.000Z +
+ invalid + + boolean + + +Either `undefined` or `true`. Will be set to `true` if the token was invalided explicitly or found to be invalid + +
+ +## `auth.hook(request, route, parameters)` or `auth.hook(request, options)` + +`auth.hook()` hooks directly into the request life cycle. It amends the request to authenticate correctly based on the request URL. + +The `request` option is an instance of [`@octokit/request`](https://github.com/octokit/request.js#readme). The `route`/`options` parameters are the same as for the [`request()` method](https://github.com/octokit/request.js#request). + +`auth.hook()` can be called directly to send an authenticated request + +```js +const { data: user } = await auth.hook(request, "GET /user"); +``` + +Or it can be passed as option to [`request()`](https://github.com/octokit/request.js#request). + +```js +const requestWithAuth = request.defaults({ + request: { + hook: auth.hook, + }, +}); + +const { data: user } = await requestWithAuth("GET /user"); +``` + +## Types + +```ts +import { + GitHubAppAuthentication, + GitHubAppAuthenticationWithExpiration, + GitHubAppAuthOptions, + GitHubAppStrategyOptions, + GitHubAppStrategyOptionsDeviceFlow, + GitHubAppStrategyOptionsExistingAuthentication, + GitHubAppStrategyOptionsExistingAuthenticationWithExpiration, + GitHubAppStrategyOptionsWebFlow, + OAuthAppAuthentication, + OAuthAppAuthOptions, + OAuthAppStrategyOptions, + OAuthAppStrategyOptionsDeviceFlow, + OAuthAppStrategyOptionsExistingAuthentication, + OAuthAppStrategyOptionsWebFlow, +} from "@octokit/auth-oauth-user"; +``` + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-node/index.js b/dist/node_modules/@octokit/auth-oauth-user/dist-node/index.js new file mode 100644 index 0000000..fa06b87 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-node/index.js @@ -0,0 +1,243 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createOAuthUserAuth: () => createOAuthUserAuth, + requiresBasicAuth: () => requiresBasicAuth +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = require("universal-user-agent"); +var import_request = require("@octokit/request"); + +// pkg/dist-src/version.js +var VERSION = "2.1.2"; + +// pkg/dist-src/get-authentication.js +var import_auth_oauth_device = require("@octokit/auth-oauth-device"); +var import_oauth_methods = require("@octokit/oauth-methods"); +async function getAuthentication(state) { + if ("code" in state.strategyOptions) { + const { authentication } = await (0, import_oauth_methods.exchangeWebFlowCode)({ + clientId: state.clientId, + clientSecret: state.clientSecret, + clientType: state.clientType, + onTokenCreated: state.onTokenCreated, + ...state.strategyOptions, + request: state.request + }); + return { + type: "token", + tokenType: "oauth", + ...authentication + }; + } + if ("onVerification" in state.strategyOptions) { + const deviceAuth = (0, import_auth_oauth_device.createOAuthDeviceAuth)({ + clientType: state.clientType, + clientId: state.clientId, + onTokenCreated: state.onTokenCreated, + ...state.strategyOptions, + request: state.request + }); + const authentication = await deviceAuth({ + type: "oauth" + }); + return { + clientSecret: state.clientSecret, + ...authentication + }; + } + if ("token" in state.strategyOptions) { + return { + type: "token", + tokenType: "oauth", + clientId: state.clientId, + clientSecret: state.clientSecret, + clientType: state.clientType, + onTokenCreated: state.onTokenCreated, + ...state.strategyOptions + }; + } + throw new Error("[@octokit/auth-oauth-user] Invalid strategy options"); +} + +// pkg/dist-src/auth.js +var import_oauth_methods2 = require("@octokit/oauth-methods"); +async function auth(state, options = {}) { + var _a, _b; + if (!state.authentication) { + state.authentication = state.clientType === "oauth-app" ? await getAuthentication(state) : await getAuthentication(state); + } + if (state.authentication.invalid) { + throw new Error("[@octokit/auth-oauth-user] Token is invalid"); + } + const currentAuthentication = state.authentication; + if ("expiresAt" in currentAuthentication) { + if (options.type === "refresh" || new Date(currentAuthentication.expiresAt) < /* @__PURE__ */ new Date()) { + const { authentication } = await (0, import_oauth_methods2.refreshToken)({ + clientType: "github-app", + clientId: state.clientId, + clientSecret: state.clientSecret, + refreshToken: currentAuthentication.refreshToken, + request: state.request + }); + state.authentication = { + tokenType: "oauth", + type: "token", + ...authentication + }; + } + } + if (options.type === "refresh") { + if (state.clientType === "oauth-app") { + throw new Error( + "[@octokit/auth-oauth-user] OAuth Apps do not support expiring tokens" + ); + } + if (!currentAuthentication.hasOwnProperty("expiresAt")) { + throw new Error("[@octokit/auth-oauth-user] Refresh token missing"); + } + await ((_a = state.onTokenCreated) == null ? void 0 : _a.call(state, state.authentication, { + type: options.type + })); + } + if (options.type === "check" || options.type === "reset") { + const method = options.type === "check" ? import_oauth_methods2.checkToken : import_oauth_methods2.resetToken; + try { + const { authentication } = await method({ + // @ts-expect-error making TS happy would require unnecessary code so no + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: state.authentication.token, + request: state.request + }); + state.authentication = { + tokenType: "oauth", + type: "token", + // @ts-expect-error TBD + ...authentication + }; + if (options.type === "reset") { + await ((_b = state.onTokenCreated) == null ? void 0 : _b.call(state, state.authentication, { + type: options.type + })); + } + return state.authentication; + } catch (error) { + if (error.status === 404) { + error.message = "[@octokit/auth-oauth-user] Token is invalid"; + state.authentication.invalid = true; + } + throw error; + } + } + if (options.type === "delete" || options.type === "deleteAuthorization") { + const method = options.type === "delete" ? import_oauth_methods2.deleteToken : import_oauth_methods2.deleteAuthorization; + try { + await method({ + // @ts-expect-error making TS happy would require unnecessary code so no + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: state.authentication.token, + request: state.request + }); + } catch (error) { + if (error.status !== 404) + throw error; + } + state.authentication.invalid = true; + return state.authentication; + } + return state.authentication; +} + +// pkg/dist-src/hook.js +var import_btoa_lite = __toESM(require("btoa-lite")); + +// pkg/dist-src/requires-basic-auth.js +var ROUTES_REQUIRING_BASIC_AUTH = /\/applications\/[^/]+\/(token|grant)s?/; +function requiresBasicAuth(url) { + return url && ROUTES_REQUIRING_BASIC_AUTH.test(url); +} + +// pkg/dist-src/hook.js +async function hook(state, request, route, parameters = {}) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + if (/\/login\/(oauth\/access_token|device\/code)$/.test(endpoint.url)) { + return request(endpoint); + } + if (requiresBasicAuth(endpoint.url)) { + const credentials = (0, import_btoa_lite.default)(`${state.clientId}:${state.clientSecret}`); + endpoint.headers.authorization = `basic ${credentials}`; + return request(endpoint); + } + const { token } = state.clientType === "oauth-app" ? await auth({ ...state, request }) : await auth({ ...state, request }); + endpoint.headers.authorization = "token " + token; + return request(endpoint); +} + +// pkg/dist-src/index.js +function createOAuthUserAuth({ + clientId, + clientSecret, + clientType = "oauth-app", + request = import_request.request.defaults({ + headers: { + "user-agent": `octokit-auth-oauth-app.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + } + }), + onTokenCreated, + ...strategyOptions +}) { + const state = Object.assign({ + clientType, + clientId, + clientSecret, + onTokenCreated, + strategyOptions, + request + }); + return Object.assign(auth.bind(null, state), { + // @ts-expect-error not worth the extra code needed to appease TS + hook: hook.bind(null, state) + }); +} +createOAuthUserAuth.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + createOAuthUserAuth, + requiresBasicAuth +}); diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-node/index.js.map b/dist/node_modules/@octokit/auth-oauth-user/dist-node/index.js.map new file mode 100644 index 0000000..a0d9379 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js", "../dist-src/get-authentication.js", "../dist-src/auth.js", "../dist-src/hook.js", "../dist-src/requires-basic-auth.js"], + "sourcesContent": ["import { getUserAgent } from \"universal-user-agent\";\nimport { request as octokitRequest } from \"@octokit/request\";\nimport { VERSION } from \"./version\";\nimport { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nimport { requiresBasicAuth } from \"./requires-basic-auth\";\nfunction createOAuthUserAuth({\n clientId,\n clientSecret,\n clientType = \"oauth-app\",\n request = octokitRequest.defaults({\n headers: {\n \"user-agent\": `octokit-auth-oauth-app.js/${VERSION} ${getUserAgent()}`\n }\n }),\n onTokenCreated,\n ...strategyOptions\n}) {\n const state = Object.assign({\n clientType,\n clientId,\n clientSecret,\n onTokenCreated,\n strategyOptions,\n request\n });\n return Object.assign(auth.bind(null, state), {\n // @ts-expect-error not worth the extra code needed to appease TS\n hook: hook.bind(null, state)\n });\n}\ncreateOAuthUserAuth.VERSION = VERSION;\nexport {\n createOAuthUserAuth,\n requiresBasicAuth\n};\n", "const VERSION = \"2.1.2\";\nexport {\n VERSION\n};\n", "import { createOAuthDeviceAuth } from \"@octokit/auth-oauth-device\";\nimport { exchangeWebFlowCode } from \"@octokit/oauth-methods\";\nasync function getAuthentication(state) {\n if (\"code\" in state.strategyOptions) {\n const { authentication } = await exchangeWebFlowCode({\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n clientType: state.clientType,\n onTokenCreated: state.onTokenCreated,\n ...state.strategyOptions,\n request: state.request\n });\n return {\n type: \"token\",\n tokenType: \"oauth\",\n ...authentication\n };\n }\n if (\"onVerification\" in state.strategyOptions) {\n const deviceAuth = createOAuthDeviceAuth({\n clientType: state.clientType,\n clientId: state.clientId,\n onTokenCreated: state.onTokenCreated,\n ...state.strategyOptions,\n request: state.request\n });\n const authentication = await deviceAuth({\n type: \"oauth\"\n });\n return {\n clientSecret: state.clientSecret,\n ...authentication\n };\n }\n if (\"token\" in state.strategyOptions) {\n return {\n type: \"token\",\n tokenType: \"oauth\",\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n clientType: state.clientType,\n onTokenCreated: state.onTokenCreated,\n ...state.strategyOptions\n };\n }\n throw new Error(\"[@octokit/auth-oauth-user] Invalid strategy options\");\n}\nexport {\n getAuthentication\n};\n", "import { getAuthentication } from \"./get-authentication\";\nimport {\n checkToken,\n deleteAuthorization,\n deleteToken,\n refreshToken,\n resetToken\n} from \"@octokit/oauth-methods\";\nasync function auth(state, options = {}) {\n if (!state.authentication) {\n state.authentication = state.clientType === \"oauth-app\" ? await getAuthentication(state) : await getAuthentication(state);\n }\n if (state.authentication.invalid) {\n throw new Error(\"[@octokit/auth-oauth-user] Token is invalid\");\n }\n const currentAuthentication = state.authentication;\n if (\"expiresAt\" in currentAuthentication) {\n if (options.type === \"refresh\" || new Date(currentAuthentication.expiresAt) < /* @__PURE__ */ new Date()) {\n const { authentication } = await refreshToken({\n clientType: \"github-app\",\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n refreshToken: currentAuthentication.refreshToken,\n request: state.request\n });\n state.authentication = {\n tokenType: \"oauth\",\n type: \"token\",\n ...authentication\n };\n }\n }\n if (options.type === \"refresh\") {\n if (state.clientType === \"oauth-app\") {\n throw new Error(\n \"[@octokit/auth-oauth-user] OAuth Apps do not support expiring tokens\"\n );\n }\n if (!currentAuthentication.hasOwnProperty(\"expiresAt\")) {\n throw new Error(\"[@octokit/auth-oauth-user] Refresh token missing\");\n }\n await state.onTokenCreated?.(state.authentication, {\n type: options.type\n });\n }\n if (options.type === \"check\" || options.type === \"reset\") {\n const method = options.type === \"check\" ? checkToken : resetToken;\n try {\n const { authentication } = await method({\n // @ts-expect-error making TS happy would require unnecessary code so no\n clientType: state.clientType,\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n token: state.authentication.token,\n request: state.request\n });\n state.authentication = {\n tokenType: \"oauth\",\n type: \"token\",\n // @ts-expect-error TBD\n ...authentication\n };\n if (options.type === \"reset\") {\n await state.onTokenCreated?.(state.authentication, {\n type: options.type\n });\n }\n return state.authentication;\n } catch (error) {\n if (error.status === 404) {\n error.message = \"[@octokit/auth-oauth-user] Token is invalid\";\n state.authentication.invalid = true;\n }\n throw error;\n }\n }\n if (options.type === \"delete\" || options.type === \"deleteAuthorization\") {\n const method = options.type === \"delete\" ? deleteToken : deleteAuthorization;\n try {\n await method({\n // @ts-expect-error making TS happy would require unnecessary code so no\n clientType: state.clientType,\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n token: state.authentication.token,\n request: state.request\n });\n } catch (error) {\n if (error.status !== 404)\n throw error;\n }\n state.authentication.invalid = true;\n return state.authentication;\n }\n return state.authentication;\n}\nexport {\n auth\n};\n", "import btoa from \"btoa-lite\";\nimport { auth } from \"./auth\";\nimport { requiresBasicAuth } from \"./requires-basic-auth\";\nasync function hook(state, request, route, parameters = {}) {\n const endpoint = request.endpoint.merge(\n route,\n parameters\n );\n if (/\\/login\\/(oauth\\/access_token|device\\/code)$/.test(endpoint.url)) {\n return request(endpoint);\n }\n if (requiresBasicAuth(endpoint.url)) {\n const credentials = btoa(`${state.clientId}:${state.clientSecret}`);\n endpoint.headers.authorization = `basic ${credentials}`;\n return request(endpoint);\n }\n const { token } = state.clientType === \"oauth-app\" ? await auth({ ...state, request }) : await auth({ ...state, request });\n endpoint.headers.authorization = \"token \" + token;\n return request(endpoint);\n}\nexport {\n hook\n};\n", "const ROUTES_REQUIRING_BASIC_AUTH = /\\/applications\\/[^/]+\\/(token|grant)s?/;\nfunction requiresBasicAuth(url) {\n return url && ROUTES_REQUIRING_BASIC_AUTH.test(url);\n}\nexport {\n requiresBasicAuth\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAA6B;AAC7B,qBAA0C;;;ACD1C,IAAM,UAAU;;;ACAhB,+BAAsC;AACtC,2BAAoC;AACpC,eAAe,kBAAkB,OAAO;AACtC,MAAI,UAAU,MAAM,iBAAiB;AACnC,UAAM,EAAE,eAAe,IAAI,UAAM,0CAAoB;AAAA,MACnD,UAAU,MAAM;AAAA,MAChB,cAAc,MAAM;AAAA,MACpB,YAAY,MAAM;AAAA,MAClB,gBAAgB,MAAM;AAAA,MACtB,GAAG,MAAM;AAAA,MACT,SAAS,MAAM;AAAA,IACjB,CAAC;AACD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX,GAAG;AAAA,IACL;AAAA,EACF;AACA,MAAI,oBAAoB,MAAM,iBAAiB;AAC7C,UAAM,iBAAa,gDAAsB;AAAA,MACvC,YAAY,MAAM;AAAA,MAClB,UAAU,MAAM;AAAA,MAChB,gBAAgB,MAAM;AAAA,MACtB,GAAG,MAAM;AAAA,MACT,SAAS,MAAM;AAAA,IACjB,CAAC;AACD,UAAM,iBAAiB,MAAM,WAAW;AAAA,MACtC,MAAM;AAAA,IACR,CAAC;AACD,WAAO;AAAA,MACL,cAAc,MAAM;AAAA,MACpB,GAAG;AAAA,IACL;AAAA,EACF;AACA,MAAI,WAAW,MAAM,iBAAiB;AACpC,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX,UAAU,MAAM;AAAA,MAChB,cAAc,MAAM;AAAA,MACpB,YAAY,MAAM;AAAA,MAClB,gBAAgB,MAAM;AAAA,MACtB,GAAG,MAAM;AAAA,IACX;AAAA,EACF;AACA,QAAM,IAAI,MAAM,qDAAqD;AACvE;;;AC7CA,IAAAA,wBAMO;AACP,eAAe,KAAK,OAAO,UAAU,CAAC,GAAG;AARzC;AASE,MAAI,CAAC,MAAM,gBAAgB;AACzB,UAAM,iBAAiB,MAAM,eAAe,cAAc,MAAM,kBAAkB,KAAK,IAAI,MAAM,kBAAkB,KAAK;AAAA,EAC1H;AACA,MAAI,MAAM,eAAe,SAAS;AAChC,UAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AACA,QAAM,wBAAwB,MAAM;AACpC,MAAI,eAAe,uBAAuB;AACxC,QAAI,QAAQ,SAAS,aAAa,IAAI,KAAK,sBAAsB,SAAS,IAAoB,oBAAI,KAAK,GAAG;AACxG,YAAM,EAAE,eAAe,IAAI,UAAM,oCAAa;AAAA,QAC5C,YAAY;AAAA,QACZ,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA,QACpB,cAAc,sBAAsB;AAAA,QACpC,SAAS,MAAM;AAAA,MACjB,CAAC;AACD,YAAM,iBAAiB;AAAA,QACrB,WAAW;AAAA,QACX,MAAM;AAAA,QACN,GAAG;AAAA,MACL;AAAA,IACF;AAAA,EACF;AACA,MAAI,QAAQ,SAAS,WAAW;AAC9B,QAAI,MAAM,eAAe,aAAa;AACpC,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,QAAI,CAAC,sBAAsB,eAAe,WAAW,GAAG;AACtD,YAAM,IAAI,MAAM,kDAAkD;AAAA,IACpE;AACA,YAAM,WAAM,mBAAN,+BAAuB,MAAM,gBAAgB;AAAA,MACjD,MAAM,QAAQ;AAAA,IAChB;AAAA,EACF;AACA,MAAI,QAAQ,SAAS,WAAW,QAAQ,SAAS,SAAS;AACxD,UAAM,SAAS,QAAQ,SAAS,UAAU,mCAAa;AACvD,QAAI;AACF,YAAM,EAAE,eAAe,IAAI,MAAM,OAAO;AAAA;AAAA,QAEtC,YAAY,MAAM;AAAA,QAClB,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA,QACpB,OAAO,MAAM,eAAe;AAAA,QAC5B,SAAS,MAAM;AAAA,MACjB,CAAC;AACD,YAAM,iBAAiB;AAAA,QACrB,WAAW;AAAA,QACX,MAAM;AAAA;AAAA,QAEN,GAAG;AAAA,MACL;AACA,UAAI,QAAQ,SAAS,SAAS;AAC5B,gBAAM,WAAM,mBAAN,+BAAuB,MAAM,gBAAgB;AAAA,UACjD,MAAM,QAAQ;AAAA,QAChB;AAAA,MACF;AACA,aAAO,MAAM;AAAA,IACf,SAAS,OAAP;AACA,UAAI,MAAM,WAAW,KAAK;AACxB,cAAM,UAAU;AAChB,cAAM,eAAe,UAAU;AAAA,MACjC;AACA,YAAM;AAAA,IACR;AAAA,EACF;AACA,MAAI,QAAQ,SAAS,YAAY,QAAQ,SAAS,uBAAuB;AACvE,UAAM,SAAS,QAAQ,SAAS,WAAW,oCAAc;AACzD,QAAI;AACF,YAAM,OAAO;AAAA;AAAA,QAEX,YAAY,MAAM;AAAA,QAClB,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA,QACpB,OAAO,MAAM,eAAe;AAAA,QAC5B,SAAS,MAAM;AAAA,MACjB,CAAC;AAAA,IACH,SAAS,OAAP;AACA,UAAI,MAAM,WAAW;AACnB,cAAM;AAAA,IACV;AACA,UAAM,eAAe,UAAU;AAC/B,WAAO,MAAM;AAAA,EACf;AACA,SAAO,MAAM;AACf;;;AC/FA,uBAAiB;;;ACAjB,IAAM,8BAA8B;AACpC,SAAS,kBAAkB,KAAK;AAC9B,SAAO,OAAO,4BAA4B,KAAK,GAAG;AACpD;;;ADAA,eAAe,KAAK,OAAO,SAAS,OAAO,aAAa,CAAC,GAAG;AAC1D,QAAM,WAAW,QAAQ,SAAS;AAAA,IAChC;AAAA,IACA;AAAA,EACF;AACA,MAAI,+CAA+C,KAAK,SAAS,GAAG,GAAG;AACrE,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACA,MAAI,kBAAkB,SAAS,GAAG,GAAG;AACnC,UAAM,kBAAc,iBAAAC,SAAK,GAAG,MAAM,YAAY,MAAM,cAAc;AAClE,aAAS,QAAQ,gBAAgB,SAAS;AAC1C,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACA,QAAM,EAAE,MAAM,IAAI,MAAM,eAAe,cAAc,MAAM,KAAK,EAAE,GAAG,OAAO,QAAQ,CAAC,IAAI,MAAM,KAAK,EAAE,GAAG,OAAO,QAAQ,CAAC;AACzH,WAAS,QAAQ,gBAAgB,WAAW;AAC5C,SAAO,QAAQ,QAAQ;AACzB;;;AJbA,SAAS,oBAAoB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA,aAAa;AAAA,EACb,UAAU,eAAAC,QAAe,SAAS;AAAA,IAChC,SAAS;AAAA,MACP,cAAc,6BAA6B,eAAW,0CAAa;AAAA,IACrE;AAAA,EACF,CAAC;AAAA,EACD;AAAA,EACA,GAAG;AACL,GAAG;AACD,QAAM,QAAQ,OAAO,OAAO;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACD,SAAO,OAAO,OAAO,KAAK,KAAK,MAAM,KAAK,GAAG;AAAA;AAAA,IAE3C,MAAM,KAAK,KAAK,MAAM,KAAK;AAAA,EAC7B,CAAC;AACH;AACA,oBAAoB,UAAU;", + "names": ["import_oauth_methods", "btoa", "octokitRequest"] +} diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-src/auth.js b/dist/node_modules/@octokit/auth-oauth-user/dist-src/auth.js new file mode 100644 index 0000000..d4862ca --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-src/auth.js @@ -0,0 +1,99 @@ +import { getAuthentication } from "./get-authentication"; +import { + checkToken, + deleteAuthorization, + deleteToken, + refreshToken, + resetToken +} from "@octokit/oauth-methods"; +async function auth(state, options = {}) { + if (!state.authentication) { + state.authentication = state.clientType === "oauth-app" ? await getAuthentication(state) : await getAuthentication(state); + } + if (state.authentication.invalid) { + throw new Error("[@octokit/auth-oauth-user] Token is invalid"); + } + const currentAuthentication = state.authentication; + if ("expiresAt" in currentAuthentication) { + if (options.type === "refresh" || new Date(currentAuthentication.expiresAt) < /* @__PURE__ */ new Date()) { + const { authentication } = await refreshToken({ + clientType: "github-app", + clientId: state.clientId, + clientSecret: state.clientSecret, + refreshToken: currentAuthentication.refreshToken, + request: state.request + }); + state.authentication = { + tokenType: "oauth", + type: "token", + ...authentication + }; + } + } + if (options.type === "refresh") { + if (state.clientType === "oauth-app") { + throw new Error( + "[@octokit/auth-oauth-user] OAuth Apps do not support expiring tokens" + ); + } + if (!currentAuthentication.hasOwnProperty("expiresAt")) { + throw new Error("[@octokit/auth-oauth-user] Refresh token missing"); + } + await state.onTokenCreated?.(state.authentication, { + type: options.type + }); + } + if (options.type === "check" || options.type === "reset") { + const method = options.type === "check" ? checkToken : resetToken; + try { + const { authentication } = await method({ + // @ts-expect-error making TS happy would require unnecessary code so no + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: state.authentication.token, + request: state.request + }); + state.authentication = { + tokenType: "oauth", + type: "token", + // @ts-expect-error TBD + ...authentication + }; + if (options.type === "reset") { + await state.onTokenCreated?.(state.authentication, { + type: options.type + }); + } + return state.authentication; + } catch (error) { + if (error.status === 404) { + error.message = "[@octokit/auth-oauth-user] Token is invalid"; + state.authentication.invalid = true; + } + throw error; + } + } + if (options.type === "delete" || options.type === "deleteAuthorization") { + const method = options.type === "delete" ? deleteToken : deleteAuthorization; + try { + await method({ + // @ts-expect-error making TS happy would require unnecessary code so no + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: state.authentication.token, + request: state.request + }); + } catch (error) { + if (error.status !== 404) + throw error; + } + state.authentication.invalid = true; + return state.authentication; + } + return state.authentication; +} +export { + auth +}; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-src/get-authentication.js b/dist/node_modules/@octokit/auth-oauth-user/dist-src/get-authentication.js new file mode 100644 index 0000000..c7fc2e2 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-src/get-authentication.js @@ -0,0 +1,50 @@ +import { createOAuthDeviceAuth } from "@octokit/auth-oauth-device"; +import { exchangeWebFlowCode } from "@octokit/oauth-methods"; +async function getAuthentication(state) { + if ("code" in state.strategyOptions) { + const { authentication } = await exchangeWebFlowCode({ + clientId: state.clientId, + clientSecret: state.clientSecret, + clientType: state.clientType, + onTokenCreated: state.onTokenCreated, + ...state.strategyOptions, + request: state.request + }); + return { + type: "token", + tokenType: "oauth", + ...authentication + }; + } + if ("onVerification" in state.strategyOptions) { + const deviceAuth = createOAuthDeviceAuth({ + clientType: state.clientType, + clientId: state.clientId, + onTokenCreated: state.onTokenCreated, + ...state.strategyOptions, + request: state.request + }); + const authentication = await deviceAuth({ + type: "oauth" + }); + return { + clientSecret: state.clientSecret, + ...authentication + }; + } + if ("token" in state.strategyOptions) { + return { + type: "token", + tokenType: "oauth", + clientId: state.clientId, + clientSecret: state.clientSecret, + clientType: state.clientType, + onTokenCreated: state.onTokenCreated, + ...state.strategyOptions + }; + } + throw new Error("[@octokit/auth-oauth-user] Invalid strategy options"); +} +export { + getAuthentication +}; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-src/hook.js b/dist/node_modules/@octokit/auth-oauth-user/dist-src/hook.js new file mode 100644 index 0000000..d0b9af6 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-src/hook.js @@ -0,0 +1,23 @@ +import btoa from "btoa-lite"; +import { auth } from "./auth"; +import { requiresBasicAuth } from "./requires-basic-auth"; +async function hook(state, request, route, parameters = {}) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + if (/\/login\/(oauth\/access_token|device\/code)$/.test(endpoint.url)) { + return request(endpoint); + } + if (requiresBasicAuth(endpoint.url)) { + const credentials = btoa(`${state.clientId}:${state.clientSecret}`); + endpoint.headers.authorization = `basic ${credentials}`; + return request(endpoint); + } + const { token } = state.clientType === "oauth-app" ? await auth({ ...state, request }) : await auth({ ...state, request }); + endpoint.headers.authorization = "token " + token; + return request(endpoint); +} +export { + hook +}; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-src/index.js b/dist/node_modules/@octokit/auth-oauth-user/dist-src/index.js new file mode 100644 index 0000000..37342b5 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-src/index.js @@ -0,0 +1,36 @@ +import { getUserAgent } from "universal-user-agent"; +import { request as octokitRequest } from "@octokit/request"; +import { VERSION } from "./version"; +import { auth } from "./auth"; +import { hook } from "./hook"; +import { requiresBasicAuth } from "./requires-basic-auth"; +function createOAuthUserAuth({ + clientId, + clientSecret, + clientType = "oauth-app", + request = octokitRequest.defaults({ + headers: { + "user-agent": `octokit-auth-oauth-app.js/${VERSION} ${getUserAgent()}` + } + }), + onTokenCreated, + ...strategyOptions +}) { + const state = Object.assign({ + clientType, + clientId, + clientSecret, + onTokenCreated, + strategyOptions, + request + }); + return Object.assign(auth.bind(null, state), { + // @ts-expect-error not worth the extra code needed to appease TS + hook: hook.bind(null, state) + }); +} +createOAuthUserAuth.VERSION = VERSION; +export { + createOAuthUserAuth, + requiresBasicAuth +}; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-src/requires-basic-auth.js b/dist/node_modules/@octokit/auth-oauth-user/dist-src/requires-basic-auth.js new file mode 100644 index 0000000..98b7871 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-src/requires-basic-auth.js @@ -0,0 +1,7 @@ +const ROUTES_REQUIRING_BASIC_AUTH = /\/applications\/[^/]+\/(token|grant)s?/; +function requiresBasicAuth(url) { + return url && ROUTES_REQUIRING_BASIC_AUTH.test(url); +} +export { + requiresBasicAuth +}; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-src/version.js b/dist/node_modules/@octokit/auth-oauth-user/dist-src/version.js new file mode 100644 index 0000000..1b843a5 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "2.1.2"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-types/auth.d.ts b/dist/node_modules/@octokit/auth-oauth-user/dist-types/auth.d.ts new file mode 100644 index 0000000..9588419 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-types/auth.d.ts @@ -0,0 +1,3 @@ +import type { OAuthAppAuthOptions, GitHubAppAuthOptions, OAuthAppAuthentication, GitHubAppAuthentication, GitHubAppAuthenticationWithExpiration, OAuthAppState, GitHubAppState } from "./types"; +export declare function auth(state: OAuthAppState, options?: OAuthAppAuthOptions): Promise; +export declare function auth(state: GitHubAppState, options?: GitHubAppAuthOptions): Promise; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-types/get-authentication.d.ts b/dist/node_modules/@octokit/auth-oauth-user/dist-types/get-authentication.d.ts new file mode 100644 index 0000000..359accb --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-types/get-authentication.d.ts @@ -0,0 +1,3 @@ +import { OAuthAppState, GitHubAppState, OAuthAppAuthentication, GitHubAppAuthentication, GitHubAppAuthenticationWithExpiration } from "./types"; +export declare function getAuthentication(state: OAuthAppState): Promise; +export declare function getAuthentication(state: GitHubAppState): Promise; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-types/hook.d.ts b/dist/node_modules/@octokit/auth-oauth-user/dist-types/hook.d.ts new file mode 100644 index 0000000..97598aa --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-types/hook.d.ts @@ -0,0 +1,6 @@ +import type { EndpointOptions, OctokitResponse, RequestInterface, RequestParameters, Route } from "@octokit/types"; +import type { OAuthAppState, GitHubAppState } from "./types"; +type AnyResponse = OctokitResponse; +export declare function hook(state: OAuthAppState, request: RequestInterface, route: Route | EndpointOptions, parameters: RequestParameters): Promise; +export declare function hook(state: GitHubAppState, request: RequestInterface, route: Route | EndpointOptions, parameters: RequestParameters): Promise; +export {}; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-types/index.d.ts b/dist/node_modules/@octokit/auth-oauth-user/dist-types/index.d.ts new file mode 100644 index 0000000..4aaa3be --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-types/index.d.ts @@ -0,0 +1,8 @@ +import type { OAuthAppStrategyOptions, GitHubAppStrategyOptions, OAuthAppAuthInterface, GitHubAppAuthInterface } from "./types"; +export type { OAuthAppStrategyOptionsWebFlow, GitHubAppStrategyOptionsWebFlow, OAuthAppStrategyOptionsDeviceFlow, GitHubAppStrategyOptionsDeviceFlow, OAuthAppStrategyOptionsExistingAuthentication, GitHubAppStrategyOptionsExistingAuthentication, GitHubAppStrategyOptionsExistingAuthenticationWithExpiration, OAuthAppStrategyOptions, GitHubAppStrategyOptions, OAuthAppAuthOptions, GitHubAppAuthOptions, OAuthAppAuthentication, GitHubAppAuthentication, GitHubAppAuthenticationWithExpiration, } from "./types"; +export { requiresBasicAuth } from "./requires-basic-auth"; +export declare function createOAuthUserAuth(options: OAuthAppStrategyOptions): OAuthAppAuthInterface; +export declare function createOAuthUserAuth(options: GitHubAppStrategyOptions): GitHubAppAuthInterface; +export declare namespace createOAuthUserAuth { + var VERSION: string; +} diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-types/requires-basic-auth.d.ts b/dist/node_modules/@octokit/auth-oauth-user/dist-types/requires-basic-auth.d.ts new file mode 100644 index 0000000..c7a90ea --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-types/requires-basic-auth.d.ts @@ -0,0 +1 @@ +export declare function requiresBasicAuth(url: string | undefined): boolean | "" | undefined; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-types/types.d.ts b/dist/node_modules/@octokit/auth-oauth-user/dist-types/types.d.ts new file mode 100644 index 0000000..61be2ae --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-types/types.d.ts @@ -0,0 +1,114 @@ +import * as OctokitTypes from "@octokit/types"; +import * as DeviceTypes from "@octokit/auth-oauth-device"; +import * as OAuthMethodsTypes from "@octokit/oauth-methods"; +export type ClientType = "oauth-app" | "github-app"; +export type WebFlowOptions = { + code: string; + state?: string; + redirectUrl?: string; +}; +type CommonAppStrategyOptions = { + clientType?: ClientType; + clientId: string; + clientSecret: string; + request?: OctokitTypes.RequestInterface; + onTokenCreated?: OnToketCreatedCallback; +}; +type CommonOAuthAppStrategyOptions = { + clientType?: "oauth-app"; +} & CommonAppStrategyOptions; +type CommonGitHubAppStrategyOptions = { + clientType?: "github-app"; +} & CommonAppStrategyOptions; +type OAuthAppDeviceFlowOptions = { + onVerification: DeviceTypes.OAuthAppStrategyOptions["onVerification"]; + scopes?: string[]; +}; +type GitHubDeviceFlowOptions = { + onVerification: DeviceTypes.OAuthAppStrategyOptions["onVerification"]; +}; +type ExistingOAuthAppAuthenticationOptions = { + clientType: "oauth-app"; + token: string; + scopes: string[]; +}; +type ExistingGitHubAppAuthenticationOptions = { + token: string; +}; +type ExistingGitHubAppAuthenticationWithExpirationOptions = { + token: string; + refreshToken: string; + expiresAt: string; + refreshTokenExpiresAt: string; +}; +export type OAuthAppStrategyOptionsWebFlow = CommonOAuthAppStrategyOptions & WebFlowOptions; +export type GitHubAppStrategyOptionsWebFlow = CommonGitHubAppStrategyOptions & WebFlowOptions; +export type OAuthAppStrategyOptionsDeviceFlow = CommonOAuthAppStrategyOptions & OAuthAppDeviceFlowOptions; +export type GitHubAppStrategyOptionsDeviceFlow = CommonGitHubAppStrategyOptions & GitHubDeviceFlowOptions; +export type OAuthAppStrategyOptionsExistingAuthentication = CommonOAuthAppStrategyOptions & ExistingOAuthAppAuthenticationOptions; +export type GitHubAppStrategyOptionsExistingAuthentication = CommonGitHubAppStrategyOptions & ExistingGitHubAppAuthenticationOptions; +export type GitHubAppStrategyOptionsExistingAuthenticationWithExpiration = CommonGitHubAppStrategyOptions & ExistingGitHubAppAuthenticationWithExpirationOptions; +export type OAuthAppStrategyOptions = OAuthAppStrategyOptionsWebFlow | OAuthAppStrategyOptionsDeviceFlow | OAuthAppStrategyOptionsExistingAuthentication; +export type GitHubAppStrategyOptions = GitHubAppStrategyOptionsWebFlow | GitHubAppStrategyOptionsDeviceFlow | GitHubAppStrategyOptionsExistingAuthentication | GitHubAppStrategyOptionsExistingAuthenticationWithExpiration; +export type OAuthAppAuthentication = { + tokenType: "oauth"; + type: "token"; +} & OAuthMethodsTypes.OAuthAppAuthentication; +export type GitHubAppAuthentication = { + tokenType: "oauth"; + type: "token"; +} & OAuthMethodsTypes.GitHubAppAuthentication; +export type GitHubAppAuthenticationWithExpiration = { + tokenType: "oauth"; + type: "token"; +} & OAuthMethodsTypes.GitHubAppAuthenticationWithExpiration; +export interface OAuthAppAuthInterface { + (options?: OAuthAppAuthOptions): Promise; + hook(request: OctokitTypes.RequestInterface, route: OctokitTypes.Route | OctokitTypes.EndpointOptions, parameters?: OctokitTypes.RequestParameters): Promise>; +} +export interface GitHubAppAuthInterface { + (options?: GitHubAppAuthOptions): Promise; + hook(request: OctokitTypes.RequestInterface, route: OctokitTypes.Route | OctokitTypes.EndpointOptions, parameters?: OctokitTypes.RequestParameters): Promise>; +} +type OnToketCreatedCallback = (authentication: OAuthAppAuthentication | GitHubAppAuthentication | GitHubAppAuthenticationWithExpiration | undefined, options: OAuthAppAuthOptions | GitHubAppAuthOptions) => void | Promise; +export type OAuthAppState = { + clientId: string; + clientSecret: string; + clientType: "oauth-app"; + request: OctokitTypes.RequestInterface; + onTokenCreated?: CommonAppStrategyOptions["onTokenCreated"]; + strategyOptions: WebFlowOptions | OAuthAppDeviceFlowOptions | ExistingOAuthAppAuthenticationOptions; + authentication?: OAuthAppAuthentication & { + invalid?: true; + }; +}; +type GitHubAppStateAuthentication = GitHubAppAuthentication & { + invalid?: true; +}; +type GitHubAppStateAuthenticationWIthExpiration = GitHubAppAuthenticationWithExpiration & { + invalid?: true; +}; +export type GitHubAppState = { + clientId: string; + clientSecret: string; + clientType: "github-app"; + request: OctokitTypes.RequestInterface; + onTokenCreated?: CommonAppStrategyOptions["onTokenCreated"]; + strategyOptions: WebFlowOptions | GitHubDeviceFlowOptions | ExistingGitHubAppAuthenticationOptions | ExistingGitHubAppAuthenticationWithExpirationOptions; + authentication?: GitHubAppStateAuthentication | GitHubAppStateAuthenticationWIthExpiration; +}; +export type State = OAuthAppState | GitHubAppState; +export type WebFlowState = { + clientId: string; + clientSecret: string; + clientType: ClientType; + request: OctokitTypes.RequestInterface; + strategyOptions: WebFlowOptions; +}; +export type OAuthAppAuthOptions = { + type?: "get" | "check" | "reset" | "delete" | "deleteAuthorization"; +}; +export type GitHubAppAuthOptions = { + type?: "get" | "check" | "reset" | "refresh" | "delete" | "deleteAuthorization"; +}; +export {}; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-types/version.d.ts b/dist/node_modules/@octokit/auth-oauth-user/dist-types/version.d.ts new file mode 100644 index 0000000..c22a7c7 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "2.1.2"; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-web/index.js b/dist/node_modules/@octokit/auth-oauth-user/dist-web/index.js new file mode 100644 index 0000000..5df401c --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-web/index.js @@ -0,0 +1,212 @@ +// pkg/dist-src/index.js +import { getUserAgent } from "universal-user-agent"; +import { request as octokitRequest } from "@octokit/request"; + +// pkg/dist-src/version.js +var VERSION = "2.1.2"; + +// pkg/dist-src/get-authentication.js +import { createOAuthDeviceAuth } from "@octokit/auth-oauth-device"; +import { exchangeWebFlowCode } from "@octokit/oauth-methods"; +async function getAuthentication(state) { + if ("code" in state.strategyOptions) { + const { authentication } = await exchangeWebFlowCode({ + clientId: state.clientId, + clientSecret: state.clientSecret, + clientType: state.clientType, + onTokenCreated: state.onTokenCreated, + ...state.strategyOptions, + request: state.request + }); + return { + type: "token", + tokenType: "oauth", + ...authentication + }; + } + if ("onVerification" in state.strategyOptions) { + const deviceAuth = createOAuthDeviceAuth({ + clientType: state.clientType, + clientId: state.clientId, + onTokenCreated: state.onTokenCreated, + ...state.strategyOptions, + request: state.request + }); + const authentication = await deviceAuth({ + type: "oauth" + }); + return { + clientSecret: state.clientSecret, + ...authentication + }; + } + if ("token" in state.strategyOptions) { + return { + type: "token", + tokenType: "oauth", + clientId: state.clientId, + clientSecret: state.clientSecret, + clientType: state.clientType, + onTokenCreated: state.onTokenCreated, + ...state.strategyOptions + }; + } + throw new Error("[@octokit/auth-oauth-user] Invalid strategy options"); +} + +// pkg/dist-src/auth.js +import { + checkToken, + deleteAuthorization, + deleteToken, + refreshToken, + resetToken +} from "@octokit/oauth-methods"; +async function auth(state, options = {}) { + if (!state.authentication) { + state.authentication = state.clientType === "oauth-app" ? await getAuthentication(state) : await getAuthentication(state); + } + if (state.authentication.invalid) { + throw new Error("[@octokit/auth-oauth-user] Token is invalid"); + } + const currentAuthentication = state.authentication; + if ("expiresAt" in currentAuthentication) { + if (options.type === "refresh" || new Date(currentAuthentication.expiresAt) < /* @__PURE__ */ new Date()) { + const { authentication } = await refreshToken({ + clientType: "github-app", + clientId: state.clientId, + clientSecret: state.clientSecret, + refreshToken: currentAuthentication.refreshToken, + request: state.request + }); + state.authentication = { + tokenType: "oauth", + type: "token", + ...authentication + }; + } + } + if (options.type === "refresh") { + if (state.clientType === "oauth-app") { + throw new Error( + "[@octokit/auth-oauth-user] OAuth Apps do not support expiring tokens" + ); + } + if (!currentAuthentication.hasOwnProperty("expiresAt")) { + throw new Error("[@octokit/auth-oauth-user] Refresh token missing"); + } + await state.onTokenCreated?.(state.authentication, { + type: options.type + }); + } + if (options.type === "check" || options.type === "reset") { + const method = options.type === "check" ? checkToken : resetToken; + try { + const { authentication } = await method({ + // @ts-expect-error making TS happy would require unnecessary code so no + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: state.authentication.token, + request: state.request + }); + state.authentication = { + tokenType: "oauth", + type: "token", + // @ts-expect-error TBD + ...authentication + }; + if (options.type === "reset") { + await state.onTokenCreated?.(state.authentication, { + type: options.type + }); + } + return state.authentication; + } catch (error) { + if (error.status === 404) { + error.message = "[@octokit/auth-oauth-user] Token is invalid"; + state.authentication.invalid = true; + } + throw error; + } + } + if (options.type === "delete" || options.type === "deleteAuthorization") { + const method = options.type === "delete" ? deleteToken : deleteAuthorization; + try { + await method({ + // @ts-expect-error making TS happy would require unnecessary code so no + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: state.authentication.token, + request: state.request + }); + } catch (error) { + if (error.status !== 404) + throw error; + } + state.authentication.invalid = true; + return state.authentication; + } + return state.authentication; +} + +// pkg/dist-src/hook.js +import btoa from "btoa-lite"; + +// pkg/dist-src/requires-basic-auth.js +var ROUTES_REQUIRING_BASIC_AUTH = /\/applications\/[^/]+\/(token|grant)s?/; +function requiresBasicAuth(url) { + return url && ROUTES_REQUIRING_BASIC_AUTH.test(url); +} + +// pkg/dist-src/hook.js +async function hook(state, request, route, parameters = {}) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + if (/\/login\/(oauth\/access_token|device\/code)$/.test(endpoint.url)) { + return request(endpoint); + } + if (requiresBasicAuth(endpoint.url)) { + const credentials = btoa(`${state.clientId}:${state.clientSecret}`); + endpoint.headers.authorization = `basic ${credentials}`; + return request(endpoint); + } + const { token } = state.clientType === "oauth-app" ? await auth({ ...state, request }) : await auth({ ...state, request }); + endpoint.headers.authorization = "token " + token; + return request(endpoint); +} + +// pkg/dist-src/index.js +function createOAuthUserAuth({ + clientId, + clientSecret, + clientType = "oauth-app", + request = octokitRequest.defaults({ + headers: { + "user-agent": `octokit-auth-oauth-app.js/${VERSION} ${getUserAgent()}` + } + }), + onTokenCreated, + ...strategyOptions +}) { + const state = Object.assign({ + clientType, + clientId, + clientSecret, + onTokenCreated, + strategyOptions, + request + }); + return Object.assign(auth.bind(null, state), { + // @ts-expect-error not worth the extra code needed to appease TS + hook: hook.bind(null, state) + }); +} +createOAuthUserAuth.VERSION = VERSION; +export { + createOAuthUserAuth, + requiresBasicAuth +}; diff --git a/dist/node_modules/@octokit/auth-oauth-user/dist-web/index.js.map b/dist/node_modules/@octokit/auth-oauth-user/dist-web/index.js.map new file mode 100644 index 0000000..a81b9a1 --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js", "../dist-src/get-authentication.js", "../dist-src/auth.js", "../dist-src/hook.js", "../dist-src/requires-basic-auth.js"], + "sourcesContent": ["import { getUserAgent } from \"universal-user-agent\";\nimport { request as octokitRequest } from \"@octokit/request\";\nimport { VERSION } from \"./version\";\nimport { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nimport { requiresBasicAuth } from \"./requires-basic-auth\";\nfunction createOAuthUserAuth({\n clientId,\n clientSecret,\n clientType = \"oauth-app\",\n request = octokitRequest.defaults({\n headers: {\n \"user-agent\": `octokit-auth-oauth-app.js/${VERSION} ${getUserAgent()}`\n }\n }),\n onTokenCreated,\n ...strategyOptions\n}) {\n const state = Object.assign({\n clientType,\n clientId,\n clientSecret,\n onTokenCreated,\n strategyOptions,\n request\n });\n return Object.assign(auth.bind(null, state), {\n // @ts-expect-error not worth the extra code needed to appease TS\n hook: hook.bind(null, state)\n });\n}\ncreateOAuthUserAuth.VERSION = VERSION;\nexport {\n createOAuthUserAuth,\n requiresBasicAuth\n};\n", "const VERSION = \"2.1.2\";\nexport {\n VERSION\n};\n", "import { createOAuthDeviceAuth } from \"@octokit/auth-oauth-device\";\nimport { exchangeWebFlowCode } from \"@octokit/oauth-methods\";\nasync function getAuthentication(state) {\n if (\"code\" in state.strategyOptions) {\n const { authentication } = await exchangeWebFlowCode({\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n clientType: state.clientType,\n onTokenCreated: state.onTokenCreated,\n ...state.strategyOptions,\n request: state.request\n });\n return {\n type: \"token\",\n tokenType: \"oauth\",\n ...authentication\n };\n }\n if (\"onVerification\" in state.strategyOptions) {\n const deviceAuth = createOAuthDeviceAuth({\n clientType: state.clientType,\n clientId: state.clientId,\n onTokenCreated: state.onTokenCreated,\n ...state.strategyOptions,\n request: state.request\n });\n const authentication = await deviceAuth({\n type: \"oauth\"\n });\n return {\n clientSecret: state.clientSecret,\n ...authentication\n };\n }\n if (\"token\" in state.strategyOptions) {\n return {\n type: \"token\",\n tokenType: \"oauth\",\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n clientType: state.clientType,\n onTokenCreated: state.onTokenCreated,\n ...state.strategyOptions\n };\n }\n throw new Error(\"[@octokit/auth-oauth-user] Invalid strategy options\");\n}\nexport {\n getAuthentication\n};\n", "import { getAuthentication } from \"./get-authentication\";\nimport {\n checkToken,\n deleteAuthorization,\n deleteToken,\n refreshToken,\n resetToken\n} from \"@octokit/oauth-methods\";\nasync function auth(state, options = {}) {\n if (!state.authentication) {\n state.authentication = state.clientType === \"oauth-app\" ? await getAuthentication(state) : await getAuthentication(state);\n }\n if (state.authentication.invalid) {\n throw new Error(\"[@octokit/auth-oauth-user] Token is invalid\");\n }\n const currentAuthentication = state.authentication;\n if (\"expiresAt\" in currentAuthentication) {\n if (options.type === \"refresh\" || new Date(currentAuthentication.expiresAt) < /* @__PURE__ */ new Date()) {\n const { authentication } = await refreshToken({\n clientType: \"github-app\",\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n refreshToken: currentAuthentication.refreshToken,\n request: state.request\n });\n state.authentication = {\n tokenType: \"oauth\",\n type: \"token\",\n ...authentication\n };\n }\n }\n if (options.type === \"refresh\") {\n if (state.clientType === \"oauth-app\") {\n throw new Error(\n \"[@octokit/auth-oauth-user] OAuth Apps do not support expiring tokens\"\n );\n }\n if (!currentAuthentication.hasOwnProperty(\"expiresAt\")) {\n throw new Error(\"[@octokit/auth-oauth-user] Refresh token missing\");\n }\n await state.onTokenCreated?.(state.authentication, {\n type: options.type\n });\n }\n if (options.type === \"check\" || options.type === \"reset\") {\n const method = options.type === \"check\" ? checkToken : resetToken;\n try {\n const { authentication } = await method({\n // @ts-expect-error making TS happy would require unnecessary code so no\n clientType: state.clientType,\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n token: state.authentication.token,\n request: state.request\n });\n state.authentication = {\n tokenType: \"oauth\",\n type: \"token\",\n // @ts-expect-error TBD\n ...authentication\n };\n if (options.type === \"reset\") {\n await state.onTokenCreated?.(state.authentication, {\n type: options.type\n });\n }\n return state.authentication;\n } catch (error) {\n if (error.status === 404) {\n error.message = \"[@octokit/auth-oauth-user] Token is invalid\";\n state.authentication.invalid = true;\n }\n throw error;\n }\n }\n if (options.type === \"delete\" || options.type === \"deleteAuthorization\") {\n const method = options.type === \"delete\" ? deleteToken : deleteAuthorization;\n try {\n await method({\n // @ts-expect-error making TS happy would require unnecessary code so no\n clientType: state.clientType,\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n token: state.authentication.token,\n request: state.request\n });\n } catch (error) {\n if (error.status !== 404)\n throw error;\n }\n state.authentication.invalid = true;\n return state.authentication;\n }\n return state.authentication;\n}\nexport {\n auth\n};\n", "import btoa from \"btoa-lite\";\nimport { auth } from \"./auth\";\nimport { requiresBasicAuth } from \"./requires-basic-auth\";\nasync function hook(state, request, route, parameters = {}) {\n const endpoint = request.endpoint.merge(\n route,\n parameters\n );\n if (/\\/login\\/(oauth\\/access_token|device\\/code)$/.test(endpoint.url)) {\n return request(endpoint);\n }\n if (requiresBasicAuth(endpoint.url)) {\n const credentials = btoa(`${state.clientId}:${state.clientSecret}`);\n endpoint.headers.authorization = `basic ${credentials}`;\n return request(endpoint);\n }\n const { token } = state.clientType === \"oauth-app\" ? await auth({ ...state, request }) : await auth({ ...state, request });\n endpoint.headers.authorization = \"token \" + token;\n return request(endpoint);\n}\nexport {\n hook\n};\n", "const ROUTES_REQUIRING_BASIC_AUTH = /\\/applications\\/[^/]+\\/(token|grant)s?/;\nfunction requiresBasicAuth(url) {\n return url && ROUTES_REQUIRING_BASIC_AUTH.test(url);\n}\nexport {\n requiresBasicAuth\n};\n"], + "mappings": ";AAAA,SAAS,oBAAoB;AAC7B,SAAS,WAAW,sBAAsB;;;ACD1C,IAAM,UAAU;;;ACAhB,SAAS,6BAA6B;AACtC,SAAS,2BAA2B;AACpC,eAAe,kBAAkB,OAAO;AACtC,MAAI,UAAU,MAAM,iBAAiB;AACnC,UAAM,EAAE,eAAe,IAAI,MAAM,oBAAoB;AAAA,MACnD,UAAU,MAAM;AAAA,MAChB,cAAc,MAAM;AAAA,MACpB,YAAY,MAAM;AAAA,MAClB,gBAAgB,MAAM;AAAA,MACtB,GAAG,MAAM;AAAA,MACT,SAAS,MAAM;AAAA,IACjB,CAAC;AACD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX,GAAG;AAAA,IACL;AAAA,EACF;AACA,MAAI,oBAAoB,MAAM,iBAAiB;AAC7C,UAAM,aAAa,sBAAsB;AAAA,MACvC,YAAY,MAAM;AAAA,MAClB,UAAU,MAAM;AAAA,MAChB,gBAAgB,MAAM;AAAA,MACtB,GAAG,MAAM;AAAA,MACT,SAAS,MAAM;AAAA,IACjB,CAAC;AACD,UAAM,iBAAiB,MAAM,WAAW;AAAA,MACtC,MAAM;AAAA,IACR,CAAC;AACD,WAAO;AAAA,MACL,cAAc,MAAM;AAAA,MACpB,GAAG;AAAA,IACL;AAAA,EACF;AACA,MAAI,WAAW,MAAM,iBAAiB;AACpC,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX,UAAU,MAAM;AAAA,MAChB,cAAc,MAAM;AAAA,MACpB,YAAY,MAAM;AAAA,MAClB,gBAAgB,MAAM;AAAA,MACtB,GAAG,MAAM;AAAA,IACX;AAAA,EACF;AACA,QAAM,IAAI,MAAM,qDAAqD;AACvE;;;AC7CA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,eAAe,KAAK,OAAO,UAAU,CAAC,GAAG;AACvC,MAAI,CAAC,MAAM,gBAAgB;AACzB,UAAM,iBAAiB,MAAM,eAAe,cAAc,MAAM,kBAAkB,KAAK,IAAI,MAAM,kBAAkB,KAAK;AAAA,EAC1H;AACA,MAAI,MAAM,eAAe,SAAS;AAChC,UAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AACA,QAAM,wBAAwB,MAAM;AACpC,MAAI,eAAe,uBAAuB;AACxC,QAAI,QAAQ,SAAS,aAAa,IAAI,KAAK,sBAAsB,SAAS,IAAoB,oBAAI,KAAK,GAAG;AACxG,YAAM,EAAE,eAAe,IAAI,MAAM,aAAa;AAAA,QAC5C,YAAY;AAAA,QACZ,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA,QACpB,cAAc,sBAAsB;AAAA,QACpC,SAAS,MAAM;AAAA,MACjB,CAAC;AACD,YAAM,iBAAiB;AAAA,QACrB,WAAW;AAAA,QACX,MAAM;AAAA,QACN,GAAG;AAAA,MACL;AAAA,IACF;AAAA,EACF;AACA,MAAI,QAAQ,SAAS,WAAW;AAC9B,QAAI,MAAM,eAAe,aAAa;AACpC,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,QAAI,CAAC,sBAAsB,eAAe,WAAW,GAAG;AACtD,YAAM,IAAI,MAAM,kDAAkD;AAAA,IACpE;AACA,UAAM,MAAM,iBAAiB,MAAM,gBAAgB;AAAA,MACjD,MAAM,QAAQ;AAAA,IAChB,CAAC;AAAA,EACH;AACA,MAAI,QAAQ,SAAS,WAAW,QAAQ,SAAS,SAAS;AACxD,UAAM,SAAS,QAAQ,SAAS,UAAU,aAAa;AACvD,QAAI;AACF,YAAM,EAAE,eAAe,IAAI,MAAM,OAAO;AAAA;AAAA,QAEtC,YAAY,MAAM;AAAA,QAClB,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA,QACpB,OAAO,MAAM,eAAe;AAAA,QAC5B,SAAS,MAAM;AAAA,MACjB,CAAC;AACD,YAAM,iBAAiB;AAAA,QACrB,WAAW;AAAA,QACX,MAAM;AAAA;AAAA,QAEN,GAAG;AAAA,MACL;AACA,UAAI,QAAQ,SAAS,SAAS;AAC5B,cAAM,MAAM,iBAAiB,MAAM,gBAAgB;AAAA,UACjD,MAAM,QAAQ;AAAA,QAChB,CAAC;AAAA,MACH;AACA,aAAO,MAAM;AAAA,IACf,SAAS,OAAP;AACA,UAAI,MAAM,WAAW,KAAK;AACxB,cAAM,UAAU;AAChB,cAAM,eAAe,UAAU;AAAA,MACjC;AACA,YAAM;AAAA,IACR;AAAA,EACF;AACA,MAAI,QAAQ,SAAS,YAAY,QAAQ,SAAS,uBAAuB;AACvE,UAAM,SAAS,QAAQ,SAAS,WAAW,cAAc;AACzD,QAAI;AACF,YAAM,OAAO;AAAA;AAAA,QAEX,YAAY,MAAM;AAAA,QAClB,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA,QACpB,OAAO,MAAM,eAAe;AAAA,QAC5B,SAAS,MAAM;AAAA,MACjB,CAAC;AAAA,IACH,SAAS,OAAP;AACA,UAAI,MAAM,WAAW;AACnB,cAAM;AAAA,IACV;AACA,UAAM,eAAe,UAAU;AAC/B,WAAO,MAAM;AAAA,EACf;AACA,SAAO,MAAM;AACf;;;AC/FA,OAAO,UAAU;;;ACAjB,IAAM,8BAA8B;AACpC,SAAS,kBAAkB,KAAK;AAC9B,SAAO,OAAO,4BAA4B,KAAK,GAAG;AACpD;;;ADAA,eAAe,KAAK,OAAO,SAAS,OAAO,aAAa,CAAC,GAAG;AAC1D,QAAM,WAAW,QAAQ,SAAS;AAAA,IAChC;AAAA,IACA;AAAA,EACF;AACA,MAAI,+CAA+C,KAAK,SAAS,GAAG,GAAG;AACrE,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACA,MAAI,kBAAkB,SAAS,GAAG,GAAG;AACnC,UAAM,cAAc,KAAK,GAAG,MAAM,YAAY,MAAM,cAAc;AAClE,aAAS,QAAQ,gBAAgB,SAAS;AAC1C,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACA,QAAM,EAAE,MAAM,IAAI,MAAM,eAAe,cAAc,MAAM,KAAK,EAAE,GAAG,OAAO,QAAQ,CAAC,IAAI,MAAM,KAAK,EAAE,GAAG,OAAO,QAAQ,CAAC;AACzH,WAAS,QAAQ,gBAAgB,WAAW;AAC5C,SAAO,QAAQ,QAAQ;AACzB;;;AJbA,SAAS,oBAAoB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA,aAAa;AAAA,EACb,UAAU,eAAe,SAAS;AAAA,IAChC,SAAS;AAAA,MACP,cAAc,6BAA6B,WAAW,aAAa;AAAA,IACrE;AAAA,EACF,CAAC;AAAA,EACD;AAAA,EACA,GAAG;AACL,GAAG;AACD,QAAM,QAAQ,OAAO,OAAO;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACD,SAAO,OAAO,OAAO,KAAK,KAAK,MAAM,KAAK,GAAG;AAAA;AAAA,IAE3C,MAAM,KAAK,KAAK,MAAM,KAAK;AAAA,EAC7B,CAAC;AACH;AACA,oBAAoB,UAAU;", + "names": [] +} diff --git a/dist/node_modules/@octokit/auth-oauth-user/package.json b/dist/node_modules/@octokit/auth-oauth-user/package.json new file mode 100644 index 0000000..aae35df --- /dev/null +++ b/dist/node_modules/@octokit/auth-oauth-user/package.json @@ -0,0 +1,55 @@ +{ + "name": "@octokit/auth-oauth-user", + "publishConfig": { + "access": "public" + }, + "version": "2.1.2", + "description": "Octokit authentication strategy for OAuth clients", + "repository": "https://github.com/octokit/auth-oauth-user.js", + "keywords": [ + "github", + "api", + "sdk", + "toolkit" + ], + "author": "Gregor Martynus (https://dev.to/gr2m)", + "license": "MIT", + "dependencies": { + "@octokit/auth-oauth-device": "^4.0.0", + "@octokit/oauth-methods": "^2.0.0", + "@octokit/request": "^6.0.0", + "@octokit/types": "^9.0.0", + "btoa-lite": "^1.0.0", + "universal-user-agent": "^6.0.0" + }, + "devDependencies": { + "@octokit/core": "^4.0.0", + "@octokit/tsconfig": "^2.0.0", + "@types/btoa-lite": "^1.0.0", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "esbuild": "^0.17.19", + "fetch-mock": "^9.11.0", + "glob": "^10.2.7", + "jest": "^29.0.0", + "mockdate": "^3.0.4", + "prettier": "2.8.8", + "semantic-release": "^21.0.0", + "semantic-release-plugin-update-version-in-files": "^1.1.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "peerDependencies": {}, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "browser": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "module": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/auth-token/LICENSE b/dist/node_modules/@octokit/auth-token/LICENSE new file mode 100644 index 0000000..ef2c18e --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/@octokit/auth-token/README.md b/dist/node_modules/@octokit/auth-token/README.md new file mode 100644 index 0000000..2e48b73 --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/README.md @@ -0,0 +1,285 @@ +# auth-token.js + +> GitHub API token authentication for browsers and Node.js + +[![@latest](https://img.shields.io/npm/v/@octokit/auth-token.svg)](https://www.npmjs.com/package/@octokit/auth-token) +[![Build Status](https://github.com/octokit/auth-token.js/workflows/Test/badge.svg)](https://github.com/octokit/auth-token.js/actions?query=workflow%3ATest) + +`@octokit/auth-token` is the simplest of [GitHub’s authentication strategies](https://github.com/octokit/auth.js). + +It is useful if you want to support multiple authentication strategies, as it’s API is compatible with its sibling packages for [basic](https://github.com/octokit/auth-basic.js), [GitHub App](https://github.com/octokit/auth-app.js) and [OAuth app](https://github.com/octokit/auth.js) authentication. + + + +- [Usage](#usage) +- [`createTokenAuth(token) options`](#createtokenauthtoken-options) +- [`auth()`](#auth) +- [Authentication object](#authentication-object) +- [`auth.hook(request, route, options)` or `auth.hook(request, options)`](#authhookrequest-route-options-or-authhookrequest-options) +- [Find more information](#find-more-information) + - [Find out what scopes are enabled for oauth tokens](#find-out-what-scopes-are-enabled-for-oauth-tokens) + - [Find out if token is a personal access token or if it belongs to an OAuth app](#find-out-if-token-is-a-personal-access-token-or-if-it-belongs-to-an-oauth-app) + - [Find out what permissions are enabled for a repository](#find-out-what-permissions-are-enabled-for-a-repository) + - [Use token for git operations](#use-token-for-git-operations) +- [License](#license) + + + +## Usage + + + + + + +
+Browsers + + +Load `@octokit/auth-token` directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+Node + + +Install with npm install @octokit/auth-token + +```js +const { createTokenAuth } = require("@octokit/auth-token"); +// or: import { createTokenAuth } from "@octokit/auth-token"; +``` + +
+ +```js +const auth = createTokenAuth("ghp_PersonalAccessToken01245678900000000"); +const authentication = await auth(); +// { +// type: 'token', +// token: 'ghp_PersonalAccessToken01245678900000000', +// tokenType: 'oauth' +// } +``` + +## `createTokenAuth(token) options` + +The `createTokenAuth` method accepts a single argument of type string, which is the token. The passed token can be one of the following: + +- [Personal access token](https://help.github.com/en/articles/creating-a-personal-access-token-for-the-command-line) +- [OAuth access token](https://developer.github.com/apps/building-oauth-apps/authorizing-oauth-apps/) +- [GITHUB_TOKEN provided to GitHub Actions](https://developer.github.com/actions/creating-github-actions/accessing-the-runtime-environment/#environment-variables) +- Installation access token ([server-to-server](https://developer.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation)) +- User authentication for installation ([user-to-server](https://docs.github.com/en/developers/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps)) + +Examples + +```js +// Personal access token or OAuth access token +createTokenAuth("ghp_PersonalAccessToken01245678900000000"); +// { +// type: 'token', +// token: 'ghp_PersonalAccessToken01245678900000000', +// tokenType: 'oauth' +// } + +// Installation access token or GitHub Action token +createTokenAuth("ghs_InstallallationOrActionToken00000000"); +// { +// type: 'token', +// token: 'ghs_InstallallationOrActionToken00000000', +// tokenType: 'installation' +// } + +// Installation access token or GitHub Action token +createTokenAuth("ghu_InstallationUserToServer000000000000"); +// { +// type: 'token', +// token: 'ghu_InstallationUserToServer000000000000', +// tokenType: 'user-to-server' +// } +``` + +## `auth()` + +The `auth()` method has no options. It returns a promise which resolves with the the authentication object. + +## Authentication object + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "token" +
+ token + + string + + The provided token. +
+ tokenType + + string + + Can be either "oauth" for personal access tokens and OAuth tokens, "installation" for installation access tokens (includes GITHUB_TOKEN provided to GitHub Actions), "app" for a GitHub App JSON Web Token, or "user-to-server" for a user authentication token through an app installation. +
+ +## `auth.hook(request, route, options)` or `auth.hook(request, options)` + +`auth.hook()` hooks directly into the request life cycle. It authenticates the request using the provided token. + +The `request` option is an instance of [`@octokit/request`](https://github.com/octokit/request.js#readme). The `route`/`options` parameters are the same as for the [`request()` method](https://github.com/octokit/request.js#request). + +`auth.hook()` can be called directly to send an authenticated request + +```js +const { data: authorizations } = await auth.hook( + request, + "GET /authorizations" +); +``` + +Or it can be passed as option to [`request()`](https://github.com/octokit/request.js#request). + +```js +const requestWithAuth = request.defaults({ + request: { + hook: auth.hook, + }, +}); + +const { data: authorizations } = await requestWithAuth("GET /authorizations"); +``` + +## Find more information + +`auth()` does not send any requests, it only transforms the provided token string into an authentication object. + +Here is a list of things you can do to retrieve further information + +### Find out what scopes are enabled for oauth tokens + +Note that this does not work for installations. There is no way to retrieve permissions based on an installation access tokens. + +```js +const TOKEN = "ghp_PersonalAccessToken01245678900000000"; + +const auth = createTokenAuth(TOKEN); +const authentication = await auth(); + +const response = await request("HEAD /"); +const scopes = response.headers["x-oauth-scopes"].split(/,\s+/); + +if (scopes.length) { + console.log( + `"${TOKEN}" has ${scopes.length} scopes enabled: ${scopes.join(", ")}` + ); +} else { + console.log(`"${TOKEN}" has no scopes enabled`); +} +``` + +### Find out if token is a personal access token or if it belongs to an OAuth app + +```js +const TOKEN = "ghp_PersonalAccessToken01245678900000000"; + +const auth = createTokenAuth(TOKEN); +const authentication = await auth(); + +const response = await request("HEAD /"); +const clientId = response.headers["x-oauth-client-id"]; + +if (clientId) { + console.log( + `"${token}" is an OAuth token, its app’s client_id is ${clientId}.` + ); +} else { + console.log(`"${token}" is a personal access token`); +} +``` + +### Find out what permissions are enabled for a repository + +Note that the `permissions` key is not set when authenticated using an installation access token. + +```js +const TOKEN = "ghp_PersonalAccessToken01245678900000000"; + +const auth = createTokenAuth(TOKEN); +const authentication = await auth(); + +const response = await request("GET /repos/{owner}/{repo}", { + owner: "octocat", + repo: "hello-world", +}); + +console.log(response.data.permissions); +// { +// admin: true, +// push: true, +// pull: true +// } +``` + +### Use token for git operations + +Both OAuth and installation access tokens can be used for git operations. However, when using with an installation, [the token must be prefixed with `x-access-token`](https://developer.github.com/apps/building-github-apps/authenticating-with-github-apps/#http-based-git-access-by-an-installation). + +This example is using the [`execa`](https://github.com/sindresorhus/execa) package to run a `git push` command. + +```js +const TOKEN = "ghp_PersonalAccessToken01245678900000000"; + +const auth = createTokenAuth(TOKEN); +const { token, tokenType } = await auth(); +const tokenWithPrefix = + tokenType === "installation" ? `x-access-token:${token}` : token; + +const repositoryUrl = `https://${tokenWithPrefix}@github.com/octocat/hello-world.git`; + +const { stdout } = await execa("git", ["push", repositoryUrl]); +console.log(stdout); +``` + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/auth-token/dist-node/index.js b/dist/node_modules/@octokit/auth-token/dist-node/index.js new file mode 100644 index 0000000..93ea7c8 --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-node/index.js @@ -0,0 +1,79 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createTokenAuth: () => createTokenAuth +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; +} + +// pkg/dist-src/with-authorization-prefix.js +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; +} + +// pkg/dist-src/hook.js +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + createTokenAuth +}); diff --git a/dist/node_modules/@octokit/auth-token/dist-node/index.js.map b/dist/node_modules/@octokit/auth-token/dist-node/index.js.map new file mode 100644 index 0000000..deff6da --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/auth.js", "../dist-src/with-authorization-prefix.js", "../dist-src/hook.js"], + "sourcesContent": ["import { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nconst createTokenAuth = function createTokenAuth2(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n if (typeof token !== \"string\") {\n throw new Error(\n \"[@octokit/auth-token] Token passed to createTokenAuth is not a string\"\n );\n }\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\nexport {\n createTokenAuth\n};\n", "const REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token,\n tokenType\n };\n}\nexport {\n auth\n};\n", "function withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n return `token ${token}`;\n}\nexport {\n withAuthorizationPrefix\n};\n", "import { withAuthorizationPrefix } from \"./with-authorization-prefix\";\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(\n route,\n parameters\n );\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\nexport {\n hook\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAM,+BAA+B;AACrC,IAAM,wBAAwB;AAC9B,IAAM,0BAA0B;AAChC,eAAe,KAAK,OAAO;AACzB,QAAM,QAAQ,MAAM,MAAM,IAAI,EAAE,WAAW;AAC3C,QAAM,iBAAiB,6BAA6B,KAAK,KAAK,KAAK,sBAAsB,KAAK,KAAK;AACnG,QAAM,iBAAiB,wBAAwB,KAAK,KAAK;AACzD,QAAM,YAAY,QAAQ,QAAQ,iBAAiB,iBAAiB,iBAAiB,mBAAmB;AACxG,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF;AACF;;;ACbA,SAAS,wBAAwB,OAAO;AACtC,MAAI,MAAM,MAAM,IAAI,EAAE,WAAW,GAAG;AAClC,WAAO,UAAU;AAAA,EACnB;AACA,SAAO,SAAS;AAClB;;;ACJA,eAAe,KAAK,OAAO,SAAS,OAAO,YAAY;AACrD,QAAM,WAAW,QAAQ,SAAS;AAAA,IAChC;AAAA,IACA;AAAA,EACF;AACA,WAAS,QAAQ,gBAAgB,wBAAwB,KAAK;AAC9D,SAAO,QAAQ,QAAQ;AACzB;;;AHNA,IAAM,kBAAkB,SAAS,iBAAiB,OAAO;AACvD,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,0DAA0D;AAAA,EAC5E;AACA,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,UAAQ,MAAM,QAAQ,sBAAsB,EAAE;AAC9C,SAAO,OAAO,OAAO,KAAK,KAAK,MAAM,KAAK,GAAG;AAAA,IAC3C,MAAM,KAAK,KAAK,MAAM,KAAK;AAAA,EAC7B,CAAC;AACH;", + "names": [] +} diff --git a/dist/node_modules/@octokit/auth-token/dist-src/auth.js b/dist/node_modules/@octokit/auth-token/dist-src/auth.js new file mode 100644 index 0000000..d3efbf5 --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-src/auth.js @@ -0,0 +1,17 @@ +const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +const REGEX_IS_INSTALLATION = /^ghs_/; +const REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; +} +export { + auth +}; diff --git a/dist/node_modules/@octokit/auth-token/dist-src/hook.js b/dist/node_modules/@octokit/auth-token/dist-src/hook.js new file mode 100644 index 0000000..371a041 --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-src/hook.js @@ -0,0 +1,12 @@ +import { withAuthorizationPrefix } from "./with-authorization-prefix"; +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} +export { + hook +}; diff --git a/dist/node_modules/@octokit/auth-token/dist-src/index.js b/dist/node_modules/@octokit/auth-token/dist-src/index.js new file mode 100644 index 0000000..445cfaa --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-src/index.js @@ -0,0 +1,19 @@ +import { auth } from "./auth"; +import { hook } from "./hook"; +const createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; +export { + createTokenAuth +}; diff --git a/dist/node_modules/@octokit/auth-token/dist-src/with-authorization-prefix.js b/dist/node_modules/@octokit/auth-token/dist-src/with-authorization-prefix.js new file mode 100644 index 0000000..02a4bb5 --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-src/with-authorization-prefix.js @@ -0,0 +1,9 @@ +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; +} +export { + withAuthorizationPrefix +}; diff --git a/dist/node_modules/@octokit/auth-token/dist-types/auth.d.ts b/dist/node_modules/@octokit/auth-token/dist-types/auth.d.ts new file mode 100644 index 0000000..941d8c2 --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-types/auth.d.ts @@ -0,0 +1,2 @@ +import type { Token, Authentication } from "./types"; +export declare function auth(token: Token): Promise; diff --git a/dist/node_modules/@octokit/auth-token/dist-types/hook.d.ts b/dist/node_modules/@octokit/auth-token/dist-types/hook.d.ts new file mode 100644 index 0000000..ad457dc --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-types/hook.d.ts @@ -0,0 +1,2 @@ +import type { AnyResponse, EndpointOptions, RequestInterface, RequestParameters, Route, Token } from "./types"; +export declare function hook(token: Token, request: RequestInterface, route: Route | EndpointOptions, parameters?: RequestParameters): Promise; diff --git a/dist/node_modules/@octokit/auth-token/dist-types/index.d.ts b/dist/node_modules/@octokit/auth-token/dist-types/index.d.ts new file mode 100644 index 0000000..115bb7f --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-types/index.d.ts @@ -0,0 +1,7 @@ +import type { StrategyInterface, Token, Authentication } from "./types"; +export type Types = { + StrategyOptions: Token; + AuthOptions: never; + Authentication: Authentication; +}; +export declare const createTokenAuth: StrategyInterface; diff --git a/dist/node_modules/@octokit/auth-token/dist-types/types.d.ts b/dist/node_modules/@octokit/auth-token/dist-types/types.d.ts new file mode 100644 index 0000000..4267fa1 --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-types/types.d.ts @@ -0,0 +1,33 @@ +import * as OctokitTypes from "@octokit/types"; +export type AnyResponse = OctokitTypes.OctokitResponse; +export type StrategyInterface = OctokitTypes.StrategyInterface<[ + Token +], [ +], Authentication>; +export type EndpointDefaults = OctokitTypes.EndpointDefaults; +export type EndpointOptions = OctokitTypes.EndpointOptions; +export type RequestParameters = OctokitTypes.RequestParameters; +export type RequestInterface = OctokitTypes.RequestInterface; +export type Route = OctokitTypes.Route; +export type Token = string; +export type OAuthTokenAuthentication = { + type: "token"; + tokenType: "oauth"; + token: Token; +}; +export type InstallationTokenAuthentication = { + type: "token"; + tokenType: "installation"; + token: Token; +}; +export type AppAuthentication = { + type: "token"; + tokenType: "app"; + token: Token; +}; +export type UserToServerAuthentication = { + type: "token"; + tokenType: "user-to-server"; + token: Token; +}; +export type Authentication = OAuthTokenAuthentication | InstallationTokenAuthentication | AppAuthentication | UserToServerAuthentication; diff --git a/dist/node_modules/@octokit/auth-token/dist-types/with-authorization-prefix.d.ts b/dist/node_modules/@octokit/auth-token/dist-types/with-authorization-prefix.d.ts new file mode 100644 index 0000000..2e52c31 --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-types/with-authorization-prefix.d.ts @@ -0,0 +1,6 @@ +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +export declare function withAuthorizationPrefix(token: string): string; diff --git a/dist/node_modules/@octokit/auth-token/dist-web/index.js b/dist/node_modules/@octokit/auth-token/dist-web/index.js new file mode 100644 index 0000000..b6f7249 --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-web/index.js @@ -0,0 +1,52 @@ +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; +} + +// pkg/dist-src/with-authorization-prefix.js +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; +} + +// pkg/dist-src/hook.js +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; +export { + createTokenAuth +}; diff --git a/dist/node_modules/@octokit/auth-token/dist-web/index.js.map b/dist/node_modules/@octokit/auth-token/dist-web/index.js.map new file mode 100644 index 0000000..b84a1ea --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/auth.js", "../dist-src/with-authorization-prefix.js", "../dist-src/hook.js", "../dist-src/index.js"], + "sourcesContent": ["const REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token,\n tokenType\n };\n}\nexport {\n auth\n};\n", "function withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n return `token ${token}`;\n}\nexport {\n withAuthorizationPrefix\n};\n", "import { withAuthorizationPrefix } from \"./with-authorization-prefix\";\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(\n route,\n parameters\n );\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\nexport {\n hook\n};\n", "import { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nconst createTokenAuth = function createTokenAuth2(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n if (typeof token !== \"string\") {\n throw new Error(\n \"[@octokit/auth-token] Token passed to createTokenAuth is not a string\"\n );\n }\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\nexport {\n createTokenAuth\n};\n"], + "mappings": ";AAAA,IAAM,+BAA+B;AACrC,IAAM,wBAAwB;AAC9B,IAAM,0BAA0B;AAChC,eAAe,KAAK,OAAO;AACzB,QAAM,QAAQ,MAAM,MAAM,IAAI,EAAE,WAAW;AAC3C,QAAM,iBAAiB,6BAA6B,KAAK,KAAK,KAAK,sBAAsB,KAAK,KAAK;AACnG,QAAM,iBAAiB,wBAAwB,KAAK,KAAK;AACzD,QAAM,YAAY,QAAQ,QAAQ,iBAAiB,iBAAiB,iBAAiB,mBAAmB;AACxG,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF;AACF;;;ACbA,SAAS,wBAAwB,OAAO;AACtC,MAAI,MAAM,MAAM,IAAI,EAAE,WAAW,GAAG;AAClC,WAAO,UAAU;AAAA,EACnB;AACA,SAAO,SAAS;AAClB;;;ACJA,eAAe,KAAK,OAAO,SAAS,OAAO,YAAY;AACrD,QAAM,WAAW,QAAQ,SAAS;AAAA,IAChC;AAAA,IACA;AAAA,EACF;AACA,WAAS,QAAQ,gBAAgB,wBAAwB,KAAK;AAC9D,SAAO,QAAQ,QAAQ;AACzB;;;ACNA,IAAM,kBAAkB,SAAS,iBAAiB,OAAO;AACvD,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,0DAA0D;AAAA,EAC5E;AACA,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,UAAQ,MAAM,QAAQ,sBAAsB,EAAE;AAC9C,SAAO,OAAO,OAAO,KAAK,KAAK,MAAM,KAAK,GAAG;AAAA,IAC3C,MAAM,KAAK,KAAK,MAAM,KAAK;AAAA,EAC7B,CAAC;AACH;", + "names": [] +} diff --git a/dist/node_modules/@octokit/auth-token/package.json b/dist/node_modules/@octokit/auth-token/package.json new file mode 100644 index 0000000..ef384c5 --- /dev/null +++ b/dist/node_modules/@octokit/auth-token/package.json @@ -0,0 +1,44 @@ +{ + "name": "@octokit/auth-token", + "publishConfig": { + "access": "public" + }, + "version": "3.0.4", + "description": "GitHub API token authentication for browsers and Node.js", + "repository": "github:octokit/auth-token.js", + "keywords": [ + "github", + "octokit", + "authentication", + "api" + ], + "author": "Gregor Martynus (https://github.com/gr2m)", + "license": "MIT", + "devDependencies": { + "@octokit/request": "^6.0.0", + "@octokit/tsconfig": "^2.0.0", + "@octokit/types": "^9.2.3", + "@types/fetch-mock": "^7.3.1", + "@types/jest": "^29.0.0", + "esbuild": "^0.17.19", + "fetch-mock": "^9.0.0", + "glob": "^10.2.6", + "jest": "^29.0.0", + "prettier": "2.8.8", + "semantic-release": "^21.0.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "browser": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "module": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/auth-unauthenticated/LICENSE b/dist/node_modules/@octokit/auth-unauthenticated/LICENSE new file mode 100644 index 0000000..07d2730 --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2020 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/@octokit/auth-unauthenticated/README.md b/dist/node_modules/@octokit/auth-unauthenticated/README.md new file mode 100644 index 0000000..e98c06b --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/README.md @@ -0,0 +1,143 @@ +# auth-unauthenticated.js + +> strategy for explicitly unauthenticated Octokit instances + +[![@latest](https://img.shields.io/npm/v/@octokit/auth-unauthenticated.svg)](https://www.npmjs.com/package/@octokit/auth-unauthenticated) +[![Build Status](https://github.com/octokit/auth-unauthenticated.js/workflows/Test/badge.svg)](https://github.com/octokit/auth-unauthenticated.js/actions?query=workflow%3ATest) + +`@octokit/auth-unauthenticated` is useful for cases when an Octokit constructor has a default authentication strategy, but you require an explicitly unauthenticated instance. + +One use cases is when building a GitHub App using [`@octokit/auth-app`](https://github.com/octokit/auth-app.js) and handling webhooks using [`@octokit/webhooks`](https://github.com/octokit/webhooks.js). While all webhook events provide an installation ID in its payload, in case of the `installation.deleted` event, the app can no longer create an installation access token, because the app's access has been revoked. + + + +- [Usage](#usage) +- [`createUnauthenticatedAuth() options`](#createunauthenticatedauth-options) +- [`auth()`](#auth) +- [Authentication object](#authentication-object) +- [`auth.hook(request, route, options)` or `auth.hook(request, options)`](#authhookrequest-route-options-or-authhookrequest-options) +- [License](#license) + + + +## Usage + + + + + + +
+Browsers + + +Load `@octokit/auth-unauthenticated` directly from [cdn.pika.dev](https://cdn.pika.dev) + +```html + +``` + +
+Node + + +Install with npm install @octokit/auth-unauthenticated + +```js +const { createUnauthenticatedAuth } = require("@octokit/auth-unauthenticated"); +// or: import { createUnauthenticatedAuth } from "@octokit/auth-unauthenticated"; +``` + +
+ +```js +const auth = createUnauthenticatedAuth({ + reason: + "Handling an installation.deleted event (The app's access has been revoked)", +}); +const authentication = await auth(); +// { +// type: 'unauthenticated', +// reason: 'Handling an installation.deleted event (The app's access has been revoked)' +// } +``` + +## `createUnauthenticatedAuth() options` + +The `createUnauthenticatedAuth` method requires an `options.reason` argument which will be used when returning an error due to a lack of authentication or when logging a warning in case of a `404` error. + +Examples + +```js +createUnauthenticatedAuth({ + reason: + "Handling an installation.deleted event: The app's access has been revoked from @octokit (id: 12345)", +}); +``` + +## `auth()` + +The `auth()` method accepts any options, but it doesn't do anything with it. That makes it a great drop-in replacement for any other authentication strategy. + +## Authentication object + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ type + + string + + "unauthenticated" +
+ +## `auth.hook(request, route, options)` or `auth.hook(request, options)` + +`auth.hook()` hooks directly into the request life cycle. If a mutating request is attempted to be sent (`DELETE`, `PATCH`, `POST`, or `PUT`), the request is failed immediately and returning an error that contains the reason passed to `createUnauthenticatedAuth({ reason })`. + +If a request fails with a `404` or due to hitting a rate/abuse limit, the returned error is amended that it might be caused due to a lack of authentication and will include the reason passed to `createUnauthenticatedAuth({ reason })`. + +The `request` option is an instance of [`@octokit/request`](https://github.com/octokit/request.js#readme). The `route`/`options` parameters are the same as for the [`request()` method](https://github.com/octokit/request.js#request). + +`auth.hook()` can be called directly to send an authenticated request + +```js +const { data } = await auth.hook(request, "GET /"); +``` + +Or it can be passed as option to [`request()`](https://github.com/octokit/request.js#request). + +```js +const requestWithAuth = request.defaults({ + request: { + hook: auth.hook, + }, +}); + +const { data } = await requestWithAuth("GET /"); +``` + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-node/index.js b/dist/node_modules/@octokit/auth-unauthenticated/dist-node/index.js new file mode 100644 index 0000000..8b14823 --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-node/index.js @@ -0,0 +1,102 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createUnauthenticatedAuth: () => createUnauthenticatedAuth +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/auth.js +async function auth(reason) { + return { + type: "unauthenticated", + reason + }; +} + +// pkg/dist-src/is-rate-limit-error.js +function isRateLimitError(error) { + if (error.status !== 403) { + return false; + } + if (!error.response) { + return false; + } + return error.response.headers["x-ratelimit-remaining"] === "0"; +} + +// pkg/dist-src/is-abuse-limit-error.js +var REGEX_ABUSE_LIMIT_MESSAGE = /\babuse\b/i; +function isAbuseLimitError(error) { + if (error.status !== 403) { + return false; + } + return REGEX_ABUSE_LIMIT_MESSAGE.test(error.message); +} + +// pkg/dist-src/hook.js +async function hook(reason, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + return request(endpoint).catch((error) => { + if (error.status === 404) { + error.message = `Not found. May be due to lack of authentication. Reason: ${reason}`; + throw error; + } + if (isRateLimitError(error)) { + error.message = `API rate limit exceeded. This maybe caused by the lack of authentication. Reason: ${reason}`; + throw error; + } + if (isAbuseLimitError(error)) { + error.message = `You have triggered an abuse detection mechanism. This maybe caused by the lack of authentication. Reason: ${reason}`; + throw error; + } + if (error.status === 401) { + error.message = `Unauthorized. "${endpoint.method} ${endpoint.url}" failed most likely due to lack of authentication. Reason: ${reason}`; + throw error; + } + if (error.status >= 400 && error.status < 500) { + error.message = error.message.replace( + /\.?$/, + `. May be caused by lack of authentication (${reason}).` + ); + } + throw error; + }); +} + +// pkg/dist-src/index.js +var createUnauthenticatedAuth = function createUnauthenticatedAuth2(options) { + if (!options || !options.reason) { + throw new Error( + "[@octokit/auth-unauthenticated] No reason passed to createUnauthenticatedAuth" + ); + } + return Object.assign(auth.bind(null, options.reason), { + hook: hook.bind(null, options.reason) + }); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + createUnauthenticatedAuth +}); diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-node/index.js.map b/dist/node_modules/@octokit/auth-unauthenticated/dist-node/index.js.map new file mode 100644 index 0000000..bc67881 --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/auth.js", "../dist-src/is-rate-limit-error.js", "../dist-src/is-abuse-limit-error.js", "../dist-src/hook.js"], + "sourcesContent": ["import { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nconst createUnauthenticatedAuth = function createUnauthenticatedAuth2(options) {\n if (!options || !options.reason) {\n throw new Error(\n \"[@octokit/auth-unauthenticated] No reason passed to createUnauthenticatedAuth\"\n );\n }\n return Object.assign(auth.bind(null, options.reason), {\n hook: hook.bind(null, options.reason)\n });\n};\nexport {\n createUnauthenticatedAuth\n};\n", "async function auth(reason) {\n return {\n type: \"unauthenticated\",\n reason\n };\n}\nexport {\n auth\n};\n", "function isRateLimitError(error) {\n if (error.status !== 403) {\n return false;\n }\n if (!error.response) {\n return false;\n }\n return error.response.headers[\"x-ratelimit-remaining\"] === \"0\";\n}\nexport {\n isRateLimitError\n};\n", "const REGEX_ABUSE_LIMIT_MESSAGE = /\\babuse\\b/i;\nfunction isAbuseLimitError(error) {\n if (error.status !== 403) {\n return false;\n }\n return REGEX_ABUSE_LIMIT_MESSAGE.test(error.message);\n}\nexport {\n isAbuseLimitError\n};\n", "import { isRateLimitError } from \"./is-rate-limit-error\";\nimport { isAbuseLimitError } from \"./is-abuse-limit-error\";\nasync function hook(reason, request, route, parameters) {\n const endpoint = request.endpoint.merge(\n route,\n parameters\n );\n return request(endpoint).catch((error) => {\n if (error.status === 404) {\n error.message = `Not found. May be due to lack of authentication. Reason: ${reason}`;\n throw error;\n }\n if (isRateLimitError(error)) {\n error.message = `API rate limit exceeded. This maybe caused by the lack of authentication. Reason: ${reason}`;\n throw error;\n }\n if (isAbuseLimitError(error)) {\n error.message = `You have triggered an abuse detection mechanism. This maybe caused by the lack of authentication. Reason: ${reason}`;\n throw error;\n }\n if (error.status === 401) {\n error.message = `Unauthorized. \"${endpoint.method} ${endpoint.url}\" failed most likely due to lack of authentication. Reason: ${reason}`;\n throw error;\n }\n if (error.status >= 400 && error.status < 500) {\n error.message = error.message.replace(\n /\\.?$/,\n `. May be caused by lack of authentication (${reason}).`\n );\n }\n throw error;\n });\n}\nexport {\n hook\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,eAAe,KAAK,QAAQ;AAC1B,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,EACF;AACF;;;ACLA,SAAS,iBAAiB,OAAO;AAC/B,MAAI,MAAM,WAAW,KAAK;AACxB,WAAO;AAAA,EACT;AACA,MAAI,CAAC,MAAM,UAAU;AACnB,WAAO;AAAA,EACT;AACA,SAAO,MAAM,SAAS,QAAQ,uBAAuB,MAAM;AAC7D;;;ACRA,IAAM,4BAA4B;AAClC,SAAS,kBAAkB,OAAO;AAChC,MAAI,MAAM,WAAW,KAAK;AACxB,WAAO;AAAA,EACT;AACA,SAAO,0BAA0B,KAAK,MAAM,OAAO;AACrD;;;ACJA,eAAe,KAAK,QAAQ,SAAS,OAAO,YAAY;AACtD,QAAM,WAAW,QAAQ,SAAS;AAAA,IAChC;AAAA,IACA;AAAA,EACF;AACA,SAAO,QAAQ,QAAQ,EAAE,MAAM,CAAC,UAAU;AACxC,QAAI,MAAM,WAAW,KAAK;AACxB,YAAM,UAAU,4DAA4D;AAC5E,YAAM;AAAA,IACR;AACA,QAAI,iBAAiB,KAAK,GAAG;AAC3B,YAAM,UAAU,qFAAqF;AACrG,YAAM;AAAA,IACR;AACA,QAAI,kBAAkB,KAAK,GAAG;AAC5B,YAAM,UAAU,6GAA6G;AAC7H,YAAM;AAAA,IACR;AACA,QAAI,MAAM,WAAW,KAAK;AACxB,YAAM,UAAU,kBAAkB,SAAS,UAAU,SAAS,kEAAkE;AAChI,YAAM;AAAA,IACR;AACA,QAAI,MAAM,UAAU,OAAO,MAAM,SAAS,KAAK;AAC7C,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B;AAAA,QACA,8CAA8C;AAAA,MAChD;AAAA,IACF;AACA,UAAM;AAAA,EACR,CAAC;AACH;;;AJ9BA,IAAM,4BAA4B,SAAS,2BAA2B,SAAS;AAC7E,MAAI,CAAC,WAAW,CAAC,QAAQ,QAAQ;AAC/B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,SAAO,OAAO,OAAO,KAAK,KAAK,MAAM,QAAQ,MAAM,GAAG;AAAA,IACpD,MAAM,KAAK,KAAK,MAAM,QAAQ,MAAM;AAAA,EACtC,CAAC;AACH;", + "names": [] +} diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-src/auth.js b/dist/node_modules/@octokit/auth-unauthenticated/dist-src/auth.js new file mode 100644 index 0000000..1cd418e --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-src/auth.js @@ -0,0 +1,9 @@ +async function auth(reason) { + return { + type: "unauthenticated", + reason + }; +} +export { + auth +}; diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-src/hook.js b/dist/node_modules/@octokit/auth-unauthenticated/dist-src/hook.js new file mode 100644 index 0000000..974af90 --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-src/hook.js @@ -0,0 +1,36 @@ +import { isRateLimitError } from "./is-rate-limit-error"; +import { isAbuseLimitError } from "./is-abuse-limit-error"; +async function hook(reason, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + return request(endpoint).catch((error) => { + if (error.status === 404) { + error.message = `Not found. May be due to lack of authentication. Reason: ${reason}`; + throw error; + } + if (isRateLimitError(error)) { + error.message = `API rate limit exceeded. This maybe caused by the lack of authentication. Reason: ${reason}`; + throw error; + } + if (isAbuseLimitError(error)) { + error.message = `You have triggered an abuse detection mechanism. This maybe caused by the lack of authentication. Reason: ${reason}`; + throw error; + } + if (error.status === 401) { + error.message = `Unauthorized. "${endpoint.method} ${endpoint.url}" failed most likely due to lack of authentication. Reason: ${reason}`; + throw error; + } + if (error.status >= 400 && error.status < 500) { + error.message = error.message.replace( + /\.?$/, + `. May be caused by lack of authentication (${reason}).` + ); + } + throw error; + }); +} +export { + hook +}; diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-src/index.js b/dist/node_modules/@octokit/auth-unauthenticated/dist-src/index.js new file mode 100644 index 0000000..5cf3853 --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-src/index.js @@ -0,0 +1,15 @@ +import { auth } from "./auth"; +import { hook } from "./hook"; +const createUnauthenticatedAuth = function createUnauthenticatedAuth2(options) { + if (!options || !options.reason) { + throw new Error( + "[@octokit/auth-unauthenticated] No reason passed to createUnauthenticatedAuth" + ); + } + return Object.assign(auth.bind(null, options.reason), { + hook: hook.bind(null, options.reason) + }); +}; +export { + createUnauthenticatedAuth +}; diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-src/is-abuse-limit-error.js b/dist/node_modules/@octokit/auth-unauthenticated/dist-src/is-abuse-limit-error.js new file mode 100644 index 0000000..eea3169 --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-src/is-abuse-limit-error.js @@ -0,0 +1,10 @@ +const REGEX_ABUSE_LIMIT_MESSAGE = /\babuse\b/i; +function isAbuseLimitError(error) { + if (error.status !== 403) { + return false; + } + return REGEX_ABUSE_LIMIT_MESSAGE.test(error.message); +} +export { + isAbuseLimitError +}; diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-src/is-rate-limit-error.js b/dist/node_modules/@octokit/auth-unauthenticated/dist-src/is-rate-limit-error.js new file mode 100644 index 0000000..477aafc --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-src/is-rate-limit-error.js @@ -0,0 +1,12 @@ +function isRateLimitError(error) { + if (error.status !== 403) { + return false; + } + if (!error.response) { + return false; + } + return error.response.headers["x-ratelimit-remaining"] === "0"; +} +export { + isRateLimitError +}; diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-types/auth.d.ts b/dist/node_modules/@octokit/auth-unauthenticated/dist-types/auth.d.ts new file mode 100644 index 0000000..ef9f628 --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-types/auth.d.ts @@ -0,0 +1,2 @@ +import { Authentication } from "./types"; +export declare function auth(reason: string): Promise; diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-types/hook.d.ts b/dist/node_modules/@octokit/auth-unauthenticated/dist-types/hook.d.ts new file mode 100644 index 0000000..f5f2f45 --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-types/hook.d.ts @@ -0,0 +1,2 @@ +import { AnyResponse, EndpointOptions, RequestInterface, RequestParameters, Route } from "./types"; +export declare function hook(reason: string, request: RequestInterface, route: Route | EndpointOptions, parameters?: RequestParameters): Promise; diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-types/index.d.ts b/dist/node_modules/@octokit/auth-unauthenticated/dist-types/index.d.ts new file mode 100644 index 0000000..bb4372d --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-types/index.d.ts @@ -0,0 +1,7 @@ +import { StrategyInterface, Options, Authentication } from "./types"; +export type Types = { + StrategyOptions: Options; + AuthOptions: never; + Authentication: Authentication; +}; +export declare const createUnauthenticatedAuth: StrategyInterface; diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-types/is-abuse-limit-error.d.ts b/dist/node_modules/@octokit/auth-unauthenticated/dist-types/is-abuse-limit-error.d.ts new file mode 100644 index 0000000..3687db9 --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-types/is-abuse-limit-error.d.ts @@ -0,0 +1,2 @@ +import { RequestError } from "@octokit/request-error"; +export declare function isAbuseLimitError(error: RequestError): boolean; diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-types/is-rate-limit-error.d.ts b/dist/node_modules/@octokit/auth-unauthenticated/dist-types/is-rate-limit-error.d.ts new file mode 100644 index 0000000..2e3dd47 --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-types/is-rate-limit-error.d.ts @@ -0,0 +1,2 @@ +import { RequestError } from "@octokit/request-error"; +export declare function isRateLimitError(error: RequestError): boolean; diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-types/types.d.ts b/dist/node_modules/@octokit/auth-unauthenticated/dist-types/types.d.ts new file mode 100644 index 0000000..ecc123d --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-types/types.d.ts @@ -0,0 +1,18 @@ +import * as OctokitTypes from "@octokit/types"; +export type AnyResponse = OctokitTypes.OctokitResponse; +export type StrategyInterface = OctokitTypes.StrategyInterface<[ + Options +], [ +], Authentication>; +export type EndpointDefaults = OctokitTypes.EndpointDefaults; +export type EndpointOptions = OctokitTypes.EndpointOptions; +export type RequestParameters = OctokitTypes.RequestParameters; +export type RequestInterface = OctokitTypes.RequestInterface; +export type Route = OctokitTypes.Route; +export type Options = { + reason: string; +}; +export type Authentication = { + type: "unauthenticated"; + reason: string; +}; diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-web/index.js b/dist/node_modules/@octokit/auth-unauthenticated/dist-web/index.js new file mode 100644 index 0000000..1caee7e --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-web/index.js @@ -0,0 +1,75 @@ +// pkg/dist-src/auth.js +async function auth(reason) { + return { + type: "unauthenticated", + reason + }; +} + +// pkg/dist-src/is-rate-limit-error.js +function isRateLimitError(error) { + if (error.status !== 403) { + return false; + } + if (!error.response) { + return false; + } + return error.response.headers["x-ratelimit-remaining"] === "0"; +} + +// pkg/dist-src/is-abuse-limit-error.js +var REGEX_ABUSE_LIMIT_MESSAGE = /\babuse\b/i; +function isAbuseLimitError(error) { + if (error.status !== 403) { + return false; + } + return REGEX_ABUSE_LIMIT_MESSAGE.test(error.message); +} + +// pkg/dist-src/hook.js +async function hook(reason, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + return request(endpoint).catch((error) => { + if (error.status === 404) { + error.message = `Not found. May be due to lack of authentication. Reason: ${reason}`; + throw error; + } + if (isRateLimitError(error)) { + error.message = `API rate limit exceeded. This maybe caused by the lack of authentication. Reason: ${reason}`; + throw error; + } + if (isAbuseLimitError(error)) { + error.message = `You have triggered an abuse detection mechanism. This maybe caused by the lack of authentication. Reason: ${reason}`; + throw error; + } + if (error.status === 401) { + error.message = `Unauthorized. "${endpoint.method} ${endpoint.url}" failed most likely due to lack of authentication. Reason: ${reason}`; + throw error; + } + if (error.status >= 400 && error.status < 500) { + error.message = error.message.replace( + /\.?$/, + `. May be caused by lack of authentication (${reason}).` + ); + } + throw error; + }); +} + +// pkg/dist-src/index.js +var createUnauthenticatedAuth = function createUnauthenticatedAuth2(options) { + if (!options || !options.reason) { + throw new Error( + "[@octokit/auth-unauthenticated] No reason passed to createUnauthenticatedAuth" + ); + } + return Object.assign(auth.bind(null, options.reason), { + hook: hook.bind(null, options.reason) + }); +}; +export { + createUnauthenticatedAuth +}; diff --git a/dist/node_modules/@octokit/auth-unauthenticated/dist-web/index.js.map b/dist/node_modules/@octokit/auth-unauthenticated/dist-web/index.js.map new file mode 100644 index 0000000..cd181e7 --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/auth.js", "../dist-src/is-rate-limit-error.js", "../dist-src/is-abuse-limit-error.js", "../dist-src/hook.js", "../dist-src/index.js"], + "sourcesContent": ["async function auth(reason) {\n return {\n type: \"unauthenticated\",\n reason\n };\n}\nexport {\n auth\n};\n", "function isRateLimitError(error) {\n if (error.status !== 403) {\n return false;\n }\n if (!error.response) {\n return false;\n }\n return error.response.headers[\"x-ratelimit-remaining\"] === \"0\";\n}\nexport {\n isRateLimitError\n};\n", "const REGEX_ABUSE_LIMIT_MESSAGE = /\\babuse\\b/i;\nfunction isAbuseLimitError(error) {\n if (error.status !== 403) {\n return false;\n }\n return REGEX_ABUSE_LIMIT_MESSAGE.test(error.message);\n}\nexport {\n isAbuseLimitError\n};\n", "import { isRateLimitError } from \"./is-rate-limit-error\";\nimport { isAbuseLimitError } from \"./is-abuse-limit-error\";\nasync function hook(reason, request, route, parameters) {\n const endpoint = request.endpoint.merge(\n route,\n parameters\n );\n return request(endpoint).catch((error) => {\n if (error.status === 404) {\n error.message = `Not found. May be due to lack of authentication. Reason: ${reason}`;\n throw error;\n }\n if (isRateLimitError(error)) {\n error.message = `API rate limit exceeded. This maybe caused by the lack of authentication. Reason: ${reason}`;\n throw error;\n }\n if (isAbuseLimitError(error)) {\n error.message = `You have triggered an abuse detection mechanism. This maybe caused by the lack of authentication. Reason: ${reason}`;\n throw error;\n }\n if (error.status === 401) {\n error.message = `Unauthorized. \"${endpoint.method} ${endpoint.url}\" failed most likely due to lack of authentication. Reason: ${reason}`;\n throw error;\n }\n if (error.status >= 400 && error.status < 500) {\n error.message = error.message.replace(\n /\\.?$/,\n `. May be caused by lack of authentication (${reason}).`\n );\n }\n throw error;\n });\n}\nexport {\n hook\n};\n", "import { auth } from \"./auth\";\nimport { hook } from \"./hook\";\nconst createUnauthenticatedAuth = function createUnauthenticatedAuth2(options) {\n if (!options || !options.reason) {\n throw new Error(\n \"[@octokit/auth-unauthenticated] No reason passed to createUnauthenticatedAuth\"\n );\n }\n return Object.assign(auth.bind(null, options.reason), {\n hook: hook.bind(null, options.reason)\n });\n};\nexport {\n createUnauthenticatedAuth\n};\n"], + "mappings": ";AAAA,eAAe,KAAK,QAAQ;AAC1B,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,EACF;AACF;;;ACLA,SAAS,iBAAiB,OAAO;AAC/B,MAAI,MAAM,WAAW,KAAK;AACxB,WAAO;AAAA,EACT;AACA,MAAI,CAAC,MAAM,UAAU;AACnB,WAAO;AAAA,EACT;AACA,SAAO,MAAM,SAAS,QAAQ,uBAAuB,MAAM;AAC7D;;;ACRA,IAAM,4BAA4B;AAClC,SAAS,kBAAkB,OAAO;AAChC,MAAI,MAAM,WAAW,KAAK;AACxB,WAAO;AAAA,EACT;AACA,SAAO,0BAA0B,KAAK,MAAM,OAAO;AACrD;;;ACJA,eAAe,KAAK,QAAQ,SAAS,OAAO,YAAY;AACtD,QAAM,WAAW,QAAQ,SAAS;AAAA,IAChC;AAAA,IACA;AAAA,EACF;AACA,SAAO,QAAQ,QAAQ,EAAE,MAAM,CAAC,UAAU;AACxC,QAAI,MAAM,WAAW,KAAK;AACxB,YAAM,UAAU,4DAA4D;AAC5E,YAAM;AAAA,IACR;AACA,QAAI,iBAAiB,KAAK,GAAG;AAC3B,YAAM,UAAU,qFAAqF;AACrG,YAAM;AAAA,IACR;AACA,QAAI,kBAAkB,KAAK,GAAG;AAC5B,YAAM,UAAU,6GAA6G;AAC7H,YAAM;AAAA,IACR;AACA,QAAI,MAAM,WAAW,KAAK;AACxB,YAAM,UAAU,kBAAkB,SAAS,UAAU,SAAS,kEAAkE;AAChI,YAAM;AAAA,IACR;AACA,QAAI,MAAM,UAAU,OAAO,MAAM,SAAS,KAAK;AAC7C,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B;AAAA,QACA,8CAA8C;AAAA,MAChD;AAAA,IACF;AACA,UAAM;AAAA,EACR,CAAC;AACH;;;AC9BA,IAAM,4BAA4B,SAAS,2BAA2B,SAAS;AAC7E,MAAI,CAAC,WAAW,CAAC,QAAQ,QAAQ;AAC/B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,SAAO,OAAO,OAAO,KAAK,KAAK,MAAM,QAAQ,MAAM,GAAG;AAAA,IACpD,MAAM,KAAK,KAAK,MAAM,QAAQ,MAAM;AAAA,EACtC,CAAC;AACH;", + "names": [] +} diff --git a/dist/node_modules/@octokit/auth-unauthenticated/package.json b/dist/node_modules/@octokit/auth-unauthenticated/package.json new file mode 100644 index 0000000..04d04a0 --- /dev/null +++ b/dist/node_modules/@octokit/auth-unauthenticated/package.json @@ -0,0 +1,47 @@ +{ + "name": "@octokit/auth-unauthenticated", + "publishConfig": { + "access": "public" + }, + "version": "3.0.5", + "description": "GitHub API token authentication for browsers and Node.js", + "repository": "github:octokit/auth-unauthenticated.js", + "keywords": [ + "github", + "octokit", + "authentication", + "api" + ], + "author": "Gregor Martynus (https://github.com/gr2m)", + "license": "MIT", + "dependencies": { + "@octokit/request-error": "^3.0.0", + "@octokit/types": "^9.0.0" + }, + "devDependencies": { + "@octokit/core": "^4.0.0", + "@octokit/request": "^6.0.0", + "@octokit/tsconfig": "^1.0.2", + "@tsconfig/node10": "^1.0.3", + "@types/jest": "^29.0.0", + "esbuild": "^0.17.19", + "fetch-mock": "^9.10.7", + "glob": "^10.2.5", + "jest": "^29.0.0", + "prettier": "2.8.8", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "source": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/core/LICENSE b/dist/node_modules/@octokit/core/LICENSE new file mode 100644 index 0000000..ef2c18e --- /dev/null +++ b/dist/node_modules/@octokit/core/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/@octokit/core/README.md b/dist/node_modules/@octokit/core/README.md new file mode 100644 index 0000000..50eea08 --- /dev/null +++ b/dist/node_modules/@octokit/core/README.md @@ -0,0 +1,448 @@ +# core.js + +> Extendable client for GitHub's REST & GraphQL APIs + +[![@latest](https://img.shields.io/npm/v/@octokit/core.svg)](https://www.npmjs.com/package/@octokit/core) +[![Build Status](https://github.com/octokit/core.js/workflows/Test/badge.svg)](https://github.com/octokit/core.js/actions?query=workflow%3ATest+branch%3Amain) + + + +- [Usage](#usage) + - [REST API example](#rest-api-example) + - [GraphQL example](#graphql-example) +- [Options](#options) +- [Defaults](#defaults) +- [Authentication](#authentication) +- [Logging](#logging) +- [Hooks](#hooks) +- [Plugins](#plugins) +- [Build your own Octokit with Plugins and Defaults](#build-your-own-octokit-with-plugins-and-defaults) +- [LICENSE](#license) + + + +If you need a minimalistic library to utilize GitHub's [REST API](https://developer.github.com/v3/) and [GraphQL API](https://developer.github.com/v4/) which you can extend with plugins as needed, then `@octokit/core` is a great starting point. + +If you don't need the Plugin API then using [`@octokit/request`](https://github.com/octokit/request.js/) or [`@octokit/graphql`](https://github.com/octokit/graphql.js/) directly is a good alternative. + +## Usage + + + + + + +
+Browsers + +Load @octokit/core directly from esm.sh + +```html + +``` + +
+Node + + +Install with npm install @octokit/core + +```js +const { Octokit } = require("@octokit/core"); +// or: import { Octokit } from "@octokit/core"; +``` + +
+ +### REST API example + +```js +// Create a personal access token at https://github.com/settings/tokens/new?scopes=repo +const octokit = new Octokit({ auth: `personal-access-token123` }); + +const response = await octokit.request("GET /orgs/{org}/repos", { + org: "octokit", + type: "private", +}); +``` + +See [`@octokit/request`](https://github.com/octokit/request.js) for full documentation of the `.request` method. + +### GraphQL example + +```js +const octokit = new Octokit({ auth: `secret123` }); + +const response = await octokit.graphql( + `query ($login: String!) { + organization(login: $login) { + repositories(privacy: PRIVATE) { + totalCount + } + } + }`, + { login: "octokit" } +); +``` + +See [`@octokit/graphql`](https://github.com/octokit/graphql.js) for full documentation of the `.graphql` method. + +## Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ options.authStrategy + + Function + + Defaults to @octokit/auth-token. See Authentication below for examples. +
+ options.auth + + String or Object + + See Authentication below for examples. +
+ options.baseUrl + + String + + +When using with GitHub Enterprise Server, set `options.baseUrl` to the root URL of the API. For example, if your GitHub Enterprise Server's hostname is `github.acme-inc.com`, then set `options.baseUrl` to `https://github.acme-inc.com/api/v3`. Example + +```js +const octokit = new Octokit({ + baseUrl: "https://github.acme-inc.com/api/v3", +}); +``` + +
+ options.previews + + Array of Strings + + +Some REST API endpoints require preview headers to be set, or enable +additional features. Preview headers can be set on a per-request basis, e.g. + +```js +octokit.request("POST /repos/{owner}/{repo}/pulls", { + mediaType: { + previews: ["shadow-cat"], + }, + owner, + repo, + title: "My pull request", + base: "main", + head: "my-feature", + draft: true, +}); +``` + +You can also set previews globally, by setting the `options.previews` option on the constructor. Example: + +```js +const octokit = new Octokit({ + previews: ["shadow-cat"], +}); +``` + +
+ options.request + + Object + + +Set a default request timeout (`options.request.timeout`) or an [`http(s).Agent`](https://nodejs.org/api/http.html#http_class_http_agent) e.g. for proxy usage (Node only, `options.request.agent`). + +There are more `options.request.*` options, see [`@octokit/request` options](https://github.com/octokit/request.js#request). `options.request` can also be set on a per-request basis. + +
+ options.timeZone + + String + + +Sets the `Time-Zone` header which defines a timezone according to the [list of names from the Olson database](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones). + +```js +const octokit = new Octokit({ + timeZone: "America/Los_Angeles", +}); +``` + +The time zone header will determine the timezone used for generating the timestamp when creating commits. See [GitHub's Timezones documentation](https://developer.github.com/v3/#timezones). + +
+ options.userAgent + + String + + +A custom user agent string for your app or library. Example + +```js +const octokit = new Octokit({ + userAgent: "my-app/v1.2.3", +}); +``` + +
+ +## Defaults + +You can create a new Octokit class with customized default options. + +```js +const MyOctokit = Octokit.defaults({ + auth: "personal-access-token123", + baseUrl: "https://github.acme-inc.com/api/v3", + userAgent: "my-app/v1.2.3", +}); +const octokit1 = new MyOctokit(); +const octokit2 = new MyOctokit(); +``` + +If you pass additional options to your new constructor, the options will be merged shallowly. + +```js +const MyOctokit = Octokit.defaults({ + foo: { + opt1: 1, + }, +}); +const octokit = new MyOctokit({ + foo: { + opt2: 1, + }, +}); +// options will be { foo: { opt2: 1 }} +``` + +If you need a deep or conditional merge, you can pass a function instead. + +```js +const MyOctokit = Octokit.defaults((options) => { + return { + foo: Object.assign({}, options.foo, { opt1: 1 }), + }; +}); +const octokit = new MyOctokit({ + foo: { opt2: 1 }, +}); +// options will be { foo: { opt1: 1, opt2: 1 }} +``` + +Be careful about mutating the `options` object in the `Octokit.defaults` callback, as it can have unforeseen consequences. + +## Authentication + +Authentication is optional for some REST API endpoints accessing public data, but is required for GraphQL queries. Using authentication also increases your [API rate limit](https://developer.github.com/v3/#rate-limiting). + +By default, Octokit authenticates using the [token authentication strategy](https://github.com/octokit/auth-token.js). Pass in a token using `options.auth`. It can be a personal access token, an OAuth token, an installation access token or a JSON Web Token for GitHub App authentication. The `Authorization` header will be set according to the type of token. + +```js +import { Octokit } from "@octokit/core"; + +const octokit = new Octokit({ + auth: "mypersonalaccesstoken123", +}); + +const { data } = await octokit.request("/user"); +``` + +To use a different authentication strategy, set `options.authStrategy`. A list of authentication strategies is available at [octokit/authentication-strategies.js](https://github.com/octokit/authentication-strategies.js/#readme). + +Example + +```js +import { Octokit } from "@octokit/core"; +import { createAppAuth } from "@octokit/auth-app"; + +const appOctokit = new Octokit({ + authStrategy: createAppAuth, + auth: { + appId: 123, + privateKey: process.env.PRIVATE_KEY, + }, +}); + +const { data } = await appOctokit.request("/app"); +``` + +The `.auth()` method returned by the current authentication strategy can be accessed at `octokit.auth()`. Example + +```js +const { token } = await appOctokit.auth({ + type: "installation", + installationId: 123, +}); +``` + +## Logging + +There are four built-in log methods + +1. `octokit.log.debug(message[, additionalInfo])` +1. `octokit.log.info(message[, additionalInfo])` +1. `octokit.log.warn(message[, additionalInfo])` +1. `octokit.log.error(message[, additionalInfo])` + +They can be configured using the [`log` client option](client-options). By default, `octokit.log.debug()` and `octokit.log.info()` are no-ops, while the other two call `console.warn()` and `console.error()` respectively. + +This is useful if you build reusable [plugins](#plugins). + +If you would like to make the log level configurable using an environment variable or external option, we recommend the [console-log-level](https://github.com/watson/console-log-level) package. Example + +```js +const octokit = new Octokit({ + log: require("console-log-level")({ level: "info" }), +}); +``` + +## Hooks + +You can customize Octokit's request lifecycle with hooks. + +```js +octokit.hook.before("request", async (options) => { + validate(options); +}); +octokit.hook.after("request", async (response, options) => { + console.log(`${options.method} ${options.url}: ${response.status}`); +}); +octokit.hook.error("request", async (error, options) => { + if (error.status === 304) { + return findInCache(error.response.headers.etag); + } + + throw error; +}); +octokit.hook.wrap("request", async (request, options) => { + // add logic before, after, catch errors or replace the request altogether + return request(options); +}); +``` + +See [before-after-hook](https://github.com/gr2m/before-after-hook#readme) for more documentation on hooks. + +## Plugins + +Octokit’s functionality can be extended using plugins. The `Octokit.plugin()` method accepts a plugin (or many) and returns a new constructor. + +A plugin is a function which gets two arguments: + +1. the current instance +2. the options passed to the constructor. + +In order to extend `octokit`'s API, the plugin must return an object with the new methods. + +```js +// index.js +const { Octokit } = require("@octokit/core") +const MyOctokit = Octokit.plugin( + require("./lib/my-plugin"), + require("octokit-plugin-example") +); + +const octokit = new MyOctokit({ greeting: "Moin moin" }); +octokit.helloWorld(); // logs "Moin moin, world!" +octokit.request("GET /"); // logs "GET / - 200 in 123ms" + +// lib/my-plugin.js +module.exports = (octokit, options = { greeting: "Hello" }) => { + // hook into the request lifecycle + octokit.hook.wrap("request", async (request, options) => { + const time = Date.now(); + const response = await request(options); + console.log( + `${options.method} ${options.url} – ${response.status} in ${Date.now() - + time}ms` + ); + return response; + }); + + // add a custom method + return { + helloWorld: () => console.log(`${options.greeting}, world!`); + } +}; +``` + +## Build your own Octokit with Plugins and Defaults + +You can build your own Octokit class with preset default options and plugins. In fact, this is mostly how the `@octokit/` modules work, such as [`@octokit/action`](https://github.com/octokit/action.js): + +```js +const { Octokit } = require("@octokit/core"); +const MyActionOctokit = Octokit.plugin( + require("@octokit/plugin-paginate-rest").paginateRest, + require("@octokit/plugin-throttling").throttling, + require("@octokit/plugin-retry").retry +).defaults({ + throttle: { + onAbuseLimit: (retryAfter, options) => { + /* ... */ + }, + onRateLimit: (retryAfter, options) => { + /* ... */ + }, + }, + authStrategy: require("@octokit/auth-action").createActionAuth, + userAgent: `my-octokit-action/v1.2.3`, +}); + +const octokit = new MyActionOctokit(); +const installations = await octokit.paginate("GET /app/installations"); +``` + +## LICENSE + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/core/dist-node/index.js b/dist/node_modules/@octokit/core/dist-node/index.js new file mode 100644 index 0000000..78ba108 --- /dev/null +++ b/dist/node_modules/@octokit/core/dist-node/index.js @@ -0,0 +1,157 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Octokit: () => Octokit +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = require("universal-user-agent"); +var import_before_after_hook = require("before-after-hook"); +var import_request = require("@octokit/request"); +var import_graphql = require("@octokit/graphql"); +var import_auth_token = require("@octokit/auth-token"); + +// pkg/dist-src/version.js +var VERSION = "4.2.4"; + +// pkg/dist-src/index.js +var Octokit = class { + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + var _a; + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this { + }, _a.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ), _a); + return NewOctokit; + } + constructor(options = {}) { + const hook = new import_before_after_hook.Collection(); + const requestDefaults = { + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = [ + options.userAgent, + `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + ].filter(Boolean).join(" "); + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: () => { + }, + info: () => { + }, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = (0, import_auth_token.createTokenAuth)(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + classConstructor.plugins.forEach((plugin) => { + Object.assign(this, plugin(this, options)); + }); + } +}; +Octokit.VERSION = VERSION; +Octokit.plugins = []; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + Octokit +}); diff --git a/dist/node_modules/@octokit/core/dist-node/index.js.map b/dist/node_modules/@octokit/core/dist-node/index.js.map new file mode 100644 index 0000000..ed08ba0 --- /dev/null +++ b/dist/node_modules/@octokit/core/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js"], + "sourcesContent": ["import { getUserAgent } from \"universal-user-agent\";\nimport { Collection } from \"before-after-hook\";\nimport { request } from \"@octokit/request\";\nimport { graphql, withCustomRequest } from \"@octokit/graphql\";\nimport { createTokenAuth } from \"@octokit/auth-token\";\nimport { VERSION } from \"./version\";\nclass Octokit {\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n super(\n Object.assign(\n {},\n defaults,\n options,\n options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null\n )\n );\n }\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n static plugin(...newPlugins) {\n var _a;\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {\n }, _a.plugins = currentPlugins.concat(\n newPlugins.filter((plugin) => !currentPlugins.includes(plugin))\n ), _a);\n return NewOctokit;\n }\n constructor(options = {}) {\n const hook = new Collection();\n const requestDefaults = {\n baseUrl: request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n };\n requestDefaults.headers[\"user-agent\"] = [\n options.userAgent,\n `octokit-core.js/${VERSION} ${getUserAgent()}`\n ].filter(Boolean).join(\" \");\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n this.request = request.defaults(requestDefaults);\n this.graphql = withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign(\n {\n debug: () => {\n },\n info: () => {\n },\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n },\n options.log\n );\n this.hook = hook;\n if (!options.authStrategy) {\n if (!options.auth) {\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n const auth = createTokenAuth(options.auth);\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const { authStrategy, ...otherOptions } = options;\n const auth = authStrategy(\n Object.assign(\n {\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n },\n options.auth\n )\n );\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach((plugin) => {\n Object.assign(this, plugin(this, options));\n });\n }\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\nexport {\n Octokit\n};\n", "const VERSION = \"4.2.4\";\nexport {\n VERSION\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAA6B;AAC7B,+BAA2B;AAC3B,qBAAwB;AACxB,qBAA2C;AAC3C,wBAAgC;;;ACJhC,IAAM,UAAU;;;ADMhB,IAAM,UAAN,MAAc;AAAA,EACZ,OAAO,SAAS,UAAU;AACxB,UAAM,sBAAsB,cAAc,KAAK;AAAA,MAC7C,eAAe,MAAM;AACnB,cAAM,UAAU,KAAK,CAAC,KAAK,CAAC;AAC5B,YAAI,OAAO,aAAa,YAAY;AAClC,gBAAM,SAAS,OAAO,CAAC;AACvB;AAAA,QACF;AACA;AAAA,UACE,OAAO;AAAA,YACL,CAAC;AAAA,YACD;AAAA,YACA;AAAA,YACA,QAAQ,aAAa,SAAS,YAAY;AAAA,cACxC,WAAW,GAAG,QAAQ,aAAa,SAAS;AAAA,YAC9C,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,UAAU,YAAY;AAC3B,QAAI;AACJ,UAAM,iBAAiB,KAAK;AAC5B,UAAM,cAAc,KAAK,cAAc,KAAK;AAAA,IAC5C,GAAG,GAAG,UAAU,eAAe;AAAA,MAC7B,WAAW,OAAO,CAAC,WAAW,CAAC,eAAe,SAAS,MAAM,CAAC;AAAA,IAChE,GAAG;AACH,WAAO;AAAA,EACT;AAAA,EACA,YAAY,UAAU,CAAC,GAAG;AACxB,UAAM,OAAO,IAAI,oCAAW;AAC5B,UAAM,kBAAkB;AAAA,MACtB,SAAS,uBAAQ,SAAS,SAAS;AAAA,MACnC,SAAS,CAAC;AAAA,MACV,SAAS,OAAO,OAAO,CAAC,GAAG,QAAQ,SAAS;AAAA;AAAA,QAE1C,MAAM,KAAK,KAAK,MAAM,SAAS;AAAA,MACjC,CAAC;AAAA,MACD,WAAW;AAAA,QACT,UAAU,CAAC;AAAA,QACX,QAAQ;AAAA,MACV;AAAA,IACF;AACA,oBAAgB,QAAQ,YAAY,IAAI;AAAA,MACtC,QAAQ;AAAA,MACR,mBAAmB,eAAW,0CAAa;AAAA,IAC7C,EAAE,OAAO,OAAO,EAAE,KAAK,GAAG;AAC1B,QAAI,QAAQ,SAAS;AACnB,sBAAgB,UAAU,QAAQ;AAAA,IACpC;AACA,QAAI,QAAQ,UAAU;AACpB,sBAAgB,UAAU,WAAW,QAAQ;AAAA,IAC/C;AACA,QAAI,QAAQ,UAAU;AACpB,sBAAgB,QAAQ,WAAW,IAAI,QAAQ;AAAA,IACjD;AACA,SAAK,UAAU,uBAAQ,SAAS,eAAe;AAC/C,SAAK,cAAU,kCAAkB,KAAK,OAAO,EAAE,SAAS,eAAe;AACvE,SAAK,MAAM,OAAO;AAAA,MAChB;AAAA,QACE,OAAO,MAAM;AAAA,QACb;AAAA,QACA,MAAM,MAAM;AAAA,QACZ;AAAA,QACA,MAAM,QAAQ,KAAK,KAAK,OAAO;AAAA,QAC/B,OAAO,QAAQ,MAAM,KAAK,OAAO;AAAA,MACnC;AAAA,MACA,QAAQ;AAAA,IACV;AACA,SAAK,OAAO;AACZ,QAAI,CAAC,QAAQ,cAAc;AACzB,UAAI,CAAC,QAAQ,MAAM;AACjB,aAAK,OAAO,aAAa;AAAA,UACvB,MAAM;AAAA,QACR;AAAA,MACF,OAAO;AACL,cAAM,WAAO,mCAAgB,QAAQ,IAAI;AACzC,aAAK,KAAK,WAAW,KAAK,IAAI;AAC9B,aAAK,OAAO;AAAA,MACd;AAAA,IACF,OAAO;AACL,YAAM,EAAE,cAAc,GAAG,aAAa,IAAI;AAC1C,YAAM,OAAO;AAAA,QACX,OAAO;AAAA,UACL;AAAA,YACE,SAAS,KAAK;AAAA,YACd,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMV,SAAS;AAAA,YACT,gBAAgB;AAAA,UAClB;AAAA,UACA,QAAQ;AAAA,QACV;AAAA,MACF;AACA,WAAK,KAAK,WAAW,KAAK,IAAI;AAC9B,WAAK,OAAO;AAAA,IACd;AACA,UAAM,mBAAmB,KAAK;AAC9B,qBAAiB,QAAQ,QAAQ,CAAC,WAAW;AAC3C,aAAO,OAAO,MAAM,OAAO,MAAM,OAAO,CAAC;AAAA,IAC3C,CAAC;AAAA,EACH;AACF;AACA,QAAQ,UAAU;AAClB,QAAQ,UAAU,CAAC;", + "names": [] +} diff --git a/dist/node_modules/@octokit/core/dist-src/index.js b/dist/node_modules/@octokit/core/dist-src/index.js new file mode 100644 index 0000000..ed23a48 --- /dev/null +++ b/dist/node_modules/@octokit/core/dist-src/index.js @@ -0,0 +1,127 @@ +import { getUserAgent } from "universal-user-agent"; +import { Collection } from "before-after-hook"; +import { request } from "@octokit/request"; +import { graphql, withCustomRequest } from "@octokit/graphql"; +import { createTokenAuth } from "@octokit/auth-token"; +import { VERSION } from "./version"; +class Octokit { + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + var _a; + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this { + }, _a.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ), _a); + return NewOctokit; + } + constructor(options = {}) { + const hook = new Collection(); + const requestDefaults = { + baseUrl: request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = [ + options.userAgent, + `octokit-core.js/${VERSION} ${getUserAgent()}` + ].filter(Boolean).join(" "); + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = request.defaults(requestDefaults); + this.graphql = withCustomRequest(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: () => { + }, + info: () => { + }, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = createTokenAuth(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + classConstructor.plugins.forEach((plugin) => { + Object.assign(this, plugin(this, options)); + }); + } +} +Octokit.VERSION = VERSION; +Octokit.plugins = []; +export { + Octokit +}; diff --git a/dist/node_modules/@octokit/core/dist-src/version.js b/dist/node_modules/@octokit/core/dist-src/version.js new file mode 100644 index 0000000..6b36c09 --- /dev/null +++ b/dist/node_modules/@octokit/core/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "4.2.4"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/core/dist-types/index.d.ts b/dist/node_modules/@octokit/core/dist-types/index.d.ts new file mode 100644 index 0000000..fa5e88f --- /dev/null +++ b/dist/node_modules/@octokit/core/dist-types/index.d.ts @@ -0,0 +1,30 @@ +import type { HookCollection } from "before-after-hook"; +import { request } from "@octokit/request"; +import { graphql } from "@octokit/graphql"; +import type { Constructor, Hooks, OctokitOptions, OctokitPlugin, ReturnTypeOf, UnionToIntersection } from "./types"; +export declare class Octokit { + static VERSION: string; + static defaults>(this: S, defaults: OctokitOptions | Function): S; + static plugins: OctokitPlugin[]; + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin & { + plugins: any[]; + }, T extends OctokitPlugin[]>(this: S, ...newPlugins: T): S & Constructor>>; + constructor(options?: OctokitOptions); + request: typeof request; + graphql: typeof graphql; + log: { + debug: (message: string, additionalInfo?: object) => any; + info: (message: string, additionalInfo?: object) => any; + warn: (message: string, additionalInfo?: object) => any; + error: (message: string, additionalInfo?: object) => any; + [key: string]: any; + }; + hook: HookCollection; + auth: (...args: unknown[]) => Promise; +} diff --git a/dist/node_modules/@octokit/core/dist-types/types.d.ts b/dist/node_modules/@octokit/core/dist-types/types.d.ts new file mode 100644 index 0000000..d2b50b8 --- /dev/null +++ b/dist/node_modules/@octokit/core/dist-types/types.d.ts @@ -0,0 +1,44 @@ +import * as OctokitTypes from "@octokit/types"; +import { RequestError } from "@octokit/request-error"; +import { Octokit } from "."; +export type RequestParameters = OctokitTypes.RequestParameters; +export interface OctokitOptions { + authStrategy?: any; + auth?: any; + userAgent?: string; + previews?: string[]; + baseUrl?: string; + log?: { + debug: (message: string) => unknown; + info: (message: string) => unknown; + warn: (message: string) => unknown; + error: (message: string) => unknown; + }; + request?: OctokitTypes.RequestRequestOptions; + timeZone?: string; + [option: string]: any; +} +export type Constructor = new (...args: any[]) => T; +export type ReturnTypeOf = T extends AnyFunction ? ReturnType : T extends AnyFunction[] ? UnionToIntersection, void>> : never; +/** + * @author https://stackoverflow.com/users/2887218/jcalz + * @see https://stackoverflow.com/a/50375286/10325032 + */ +export type UnionToIntersection = (Union extends any ? (argument: Union) => void : never) extends (argument: infer Intersection) => void ? Intersection : never; +type AnyFunction = (...args: any) => any; +export type OctokitPlugin = (octokit: Octokit, options: OctokitOptions) => { + [key: string]: any; +} | void; +export type Hooks = { + request: { + Options: Required; + Result: OctokitTypes.OctokitResponse; + Error: RequestError | Error; + }; + [key: string]: { + Options: unknown; + Result: unknown; + Error: unknown; + }; +}; +export {}; diff --git a/dist/node_modules/@octokit/core/dist-types/version.d.ts b/dist/node_modules/@octokit/core/dist-types/version.d.ts new file mode 100644 index 0000000..b8a0794 --- /dev/null +++ b/dist/node_modules/@octokit/core/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "4.2.4"; diff --git a/dist/node_modules/@octokit/core/dist-web/index.js b/dist/node_modules/@octokit/core/dist-web/index.js new file mode 100644 index 0000000..5a984d2 --- /dev/null +++ b/dist/node_modules/@octokit/core/dist-web/index.js @@ -0,0 +1,132 @@ +// pkg/dist-src/index.js +import { getUserAgent } from "universal-user-agent"; +import { Collection } from "before-after-hook"; +import { request } from "@octokit/request"; +import { graphql, withCustomRequest } from "@octokit/graphql"; +import { createTokenAuth } from "@octokit/auth-token"; + +// pkg/dist-src/version.js +var VERSION = "4.2.4"; + +// pkg/dist-src/index.js +var Octokit = class { + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + var _a; + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this { + }, _a.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ), _a); + return NewOctokit; + } + constructor(options = {}) { + const hook = new Collection(); + const requestDefaults = { + baseUrl: request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = [ + options.userAgent, + `octokit-core.js/${VERSION} ${getUserAgent()}` + ].filter(Boolean).join(" "); + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = request.defaults(requestDefaults); + this.graphql = withCustomRequest(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: () => { + }, + info: () => { + }, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = createTokenAuth(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + classConstructor.plugins.forEach((plugin) => { + Object.assign(this, plugin(this, options)); + }); + } +}; +Octokit.VERSION = VERSION; +Octokit.plugins = []; +export { + Octokit +}; diff --git a/dist/node_modules/@octokit/core/dist-web/index.js.map b/dist/node_modules/@octokit/core/dist-web/index.js.map new file mode 100644 index 0000000..538c019 --- /dev/null +++ b/dist/node_modules/@octokit/core/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js"], + "sourcesContent": ["import { getUserAgent } from \"universal-user-agent\";\nimport { Collection } from \"before-after-hook\";\nimport { request } from \"@octokit/request\";\nimport { graphql, withCustomRequest } from \"@octokit/graphql\";\nimport { createTokenAuth } from \"@octokit/auth-token\";\nimport { VERSION } from \"./version\";\nclass Octokit {\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n super(\n Object.assign(\n {},\n defaults,\n options,\n options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null\n )\n );\n }\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n static plugin(...newPlugins) {\n var _a;\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {\n }, _a.plugins = currentPlugins.concat(\n newPlugins.filter((plugin) => !currentPlugins.includes(plugin))\n ), _a);\n return NewOctokit;\n }\n constructor(options = {}) {\n const hook = new Collection();\n const requestDefaults = {\n baseUrl: request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n };\n requestDefaults.headers[\"user-agent\"] = [\n options.userAgent,\n `octokit-core.js/${VERSION} ${getUserAgent()}`\n ].filter(Boolean).join(\" \");\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n this.request = request.defaults(requestDefaults);\n this.graphql = withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign(\n {\n debug: () => {\n },\n info: () => {\n },\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n },\n options.log\n );\n this.hook = hook;\n if (!options.authStrategy) {\n if (!options.auth) {\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n const auth = createTokenAuth(options.auth);\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const { authStrategy, ...otherOptions } = options;\n const auth = authStrategy(\n Object.assign(\n {\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n },\n options.auth\n )\n );\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach((plugin) => {\n Object.assign(this, plugin(this, options));\n });\n }\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\nexport {\n Octokit\n};\n", "const VERSION = \"4.2.4\";\nexport {\n VERSION\n};\n"], + "mappings": ";AAAA,SAAS,oBAAoB;AAC7B,SAAS,kBAAkB;AAC3B,SAAS,eAAe;AACxB,SAAS,SAAS,yBAAyB;AAC3C,SAAS,uBAAuB;;;ACJhC,IAAM,UAAU;;;ADMhB,IAAM,UAAN,MAAc;AAAA,EACZ,OAAO,SAAS,UAAU;AACxB,UAAM,sBAAsB,cAAc,KAAK;AAAA,MAC7C,eAAe,MAAM;AACnB,cAAM,UAAU,KAAK,CAAC,KAAK,CAAC;AAC5B,YAAI,OAAO,aAAa,YAAY;AAClC,gBAAM,SAAS,OAAO,CAAC;AACvB;AAAA,QACF;AACA;AAAA,UACE,OAAO;AAAA,YACL,CAAC;AAAA,YACD;AAAA,YACA;AAAA,YACA,QAAQ,aAAa,SAAS,YAAY;AAAA,cACxC,WAAW,GAAG,QAAQ,aAAa,SAAS;AAAA,YAC9C,IAAI;AAAA,UACN;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,UAAU,YAAY;AAC3B,QAAI;AACJ,UAAM,iBAAiB,KAAK;AAC5B,UAAM,cAAc,KAAK,cAAc,KAAK;AAAA,IAC5C,GAAG,GAAG,UAAU,eAAe;AAAA,MAC7B,WAAW,OAAO,CAAC,WAAW,CAAC,eAAe,SAAS,MAAM,CAAC;AAAA,IAChE,GAAG;AACH,WAAO;AAAA,EACT;AAAA,EACA,YAAY,UAAU,CAAC,GAAG;AACxB,UAAM,OAAO,IAAI,WAAW;AAC5B,UAAM,kBAAkB;AAAA,MACtB,SAAS,QAAQ,SAAS,SAAS;AAAA,MACnC,SAAS,CAAC;AAAA,MACV,SAAS,OAAO,OAAO,CAAC,GAAG,QAAQ,SAAS;AAAA;AAAA,QAE1C,MAAM,KAAK,KAAK,MAAM,SAAS;AAAA,MACjC,CAAC;AAAA,MACD,WAAW;AAAA,QACT,UAAU,CAAC;AAAA,QACX,QAAQ;AAAA,MACV;AAAA,IACF;AACA,oBAAgB,QAAQ,YAAY,IAAI;AAAA,MACtC,QAAQ;AAAA,MACR,mBAAmB,WAAW,aAAa;AAAA,IAC7C,EAAE,OAAO,OAAO,EAAE,KAAK,GAAG;AAC1B,QAAI,QAAQ,SAAS;AACnB,sBAAgB,UAAU,QAAQ;AAAA,IACpC;AACA,QAAI,QAAQ,UAAU;AACpB,sBAAgB,UAAU,WAAW,QAAQ;AAAA,IAC/C;AACA,QAAI,QAAQ,UAAU;AACpB,sBAAgB,QAAQ,WAAW,IAAI,QAAQ;AAAA,IACjD;AACA,SAAK,UAAU,QAAQ,SAAS,eAAe;AAC/C,SAAK,UAAU,kBAAkB,KAAK,OAAO,EAAE,SAAS,eAAe;AACvE,SAAK,MAAM,OAAO;AAAA,MAChB;AAAA,QACE,OAAO,MAAM;AAAA,QACb;AAAA,QACA,MAAM,MAAM;AAAA,QACZ;AAAA,QACA,MAAM,QAAQ,KAAK,KAAK,OAAO;AAAA,QAC/B,OAAO,QAAQ,MAAM,KAAK,OAAO;AAAA,MACnC;AAAA,MACA,QAAQ;AAAA,IACV;AACA,SAAK,OAAO;AACZ,QAAI,CAAC,QAAQ,cAAc;AACzB,UAAI,CAAC,QAAQ,MAAM;AACjB,aAAK,OAAO,aAAa;AAAA,UACvB,MAAM;AAAA,QACR;AAAA,MACF,OAAO;AACL,cAAM,OAAO,gBAAgB,QAAQ,IAAI;AACzC,aAAK,KAAK,WAAW,KAAK,IAAI;AAC9B,aAAK,OAAO;AAAA,MACd;AAAA,IACF,OAAO;AACL,YAAM,EAAE,cAAc,GAAG,aAAa,IAAI;AAC1C,YAAM,OAAO;AAAA,QACX,OAAO;AAAA,UACL;AAAA,YACE,SAAS,KAAK;AAAA,YACd,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMV,SAAS;AAAA,YACT,gBAAgB;AAAA,UAClB;AAAA,UACA,QAAQ;AAAA,QACV;AAAA,MACF;AACA,WAAK,KAAK,WAAW,KAAK,IAAI;AAC9B,WAAK,OAAO;AAAA,IACd;AACA,UAAM,mBAAmB,KAAK;AAC9B,qBAAiB,QAAQ,QAAQ,CAAC,WAAW;AAC3C,aAAO,OAAO,MAAM,OAAO,MAAM,OAAO,CAAC;AAAA,IAC3C,CAAC;AAAA,EACH;AACF;AACA,QAAQ,UAAU;AAClB,QAAQ,UAAU,CAAC;", + "names": [] +} diff --git a/dist/node_modules/@octokit/core/package.json b/dist/node_modules/@octokit/core/package.json new file mode 100644 index 0000000..7ecd7ae --- /dev/null +++ b/dist/node_modules/@octokit/core/package.json @@ -0,0 +1,59 @@ +{ + "name": "@octokit/core", + "version": "4.2.4", + "publishConfig": { + "access": "public" + }, + "description": "Extendable client for GitHub's REST & GraphQL APIs", + "repository": "github:octokit/core.js", + "keywords": [ + "octokit", + "github", + "api", + "sdk", + "toolkit" + ], + "author": "Gregor Martynus (https://github.com/gr2m)", + "license": "MIT", + "dependencies": { + "@octokit/auth-token": "^3.0.0", + "@octokit/graphql": "^5.0.0", + "@octokit/request": "^6.0.0", + "@octokit/request-error": "^3.0.0", + "@octokit/types": "^9.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "devDependencies": { + "@octokit/auth": "^3.0.1", + "@octokit/tsconfig": "^2.0.0", + "@types/fetch-mock": "^7.3.1", + "@types/jest": "^29.0.0", + "@types/lolex": "^5.1.0", + "@types/node": "^18.0.0", + "esbuild": "^0.18.0", + "fetch-mock": "^9.0.0", + "glob": "^10.2.5", + "http-proxy-agent": "^7.0.0", + "jest": "^29.0.0", + "lolex": "^6.0.0", + "prettier": "2.8.8", + "proxy": "^2.0.0", + "semantic-release": "^21.0.0", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "source": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/endpoint/LICENSE b/dist/node_modules/@octokit/endpoint/LICENSE new file mode 100644 index 0000000..af5366d --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/@octokit/endpoint/README.md b/dist/node_modules/@octokit/endpoint/README.md new file mode 100644 index 0000000..c7b5232 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/README.md @@ -0,0 +1,421 @@ +# endpoint.js + +> Turns GitHub REST API endpoints into generic request options + +[![@latest](https://img.shields.io/npm/v/@octokit/endpoint.svg)](https://www.npmjs.com/package/@octokit/endpoint) +[![Build Status](https://github.com/octokit/endpoint.js/workflows/Test/badge.svg)](https://github.com/octokit/endpoint.js/actions/workflows/test.yml?query=branch%3Amain) + +`@octokit/endpoint` combines [GitHub REST API routes](https://developer.github.com/v3/) with your parameters and turns them into generic request options that can be used in any request library. + + + + + +- [Usage](#usage) +- [API](#api) + - [`endpoint(route, options)` or `endpoint(options)`](#endpointroute-options-or-endpointoptions) + - [`endpoint.defaults()`](#endpointdefaults) + - [`endpoint.DEFAULTS`](#endpointdefaults) + - [`endpoint.merge(route, options)` or `endpoint.merge(options)`](#endpointmergeroute-options-or-endpointmergeoptions) + - [`endpoint.parse()`](#endpointparse) +- [Special cases](#special-cases) + - [The `data` parameter – set request body directly](#the-data-parameter-%E2%80%93-set-request-body-directly) + - [Set parameters for both the URL/query and the request body](#set-parameters-for-both-the-urlquery-and-the-request-body) +- [LICENSE](#license) + + + +## Usage + + + + + + +
+Browsers + +Load @octokit/endpoint directly from cdn.skypack.dev + +```html + +``` + +
+Node + + +Install with npm install @octokit/endpoint + +```js +const { endpoint } = require("@octokit/endpoint"); +// or: import { endpoint } from "@octokit/endpoint"; +``` + +
+ +Example for [List organization repositories](https://developer.github.com/v3/repos/#list-organization-repositories) + +```js +const requestOptions = endpoint("GET /orgs/{org}/repos", { + headers: { + authorization: "token 0000000000000000000000000000000000000001", + }, + org: "octokit", + type: "private", +}); +``` + +The resulting `requestOptions` looks as follows + +```json +{ + "method": "GET", + "url": "https://api.github.com/orgs/octokit/repos?type=private", + "headers": { + "accept": "application/vnd.github.v3+json", + "authorization": "token 0000000000000000000000000000000000000001", + "user-agent": "octokit/endpoint.js v1.2.3" + } +} +``` + +You can pass `requestOptions` to common request libraries + +```js +const { url, ...options } = requestOptions; +// using with fetch (https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) +fetch(url, options); +// using with request (https://github.com/request/request) +request(requestOptions); +// using with got (https://github.com/sindresorhus/got) +got[options.method](url, options); +// using with axios +axios(requestOptions); +``` + +## API + +### `endpoint(route, options)` or `endpoint(options)` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ route + + String + + If set, it has to be a string consisting of URL and the request method, e.g., GET /orgs/{org}. If it’s set to a URL, only the method defaults to GET. +
+ options.method + + String + + Required unless route is set. Any supported http verb. Defaults to GET. +
+ options.url + + String + + Required unless route is set. A path or full URL which may contain :variable or {variable} placeholders, + e.g., /orgs/{org}/repos. The url is parsed using url-template. +
+ options.baseUrl + + String + + Defaults to https://api.github.com. +
+ options.headers + + Object + + Custom headers. Passed headers are merged with defaults:
+ headers['user-agent'] defaults to octokit-endpoint.js/1.2.3 (where 1.2.3 is the released version).
+ headers['accept'] defaults to application/vnd.github.v3+json.
+
+ options.mediaType.format + + String + + Media type param, such as raw, diff, or text+json. See Media Types. Setting options.mediaType.format will amend the headers.accept value. +
+ options.mediaType.previews + + Array of Strings + + Name of previews, such as mercy, symmetra, or scarlet-witch. See API Previews. If options.mediaType.previews was set as default, the new previews will be merged into the default ones. Setting options.mediaType.previews will amend the headers.accept value. options.mediaType.previews will be merged with an existing array set using .defaults(). +
+ options.data + + Any + + Set request body directly instead of setting it to JSON based on additional parameters. See "The data parameter" below. +
+ options.request + + Object + + Pass custom meta information for the request. The request object will be returned as is. +
+ +All other options will be passed depending on the `method` and `url` options. + +1. If the option key has a placeholder in the `url`, it will be used as the replacement. For example, if the passed options are `{url: '/orgs/{org}/repos', org: 'foo'}` the returned `options.url` is `https://api.github.com/orgs/foo/repos`. +2. If the `method` is `GET` or `HEAD`, the option is passed as a query parameter. +3. Otherwise, the parameter is passed in the request body as a JSON key. + +**Result** + +`endpoint()` is a synchronous method and returns an object with the following keys: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ key + + type + + description +
methodStringThe http method. Always lowercase.
urlStringThe url with placeholders replaced with passed parameters.
headersObjectAll header names are lowercased.
bodyAnyThe request body if one is present. Only for PATCH, POST, PUT, DELETE requests.
requestObjectRequest meta option, it will be returned as it was passed into endpoint()
+ +### `endpoint.defaults()` + +Override or set default options. Example: + +```js +const request = require("request"); +const myEndpoint = require("@octokit/endpoint").defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api/v3", + headers: { + "user-agent": "myApp/1.2.3", + authorization: `token 0000000000000000000000000000000000000001`, + }, + org: "my-project", + per_page: 100, +}); + +request(myEndpoint(`GET /orgs/{org}/repos`)); +``` + +You can call `.defaults()` again on the returned method, the defaults will cascade. + +```js +const myProjectEndpoint = endpoint.defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api/v3", + headers: { + "user-agent": "myApp/1.2.3", + }, + org: "my-project", +}); +const myProjectEndpointWithAuth = myProjectEndpoint.defaults({ + headers: { + authorization: `token 0000000000000000000000000000000000000001`, + }, +}); +``` + +`myProjectEndpointWithAuth` now defaults the `baseUrl`, `headers['user-agent']`, +`org` and `headers['authorization']` on top of `headers['accept']` that is set +by the global default. + +### `endpoint.DEFAULTS` + +The current default options. + +```js +endpoint.DEFAULTS.baseUrl; // https://api.github.com +const myEndpoint = endpoint.defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api/v3", +}); +myEndpoint.DEFAULTS.baseUrl; // https://github-enterprise.acme-inc.com/api/v3 +``` + +### `endpoint.merge(route, options)` or `endpoint.merge(options)` + +Get the defaulted endpoint options, but without parsing them into request options: + +```js +const myProjectEndpoint = endpoint.defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api/v3", + headers: { + "user-agent": "myApp/1.2.3", + }, + org: "my-project", +}); +myProjectEndpoint.merge("GET /orgs/{org}/repos", { + headers: { + authorization: `token 0000000000000000000000000000000000000001`, + }, + org: "my-secret-project", + type: "private", +}); + +// { +// baseUrl: 'https://github-enterprise.acme-inc.com/api/v3', +// method: 'GET', +// url: '/orgs/{org}/repos', +// headers: { +// accept: 'application/vnd.github.v3+json', +// authorization: `token 0000000000000000000000000000000000000001`, +// 'user-agent': 'myApp/1.2.3' +// }, +// org: 'my-secret-project', +// type: 'private' +// } +``` + +### `endpoint.parse()` + +Stateless method to turn endpoint options into request options. Calling +`endpoint(options)` is the same as calling `endpoint.parse(endpoint.merge(options))`. + +## Special cases + + + +### The `data` parameter – set request body directly + +Some endpoints such as [Render a Markdown document in raw mode](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode) don’t have parameters that are sent as request body keys, instead, the request body needs to be set directly. In these cases, set the `data` parameter. + +```js +const options = endpoint("POST /markdown/raw", { + data: "Hello world github/linguist#1 **cool**, and #1!", + headers: { + accept: "text/html;charset=utf-8", + "content-type": "text/plain", + }, +}); + +// options is +// { +// method: 'post', +// url: 'https://api.github.com/markdown/raw', +// headers: { +// accept: 'text/html;charset=utf-8', +// 'content-type': 'text/plain', +// 'user-agent': userAgent +// }, +// body: 'Hello world github/linguist#1 **cool**, and #1!' +// } +``` + +### Set parameters for both the URL/query and the request body + +There are API endpoints that accept both query parameters as well as a body. In that case, you need to add the query parameters as templates to `options.url`, as defined in the [RFC 6570 URI Template specification](https://tools.ietf.org/html/rfc6570). + +Example + +```js +endpoint( + "POST https://uploads.github.com/repos/octocat/Hello-World/releases/1/assets{?name,label}", + { + name: "example.zip", + label: "short description", + headers: { + "content-type": "text/plain", + "content-length": 14, + authorization: `token 0000000000000000000000000000000000000001`, + }, + data: "Hello, world!", + } +); +``` + +## LICENSE + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/endpoint/dist-node/index.js b/dist/node_modules/@octokit/endpoint/dist-node/index.js new file mode 100644 index 0000000..004b23b --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-node/index.js @@ -0,0 +1,355 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + endpoint: () => endpoint +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/util/lowercase-keys.js +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +// pkg/dist-src/util/merge-deep.js +var import_is_plain_object = require("is-plain-object"); +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if ((0, import_is_plain_object.isPlainObject)(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; +} + +// pkg/dist-src/util/remove-undefined-properties.js +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; + } + } + return obj; +} + +// pkg/dist-src/merge.js +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter((preview) => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map( + (preview) => preview.replace(/-preview/, "") + ); + return mergedOptions; +} + +// pkg/dist-src/util/add-query-parameters.js +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +// pkg/dist-src/util/omit.js +function omit(object, keysToOmit) { + return Object.keys(object).filter((option) => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} + +// pkg/dist-src/util/url-template.js +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }).join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} +function isDefined(value) { + return value !== void 0 && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + } + ); +} + +// pkg/dist-src/parse.js +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (preview) => preview.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + } + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); +} + +// pkg/dist-src/endpoint-with-defaults.js +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse + }); +} + +// pkg/dist-src/defaults.js +var import_universal_user_agent = require("universal-user-agent"); + +// pkg/dist-src/version.js +var VERSION = "7.0.6"; + +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "", + previews: [] + } +}; + +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + endpoint +}); diff --git a/dist/node_modules/@octokit/endpoint/dist-node/index.js.map b/dist/node_modules/@octokit/endpoint/dist-node/index.js.map new file mode 100644 index 0000000..4c3c17b --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/util/lowercase-keys.js", "../dist-src/util/merge-deep.js", "../dist-src/util/remove-undefined-properties.js", "../dist-src/merge.js", "../dist-src/util/add-query-parameters.js", "../dist-src/util/extract-url-variable-names.js", "../dist-src/util/omit.js", "../dist-src/util/url-template.js", "../dist-src/parse.js", "../dist-src/endpoint-with-defaults.js", "../dist-src/with-defaults.js", "../dist-src/defaults.js", "../dist-src/version.js"], + "sourcesContent": ["import { withDefaults } from \"./with-defaults\";\nimport { DEFAULTS } from \"./defaults\";\nconst endpoint = withDefaults(null, DEFAULTS);\nexport {\n endpoint\n};\n", "function lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\nexport {\n lowercaseKeys\n};\n", "import { isPlainObject } from \"is-plain-object\";\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach((key) => {\n if (isPlainObject(options[key])) {\n if (!(key in defaults))\n Object.assign(result, { [key]: options[key] });\n else\n result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, { [key]: options[key] });\n }\n });\n return result;\n}\nexport {\n mergeDeep\n};\n", "function removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === void 0) {\n delete obj[key];\n }\n }\n return obj;\n}\nexport {\n removeUndefinedProperties\n};\n", "import { lowercaseKeys } from \"./util/lowercase-keys\";\nimport { mergeDeep } from \"./util/merge-deep\";\nimport { removeUndefinedProperties } from \"./util/remove-undefined-properties\";\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? { method, url } : { url: method }, options);\n } else {\n options = Object.assign({}, route);\n }\n options.headers = lowercaseKeys(options.headers);\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter((preview) => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(\n (preview) => preview.replace(/-preview/, \"\")\n );\n return mergedOptions;\n}\nexport {\n merge\n};\n", "function addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return url + separator + names.map((name) => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\nexport {\n addQueryParameters\n};\n", "const urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\nexport {\n extractUrlVariableNames\n};\n", "function omit(object, keysToOmit) {\n return Object.keys(object).filter((option) => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\nexport {\n omit\n};\n", "function encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n }).join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function(c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== void 0 && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key], result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(\n encodeValue(operator, value, isKeyOperator(operator) ? key : \"\")\n );\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function(value2) {\n result.push(\n encodeValue(operator, value2, isKeyOperator(operator) ? key : \"\")\n );\n });\n } else {\n Object.keys(value).forEach(function(k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function(value2) {\n tmp.push(encodeValue(operator, value2));\n });\n } else {\n Object.keys(value).forEach(function(k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(\n /\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g,\n function(_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function(variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n }\n );\n}\nexport {\n parseUrl\n};\n", "import { addQueryParameters } from \"./util/add-query-parameters\";\nimport { extractUrlVariableNames } from \"./util/extract-url-variable-names\";\nimport { omit } from \"./util/omit\";\nimport { parseUrl } from \"./util/url-template\";\nfunction parse(options) {\n let method = options.method.toUpperCase();\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"mediaType\"\n ]);\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n headers.accept = headers.accept.split(/,/).map(\n (preview) => preview.replace(\n /application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/,\n `application/vnd$1$2.${options.mediaType.format}`\n )\n ).join(\",\");\n }\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n }\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n }\n }\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n return Object.assign(\n { method, url, headers },\n typeof body !== \"undefined\" ? { body } : null,\n options.request ? { request: options.request } : null\n );\n}\nexport {\n parse\n};\n", "import { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\nexport {\n endpointWithDefaults\n};\n", "import { endpointWithDefaults } from \"./endpoint-with-defaults\";\nimport { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\nexport {\n withDefaults\n};\n", "import { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nconst userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`;\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\nexport {\n DEFAULTS\n};\n", "const VERSION = \"7.0.6\";\nexport {\n VERSION\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,SAAS,cAAc,QAAQ;AAC7B,MAAI,CAAC,QAAQ;AACX,WAAO,CAAC;AAAA,EACV;AACA,SAAO,OAAO,KAAK,MAAM,EAAE,OAAO,CAAC,QAAQ,QAAQ;AACjD,WAAO,IAAI,YAAY,CAAC,IAAI,OAAO,GAAG;AACtC,WAAO;AAAA,EACT,GAAG,CAAC,CAAC;AACP;;;ACRA,6BAA8B;AAC9B,SAAS,UAAU,UAAU,SAAS;AACpC,QAAM,SAAS,OAAO,OAAO,CAAC,GAAG,QAAQ;AACzC,SAAO,KAAK,OAAO,EAAE,QAAQ,CAAC,QAAQ;AACpC,YAAI,sCAAc,QAAQ,GAAG,CAAC,GAAG;AAC/B,UAAI,EAAE,OAAO;AACX,eAAO,OAAO,QAAQ,EAAE,CAAC,GAAG,GAAG,QAAQ,GAAG,EAAE,CAAC;AAAA;AAE7C,eAAO,GAAG,IAAI,UAAU,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAC;AAAA,IACvD,OAAO;AACL,aAAO,OAAO,QAAQ,EAAE,CAAC,GAAG,GAAG,QAAQ,GAAG,EAAE,CAAC;AAAA,IAC/C;AAAA,EACF,CAAC;AACD,SAAO;AACT;;;ACdA,SAAS,0BAA0B,KAAK;AACtC,aAAW,OAAO,KAAK;AACrB,QAAI,IAAI,GAAG,MAAM,QAAQ;AACvB,aAAO,IAAI,GAAG;AAAA,IAChB;AAAA,EACF;AACA,SAAO;AACT;;;ACJA,SAAS,MAAM,UAAU,OAAO,SAAS;AACvC,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,CAAC,QAAQ,GAAG,IAAI,MAAM,MAAM,GAAG;AACnC,cAAU,OAAO,OAAO,MAAM,EAAE,QAAQ,IAAI,IAAI,EAAE,KAAK,OAAO,GAAG,OAAO;AAAA,EAC1E,OAAO;AACL,cAAU,OAAO,OAAO,CAAC,GAAG,KAAK;AAAA,EACnC;AACA,UAAQ,UAAU,cAAc,QAAQ,OAAO;AAC/C,4BAA0B,OAAO;AACjC,4BAA0B,QAAQ,OAAO;AACzC,QAAM,gBAAgB,UAAU,YAAY,CAAC,GAAG,OAAO;AACvD,MAAI,YAAY,SAAS,UAAU,SAAS,QAAQ;AAClD,kBAAc,UAAU,WAAW,SAAS,UAAU,SAAS,OAAO,CAAC,YAAY,CAAC,cAAc,UAAU,SAAS,SAAS,OAAO,CAAC,EAAE,OAAO,cAAc,UAAU,QAAQ;AAAA,EACjL;AACA,gBAAc,UAAU,WAAW,cAAc,UAAU,SAAS;AAAA,IAClE,CAAC,YAAY,QAAQ,QAAQ,YAAY,EAAE;AAAA,EAC7C;AACA,SAAO;AACT;;;ACrBA,SAAS,mBAAmB,KAAK,YAAY;AAC3C,QAAM,YAAY,KAAK,KAAK,GAAG,IAAI,MAAM;AACzC,QAAM,QAAQ,OAAO,KAAK,UAAU;AACpC,MAAI,MAAM,WAAW,GAAG;AACtB,WAAO;AAAA,EACT;AACA,SAAO,MAAM,YAAY,MAAM,IAAI,CAAC,SAAS;AAC3C,QAAI,SAAS,KAAK;AAChB,aAAO,OAAO,WAAW,EAAE,MAAM,GAAG,EAAE,IAAI,kBAAkB,EAAE,KAAK,GAAG;AAAA,IACxE;AACA,WAAO,GAAG,QAAQ,mBAAmB,WAAW,IAAI,CAAC;AAAA,EACvD,CAAC,EAAE,KAAK,GAAG;AACb;;;ACZA,IAAM,mBAAmB;AACzB,SAAS,eAAe,cAAc;AACpC,SAAO,aAAa,QAAQ,cAAc,EAAE,EAAE,MAAM,GAAG;AACzD;AACA,SAAS,wBAAwB,KAAK;AACpC,QAAM,UAAU,IAAI,MAAM,gBAAgB;AAC1C,MAAI,CAAC,SAAS;AACZ,WAAO,CAAC;AAAA,EACV;AACA,SAAO,QAAQ,IAAI,cAAc,EAAE,OAAO,CAAC,GAAG,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;AACrE;;;ACVA,SAAS,KAAK,QAAQ,YAAY;AAChC,SAAO,OAAO,KAAK,MAAM,EAAE,OAAO,CAAC,WAAW,CAAC,WAAW,SAAS,MAAM,CAAC,EAAE,OAAO,CAAC,KAAK,QAAQ;AAC/F,QAAI,GAAG,IAAI,OAAO,GAAG;AACrB,WAAO;AAAA,EACT,GAAG,CAAC,CAAC;AACP;;;ACLA,SAAS,eAAe,KAAK;AAC3B,SAAO,IAAI,MAAM,oBAAoB,EAAE,IAAI,SAAS,MAAM;AACxD,QAAI,CAAC,eAAe,KAAK,IAAI,GAAG;AAC9B,aAAO,UAAU,IAAI,EAAE,QAAQ,QAAQ,GAAG,EAAE,QAAQ,QAAQ,GAAG;AAAA,IACjE;AACA,WAAO;AAAA,EACT,CAAC,EAAE,KAAK,EAAE;AACZ;AACA,SAAS,iBAAiB,KAAK;AAC7B,SAAO,mBAAmB,GAAG,EAAE,QAAQ,YAAY,SAAS,GAAG;AAC7D,WAAO,MAAM,EAAE,WAAW,CAAC,EAAE,SAAS,EAAE,EAAE,YAAY;AAAA,EACxD,CAAC;AACH;AACA,SAAS,YAAY,UAAU,OAAO,KAAK;AACzC,UAAQ,aAAa,OAAO,aAAa,MAAM,eAAe,KAAK,IAAI,iBAAiB,KAAK;AAC7F,MAAI,KAAK;AACP,WAAO,iBAAiB,GAAG,IAAI,MAAM;AAAA,EACvC,OAAO;AACL,WAAO;AAAA,EACT;AACF;AACA,SAAS,UAAU,OAAO;AACxB,SAAO,UAAU,UAAU,UAAU;AACvC;AACA,SAAS,cAAc,UAAU;AAC/B,SAAO,aAAa,OAAO,aAAa,OAAO,aAAa;AAC9D;AACA,SAAS,UAAU,SAAS,UAAU,KAAK,UAAU;AACnD,MAAI,QAAQ,QAAQ,GAAG,GAAG,SAAS,CAAC;AACpC,MAAI,UAAU,KAAK,KAAK,UAAU,IAAI;AACpC,QAAI,OAAO,UAAU,YAAY,OAAO,UAAU,YAAY,OAAO,UAAU,WAAW;AACxF,cAAQ,MAAM,SAAS;AACvB,UAAI,YAAY,aAAa,KAAK;AAChC,gBAAQ,MAAM,UAAU,GAAG,SAAS,UAAU,EAAE,CAAC;AAAA,MACnD;AACA,aAAO;AAAA,QACL,YAAY,UAAU,OAAO,cAAc,QAAQ,IAAI,MAAM,EAAE;AAAA,MACjE;AAAA,IACF,OAAO;AACL,UAAI,aAAa,KAAK;AACpB,YAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,gBAAM,OAAO,SAAS,EAAE,QAAQ,SAAS,QAAQ;AAC/C,mBAAO;AAAA,cACL,YAAY,UAAU,QAAQ,cAAc,QAAQ,IAAI,MAAM,EAAE;AAAA,YAClE;AAAA,UACF,CAAC;AAAA,QACH,OAAO;AACL,iBAAO,KAAK,KAAK,EAAE,QAAQ,SAAS,GAAG;AACrC,gBAAI,UAAU,MAAM,CAAC,CAAC,GAAG;AACvB,qBAAO,KAAK,YAAY,UAAU,MAAM,CAAC,GAAG,CAAC,CAAC;AAAA,YAChD;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AACL,cAAM,MAAM,CAAC;AACb,YAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,gBAAM,OAAO,SAAS,EAAE,QAAQ,SAAS,QAAQ;AAC/C,gBAAI,KAAK,YAAY,UAAU,MAAM,CAAC;AAAA,UACxC,CAAC;AAAA,QACH,OAAO;AACL,iBAAO,KAAK,KAAK,EAAE,QAAQ,SAAS,GAAG;AACrC,gBAAI,UAAU,MAAM,CAAC,CAAC,GAAG;AACvB,kBAAI,KAAK,iBAAiB,CAAC,CAAC;AAC5B,kBAAI,KAAK,YAAY,UAAU,MAAM,CAAC,EAAE,SAAS,CAAC,CAAC;AAAA,YACrD;AAAA,UACF,CAAC;AAAA,QACH;AACA,YAAI,cAAc,QAAQ,GAAG;AAC3B,iBAAO,KAAK,iBAAiB,GAAG,IAAI,MAAM,IAAI,KAAK,GAAG,CAAC;AAAA,QACzD,WAAW,IAAI,WAAW,GAAG;AAC3B,iBAAO,KAAK,IAAI,KAAK,GAAG,CAAC;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,EACF,OAAO;AACL,QAAI,aAAa,KAAK;AACpB,UAAI,UAAU,KAAK,GAAG;AACpB,eAAO,KAAK,iBAAiB,GAAG,CAAC;AAAA,MACnC;AAAA,IACF,WAAW,UAAU,OAAO,aAAa,OAAO,aAAa,MAAM;AACjE,aAAO,KAAK,iBAAiB,GAAG,IAAI,GAAG;AAAA,IACzC,WAAW,UAAU,IAAI;AACvB,aAAO,KAAK,EAAE;AAAA,IAChB;AAAA,EACF;AACA,SAAO;AACT;AACA,SAAS,SAAS,UAAU;AAC1B,SAAO;AAAA,IACL,QAAQ,OAAO,KAAK,MAAM,QAAQ;AAAA,EACpC;AACF;AACA,SAAS,OAAO,UAAU,SAAS;AACjC,MAAI,YAAY,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,GAAG;AAClD,SAAO,SAAS;AAAA,IACd;AAAA,IACA,SAAS,GAAG,YAAY,SAAS;AAC/B,UAAI,YAAY;AACd,YAAI,WAAW;AACf,cAAM,SAAS,CAAC;AAChB,YAAI,UAAU,QAAQ,WAAW,OAAO,CAAC,CAAC,MAAM,IAAI;AAClD,qBAAW,WAAW,OAAO,CAAC;AAC9B,uBAAa,WAAW,OAAO,CAAC;AAAA,QAClC;AACA,mBAAW,MAAM,IAAI,EAAE,QAAQ,SAAS,UAAU;AAChD,cAAI,MAAM,4BAA4B,KAAK,QAAQ;AACnD,iBAAO,KAAK,UAAU,SAAS,UAAU,IAAI,CAAC,GAAG,IAAI,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC;AAAA,QACpE,CAAC;AACD,YAAI,YAAY,aAAa,KAAK;AAChC,cAAI,YAAY;AAChB,cAAI,aAAa,KAAK;AACpB,wBAAY;AAAA,UACd,WAAW,aAAa,KAAK;AAC3B,wBAAY;AAAA,UACd;AACA,kBAAQ,OAAO,WAAW,IAAI,WAAW,MAAM,OAAO,KAAK,SAAS;AAAA,QACtE,OAAO;AACL,iBAAO,OAAO,KAAK,GAAG;AAAA,QACxB;AAAA,MACF,OAAO;AACL,eAAO,eAAe,OAAO;AAAA,MAC/B;AAAA,IACF;AAAA,EACF;AACF;;;ACxHA,SAAS,MAAM,SAAS;AACtB,MAAI,SAAS,QAAQ,OAAO,YAAY;AACxC,MAAI,OAAO,QAAQ,OAAO,KAAK,QAAQ,gBAAgB,MAAM;AAC7D,MAAI,UAAU,OAAO,OAAO,CAAC,GAAG,QAAQ,OAAO;AAC/C,MAAI;AACJ,MAAI,aAAa,KAAK,SAAS;AAAA,IAC7B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACD,QAAM,mBAAmB,wBAAwB,GAAG;AACpD,QAAM,SAAS,GAAG,EAAE,OAAO,UAAU;AACrC,MAAI,CAAC,QAAQ,KAAK,GAAG,GAAG;AACtB,UAAM,QAAQ,UAAU;AAAA,EAC1B;AACA,QAAM,oBAAoB,OAAO,KAAK,OAAO,EAAE,OAAO,CAAC,WAAW,iBAAiB,SAAS,MAAM,CAAC,EAAE,OAAO,SAAS;AACrH,QAAM,sBAAsB,KAAK,YAAY,iBAAiB;AAC9D,QAAM,kBAAkB,6BAA6B,KAAK,QAAQ,MAAM;AACxE,MAAI,CAAC,iBAAiB;AACpB,QAAI,QAAQ,UAAU,QAAQ;AAC5B,cAAQ,SAAS,QAAQ,OAAO,MAAM,GAAG,EAAE;AAAA,QACzC,CAAC,YAAY,QAAQ;AAAA,UACnB;AAAA,UACA,uBAAuB,QAAQ,UAAU;AAAA,QAC3C;AAAA,MACF,EAAE,KAAK,GAAG;AAAA,IACZ;AACA,QAAI,QAAQ,UAAU,SAAS,QAAQ;AACrC,YAAM,2BAA2B,QAAQ,OAAO,MAAM,qBAAqB,KAAK,CAAC;AACjF,cAAQ,SAAS,yBAAyB,OAAO,QAAQ,UAAU,QAAQ,EAAE,IAAI,CAAC,YAAY;AAC5F,cAAM,SAAS,QAAQ,UAAU,SAAS,IAAI,QAAQ,UAAU,WAAW;AAC3E,eAAO,0BAA0B,kBAAkB;AAAA,MACrD,CAAC,EAAE,KAAK,GAAG;AAAA,IACb;AAAA,EACF;AACA,MAAI,CAAC,OAAO,MAAM,EAAE,SAAS,MAAM,GAAG;AACpC,UAAM,mBAAmB,KAAK,mBAAmB;AAAA,EACnD,OAAO;AACL,QAAI,UAAU,qBAAqB;AACjC,aAAO,oBAAoB;AAAA,IAC7B,OAAO;AACL,UAAI,OAAO,KAAK,mBAAmB,EAAE,QAAQ;AAC3C,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,QAAQ,cAAc,KAAK,OAAO,SAAS,aAAa;AAC3D,YAAQ,cAAc,IAAI;AAAA,EAC5B;AACA,MAAI,CAAC,SAAS,KAAK,EAAE,SAAS,MAAM,KAAK,OAAO,SAAS,aAAa;AACpE,WAAO;AAAA,EACT;AACA,SAAO,OAAO;AAAA,IACZ,EAAE,QAAQ,KAAK,QAAQ;AAAA,IACvB,OAAO,SAAS,cAAc,EAAE,KAAK,IAAI;AAAA,IACzC,QAAQ,UAAU,EAAE,SAAS,QAAQ,QAAQ,IAAI;AAAA,EACnD;AACF;;;AC9DA,SAAS,qBAAqB,UAAU,OAAO,SAAS;AACtD,SAAO,MAAM,MAAM,UAAU,OAAO,OAAO,CAAC;AAC9C;;;ACDA,SAAS,aAAa,aAAa,aAAa;AAC9C,QAAMA,YAAW,MAAM,aAAa,WAAW;AAC/C,QAAMC,YAAW,qBAAqB,KAAK,MAAMD,SAAQ;AACzD,SAAO,OAAO,OAAOC,WAAU;AAAA,IAC7B,UAAAD;AAAA,IACA,UAAU,aAAa,KAAK,MAAMA,SAAQ;AAAA,IAC1C,OAAO,MAAM,KAAK,MAAMA,SAAQ;AAAA,IAChC;AAAA,EACF,CAAC;AACH;;;ACZA,kCAA6B;;;ACA7B,IAAM,UAAU;;;ADEhB,IAAM,YAAY,uBAAuB,eAAW,0CAAa;AACjE,IAAM,WAAW;AAAA,EACf,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,SAAS;AAAA,IACP,QAAQ;AAAA,IACR,cAAc;AAAA,EAChB;AAAA,EACA,WAAW;AAAA,IACT,QAAQ;AAAA,IACR,UAAU,CAAC;AAAA,EACb;AACF;;;AZZA,IAAM,WAAW,aAAa,MAAM,QAAQ;", + "names": ["DEFAULTS", "endpoint"] +} diff --git a/dist/node_modules/@octokit/endpoint/dist-src/defaults.js b/dist/node_modules/@octokit/endpoint/dist-src/defaults.js new file mode 100644 index 0000000..69da4dd --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/defaults.js @@ -0,0 +1,18 @@ +import { getUserAgent } from "universal-user-agent"; +import { VERSION } from "./version"; +const userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`; +const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "", + previews: [] + } +}; +export { + DEFAULTS +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/endpoint-with-defaults.js b/dist/node_modules/@octokit/endpoint/dist-src/endpoint-with-defaults.js new file mode 100644 index 0000000..a96e8ce --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/endpoint-with-defaults.js @@ -0,0 +1,8 @@ +import { merge } from "./merge"; +import { parse } from "./parse"; +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} +export { + endpointWithDefaults +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/index.js b/dist/node_modules/@octokit/endpoint/dist-src/index.js new file mode 100644 index 0000000..b1d45e3 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/index.js @@ -0,0 +1,6 @@ +import { withDefaults } from "./with-defaults"; +import { DEFAULTS } from "./defaults"; +const endpoint = withDefaults(null, DEFAULTS); +export { + endpoint +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/merge.js b/dist/node_modules/@octokit/endpoint/dist-src/merge.js new file mode 100644 index 0000000..ed7559c --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/merge.js @@ -0,0 +1,25 @@ +import { lowercaseKeys } from "./util/lowercase-keys"; +import { mergeDeep } from "./util/merge-deep"; +import { removeUndefinedProperties } from "./util/remove-undefined-properties"; +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter((preview) => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map( + (preview) => preview.replace(/-preview/, "") + ); + return mergedOptions; +} +export { + merge +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/parse.js b/dist/node_modules/@octokit/endpoint/dist-src/parse.js new file mode 100644 index 0000000..ca1d638 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/parse.js @@ -0,0 +1,68 @@ +import { addQueryParameters } from "./util/add-query-parameters"; +import { extractUrlVariableNames } from "./util/extract-url-variable-names"; +import { omit } from "./util/omit"; +import { parseUrl } from "./util/url-template"; +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (preview) => preview.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + } + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); +} +export { + parse +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/util/add-query-parameters.js b/dist/node_modules/@octokit/endpoint/dist-src/util/add-query-parameters.js new file mode 100644 index 0000000..6bdf736 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/util/add-query-parameters.js @@ -0,0 +1,16 @@ +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} +export { + addQueryParameters +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/util/extract-url-variable-names.js b/dist/node_modules/@octokit/endpoint/dist-src/util/extract-url-variable-names.js new file mode 100644 index 0000000..1d75bb9 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/util/extract-url-variable-names.js @@ -0,0 +1,14 @@ +const urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} +export { + extractUrlVariableNames +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/util/lowercase-keys.js b/dist/node_modules/@octokit/endpoint/dist-src/util/lowercase-keys.js new file mode 100644 index 0000000..9bd7bb9 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/util/lowercase-keys.js @@ -0,0 +1,12 @@ +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} +export { + lowercaseKeys +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/util/merge-deep.js b/dist/node_modules/@octokit/endpoint/dist-src/util/merge-deep.js new file mode 100644 index 0000000..1185e48 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/util/merge-deep.js @@ -0,0 +1,18 @@ +import { isPlainObject } from "is-plain-object"; +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; +} +export { + mergeDeep +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/util/omit.js b/dist/node_modules/@octokit/endpoint/dist-src/util/omit.js new file mode 100644 index 0000000..11b467b --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/util/omit.js @@ -0,0 +1,9 @@ +function omit(object, keysToOmit) { + return Object.keys(object).filter((option) => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} +export { + omit +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/util/remove-undefined-properties.js b/dist/node_modules/@octokit/endpoint/dist-src/util/remove-undefined-properties.js new file mode 100644 index 0000000..8653027 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/util/remove-undefined-properties.js @@ -0,0 +1,11 @@ +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; + } + } + return obj; +} +export { + removeUndefinedProperties +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/util/url-template.js b/dist/node_modules/@octokit/endpoint/dist-src/util/url-template.js new file mode 100644 index 0000000..e1abdd5 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/util/url-template.js @@ -0,0 +1,128 @@ +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }).join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} +function isDefined(value) { + return value !== void 0 && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + } + ); +} +export { + parseUrl +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/version.js b/dist/node_modules/@octokit/endpoint/dist-src/version.js new file mode 100644 index 0000000..e711fc1 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "7.0.6"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-src/with-defaults.js b/dist/node_modules/@octokit/endpoint/dist-src/with-defaults.js new file mode 100644 index 0000000..6d508f0 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-src/with-defaults.js @@ -0,0 +1,16 @@ +import { endpointWithDefaults } from "./endpoint-with-defaults"; +import { merge } from "./merge"; +import { parse } from "./parse"; +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse + }); +} +export { + withDefaults +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/defaults.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/defaults.d.ts new file mode 100644 index 0000000..d65e889 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/defaults.d.ts @@ -0,0 +1,2 @@ +import type { EndpointDefaults } from "@octokit/types"; +export declare const DEFAULTS: EndpointDefaults; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/endpoint-with-defaults.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/endpoint-with-defaults.d.ts new file mode 100644 index 0000000..c36732e --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/endpoint-with-defaults.d.ts @@ -0,0 +1,3 @@ +import type { EndpointOptions, RequestParameters, Route } from "@octokit/types"; +import { DEFAULTS } from "./defaults"; +export declare function endpointWithDefaults(defaults: typeof DEFAULTS, route: Route | EndpointOptions, options?: RequestParameters): import("@octokit/types").RequestOptions; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/index.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/index.d.ts new file mode 100644 index 0000000..1ede136 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/index.d.ts @@ -0,0 +1 @@ +export declare const endpoint: import("@octokit/types").EndpointInterface; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/merge.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/merge.d.ts new file mode 100644 index 0000000..f583981 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/merge.d.ts @@ -0,0 +1,2 @@ +import type { EndpointDefaults, RequestParameters, Route } from "@octokit/types"; +export declare function merge(defaults: EndpointDefaults | null, route?: Route | RequestParameters, options?: RequestParameters): EndpointDefaults; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/parse.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/parse.d.ts new file mode 100644 index 0000000..5d0927a --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/parse.d.ts @@ -0,0 +1,2 @@ +import type { EndpointDefaults, RequestOptions } from "@octokit/types"; +export declare function parse(options: EndpointDefaults): RequestOptions; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/util/add-query-parameters.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/util/add-query-parameters.d.ts new file mode 100644 index 0000000..4b192ac --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/util/add-query-parameters.d.ts @@ -0,0 +1,4 @@ +export declare function addQueryParameters(url: string, parameters: { + [x: string]: string | undefined; + q?: string; +}): string; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/util/extract-url-variable-names.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/util/extract-url-variable-names.d.ts new file mode 100644 index 0000000..93586d4 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/util/extract-url-variable-names.d.ts @@ -0,0 +1 @@ +export declare function extractUrlVariableNames(url: string): string[]; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/util/lowercase-keys.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/util/lowercase-keys.d.ts new file mode 100644 index 0000000..1daf307 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/util/lowercase-keys.d.ts @@ -0,0 +1,5 @@ +export declare function lowercaseKeys(object?: { + [key: string]: any; +}): { + [key: string]: any; +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/util/merge-deep.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/util/merge-deep.d.ts new file mode 100644 index 0000000..914411c --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/util/merge-deep.d.ts @@ -0,0 +1 @@ +export declare function mergeDeep(defaults: any, options: any): object; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/util/omit.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/util/omit.d.ts new file mode 100644 index 0000000..06927d6 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/util/omit.d.ts @@ -0,0 +1,5 @@ +export declare function omit(object: { + [key: string]: any; +}, keysToOmit: string[]): { + [key: string]: any; +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/util/remove-undefined-properties.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/util/remove-undefined-properties.d.ts new file mode 100644 index 0000000..92d8d85 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/util/remove-undefined-properties.d.ts @@ -0,0 +1 @@ +export declare function removeUndefinedProperties(obj: any): any; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/util/url-template.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/util/url-template.d.ts new file mode 100644 index 0000000..5d967ca --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/util/url-template.d.ts @@ -0,0 +1,3 @@ +export declare function parseUrl(template: string): { + expand: (context: object) => string; +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/version.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/version.d.ts new file mode 100644 index 0000000..12819c5 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "7.0.6"; diff --git a/dist/node_modules/@octokit/endpoint/dist-types/with-defaults.d.ts b/dist/node_modules/@octokit/endpoint/dist-types/with-defaults.d.ts new file mode 100644 index 0000000..e09354c --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-types/with-defaults.d.ts @@ -0,0 +1,2 @@ +import type { EndpointInterface, RequestParameters, EndpointDefaults } from "@octokit/types"; +export declare function withDefaults(oldDefaults: EndpointDefaults | null, newDefaults: RequestParameters): EndpointInterface; diff --git a/dist/node_modules/@octokit/endpoint/dist-web/index.js b/dist/node_modules/@octokit/endpoint/dist-web/index.js new file mode 100644 index 0000000..d2e98ea --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-web/index.js @@ -0,0 +1,328 @@ +// pkg/dist-src/util/lowercase-keys.js +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +// pkg/dist-src/util/merge-deep.js +import { isPlainObject } from "is-plain-object"; +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; +} + +// pkg/dist-src/util/remove-undefined-properties.js +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; + } + } + return obj; +} + +// pkg/dist-src/merge.js +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter((preview) => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map( + (preview) => preview.replace(/-preview/, "") + ); + return mergedOptions; +} + +// pkg/dist-src/util/add-query-parameters.js +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +// pkg/dist-src/util/omit.js +function omit(object, keysToOmit) { + return Object.keys(object).filter((option) => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} + +// pkg/dist-src/util/url-template.js +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }).join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} +function isDefined(value) { + return value !== void 0 && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + } + ); +} + +// pkg/dist-src/parse.js +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (preview) => preview.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + } + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); +} + +// pkg/dist-src/endpoint-with-defaults.js +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse + }); +} + +// pkg/dist-src/defaults.js +import { getUserAgent } from "universal-user-agent"; + +// pkg/dist-src/version.js +var VERSION = "7.0.6"; + +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "", + previews: [] + } +}; + +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +export { + endpoint +}; diff --git a/dist/node_modules/@octokit/endpoint/dist-web/index.js.map b/dist/node_modules/@octokit/endpoint/dist-web/index.js.map new file mode 100644 index 0000000..d0f95a3 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/util/lowercase-keys.js", "../dist-src/util/merge-deep.js", "../dist-src/util/remove-undefined-properties.js", "../dist-src/merge.js", "../dist-src/util/add-query-parameters.js", "../dist-src/util/extract-url-variable-names.js", "../dist-src/util/omit.js", "../dist-src/util/url-template.js", "../dist-src/parse.js", "../dist-src/endpoint-with-defaults.js", "../dist-src/with-defaults.js", "../dist-src/defaults.js", "../dist-src/version.js", "../dist-src/index.js"], + "sourcesContent": ["function lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\nexport {\n lowercaseKeys\n};\n", "import { isPlainObject } from \"is-plain-object\";\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach((key) => {\n if (isPlainObject(options[key])) {\n if (!(key in defaults))\n Object.assign(result, { [key]: options[key] });\n else\n result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, { [key]: options[key] });\n }\n });\n return result;\n}\nexport {\n mergeDeep\n};\n", "function removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === void 0) {\n delete obj[key];\n }\n }\n return obj;\n}\nexport {\n removeUndefinedProperties\n};\n", "import { lowercaseKeys } from \"./util/lowercase-keys\";\nimport { mergeDeep } from \"./util/merge-deep\";\nimport { removeUndefinedProperties } from \"./util/remove-undefined-properties\";\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? { method, url } : { url: method }, options);\n } else {\n options = Object.assign({}, route);\n }\n options.headers = lowercaseKeys(options.headers);\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter((preview) => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(\n (preview) => preview.replace(/-preview/, \"\")\n );\n return mergedOptions;\n}\nexport {\n merge\n};\n", "function addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return url + separator + names.map((name) => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\nexport {\n addQueryParameters\n};\n", "const urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\nexport {\n extractUrlVariableNames\n};\n", "function omit(object, keysToOmit) {\n return Object.keys(object).filter((option) => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\nexport {\n omit\n};\n", "function encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n }).join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function(c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== void 0 && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key], result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(\n encodeValue(operator, value, isKeyOperator(operator) ? key : \"\")\n );\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function(value2) {\n result.push(\n encodeValue(operator, value2, isKeyOperator(operator) ? key : \"\")\n );\n });\n } else {\n Object.keys(value).forEach(function(k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function(value2) {\n tmp.push(encodeValue(operator, value2));\n });\n } else {\n Object.keys(value).forEach(function(k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(\n /\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g,\n function(_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function(variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n }\n );\n}\nexport {\n parseUrl\n};\n", "import { addQueryParameters } from \"./util/add-query-parameters\";\nimport { extractUrlVariableNames } from \"./util/extract-url-variable-names\";\nimport { omit } from \"./util/omit\";\nimport { parseUrl } from \"./util/url-template\";\nfunction parse(options) {\n let method = options.method.toUpperCase();\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"mediaType\"\n ]);\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n headers.accept = headers.accept.split(/,/).map(\n (preview) => preview.replace(\n /application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/,\n `application/vnd$1$2.${options.mediaType.format}`\n )\n ).join(\",\");\n }\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n }\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n }\n }\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n return Object.assign(\n { method, url, headers },\n typeof body !== \"undefined\" ? { body } : null,\n options.request ? { request: options.request } : null\n );\n}\nexport {\n parse\n};\n", "import { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\nexport {\n endpointWithDefaults\n};\n", "import { endpointWithDefaults } from \"./endpoint-with-defaults\";\nimport { merge } from \"./merge\";\nimport { parse } from \"./parse\";\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\nexport {\n withDefaults\n};\n", "import { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nconst userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`;\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\nexport {\n DEFAULTS\n};\n", "const VERSION = \"7.0.6\";\nexport {\n VERSION\n};\n", "import { withDefaults } from \"./with-defaults\";\nimport { DEFAULTS } from \"./defaults\";\nconst endpoint = withDefaults(null, DEFAULTS);\nexport {\n endpoint\n};\n"], + "mappings": ";AAAA,SAAS,cAAc,QAAQ;AAC7B,MAAI,CAAC,QAAQ;AACX,WAAO,CAAC;AAAA,EACV;AACA,SAAO,OAAO,KAAK,MAAM,EAAE,OAAO,CAAC,QAAQ,QAAQ;AACjD,WAAO,IAAI,YAAY,CAAC,IAAI,OAAO,GAAG;AACtC,WAAO;AAAA,EACT,GAAG,CAAC,CAAC;AACP;;;ACRA,SAAS,qBAAqB;AAC9B,SAAS,UAAU,UAAU,SAAS;AACpC,QAAM,SAAS,OAAO,OAAO,CAAC,GAAG,QAAQ;AACzC,SAAO,KAAK,OAAO,EAAE,QAAQ,CAAC,QAAQ;AACpC,QAAI,cAAc,QAAQ,GAAG,CAAC,GAAG;AAC/B,UAAI,EAAE,OAAO;AACX,eAAO,OAAO,QAAQ,EAAE,CAAC,GAAG,GAAG,QAAQ,GAAG,EAAE,CAAC;AAAA;AAE7C,eAAO,GAAG,IAAI,UAAU,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAC;AAAA,IACvD,OAAO;AACL,aAAO,OAAO,QAAQ,EAAE,CAAC,GAAG,GAAG,QAAQ,GAAG,EAAE,CAAC;AAAA,IAC/C;AAAA,EACF,CAAC;AACD,SAAO;AACT;;;ACdA,SAAS,0BAA0B,KAAK;AACtC,aAAW,OAAO,KAAK;AACrB,QAAI,IAAI,GAAG,MAAM,QAAQ;AACvB,aAAO,IAAI,GAAG;AAAA,IAChB;AAAA,EACF;AACA,SAAO;AACT;;;ACJA,SAAS,MAAM,UAAU,OAAO,SAAS;AACvC,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,CAAC,QAAQ,GAAG,IAAI,MAAM,MAAM,GAAG;AACnC,cAAU,OAAO,OAAO,MAAM,EAAE,QAAQ,IAAI,IAAI,EAAE,KAAK,OAAO,GAAG,OAAO;AAAA,EAC1E,OAAO;AACL,cAAU,OAAO,OAAO,CAAC,GAAG,KAAK;AAAA,EACnC;AACA,UAAQ,UAAU,cAAc,QAAQ,OAAO;AAC/C,4BAA0B,OAAO;AACjC,4BAA0B,QAAQ,OAAO;AACzC,QAAM,gBAAgB,UAAU,YAAY,CAAC,GAAG,OAAO;AACvD,MAAI,YAAY,SAAS,UAAU,SAAS,QAAQ;AAClD,kBAAc,UAAU,WAAW,SAAS,UAAU,SAAS,OAAO,CAAC,YAAY,CAAC,cAAc,UAAU,SAAS,SAAS,OAAO,CAAC,EAAE,OAAO,cAAc,UAAU,QAAQ;AAAA,EACjL;AACA,gBAAc,UAAU,WAAW,cAAc,UAAU,SAAS;AAAA,IAClE,CAAC,YAAY,QAAQ,QAAQ,YAAY,EAAE;AAAA,EAC7C;AACA,SAAO;AACT;;;ACrBA,SAAS,mBAAmB,KAAK,YAAY;AAC3C,QAAM,YAAY,KAAK,KAAK,GAAG,IAAI,MAAM;AACzC,QAAM,QAAQ,OAAO,KAAK,UAAU;AACpC,MAAI,MAAM,WAAW,GAAG;AACtB,WAAO;AAAA,EACT;AACA,SAAO,MAAM,YAAY,MAAM,IAAI,CAAC,SAAS;AAC3C,QAAI,SAAS,KAAK;AAChB,aAAO,OAAO,WAAW,EAAE,MAAM,GAAG,EAAE,IAAI,kBAAkB,EAAE,KAAK,GAAG;AAAA,IACxE;AACA,WAAO,GAAG,QAAQ,mBAAmB,WAAW,IAAI,CAAC;AAAA,EACvD,CAAC,EAAE,KAAK,GAAG;AACb;;;ACZA,IAAM,mBAAmB;AACzB,SAAS,eAAe,cAAc;AACpC,SAAO,aAAa,QAAQ,cAAc,EAAE,EAAE,MAAM,GAAG;AACzD;AACA,SAAS,wBAAwB,KAAK;AACpC,QAAM,UAAU,IAAI,MAAM,gBAAgB;AAC1C,MAAI,CAAC,SAAS;AACZ,WAAO,CAAC;AAAA,EACV;AACA,SAAO,QAAQ,IAAI,cAAc,EAAE,OAAO,CAAC,GAAG,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;AACrE;;;ACVA,SAAS,KAAK,QAAQ,YAAY;AAChC,SAAO,OAAO,KAAK,MAAM,EAAE,OAAO,CAAC,WAAW,CAAC,WAAW,SAAS,MAAM,CAAC,EAAE,OAAO,CAAC,KAAK,QAAQ;AAC/F,QAAI,GAAG,IAAI,OAAO,GAAG;AACrB,WAAO;AAAA,EACT,GAAG,CAAC,CAAC;AACP;;;ACLA,SAAS,eAAe,KAAK;AAC3B,SAAO,IAAI,MAAM,oBAAoB,EAAE,IAAI,SAAS,MAAM;AACxD,QAAI,CAAC,eAAe,KAAK,IAAI,GAAG;AAC9B,aAAO,UAAU,IAAI,EAAE,QAAQ,QAAQ,GAAG,EAAE,QAAQ,QAAQ,GAAG;AAAA,IACjE;AACA,WAAO;AAAA,EACT,CAAC,EAAE,KAAK,EAAE;AACZ;AACA,SAAS,iBAAiB,KAAK;AAC7B,SAAO,mBAAmB,GAAG,EAAE,QAAQ,YAAY,SAAS,GAAG;AAC7D,WAAO,MAAM,EAAE,WAAW,CAAC,EAAE,SAAS,EAAE,EAAE,YAAY;AAAA,EACxD,CAAC;AACH;AACA,SAAS,YAAY,UAAU,OAAO,KAAK;AACzC,UAAQ,aAAa,OAAO,aAAa,MAAM,eAAe,KAAK,IAAI,iBAAiB,KAAK;AAC7F,MAAI,KAAK;AACP,WAAO,iBAAiB,GAAG,IAAI,MAAM;AAAA,EACvC,OAAO;AACL,WAAO;AAAA,EACT;AACF;AACA,SAAS,UAAU,OAAO;AACxB,SAAO,UAAU,UAAU,UAAU;AACvC;AACA,SAAS,cAAc,UAAU;AAC/B,SAAO,aAAa,OAAO,aAAa,OAAO,aAAa;AAC9D;AACA,SAAS,UAAU,SAAS,UAAU,KAAK,UAAU;AACnD,MAAI,QAAQ,QAAQ,GAAG,GAAG,SAAS,CAAC;AACpC,MAAI,UAAU,KAAK,KAAK,UAAU,IAAI;AACpC,QAAI,OAAO,UAAU,YAAY,OAAO,UAAU,YAAY,OAAO,UAAU,WAAW;AACxF,cAAQ,MAAM,SAAS;AACvB,UAAI,YAAY,aAAa,KAAK;AAChC,gBAAQ,MAAM,UAAU,GAAG,SAAS,UAAU,EAAE,CAAC;AAAA,MACnD;AACA,aAAO;AAAA,QACL,YAAY,UAAU,OAAO,cAAc,QAAQ,IAAI,MAAM,EAAE;AAAA,MACjE;AAAA,IACF,OAAO;AACL,UAAI,aAAa,KAAK;AACpB,YAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,gBAAM,OAAO,SAAS,EAAE,QAAQ,SAAS,QAAQ;AAC/C,mBAAO;AAAA,cACL,YAAY,UAAU,QAAQ,cAAc,QAAQ,IAAI,MAAM,EAAE;AAAA,YAClE;AAAA,UACF,CAAC;AAAA,QACH,OAAO;AACL,iBAAO,KAAK,KAAK,EAAE,QAAQ,SAAS,GAAG;AACrC,gBAAI,UAAU,MAAM,CAAC,CAAC,GAAG;AACvB,qBAAO,KAAK,YAAY,UAAU,MAAM,CAAC,GAAG,CAAC,CAAC;AAAA,YAChD;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AACL,cAAM,MAAM,CAAC;AACb,YAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,gBAAM,OAAO,SAAS,EAAE,QAAQ,SAAS,QAAQ;AAC/C,gBAAI,KAAK,YAAY,UAAU,MAAM,CAAC;AAAA,UACxC,CAAC;AAAA,QACH,OAAO;AACL,iBAAO,KAAK,KAAK,EAAE,QAAQ,SAAS,GAAG;AACrC,gBAAI,UAAU,MAAM,CAAC,CAAC,GAAG;AACvB,kBAAI,KAAK,iBAAiB,CAAC,CAAC;AAC5B,kBAAI,KAAK,YAAY,UAAU,MAAM,CAAC,EAAE,SAAS,CAAC,CAAC;AAAA,YACrD;AAAA,UACF,CAAC;AAAA,QACH;AACA,YAAI,cAAc,QAAQ,GAAG;AAC3B,iBAAO,KAAK,iBAAiB,GAAG,IAAI,MAAM,IAAI,KAAK,GAAG,CAAC;AAAA,QACzD,WAAW,IAAI,WAAW,GAAG;AAC3B,iBAAO,KAAK,IAAI,KAAK,GAAG,CAAC;AAAA,QAC3B;AAAA,MACF;AAAA,IACF;AAAA,EACF,OAAO;AACL,QAAI,aAAa,KAAK;AACpB,UAAI,UAAU,KAAK,GAAG;AACpB,eAAO,KAAK,iBAAiB,GAAG,CAAC;AAAA,MACnC;AAAA,IACF,WAAW,UAAU,OAAO,aAAa,OAAO,aAAa,MAAM;AACjE,aAAO,KAAK,iBAAiB,GAAG,IAAI,GAAG;AAAA,IACzC,WAAW,UAAU,IAAI;AACvB,aAAO,KAAK,EAAE;AAAA,IAChB;AAAA,EACF;AACA,SAAO;AACT;AACA,SAAS,SAAS,UAAU;AAC1B,SAAO;AAAA,IACL,QAAQ,OAAO,KAAK,MAAM,QAAQ;AAAA,EACpC;AACF;AACA,SAAS,OAAO,UAAU,SAAS;AACjC,MAAI,YAAY,CAAC,KAAK,KAAK,KAAK,KAAK,KAAK,KAAK,GAAG;AAClD,SAAO,SAAS;AAAA,IACd;AAAA,IACA,SAAS,GAAG,YAAY,SAAS;AAC/B,UAAI,YAAY;AACd,YAAI,WAAW;AACf,cAAM,SAAS,CAAC;AAChB,YAAI,UAAU,QAAQ,WAAW,OAAO,CAAC,CAAC,MAAM,IAAI;AAClD,qBAAW,WAAW,OAAO,CAAC;AAC9B,uBAAa,WAAW,OAAO,CAAC;AAAA,QAClC;AACA,mBAAW,MAAM,IAAI,EAAE,QAAQ,SAAS,UAAU;AAChD,cAAI,MAAM,4BAA4B,KAAK,QAAQ;AACnD,iBAAO,KAAK,UAAU,SAAS,UAAU,IAAI,CAAC,GAAG,IAAI,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC;AAAA,QACpE,CAAC;AACD,YAAI,YAAY,aAAa,KAAK;AAChC,cAAI,YAAY;AAChB,cAAI,aAAa,KAAK;AACpB,wBAAY;AAAA,UACd,WAAW,aAAa,KAAK;AAC3B,wBAAY;AAAA,UACd;AACA,kBAAQ,OAAO,WAAW,IAAI,WAAW,MAAM,OAAO,KAAK,SAAS;AAAA,QACtE,OAAO;AACL,iBAAO,OAAO,KAAK,GAAG;AAAA,QACxB;AAAA,MACF,OAAO;AACL,eAAO,eAAe,OAAO;AAAA,MAC/B;AAAA,IACF;AAAA,EACF;AACF;;;ACxHA,SAAS,MAAM,SAAS;AACtB,MAAI,SAAS,QAAQ,OAAO,YAAY;AACxC,MAAI,OAAO,QAAQ,OAAO,KAAK,QAAQ,gBAAgB,MAAM;AAC7D,MAAI,UAAU,OAAO,OAAO,CAAC,GAAG,QAAQ,OAAO;AAC/C,MAAI;AACJ,MAAI,aAAa,KAAK,SAAS;AAAA,IAC7B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACD,QAAM,mBAAmB,wBAAwB,GAAG;AACpD,QAAM,SAAS,GAAG,EAAE,OAAO,UAAU;AACrC,MAAI,CAAC,QAAQ,KAAK,GAAG,GAAG;AACtB,UAAM,QAAQ,UAAU;AAAA,EAC1B;AACA,QAAM,oBAAoB,OAAO,KAAK,OAAO,EAAE,OAAO,CAAC,WAAW,iBAAiB,SAAS,MAAM,CAAC,EAAE,OAAO,SAAS;AACrH,QAAM,sBAAsB,KAAK,YAAY,iBAAiB;AAC9D,QAAM,kBAAkB,6BAA6B,KAAK,QAAQ,MAAM;AACxE,MAAI,CAAC,iBAAiB;AACpB,QAAI,QAAQ,UAAU,QAAQ;AAC5B,cAAQ,SAAS,QAAQ,OAAO,MAAM,GAAG,EAAE;AAAA,QACzC,CAAC,YAAY,QAAQ;AAAA,UACnB;AAAA,UACA,uBAAuB,QAAQ,UAAU;AAAA,QAC3C;AAAA,MACF,EAAE,KAAK,GAAG;AAAA,IACZ;AACA,QAAI,QAAQ,UAAU,SAAS,QAAQ;AACrC,YAAM,2BAA2B,QAAQ,OAAO,MAAM,qBAAqB,KAAK,CAAC;AACjF,cAAQ,SAAS,yBAAyB,OAAO,QAAQ,UAAU,QAAQ,EAAE,IAAI,CAAC,YAAY;AAC5F,cAAM,SAAS,QAAQ,UAAU,SAAS,IAAI,QAAQ,UAAU,WAAW;AAC3E,eAAO,0BAA0B,kBAAkB;AAAA,MACrD,CAAC,EAAE,KAAK,GAAG;AAAA,IACb;AAAA,EACF;AACA,MAAI,CAAC,OAAO,MAAM,EAAE,SAAS,MAAM,GAAG;AACpC,UAAM,mBAAmB,KAAK,mBAAmB;AAAA,EACnD,OAAO;AACL,QAAI,UAAU,qBAAqB;AACjC,aAAO,oBAAoB;AAAA,IAC7B,OAAO;AACL,UAAI,OAAO,KAAK,mBAAmB,EAAE,QAAQ;AAC3C,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,QAAQ,cAAc,KAAK,OAAO,SAAS,aAAa;AAC3D,YAAQ,cAAc,IAAI;AAAA,EAC5B;AACA,MAAI,CAAC,SAAS,KAAK,EAAE,SAAS,MAAM,KAAK,OAAO,SAAS,aAAa;AACpE,WAAO;AAAA,EACT;AACA,SAAO,OAAO;AAAA,IACZ,EAAE,QAAQ,KAAK,QAAQ;AAAA,IACvB,OAAO,SAAS,cAAc,EAAE,KAAK,IAAI;AAAA,IACzC,QAAQ,UAAU,EAAE,SAAS,QAAQ,QAAQ,IAAI;AAAA,EACnD;AACF;;;AC9DA,SAAS,qBAAqB,UAAU,OAAO,SAAS;AACtD,SAAO,MAAM,MAAM,UAAU,OAAO,OAAO,CAAC;AAC9C;;;ACDA,SAAS,aAAa,aAAa,aAAa;AAC9C,QAAMA,YAAW,MAAM,aAAa,WAAW;AAC/C,QAAMC,YAAW,qBAAqB,KAAK,MAAMD,SAAQ;AACzD,SAAO,OAAO,OAAOC,WAAU;AAAA,IAC7B,UAAAD;AAAA,IACA,UAAU,aAAa,KAAK,MAAMA,SAAQ;AAAA,IAC1C,OAAO,MAAM,KAAK,MAAMA,SAAQ;AAAA,IAChC;AAAA,EACF,CAAC;AACH;;;ACZA,SAAS,oBAAoB;;;ACA7B,IAAM,UAAU;;;ADEhB,IAAM,YAAY,uBAAuB,WAAW,aAAa;AACjE,IAAM,WAAW;AAAA,EACf,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,SAAS;AAAA,IACP,QAAQ;AAAA,IACR,cAAc;AAAA,EAChB;AAAA,EACA,WAAW;AAAA,IACT,QAAQ;AAAA,IACR,UAAU,CAAC;AAAA,EACb;AACF;;;AEZA,IAAM,WAAW,aAAa,MAAM,QAAQ;", + "names": ["DEFAULTS", "endpoint"] +} diff --git a/dist/node_modules/@octokit/endpoint/package.json b/dist/node_modules/@octokit/endpoint/package.json new file mode 100644 index 0000000..eeae264 --- /dev/null +++ b/dist/node_modules/@octokit/endpoint/package.json @@ -0,0 +1,46 @@ +{ + "name": "@octokit/endpoint", + "version": "7.0.6", + "publishConfig": { + "access": "public" + }, + "description": "Turns REST API endpoints into generic request options", + "repository": "github:octokit/endpoint.js", + "keywords": [ + "octokit", + "github", + "api", + "rest" + ], + "author": "Gregor Martynus (https://github.com/gr2m)", + "license": "MIT", + "devDependencies": { + "@octokit/tsconfig": "^2.0.0", + "@types/jest": "^29.0.0", + "esbuild": "^0.17.19", + "glob": "^10.2.7", + "jest": "^29.0.0", + "prettier": "2.8.8", + "semantic-release": "^21.0.0", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "dependencies": { + "@octokit/types": "^9.0.0", + "is-plain-object": "^5.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "browser": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "module": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/graphql/LICENSE b/dist/node_modules/@octokit/graphql/LICENSE new file mode 100644 index 0000000..af5366d --- /dev/null +++ b/dist/node_modules/@octokit/graphql/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/@octokit/graphql/README.md b/dist/node_modules/@octokit/graphql/README.md new file mode 100644 index 0000000..aee921a --- /dev/null +++ b/dist/node_modules/@octokit/graphql/README.md @@ -0,0 +1,409 @@ +# graphql.js + +> GitHub GraphQL API client for browsers and Node + +[![@latest](https://img.shields.io/npm/v/@octokit/graphql.svg)](https://www.npmjs.com/package/@octokit/graphql) +[![Build Status](https://github.com/octokit/graphql.js/workflows/Test/badge.svg)](https://github.com/octokit/graphql.js/actions?query=workflow%3ATest+branch%3Amain) + + + +- [Usage](#usage) + - [Send a simple query](#send-a-simple-query) + - [Authentication](#authentication) + - [Variables](#variables) + - [Pass query together with headers and variables](#pass-query-together-with-headers-and-variables) + - [Use with GitHub Enterprise](#use-with-github-enterprise) + - [Use custom `@octokit/request` instance](#use-custom-octokitrequest-instance) +- [TypeScript](#typescript) + - [Additional Types](#additional-types) +- [Errors](#errors) +- [Partial responses](#partial-responses) +- [Writing tests](#writing-tests) +- [License](#license) + + + +## Usage + + + + + + +
+Browsers + + +Load `@octokit/graphql` directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+Node + + +Install with npm install @octokit/graphql + +```js +const { graphql } = require("@octokit/graphql"); +// or: import { graphql } from "@octokit/graphql"; +``` + +
+ +### Send a simple query + +```js +const { repository } = await graphql( + ` + { + repository(owner: "octokit", name: "graphql.js") { + issues(last: 3) { + edges { + node { + title + } + } + } + } + } + `, + { + headers: { + authorization: `token secret123`, + }, + } +); +``` + +### Authentication + +The simplest way to authenticate a request is to set the `Authorization` header, e.g. to a [personal access token](https://github.com/settings/tokens/). + +```js +const graphqlWithAuth = graphql.defaults({ + headers: { + authorization: `token secret123`, + }, +}); +const { repository } = await graphqlWithAuth(` + { + repository(owner: "octokit", name: "graphql.js") { + issues(last: 3) { + edges { + node { + title + } + } + } + } + } +`); +``` + +For more complex authentication strategies such as GitHub Apps or Basic, we recommend the according authentication library exported by [`@octokit/auth`](https://github.com/octokit/auth.js). + +```js +const { createAppAuth } = require("@octokit/auth-app"); +const auth = createAppAuth({ + appId: process.env.APP_ID, + privateKey: process.env.PRIVATE_KEY, + installationId: 123, +}); +const graphqlWithAuth = graphql.defaults({ + request: { + hook: auth.hook, + }, +}); + +const { repository } = await graphqlWithAuth( + `{ + repository(owner: "octokit", name: "graphql.js") { + issues(last: 3) { + edges { + node { + title + } + } + } + } + }` +); +``` + +### Variables + +⚠️ Do not use [template literals](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals) in the query strings as they make your code vulnerable to query injection attacks (see [#2](https://github.com/octokit/graphql.js/issues/2)). Use variables instead: + +```js +const { lastIssues } = await graphql( + ` + query lastIssues($owner: String!, $repo: String!, $num: Int = 3) { + repository(owner: $owner, name: $repo) { + issues(last: $num) { + edges { + node { + title + } + } + } + } + } + `, + { + owner: "octokit", + repo: "graphql.js", + headers: { + authorization: `token secret123`, + }, + } +); +``` + +### Pass query together with headers and variables + +```js +const { graphql } = require("@octokit/graphql"); +const { lastIssues } = await graphql({ + query: `query lastIssues($owner: String!, $repo: String!, $num: Int = 3) { + repository(owner:$owner, name:$repo) { + issues(last:$num) { + edges { + node { + title + } + } + } + } + }`, + owner: "octokit", + repo: "graphql.js", + headers: { + authorization: `token secret123`, + }, +}); +``` + +### Use with GitHub Enterprise + +```js +let { graphql } = require("@octokit/graphql"); +graphql = graphql.defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api", + headers: { + authorization: `token secret123`, + }, +}); +const { repository } = await graphql(` + { + repository(owner: "acme-project", name: "acme-repo") { + issues(last: 3) { + edges { + node { + title + } + } + } + } + } +`); +``` + +### Use custom `@octokit/request` instance + +```js +const { request } = require("@octokit/request"); +const { withCustomRequest } = require("@octokit/graphql"); + +let requestCounter = 0; +const myRequest = request.defaults({ + headers: { + authorization: "bearer secret123", + }, + request: { + hook(request, options) { + requestCounter++; + return request(options); + }, + }, +}); +const myGraphql = withCustomRequest(myRequest); +await request("/"); +await myGraphql(` + { + repository(owner: "acme-project", name: "acme-repo") { + issues(last: 3) { + edges { + node { + title + } + } + } + } + } +`); +// requestCounter is now 2 +``` + +## TypeScript + +`@octokit/graphql` is exposing proper types for its usage with TypeScript projects. + +### Additional Types + +Additionally, `GraphQlQueryResponseData` has been exposed to users: + +```ts +import type { GraphQlQueryResponseData } from "@octokit/graphql"; +``` + +## Errors + +In case of a GraphQL error, `error.message` is set to a combined message describing all errors returned by the endpoint. +All errors can be accessed at `error.errors`. `error.request` has the request options such as query, variables and headers set for easier debugging. + +```js +let { graphql, GraphqlResponseError } = require("@octokit/graphql"); +graphql = graphql.defaults({ + headers: { + authorization: `token secret123`, + }, +}); +const query = `{ + viewer { + bioHtml + } +}`; + +try { + const result = await graphql(query); +} catch (error) { + if (error instanceof GraphqlResponseError) { + // do something with the error, allowing you to detect a graphql response error, + // compared to accidentally catching unrelated errors. + + // server responds with an object like the following (as an example) + // class GraphqlResponseError { + // "headers": { + // "status": "403", + // }, + // "data": null, + // "errors": [{ + // "message": "Field 'bioHtml' doesn't exist on type 'User'", + // "locations": [{ + // "line": 3, + // "column": 5 + // }] + // }] + // } + + console.log("Request failed:", error.request); // { query, variables: {}, headers: { authorization: 'token secret123' } } + console.log(error.message); // Field 'bioHtml' doesn't exist on type 'User' + } else { + // handle non-GraphQL error + } +} +``` + +## Partial responses + +A GraphQL query may respond with partial data accompanied by errors. In this case we will throw an error but the partial data will still be accessible through `error.data` + +```js +let { graphql } = require("@octokit/graphql"); +graphql = graphql.defaults({ + headers: { + authorization: `token secret123`, + }, +}); +const query = `{ + repository(name: "probot", owner: "probot") { + name + ref(qualifiedName: "master") { + target { + ... on Commit { + history(first: 25, after: "invalid cursor") { + nodes { + message + } + } + } + } + } + } +}`; + +try { + const result = await graphql(query); +} catch (error) { + // server responds with + // { + // "data": { + // "repository": { + // "name": "probot", + // "ref": null + // } + // }, + // "errors": [ + // { + // "type": "INVALID_CURSOR_ARGUMENTS", + // "path": [ + // "repository", + // "ref", + // "target", + // "history" + // ], + // "locations": [ + // { + // "line": 7, + // "column": 11 + // } + // ], + // "message": "`invalid cursor` does not appear to be a valid cursor." + // } + // ] + // } + + console.log("Request failed:", error.request); // { query, variables: {}, headers: { authorization: 'token secret123' } } + console.log(error.message); // `invalid cursor` does not appear to be a valid cursor. + console.log(error.data); // { repository: { name: 'probot', ref: null } } +} +``` + +## Writing tests + +You can pass a replacement for [the built-in fetch implementation](https://github.com/bitinn/node-fetch) as `request.fetch` option. For example, using [fetch-mock](http://www.wheresrhys.co.uk/fetch-mock/) works great to write tests + +```js +const assert = require("assert"); +const fetchMock = require("fetch-mock/es5/server"); + +const { graphql } = require("@octokit/graphql"); + +graphql("{ viewer { login } }", { + headers: { + authorization: "token secret123", + }, + request: { + fetch: fetchMock + .sandbox() + .post("https://api.github.com/graphql", (url, options) => { + assert.strictEqual(options.headers.authorization, "token secret123"); + assert.strictEqual( + options.body, + '{"query":"{ viewer { login } }"}', + "Sends correct query" + ); + return { data: {} }; + }), + }, +}); +``` + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/graphql/dist-node/index.js b/dist/node_modules/@octokit/graphql/dist-node/index.js new file mode 100644 index 0000000..fa5b81c --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-node/index.js @@ -0,0 +1,146 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + GraphqlResponseError: () => GraphqlResponseError, + graphql: () => graphql2, + withCustomRequest: () => withCustomRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_request = require("@octokit/request"); +var import_universal_user_agent = require("universal-user-agent"); + +// pkg/dist-src/version.js +var VERSION = "5.0.6"; + +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +}; + +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`) + ); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); +} + +// pkg/dist-src/index.js +var graphql2 = withDefaults(import_request.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + GraphqlResponseError, + graphql, + withCustomRequest +}); diff --git a/dist/node_modules/@octokit/graphql/dist-node/index.js.map b/dist/node_modules/@octokit/graphql/dist-node/index.js.map new file mode 100644 index 0000000..ef6a9c4 --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js", "../dist-src/error.js", "../dist-src/graphql.js", "../dist-src/with-defaults.js"], + "sourcesContent": ["import { request } from \"@octokit/request\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport { withDefaults } from \"./with-defaults\";\nconst graphql = withDefaults(request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nimport { GraphqlResponseError } from \"./error\";\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\nexport {\n GraphqlResponseError,\n graphql,\n withCustomRequest\n};\n", "const VERSION = \"5.0.6\";\nexport {\n VERSION\n};\n", "function _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\n` + data.errors.map((e) => ` - ${e.message}`).join(\"\\n\");\n}\nclass GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\";\n this.errors = response.errors;\n this.data = response.data;\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n}\nexport {\n GraphqlResponseError\n};\n", "import { GraphqlResponseError } from \"./error\";\nconst NON_VARIABLE_OPTIONS = [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"query\",\n \"mediaType\"\n];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(\n new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`)\n );\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))\n continue;\n return Promise.reject(\n new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`)\n );\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({ query }, options) : query;\n const requestOptions = Object.keys(\n parsedOptions\n ).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request(requestOptions).then((response) => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlResponseError(\n requestOptions,\n headers,\n response.data\n );\n }\n return response.data.data;\n });\n}\nexport {\n graphql\n};\n", "import { graphql } from \"./graphql\";\nfunction withDefaults(request, newDefaults) {\n const newRequest = request.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: newRequest.endpoint\n });\n}\nexport {\n withDefaults\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA,iBAAAA;AAAA,EAAA;AAAA;AAAA;AAAA,qBAAwB;AACxB,kCAA6B;;;ACD7B,IAAM,UAAU;;;ACAhB,SAAS,+BAA+B,MAAM;AAC5C,SAAO;AAAA,IACL,KAAK,OAAO,IAAI,CAAC,MAAM,MAAM,EAAE,SAAS,EAAE,KAAK,IAAI;AACvD;AACA,IAAM,uBAAN,cAAmC,MAAM;AAAA,EACvC,YAAYC,UAAS,SAAS,UAAU;AACtC,UAAM,+BAA+B,QAAQ,CAAC;AAC9C,SAAK,UAAUA;AACf,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,OAAO;AACZ,SAAK,SAAS,SAAS;AACvB,SAAK,OAAO,SAAS;AACrB,QAAI,MAAM,mBAAmB;AAC3B,YAAM,kBAAkB,MAAM,KAAK,WAAW;AAAA,IAChD;AAAA,EACF;AACF;;;AChBA,IAAM,uBAAuB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AACA,IAAM,6BAA6B,CAAC,SAAS,UAAU,KAAK;AAC5D,IAAM,uBAAuB;AAC7B,SAAS,QAAQC,UAAS,OAAO,SAAS;AACxC,MAAI,SAAS;AACX,QAAI,OAAO,UAAU,YAAY,WAAW,SAAS;AACnD,aAAO,QAAQ;AAAA,QACb,IAAI,MAAM,4DAA4D;AAAA,MACxE;AAAA,IACF;AACA,eAAW,OAAO,SAAS;AACzB,UAAI,CAAC,2BAA2B,SAAS,GAAG;AAC1C;AACF,aAAO,QAAQ;AAAA,QACb,IAAI,MAAM,uBAAuB,sCAAsC;AAAA,MACzE;AAAA,IACF;AAAA,EACF;AACA,QAAM,gBAAgB,OAAO,UAAU,WAAW,OAAO,OAAO,EAAE,MAAM,GAAG,OAAO,IAAI;AACtF,QAAM,iBAAiB,OAAO;AAAA,IAC5B;AAAA,EACF,EAAE,OAAO,CAAC,QAAQ,QAAQ;AACxB,QAAI,qBAAqB,SAAS,GAAG,GAAG;AACtC,aAAO,GAAG,IAAI,cAAc,GAAG;AAC/B,aAAO;AAAA,IACT;AACA,QAAI,CAAC,OAAO,WAAW;AACrB,aAAO,YAAY,CAAC;AAAA,IACtB;AACA,WAAO,UAAU,GAAG,IAAI,cAAc,GAAG;AACzC,WAAO;AAAA,EACT,GAAG,CAAC,CAAC;AACL,QAAM,UAAU,cAAc,WAAWA,SAAQ,SAAS,SAAS;AACnE,MAAI,qBAAqB,KAAK,OAAO,GAAG;AACtC,mBAAe,MAAM,QAAQ,QAAQ,sBAAsB,cAAc;AAAA,EAC3E;AACA,SAAOA,SAAQ,cAAc,EAAE,KAAK,CAAC,aAAa;AAChD,QAAI,SAAS,KAAK,QAAQ;AACxB,YAAM,UAAU,CAAC;AACjB,iBAAW,OAAO,OAAO,KAAK,SAAS,OAAO,GAAG;AAC/C,gBAAQ,GAAG,IAAI,SAAS,QAAQ,GAAG;AAAA,MACrC;AACA,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,QACA,SAAS;AAAA,MACX;AAAA,IACF;AACA,WAAO,SAAS,KAAK;AAAA,EACvB,CAAC;AACH;;;AC1DA,SAAS,aAAaC,UAAS,aAAa;AAC1C,QAAM,aAAaA,SAAQ,SAAS,WAAW;AAC/C,QAAM,SAAS,CAAC,OAAO,YAAY;AACjC,WAAO,QAAQ,YAAY,OAAO,OAAO;AAAA,EAC3C;AACA,SAAO,OAAO,OAAO,QAAQ;AAAA,IAC3B,UAAU,aAAa,KAAK,MAAM,UAAU;AAAA,IAC5C,UAAU,WAAW;AAAA,EACvB,CAAC;AACH;;;AJNA,IAAMC,WAAU,aAAa,wBAAS;AAAA,EACpC,SAAS;AAAA,IACP,cAAc,sBAAsB,eAAW,0CAAa;AAAA,EAC9D;AAAA,EACA,QAAQ;AAAA,EACR,KAAK;AACP,CAAC;AAED,SAAS,kBAAkB,eAAe;AACxC,SAAO,aAAa,eAAe;AAAA,IACjC,QAAQ;AAAA,IACR,KAAK;AAAA,EACP,CAAC;AACH;", + "names": ["graphql", "request", "request", "request", "graphql"] +} diff --git a/dist/node_modules/@octokit/graphql/dist-src/error.js b/dist/node_modules/@octokit/graphql/dist-src/error.js new file mode 100644 index 0000000..dd0f4e6 --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-src/error.js @@ -0,0 +1,21 @@ +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +class GraphqlResponseError extends Error { + constructor(request, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +} +export { + GraphqlResponseError +}; diff --git a/dist/node_modules/@octokit/graphql/dist-src/graphql.js b/dist/node_modules/@octokit/graphql/dist-src/graphql.js new file mode 100644 index 0000000..e423f3d --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-src/graphql.js @@ -0,0 +1,63 @@ +import { GraphqlResponseError } from "./error"; +const NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`) + ); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); +} +export { + graphql +}; diff --git a/dist/node_modules/@octokit/graphql/dist-src/index.js b/dist/node_modules/@octokit/graphql/dist-src/index.js new file mode 100644 index 0000000..4e7deb5 --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-src/index.js @@ -0,0 +1,23 @@ +import { request } from "@octokit/request"; +import { getUserAgent } from "universal-user-agent"; +import { VERSION } from "./version"; +import { withDefaults } from "./with-defaults"; +const graphql = withDefaults(request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${getUserAgent()}` + }, + method: "POST", + url: "/graphql" +}); +import { GraphqlResponseError } from "./error"; +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} +export { + GraphqlResponseError, + graphql, + withCustomRequest +}; diff --git a/dist/node_modules/@octokit/graphql/dist-src/version.js b/dist/node_modules/@octokit/graphql/dist-src/version.js new file mode 100644 index 0000000..e41c386 --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "5.0.6"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/graphql/dist-src/with-defaults.js b/dist/node_modules/@octokit/graphql/dist-src/with-defaults.js new file mode 100644 index 0000000..0676209 --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-src/with-defaults.js @@ -0,0 +1,14 @@ +import { graphql } from "./graphql"; +function withDefaults(request, newDefaults) { + const newRequest = request.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); +} +export { + withDefaults +}; diff --git a/dist/node_modules/@octokit/graphql/dist-types/error.d.ts b/dist/node_modules/@octokit/graphql/dist-types/error.d.ts new file mode 100644 index 0000000..e921bfa --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-types/error.d.ts @@ -0,0 +1,13 @@ +import type { ResponseHeaders } from "@octokit/types"; +import type { GraphQlEndpointOptions, GraphQlQueryResponse } from "./types"; +type ServerResponseData = Required>; +export declare class GraphqlResponseError extends Error { + readonly request: GraphQlEndpointOptions; + readonly headers: ResponseHeaders; + readonly response: ServerResponseData; + name: string; + readonly errors: GraphQlQueryResponse["errors"]; + readonly data: ResponseData; + constructor(request: GraphQlEndpointOptions, headers: ResponseHeaders, response: ServerResponseData); +} +export {}; diff --git a/dist/node_modules/@octokit/graphql/dist-types/graphql.d.ts b/dist/node_modules/@octokit/graphql/dist-types/graphql.d.ts new file mode 100644 index 0000000..376e5df --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-types/graphql.d.ts @@ -0,0 +1,3 @@ +import { request as Request } from "@octokit/request"; +import type { RequestParameters, GraphQlQueryResponseData } from "./types"; +export declare function graphql(request: typeof Request, query: string | RequestParameters, options?: RequestParameters): Promise; diff --git a/dist/node_modules/@octokit/graphql/dist-types/index.d.ts b/dist/node_modules/@octokit/graphql/dist-types/index.d.ts new file mode 100644 index 0000000..c9b22c9 --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { request } from "@octokit/request"; +export declare const graphql: import("./types").graphql; +export type { GraphQlQueryResponseData } from "./types"; +export { GraphqlResponseError } from "./error"; +export declare function withCustomRequest(customRequest: typeof request): import("./types").graphql; diff --git a/dist/node_modules/@octokit/graphql/dist-types/types.d.ts b/dist/node_modules/@octokit/graphql/dist-types/types.d.ts new file mode 100644 index 0000000..ceba025 --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-types/types.d.ts @@ -0,0 +1,55 @@ +import type { EndpointOptions, RequestParameters as RequestParametersType, EndpointInterface } from "@octokit/types"; +export type GraphQlEndpointOptions = EndpointOptions & { + variables?: { + [key: string]: unknown; + }; +}; +export type RequestParameters = RequestParametersType; +export type Query = string; +export interface graphql { + /** + * Sends a GraphQL query request based on endpoint options + * The GraphQL query must be specified in `options`. + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (options: RequestParameters): GraphQlResponse; + /** + * Sends a GraphQL query request based on endpoint options + * + * @param {string} query GraphQL query. Example: `'query { viewer { login } }'`. + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (query: Query, parameters?: RequestParameters): GraphQlResponse; + /** + * Returns a new `endpoint` with updated route and parameters + */ + defaults: (newDefaults: RequestParameters) => graphql; + /** + * Octokit endpoint API, see {@link https://github.com/octokit/endpoint.js|@octokit/endpoint} + */ + endpoint: EndpointInterface; +} +export type GraphQlResponse = Promise; +export type GraphQlQueryResponseData = { + [key: string]: any; +}; +export type GraphQlQueryResponse = { + data: ResponseData; + errors?: [ + { + type: string; + message: string; + path: [string]; + extensions: { + [key: string]: any; + }; + locations: [ + { + line: number; + column: number; + } + ]; + } + ]; +}; diff --git a/dist/node_modules/@octokit/graphql/dist-types/version.d.ts b/dist/node_modules/@octokit/graphql/dist-types/version.d.ts new file mode 100644 index 0000000..2fc0fc4 --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "5.0.6"; diff --git a/dist/node_modules/@octokit/graphql/dist-types/with-defaults.d.ts b/dist/node_modules/@octokit/graphql/dist-types/with-defaults.d.ts new file mode 100644 index 0000000..28f22dc --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-types/with-defaults.d.ts @@ -0,0 +1,3 @@ +import { request as Request } from "@octokit/request"; +import type { graphql as ApiInterface, RequestParameters } from "./types"; +export declare function withDefaults(request: typeof Request, newDefaults: RequestParameters): ApiInterface; diff --git a/dist/node_modules/@octokit/graphql/dist-web/index.js b/dist/node_modules/@octokit/graphql/dist-web/index.js new file mode 100644 index 0000000..6a36528 --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-web/index.js @@ -0,0 +1,119 @@ +// pkg/dist-src/index.js +import { request } from "@octokit/request"; +import { getUserAgent } from "universal-user-agent"; + +// pkg/dist-src/version.js +var VERSION = "5.0.6"; + +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +}; + +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`) + ); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); +} + +// pkg/dist-src/index.js +var graphql2 = withDefaults(request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${getUserAgent()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} +export { + GraphqlResponseError, + graphql2 as graphql, + withCustomRequest +}; diff --git a/dist/node_modules/@octokit/graphql/dist-web/index.js.map b/dist/node_modules/@octokit/graphql/dist-web/index.js.map new file mode 100644 index 0000000..9534050 --- /dev/null +++ b/dist/node_modules/@octokit/graphql/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js", "../dist-src/error.js", "../dist-src/graphql.js", "../dist-src/with-defaults.js"], + "sourcesContent": ["import { request } from \"@octokit/request\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport { withDefaults } from \"./with-defaults\";\nconst graphql = withDefaults(request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nimport { GraphqlResponseError } from \"./error\";\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\nexport {\n GraphqlResponseError,\n graphql,\n withCustomRequest\n};\n", "const VERSION = \"5.0.6\";\nexport {\n VERSION\n};\n", "function _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\n` + data.errors.map((e) => ` - ${e.message}`).join(\"\\n\");\n}\nclass GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\";\n this.errors = response.errors;\n this.data = response.data;\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n}\nexport {\n GraphqlResponseError\n};\n", "import { GraphqlResponseError } from \"./error\";\nconst NON_VARIABLE_OPTIONS = [\n \"method\",\n \"baseUrl\",\n \"url\",\n \"headers\",\n \"request\",\n \"query\",\n \"mediaType\"\n];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(\n new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`)\n );\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))\n continue;\n return Promise.reject(\n new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`)\n );\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({ query }, options) : query;\n const requestOptions = Object.keys(\n parsedOptions\n ).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request(requestOptions).then((response) => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlResponseError(\n requestOptions,\n headers,\n response.data\n );\n }\n return response.data.data;\n });\n}\nexport {\n graphql\n};\n", "import { graphql } from \"./graphql\";\nfunction withDefaults(request, newDefaults) {\n const newRequest = request.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: newRequest.endpoint\n });\n}\nexport {\n withDefaults\n};\n"], + "mappings": ";AAAA,SAAS,eAAe;AACxB,SAAS,oBAAoB;;;ACD7B,IAAM,UAAU;;;ACAhB,SAAS,+BAA+B,MAAM;AAC5C,SAAO;AAAA,IACL,KAAK,OAAO,IAAI,CAAC,MAAM,MAAM,EAAE,SAAS,EAAE,KAAK,IAAI;AACvD;AACA,IAAM,uBAAN,cAAmC,MAAM;AAAA,EACvC,YAAYA,UAAS,SAAS,UAAU;AACtC,UAAM,+BAA+B,QAAQ,CAAC;AAC9C,SAAK,UAAUA;AACf,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,OAAO;AACZ,SAAK,SAAS,SAAS;AACvB,SAAK,OAAO,SAAS;AACrB,QAAI,MAAM,mBAAmB;AAC3B,YAAM,kBAAkB,MAAM,KAAK,WAAW;AAAA,IAChD;AAAA,EACF;AACF;;;AChBA,IAAM,uBAAuB;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AACA,IAAM,6BAA6B,CAAC,SAAS,UAAU,KAAK;AAC5D,IAAM,uBAAuB;AAC7B,SAAS,QAAQC,UAAS,OAAO,SAAS;AACxC,MAAI,SAAS;AACX,QAAI,OAAO,UAAU,YAAY,WAAW,SAAS;AACnD,aAAO,QAAQ;AAAA,QACb,IAAI,MAAM,4DAA4D;AAAA,MACxE;AAAA,IACF;AACA,eAAW,OAAO,SAAS;AACzB,UAAI,CAAC,2BAA2B,SAAS,GAAG;AAC1C;AACF,aAAO,QAAQ;AAAA,QACb,IAAI,MAAM,uBAAuB,sCAAsC;AAAA,MACzE;AAAA,IACF;AAAA,EACF;AACA,QAAM,gBAAgB,OAAO,UAAU,WAAW,OAAO,OAAO,EAAE,MAAM,GAAG,OAAO,IAAI;AACtF,QAAM,iBAAiB,OAAO;AAAA,IAC5B;AAAA,EACF,EAAE,OAAO,CAAC,QAAQ,QAAQ;AACxB,QAAI,qBAAqB,SAAS,GAAG,GAAG;AACtC,aAAO,GAAG,IAAI,cAAc,GAAG;AAC/B,aAAO;AAAA,IACT;AACA,QAAI,CAAC,OAAO,WAAW;AACrB,aAAO,YAAY,CAAC;AAAA,IACtB;AACA,WAAO,UAAU,GAAG,IAAI,cAAc,GAAG;AACzC,WAAO;AAAA,EACT,GAAG,CAAC,CAAC;AACL,QAAM,UAAU,cAAc,WAAWA,SAAQ,SAAS,SAAS;AACnE,MAAI,qBAAqB,KAAK,OAAO,GAAG;AACtC,mBAAe,MAAM,QAAQ,QAAQ,sBAAsB,cAAc;AAAA,EAC3E;AACA,SAAOA,SAAQ,cAAc,EAAE,KAAK,CAAC,aAAa;AAChD,QAAI,SAAS,KAAK,QAAQ;AACxB,YAAM,UAAU,CAAC;AACjB,iBAAW,OAAO,OAAO,KAAK,SAAS,OAAO,GAAG;AAC/C,gBAAQ,GAAG,IAAI,SAAS,QAAQ,GAAG;AAAA,MACrC;AACA,YAAM,IAAI;AAAA,QACR;AAAA,QACA;AAAA,QACA,SAAS;AAAA,MACX;AAAA,IACF;AACA,WAAO,SAAS,KAAK;AAAA,EACvB,CAAC;AACH;;;AC1DA,SAAS,aAAaC,UAAS,aAAa;AAC1C,QAAM,aAAaA,SAAQ,SAAS,WAAW;AAC/C,QAAM,SAAS,CAAC,OAAO,YAAY;AACjC,WAAO,QAAQ,YAAY,OAAO,OAAO;AAAA,EAC3C;AACA,SAAO,OAAO,OAAO,QAAQ;AAAA,IAC3B,UAAU,aAAa,KAAK,MAAM,UAAU;AAAA,IAC5C,UAAU,WAAW;AAAA,EACvB,CAAC;AACH;;;AJNA,IAAMC,WAAU,aAAa,SAAS;AAAA,EACpC,SAAS;AAAA,IACP,cAAc,sBAAsB,WAAW,aAAa;AAAA,EAC9D;AAAA,EACA,QAAQ;AAAA,EACR,KAAK;AACP,CAAC;AAED,SAAS,kBAAkB,eAAe;AACxC,SAAO,aAAa,eAAe;AAAA,IACjC,QAAQ;AAAA,IACR,KAAK;AAAA,EACP,CAAC;AACH;", + "names": ["request", "request", "request", "graphql"] +} diff --git a/dist/node_modules/@octokit/graphql/package.json b/dist/node_modules/@octokit/graphql/package.json new file mode 100644 index 0000000..e90187b --- /dev/null +++ b/dist/node_modules/@octokit/graphql/package.json @@ -0,0 +1,48 @@ +{ + "name": "@octokit/graphql", + "version": "5.0.6", + "publishConfig": { + "access": "public" + }, + "description": "GitHub GraphQL API client for browsers and Node", + "repository": "github:octokit/graphql.js", + "keywords": [ + "octokit", + "github", + "api", + "graphql" + ], + "author": "Gregor Martynus (https://github.com/gr2m)", + "license": "MIT", + "dependencies": { + "@octokit/request": "^6.0.0", + "@octokit/types": "^9.0.0", + "universal-user-agent": "^6.0.0" + }, + "devDependencies": { + "@octokit/tsconfig": "^1.0.2", + "@types/fetch-mock": "^7.2.5", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "esbuild": "^0.17.19", + "fetch-mock": "^9.0.0", + "glob": "^10.2.6", + "jest": "^29.0.0", + "prettier": "2.8.8", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "source": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/oauth-app/LICENSE b/dist/node_modules/@octokit/oauth-app/LICENSE new file mode 100644 index 0000000..d7d5927 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/LICENSE @@ -0,0 +1,7 @@ +MIT License Copyright (c) 2020 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/@octokit/oauth-app/README.md b/dist/node_modules/@octokit/oauth-app/README.md new file mode 100644 index 0000000..5c2251c --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/README.md @@ -0,0 +1,1242 @@ +# oauth-app.js + +> GitHub OAuth toolset for Node.js + +[![@latest](https://img.shields.io/npm/v/@octokit/oauth-app.svg)](https://www.npmjs.com/package/@octokit/oauth-app) +[![Build Status](https://github.com/octokit/oauth-app.js/workflows/Test/badge.svg)](https://github.com/octokit/oauth-app.js/actions?workflow=Test) + +
+ Table of contents + + + +- [oauth-app.js](#oauth-appjs) + - [Usage](#usage) + - [For OAuth Apps](#for-oauth-apps) + - [For GitHub Apps](#for-github-apps) + - [Examples](#examples) + - [`OAuthApp.defaults(options)`](#oauthappdefaultsoptions) + - [Constructor options](#constructor-options) + - [`app.on(eventName, eventHandler)`](#apponeventname-eventhandler) + - [`app.octokit`](#appoctokit) + - [`app.getUserOctokit(options)`](#appgetuseroctokitoptions) + - [`app.getWebFlowAuthorizationUrl(options)`](#appgetwebflowauthorizationurloptions) + - [`app.createToken(options)`](#appcreatetokenoptions) + - [For OAuth Web flow](#for-oauth-web-flow) + - [For OAuth Device flow](#for-oauth-device-flow) + - [`app.checkToken(options)`](#appchecktokenoptions) + - [`app.resetToken(options)`](#appresettokenoptions) + - [`app.refreshToken(options)`](#apprefreshtokenoptions) + - [`app.scopeToken(options)`](#appscopetokenoptions) + - [`app.deleteToken(options)`](#appdeletetokenoptions) + - [`app.deleteAuthorization(options)`](#appdeleteauthorizationoptions) + - [Middlewares](#middlewares) + - [`createNodeMiddleware(app, options)`](#createnodemiddlewareapp-options) + - [`createWebWorkerHandler(app, options)`](#createwebworkerhandlerapp-options) + - [`createAWSLambdaAPIGatewayV2Handler(app, options)`](#createawslambdaapigatewayv2handlerapp-options) + - [Build Custom Middlewares](#build-custom-middlewares) + - [Contributing](#contributing) + - [License](#license) + + + +
+ +## Usage + + + + + + +
+ +Browsers + + + +`@octokit/oauth-app` is not meant for browser usage. + +
+ +Node + + + +Install with `npm install @octokit/oauth-app` + +
+ +### For OAuth Apps + +```js +const { OAuthApp, createNodeMiddleware } = require("@octokit/oauth-app"); + +const app = new OAuthApp({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", +}); + +app.on("token", async ({ token, octokit }) => { + const { data } = await octokit.request("GET /user"); + console.log(`Token retrieved for ${data.login}`); +}); + +require("http").createServer(createNodeMiddleware(app)).listen(3000); +// can now receive user authorization callbacks at /api/github/oauth/callback +// See all endpoints at https://github.com/octokit/oauth-app.js#middlewares +``` + +### For GitHub Apps + +GitHub Apps do not support `scopes`. If the GitHub App has expiring user tokens enabled, the token used for the `octokit` instance will be refreshed automatically, and the additional refresh-releated properties will be passed to the `"token"` event handler. + +```js +const { OAuthApp, createNodeMiddleware } = require("@octokit/oauth-app"); + +const app = new OAuthApp({ + clientType: "github-app", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef1234567890abcdef12345678", +}); + +app.on("token", async ({ token, octokit, expiresAt }) => { + const { data } = await octokit.request("GET /user"); + console.log(`Token retrieved for ${data.login}`); +}); + +require("http").createServer(createNodeMiddleware(app)).listen(3000); +// can now receive user authorization callbacks at /api/github/oauth/callback +// See all endpoints at https://github.com/octokit/oauth-app.js#middlewares +``` + +## Examples + +- Node server with static files served from `public/` folder, hosted on Glitch: +- Serverless functions, hosted on [Zeit's now](https://zeit.co/): +- Serverless functions, hosted on AWS (via [begin.com](https://begin.com/)): + +## `OAuthApp.defaults(options)` + +Create a new `OAuthApp` with custom defaults for the [constructor options](#constructor-options) + +```js +const MyOAuthApp = OAuthApp.defaults({ + Octokit: MyOctokit, +}); +const app = new MyOAuthApp({ clientId, clientSecret }); +// app.octokit is now an instance of MyOctokit +``` + +## Constructor options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientId + + number + + Required. Find Client ID on the app’s about page in settings. +
+ clientSecret + + number + + Required. Find Client Secret on the app’s about page in settings. +
+ clientType + + string + + Either "oauth-app" or "github-app". Defaults to "oauth-app". +
+ allowSignup + + boolean + + Sets the default value for app.getWebFlowAuthorizationUrl(options). +
+ redirectUrl + + string + + The URL in your application where users will be sent after authorization. See Redirect URLs in GitHub’s Developer Guide. +
+ defaultScopes + + Array of strings + + +Only relevant when `clientType` is set to `"oauth-app"`. + +Sets the default `scopes` value for `app.getWebFlowAuthorizationUrl(options)`. See [available scopes](https://docs.github.com/en/developers/apps/scopes-for-oauth-apps#available-scopes) + +
+ log + + object + + Used for internal logging. Defaults to console. +
+ Octokit + + Constructor + + +You can pass in your own Octokit constructor with custom defaults and plugins. The Octokit Constructor must use an authenticatio strategy that is compatible with[`@octokit/auth-oauth-app](https://github.com/octokit/auth-oauth-app.js/#readme). + +For usage with enterprise, set `baseUrl` to the hostname + `/api/v3`. Example: + +```js +const { Octokit } = require("@octokit/core"); +new OAuthApp({ + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + Octokit: Octokit.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +Defaults to `@octokit/oauth-app`'s owne `Octokit` constructor which can be imported separately from `OAuthApp`. It's [`@octokit/core`](https://github.com/octokit/core.js) with the [`@octokit/auth-oauth-user`](https://github.com/octokit/auth-oauth-user.js/#readme) authentication strategy. + +
+ +## `app.on(eventName, eventHandler)` + +Called whenever a new OAuth access token is created for a user. It accepts two parameters, an event name and a function with one argument + +```js +app.on("token.created", async (context) => { + const { data } = await context.octokit.request("GET /user"); + app.log.info(`New token created for ${data.login}`); +}); +``` + +The `eventName` can be one of (or an array of) + +- `token.created` +- `token.reset` +- `token.refreshed` (GitHub Apps only) +- `token.scoped` (GitHub Apps only) +- `token.deleted` +- `authorization.deleted` + +All event handlers are awaited before continuing. + +`context` can have the following properties + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ property + + type + + description +
+ context.name + + string + + Name of the event. One of: token, authorization +
+ context.action + + string + + Action of the event. One of: created, reset, deleted +
+ context.authentication + + object + + +The OAuth authentication object. See https://github.com/octokit/auth-oauth-user.js/#authentication-object + +
+ context.octokit + + Octokit instance + + +Authenticated instance using the `Octokit` option passed to the constructor and [`@octokit/auth-oauth-user`](https://github.com/octokit/auth-oauth-user.js/#readme) as authentication strategy. + +The `octokit` instance is unauthenticated for `"token.deleted"` and `"authorization.deleted"` events. + +
+ +## `app.octokit` + +Octokit instance with [OAuth App authentication](https://github.com/octokit/auth-oauth-app.js/#readme). Uses `Octokit` constructor option + +## `app.getUserOctokit(options)` + +```js +const octokit = await app.getUserOctokit({ code: "code123" }); +``` + +`options` are the same as in [`app.createToken(options)`](#appcreatetokenoptions) + +The `octokit` instance is authorized using the user access token if the app is an OAuth app and a user-to-server token if the app is a GitHub app. If the token expires it will be refreshed automatically. + +## `app.getWebFlowAuthorizationUrl(options)` + +Returns and object with all options and a `url` property which is the authorization URL. See https://github.com/octokit/oauth-methods.js/#getwebflowauthorizationurl + +```js +const { url } = app.getWebFlowAuthorizationUrl({ + state: "state123", + scopes: ["repo"], +}); +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ redirectUrl + + string + + The URL in your application where users will be sent after authorization. See Redirect URLs in GitHub’s Developer Guide. +
+ login + + string + + Suggests a specific account to use for signing in and authorizing the app. +
+ scopes + + array of strings + + An array of scope names (or: space-delimited list of scopes). If not provided, scope defaults to an empty list for users that have not authorized any scopes for the application. For users who have authorized scopes for the application, the user won't be shown the OAuth authorization page with the list of scopes. Instead, this step of the flow will automatically complete with the set of scopes the user has authorized for the application. For example, if a user has already performed the web flow twice and has authorized one token with user scope and another token with repo scope, a third web flow that does not provide a scope will receive a token with user and repo scope. +
+ state + + string + + An unguessable random string. It is used to protect against cross-site request forgery attacks. + Defaults to Math.random().toString(36).substr(2). +
+ allowSignup + + boolean + + Whether or not unauthenticated users will be offered an option to sign up for GitHub during the OAuth flow. The default is true. Use false in the case that a policy prohibits signups. +
+ +## `app.createToken(options)` + +The method can be used for both, the [OAuth Web Flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#web-application-flow) and the [OAuth Device Flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#device-flow). + +### For OAuth Web flow + +For the web flow, you have to pass the `code` from URL redirect described in [step 2](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#2-users-are-redirected-back-to-your-site-by-github). + +```js +const { token } = await app.createToken({ + code: "code123", +}); +``` + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ code + + string + + Required. Pass the code that was passed as ?code query parameter in the authorization redirect URL. +
+ state + + string + + Required. Pass the state that was passed as ?state query parameter in the authorization redirect URL. +
+ +Resolves with with an [user authentication object](https://github.com/octokit/auth-oauth-app.js#authentication-object) + +### For OAuth Device flow + +For the device flow, you have to pass a `onVerification` callback function, which prompts the user to enter the received user code at the received authorization URL. + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ onVerification + + function + + +**Required**. A function that is called once the device and user codes were retrieved + +The `onVerification()` callback can be used to pause until the user completes step 2, which might result in a better user experience. + +```js +const auth = createOAuthUserAuth({ + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", + onVerification(verification) { + console.log("Open %s", verification.verification_uri); + console.log("Enter code: %s", verification.user_code); + + await prompt("press enter when you are ready to continue"); + }, +}); +``` + +
+ scopes + + array of strings + + +Only relevant if `app.type` is `"oauth-app"`. Scopes are not supported by GitHub apps. + +Array of OAuth scope names that the user access token should be granted. Defaults to no scopes (`[]`). + +
+ +Resolves with with an [user authentication object](https://github.com/octokit/auth-oauth-app.js#authentication-object) + +## `app.checkToken(options)` + +```js +try { + const { created_at, app, user } = await app.checkToken({ token }); + console.log( + `token valid, created on %s by %s for %s`, + created_at, + user.login, + app.name + ); +} catch (error) { + // token invalid or request error +} +``` + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ token + + string + + Required. +
+ +Resolves with response body from ["Check a token" request](https://docs.github.com/en/rest/reference/apps#check-a-token) with an additional `authentication` property which is a [user authentication object](https://github.com/octokit/auth-oauth-app.js#authentication-object). + +## `app.resetToken(options)` + +```js +const { data, authentication } = await app.resetToken({ + token: "token123", +}); +// "token123" is no longer valid. Use `token` instead +``` + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ token + + string + + Required. +
+ +Resolves with response body from ["Reset a token" request](https://docs.github.com/en/rest/reference/apps#reset-a-token) with an additional `authentication` property which is a [user authentication object](https://github.com/octokit/auth-oauth-app.js#authentication-object). + +## `app.refreshToken(options)` + +Expiring tokens are only supported by GitHub Apps, and only if expiring user tokens are enabled. + +```js +const { data, authentication } = await app.refreshToken({ + refreshToken: "refreshtoken123", +}); +``` + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ refreshToken + + string + + Required. +
+ +Resolves with response body from ["Renewing a user token with a refresh token" request](https://docs.github.com/en/developers/apps/refreshing-user-to-server-access-tokens#renewing-a-user-token-with-a-refresh-token) (JSON) with an additional `authentication` property which is a [user authentication object](https://github.com/octokit/auth-oauth-app.js#authentication-object). + +## `app.scopeToken(options)` + +Scoping a token is only supported by GitHub Apps. "Scoping" in this context means to limit access to a selected installation, with a subset of repositories and permissions. + +```js +const { data, authentication } = await app.scopeToken({ + clientType: "github-app", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef12347890abcdef12345678", + token: "usertoken123", + target: "octokit", + repositories: ["oauth-app.js"], + permissions: { + issues: "write", + }, +}); +``` + +Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ target + + string + + Required unless targetId is set. The name of the user or organization to scope the user-to-server access token to. +
+ targetId + + integer + + Required unless target is set. The ID of the user or organization to scope the user-to-server access token to. +
+ repositories + + array of strings + + The list of repository names to scope the user-to-server access token to. repositories may not be specified if repository_ids is specified. +
+ repository_ids + + array of integers + + The list of repository IDs to scope the user-to-server access token to. repositories may not be specified if repositories is specified. +
+ permissions + + object + + The permissions granted to the user-to-server access token. See GitHub App Permissions. +
+ +Resolves with response body from ["Create a scoped access token" request](https://docs.github.com/en/rest/reference/apps#create-a-scoped-access-token) with an additional `authentication` property which is a [user authentication object](https://github.com/octokit/auth-oauth-app.js#authentication-object). + +## `app.deleteToken(options)` + +```js +await app.deleteToken({ + token: "token123", +}); +// "token123" is no longer valid. +``` + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ token + + string + + Required. +
+ +Resolves with response body from ["Delete a token" request](https://docs.github.com/en/rest/reference/apps#delete-an-app-token). + +## `app.deleteAuthorization(options)` + +```js +await app.deleteAuthorization({ + token: "token123", +}); +// "token123" is no longer valid, and no tokens can be created until the app gets re-authorized. +``` + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ token + + string + + Required. +
+ +Resolves with response body from ["Delete an app authorization" request](https://docs.github.com/en/rest/reference/apps#delete-an-app-authorization). + +## Middlewares + +A middleware is a method or set of methods to handle requests for common environments. + +By default, all middlewares expose the following routes + +| Route | Route Description | +| --------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `GET /api/github/oauth/login` | Redirects to GitHub's authorization endpoint. Accepts optional `?state` and `?scopes` query parameters. `?scopes` is a comma-separated list of [supported OAuth scope names](https://docs.github.com/en/developers/apps/scopes-for-oauth-apps#available-scopes) | +| `GET /api/github/oauth/callback` | The client's redirect endpoint. This is where the `token` event gets triggered | +| `POST /api/github/oauth/token` | Exchange an authorization code for an OAuth Access token. If successful, the `token` event gets triggered. | +| `GET /api/github/oauth/token` | Check if token is valid. Must authenticate using token in `Authorization` header. Uses GitHub's [`POST /applications/{client_id}/token`](https://docs.github.com/en/rest/reference/apps#check-a-token) endpoint | +| `PATCH /api/github/oauth/token` | Resets a token (invalidates current one, returns new token). Must authenticate using token in `Authorization` header. Uses GitHub's [`PATCH /applications/{client_id}/token`](https://docs.github.com/en/rest/reference/apps#reset-a-token) endpoint. | +| `PATCH /api/github/oauth/refresh-token` | Refreshes an expiring token (invalidates current one, returns new access token and refresh token). Must authenticate using token in `Authorization` header. Uses GitHub's [`POST https://github.com/login/oauth/access_token`](https://docs.github.com/en/developers/apps/refreshing-user-to-server-access-tokens#renewing-a-user-token-with-a-refresh-token) OAuth endpoint. | +| `POST /api/github/oauth/token/scoped` | Creates a scoped token (does not invalidate the current one). Must authenticate using token in `Authorization` header. Uses GitHub's [`POST /applications/{client_id}/token/scoped`](https://docs.github.com/en/rest/reference/apps#create-a-scoped-access-token) endpoint. | +| `DELETE /api/github/oauth/token` | Invalidates current token, basically the equivalent of a logout. Must authenticate using token in `Authorization` header. | +| `DELETE /api/github/oauth/grant` | Revokes the user's grant, basically the equivalent of an uninstall. must authenticate using token in `Authorization` header. | + +### `createNodeMiddleware(app, options)` + +Native http server middleware for Node.js + +```js +const { OAuthApp, createNodeMiddleware } = require("@octokit/oauth-app"); +const app = new OAuthApp({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", +}); + +const middleware = createNodeMiddleware(app, { + pathPrefix: "/api/github/oauth", +}); + +require("http").createServer(middleware).listen(3000); +// can now receive user authorization callbacks at /api/github/oauth/callback +``` + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ app + + OAuthApp instance + + Required. +
+ options.pathPrefix + + string + + +All exposed paths will be prefixed with the provided prefix. Defaults to `"/api/github/oauth"` + +
+ options.onUnhandledRequest __deprecated__ + + function + + +Defaults to + +```js +function onUnhandledRequest(request, response) { + response.writeHead(404, { + "content-type": "application/json", + }); + response.end( + JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}`, + }) + ); +} +``` + +
+ +### `createWebWorkerHandler(app, options)` + +Event handler for web worker environments (Cloudflare workers or Deno). + +```js +// worker.js +import { OAuthApp, createWebWorkerHandler } from "@octokit/oauth-app"; +const app = new OAuthApp({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", +}); + +const handleRequest = createWebWorkerHandler(app, { + pathPrefix: "/api/github/oauth", +}); + +addEventListener("fetch", (event) => { + event.respondWith(handleRequest(event.request)); +}); +// can now receive user authorization callbacks at /api/github/oauth/callback +``` + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ app + + OAuthApp instance + + Required. +
+ options.pathPrefix + + string + + +All exposed paths will be prefixed with the provided prefix. Defaults to `"/api/github/oauth"` + +
+ options.onUnhandledRequest __deprecated__ + + function + Defaults to + +```js +function onUnhandledRequest(request) { + return new Response( + JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}`, + }), + { + status: 404, + headers: { "content-type": "application/json" }, + } + ); +} +``` + +
+ +### `createAWSLambdaAPIGatewayV2Handler(app, options)` + +Event handler for AWS Lambda using API Gateway V2 HTTP integration. + +```js +// worker.js +import { + OAuthApp, + createAWSLambdaAPIGatewayV2Handler, +} from "@octokit/oauth-app"; +const app = new OAuthApp({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef1234567890abcdef12345678", +}); + +export const handler = createAWSLambdaAPIGatewayV2Handler(app, { + pathPrefix: "/api/github/oauth", +}); + +// can now receive user authorization callbacks at /api/github/oauth/callback +``` + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ app + + OAuthApp instance + + Required. +
+ options.pathPrefix + + string + + +All exposed paths will be prefixed with the provided prefix. Defaults to `"/api/github/oauth"` + +
+ options.onUnhandledRequest __deprecated__ + + function + Defaults to returns: + +```js +function onUnhandledRequest(request) { + return + { + status: 404, + headers: { "content-type": "application/json" }, + body: JSON.stringify({ + error: `Unknown route: [METHOD] [URL]`, + }) + } + ); +} +``` + +
+ +### Build Custom Middlewares + +When above middlewares do not meet your needs, you can build your own +using the exported `handleRequest` function. + +[`handleRequest`](./src/middleware/handle-request.ts) function is an abstract HTTP handler which accepts an `OctokitRequest` and returns an `OctokitResponse` if the request matches any predefined route. + +> Different environments (e.g., Node.js, Cloudflare Workers, Deno, etc.) exposes different APIs when processing HTTP requests (e.g., [`IncomingMessage`](https://nodejs.org/api/http.html#http_class_http_incomingmessage) for Node.js, [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) for Cloudflare workers, etc.). Two HTTP-related types ([`OctokitRequest` and `OctokitResponse`](.src/middleware/types.ts)) are generalized to make an abstract HTTP handler possible. + +To share the behavior and capability with the existing Node.js middleware (and be compatible with [OAuth user authentication strategy in the browser](https://github.com/octokit/auth-oauth-user-client.js)), it is better to implement your HTTP handler/middleware based on `handleRequest` function. + +`handleRequest` function takes three parameters: + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ app + + OAuthApp instance + + Required. +
+ options.pathPrefix + + string + + +All exposed paths will be prefixed with the provided prefix. Defaults to `"/api/github/oauth"` + +
+ request + + OctokitRequest + + Generalized HTTP request in `OctokitRequest` type. +
+ +Implementing an HTTP handler/middleware for a certain environment involves three steps: + +1. Write a function to parse the HTTP request (e.g., `IncomingMessage` in Node.js) into an `OctokitRequest` object. See [`node/parse-request.ts`](.src/middleware/node/parse-request.ts) for reference. +2. Write a function to render an `OctokitResponse` object (e.g., as `ServerResponse` in Node.js). See [`node/send-response.ts`](.src/middleware/node/send-response.ts) for reference. +3. Expose an HTTP handler/middleware in the dialect of the environment which performs three steps: + 1. Parse the HTTP request using (1). + 2. Process the `OctokitRequest` object using `handleRequest`. + 3. Render the `OctokitResponse` object using (2). + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/oauth-app/dist-node/index.js b/dist/node_modules/@octokit/oauth-app/dist-node/index.js new file mode 100644 index 0000000..6ea7a87 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-node/index.js @@ -0,0 +1,856 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + OAuthApp: () => OAuthApp, + createAWSLambdaAPIGatewayV2Handler: () => createAWSLambdaAPIGatewayV2Handler, + createCloudflareHandler: () => createCloudflareHandler, + createNodeMiddleware: () => createNodeMiddleware, + createWebWorkerHandler: () => createWebWorkerHandler, + handleRequest: () => handleRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_auth_oauth_app = require("@octokit/auth-oauth-app"); + +// pkg/dist-src/version.js +var VERSION = "4.2.4"; + +// pkg/dist-src/add-event-handler.js +function addEventHandler(state, eventName, eventHandler) { + if (Array.isArray(eventName)) { + for (const singleEventName of eventName) { + addEventHandler(state, singleEventName, eventHandler); + } + return; + } + if (!state.eventHandlers[eventName]) { + state.eventHandlers[eventName] = []; + } + state.eventHandlers[eventName].push(eventHandler); +} + +// pkg/dist-src/oauth-app-octokit.js +var import_core = require("@octokit/core"); +var import_universal_user_agent = require("universal-user-agent"); +var OAuthAppOctokit = import_core.Octokit.defaults({ + userAgent: `octokit-oauth-app.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` +}); + +// pkg/dist-src/methods/get-user-octokit.js +var import_auth_oauth_user = require("@octokit/auth-oauth-user"); + +// pkg/dist-src/emit-event.js +async function emitEvent(state, context) { + const { name, action } = context; + if (state.eventHandlers[`${name}.${action}`]) { + for (const eventHandler of state.eventHandlers[`${name}.${action}`]) { + await eventHandler(context); + } + } + if (state.eventHandlers[name]) { + for (const eventHandler of state.eventHandlers[name]) { + await eventHandler(context); + } + } +} + +// pkg/dist-src/methods/get-user-octokit.js +async function getUserOctokitWithState(state, options) { + return state.octokit.auth({ + type: "oauth-user", + ...options, + async factory(options2) { + const octokit = new state.Octokit({ + authStrategy: import_auth_oauth_user.createOAuthUserAuth, + auth: options2 + }); + const authentication = await octokit.auth({ + type: "get" + }); + await emitEvent(state, { + name: "token", + action: "created", + token: authentication.token, + scopes: authentication.scopes, + authentication, + octokit + }); + return octokit; + } + }); +} + +// pkg/dist-src/methods/get-web-flow-authorization-url.js +var OAuthMethods = __toESM(require("@octokit/oauth-methods")); +function getWebFlowAuthorizationUrlWithState(state, options) { + const optionsWithDefaults = { + clientId: state.clientId, + request: state.octokit.request, + ...options, + allowSignup: state.allowSignup ?? options.allowSignup, + redirectUrl: options.redirectUrl ?? state.redirectUrl, + scopes: options.scopes ?? state.defaultScopes + }; + return OAuthMethods.getWebFlowAuthorizationUrl({ + clientType: state.clientType, + ...optionsWithDefaults + }); +} + +// pkg/dist-src/methods/create-token.js +var OAuthAppAuth = __toESM(require("@octokit/auth-oauth-app")); +async function createTokenWithState(state, options) { + const authentication = await state.octokit.auth({ + type: "oauth-user", + ...options + }); + await emitEvent(state, { + name: "token", + action: "created", + token: authentication.token, + scopes: authentication.scopes, + authentication, + octokit: new state.Octokit({ + authStrategy: OAuthAppAuth.createOAuthUserAuth, + auth: { + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: authentication.token, + scopes: authentication.scopes, + refreshToken: authentication.refreshToken, + expiresAt: authentication.expiresAt, + refreshTokenExpiresAt: authentication.refreshTokenExpiresAt + } + }) + }); + return { authentication }; +} + +// pkg/dist-src/methods/check-token.js +var OAuthMethods2 = __toESM(require("@octokit/oauth-methods")); +async function checkTokenWithState(state, options) { + const result = await OAuthMethods2.checkToken({ + // @ts-expect-error not worth the extra code to appease TS + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.octokit.request, + ...options + }); + Object.assign(result.authentication, { type: "token", tokenType: "oauth" }); + return result; +} + +// pkg/dist-src/methods/reset-token.js +var OAuthMethods3 = __toESM(require("@octokit/oauth-methods")); +var import_auth_oauth_user2 = require("@octokit/auth-oauth-user"); +async function resetTokenWithState(state, options) { + const optionsWithDefaults = { + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.octokit.request, + ...options + }; + if (state.clientType === "oauth-app") { + const response2 = await OAuthMethods3.resetToken({ + clientType: "oauth-app", + ...optionsWithDefaults + }); + const authentication2 = Object.assign(response2.authentication, { + type: "token", + tokenType: "oauth" + }); + await emitEvent(state, { + name: "token", + action: "reset", + token: response2.authentication.token, + scopes: response2.authentication.scopes || void 0, + authentication: authentication2, + octokit: new state.Octokit({ + authStrategy: import_auth_oauth_user2.createOAuthUserAuth, + auth: { + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: response2.authentication.token, + scopes: response2.authentication.scopes + } + }) + }); + return { ...response2, authentication: authentication2 }; + } + const response = await OAuthMethods3.resetToken({ + clientType: "github-app", + ...optionsWithDefaults + }); + const authentication = Object.assign(response.authentication, { + type: "token", + tokenType: "oauth" + }); + await emitEvent(state, { + name: "token", + action: "reset", + token: response.authentication.token, + authentication, + octokit: new state.Octokit({ + authStrategy: import_auth_oauth_user2.createOAuthUserAuth, + auth: { + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: response.authentication.token + } + }) + }); + return { ...response, authentication }; +} + +// pkg/dist-src/methods/refresh-token.js +var OAuthMethods4 = __toESM(require("@octokit/oauth-methods")); +var import_auth_oauth_user3 = require("@octokit/auth-oauth-user"); +async function refreshTokenWithState(state, options) { + if (state.clientType === "oauth-app") { + throw new Error( + "[@octokit/oauth-app] app.refreshToken() is not supported for OAuth Apps" + ); + } + const response = await OAuthMethods4.refreshToken({ + clientType: "github-app", + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.octokit.request, + refreshToken: options.refreshToken + }); + const authentication = Object.assign(response.authentication, { + type: "token", + tokenType: "oauth" + }); + await emitEvent(state, { + name: "token", + action: "refreshed", + token: response.authentication.token, + authentication, + octokit: new state.Octokit({ + authStrategy: import_auth_oauth_user3.createOAuthUserAuth, + auth: { + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: response.authentication.token + } + }) + }); + return { ...response, authentication }; +} + +// pkg/dist-src/methods/scope-token.js +var OAuthMethods5 = __toESM(require("@octokit/oauth-methods")); +var import_auth_oauth_user4 = require("@octokit/auth-oauth-user"); +async function scopeTokenWithState(state, options) { + if (state.clientType === "oauth-app") { + throw new Error( + "[@octokit/oauth-app] app.scopeToken() is not supported for OAuth Apps" + ); + } + const response = await OAuthMethods5.scopeToken({ + clientType: "github-app", + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.octokit.request, + ...options + }); + const authentication = Object.assign(response.authentication, { + type: "token", + tokenType: "oauth" + }); + await emitEvent(state, { + name: "token", + action: "scoped", + token: response.authentication.token, + authentication, + octokit: new state.Octokit({ + authStrategy: import_auth_oauth_user4.createOAuthUserAuth, + auth: { + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: response.authentication.token + } + }) + }); + return { ...response, authentication }; +} + +// pkg/dist-src/methods/delete-token.js +var OAuthMethods6 = __toESM(require("@octokit/oauth-methods")); +var import_auth_unauthenticated = require("@octokit/auth-unauthenticated"); +async function deleteTokenWithState(state, options) { + const optionsWithDefaults = { + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.octokit.request, + ...options + }; + const response = state.clientType === "oauth-app" ? await OAuthMethods6.deleteToken({ + clientType: "oauth-app", + ...optionsWithDefaults + }) : ( + // istanbul ignore next + await OAuthMethods6.deleteToken({ + clientType: "github-app", + ...optionsWithDefaults + }) + ); + await emitEvent(state, { + name: "token", + action: "deleted", + token: options.token, + octokit: new state.Octokit({ + authStrategy: import_auth_unauthenticated.createUnauthenticatedAuth, + auth: { + reason: `Handling "token.deleted" event. The access for the token has been revoked.` + } + }) + }); + return response; +} + +// pkg/dist-src/methods/delete-authorization.js +var OAuthMethods7 = __toESM(require("@octokit/oauth-methods")); +var import_auth_unauthenticated2 = require("@octokit/auth-unauthenticated"); +async function deleteAuthorizationWithState(state, options) { + const optionsWithDefaults = { + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.octokit.request, + ...options + }; + const response = state.clientType === "oauth-app" ? await OAuthMethods7.deleteAuthorization({ + clientType: "oauth-app", + ...optionsWithDefaults + }) : ( + // istanbul ignore next + await OAuthMethods7.deleteAuthorization({ + clientType: "github-app", + ...optionsWithDefaults + }) + ); + await emitEvent(state, { + name: "token", + action: "deleted", + token: options.token, + octokit: new state.Octokit({ + authStrategy: import_auth_unauthenticated2.createUnauthenticatedAuth, + auth: { + reason: `Handling "token.deleted" event. The access for the token has been revoked.` + } + }) + }); + await emitEvent(state, { + name: "authorization", + action: "deleted", + token: options.token, + octokit: new state.Octokit({ + authStrategy: import_auth_unauthenticated2.createUnauthenticatedAuth, + auth: { + reason: `Handling "authorization.deleted" event. The access for the app has been revoked.` + } + }) + }); + return response; +} + +// pkg/dist-src/middleware/handle-request.js +var import_fromentries = __toESM(require("fromentries")); +async function handleRequest(app, { pathPrefix = "/api/github/oauth" }, request) { + var _a, _b, _c, _d, _e, _f; + if (request.method === "OPTIONS") { + return { + status: 200, + headers: { + "access-control-allow-origin": "*", + "access-control-allow-methods": "*", + "access-control-allow-headers": "Content-Type, User-Agent, Authorization" + } + }; + } + const { pathname } = new URL(request.url, "http://localhost"); + const route = [request.method, pathname].join(" "); + const routes = { + getLogin: `GET ${pathPrefix}/login`, + getCallback: `GET ${pathPrefix}/callback`, + createToken: `POST ${pathPrefix}/token`, + getToken: `GET ${pathPrefix}/token`, + patchToken: `PATCH ${pathPrefix}/token`, + patchRefreshToken: `PATCH ${pathPrefix}/refresh-token`, + scopeToken: `POST ${pathPrefix}/token/scoped`, + deleteToken: `DELETE ${pathPrefix}/token`, + deleteGrant: `DELETE ${pathPrefix}/grant` + }; + if (!Object.values(routes).includes(route)) { + return null; + } + let json; + try { + const text = await request.text(); + json = text ? JSON.parse(text) : {}; + } catch (error) { + return { + status: 400, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify({ + error: "[@octokit/oauth-app] request error" + }) + }; + } + const { searchParams } = new URL(request.url, "http://localhost"); + const query = (0, import_fromentries.default)(searchParams); + const headers = request.headers; + try { + if (route === routes.getLogin) { + const { url } = app.getWebFlowAuthorizationUrl({ + state: query.state, + scopes: query.scopes ? query.scopes.split(",") : void 0, + allowSignup: query.allowSignup ? query.allowSignup === "true" : void 0, + redirectUrl: query.redirectUrl + }); + return { status: 302, headers: { location: url } }; + } + if (route === routes.getCallback) { + if (query.error) { + throw new Error( + `[@octokit/oauth-app] ${query.error} ${query.error_description}` + ); + } + if (!query.code) { + throw new Error('[@octokit/oauth-app] "code" parameter is required'); + } + const { + authentication: { token: token2 } + } = await app.createToken({ + code: query.code + }); + return { + status: 200, + headers: { + "content-type": "text/html" + }, + text: `

Token created successfully

+ +

Your token is: ${token2}. Copy it now as it cannot be shown again.

` + }; + } + if (route === routes.createToken) { + const { code, redirectUrl } = json; + if (!code) { + throw new Error('[@octokit/oauth-app] "code" parameter is required'); + } + const result = await app.createToken({ + code, + redirectUrl + }); + delete result.authentication.clientSecret; + return { + status: 201, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify(result) + }; + } + if (route === routes.getToken) { + const token2 = (_a = headers.authorization) == null ? void 0 : _a.substr("token ".length); + if (!token2) { + throw new Error( + '[@octokit/oauth-app] "Authorization" header is required' + ); + } + const result = await app.checkToken({ + token: token2 + }); + delete result.authentication.clientSecret; + return { + status: 200, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify(result) + }; + } + if (route === routes.patchToken) { + const token2 = (_b = headers.authorization) == null ? void 0 : _b.substr("token ".length); + if (!token2) { + throw new Error( + '[@octokit/oauth-app] "Authorization" header is required' + ); + } + const result = await app.resetToken({ token: token2 }); + delete result.authentication.clientSecret; + return { + status: 200, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify(result) + }; + } + if (route === routes.patchRefreshToken) { + const token2 = (_c = headers.authorization) == null ? void 0 : _c.substr("token ".length); + if (!token2) { + throw new Error( + '[@octokit/oauth-app] "Authorization" header is required' + ); + } + const { refreshToken: refreshToken2 } = json; + if (!refreshToken2) { + throw new Error( + "[@octokit/oauth-app] refreshToken must be sent in request body" + ); + } + const result = await app.refreshToken({ refreshToken: refreshToken2 }); + delete result.authentication.clientSecret; + return { + status: 200, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify(result) + }; + } + if (route === routes.scopeToken) { + const token2 = (_d = headers.authorization) == null ? void 0 : _d.substr("token ".length); + if (!token2) { + throw new Error( + '[@octokit/oauth-app] "Authorization" header is required' + ); + } + const result = await app.scopeToken({ + token: token2, + ...json + }); + delete result.authentication.clientSecret; + return { + status: 200, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify(result) + }; + } + if (route === routes.deleteToken) { + const token2 = (_e = headers.authorization) == null ? void 0 : _e.substr("token ".length); + if (!token2) { + throw new Error( + '[@octokit/oauth-app] "Authorization" header is required' + ); + } + await app.deleteToken({ + token: token2 + }); + return { + status: 204, + headers: { "access-control-allow-origin": "*" } + }; + } + const token = (_f = headers.authorization) == null ? void 0 : _f.substr("token ".length); + if (!token) { + throw new Error( + '[@octokit/oauth-app] "Authorization" header is required' + ); + } + await app.deleteAuthorization({ + token + }); + return { + status: 204, + headers: { "access-control-allow-origin": "*" } + }; + } catch (error) { + return { + status: 400, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify({ error: error.message }) + }; + } +} + +// pkg/dist-src/middleware/node/parse-request.js +function parseRequest(request) { + const { method, url, headers } = request; + async function text() { + const text2 = await new Promise((resolve, reject) => { + let bodyChunks = []; + request.on("error", reject).on("data", (chunk) => bodyChunks.push(chunk)).on("end", () => resolve(Buffer.concat(bodyChunks).toString())); + }); + return text2; + } + return { method, url, headers, text }; +} + +// pkg/dist-src/middleware/node/send-response.js +function sendResponse(octokitResponse, response) { + response.writeHead(octokitResponse.status, octokitResponse.headers); + response.end(octokitResponse.text); +} + +// pkg/dist-src/middleware/on-unhandled-request-default.js +function onUnhandledRequestDefault(request) { + return { + status: 404, + headers: { "content-type": "application/json" }, + text: JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}` + }) + }; +} + +// pkg/dist-src/middleware/node/index.js +function onUnhandledRequestDefaultNode(request, response) { + const octokitRequest = parseRequest(request); + const octokitResponse = onUnhandledRequestDefault(octokitRequest); + sendResponse(octokitResponse, response); +} +function createNodeMiddleware(app, { + pathPrefix, + onUnhandledRequest +} = {}) { + if (onUnhandledRequest) { + app.octokit.log.warn( + "[@octokit/oauth-app] `onUnhandledRequest` is deprecated and will be removed from the next major version." + ); + } + onUnhandledRequest ?? (onUnhandledRequest = onUnhandledRequestDefaultNode); + return async function(request, response, next) { + const octokitRequest = parseRequest(request); + const octokitResponse = await handleRequest( + app, + { pathPrefix }, + octokitRequest + ); + if (octokitResponse) { + sendResponse(octokitResponse, response); + } else if (typeof next === "function") { + next(); + } else { + onUnhandledRequest(request, response); + } + }; +} + +// pkg/dist-src/middleware/web-worker/parse-request.js +function parseRequest2(request) { + const headers = Object.fromEntries(request.headers.entries()); + return { + method: request.method, + url: request.url, + headers, + text: () => request.text() + }; +} + +// pkg/dist-src/middleware/web-worker/send-response.js +function sendResponse2(octokitResponse) { + return new Response(octokitResponse.text, { + status: octokitResponse.status, + headers: octokitResponse.headers + }); +} + +// pkg/dist-src/middleware/web-worker/index.js +async function onUnhandledRequestDefaultWebWorker(request) { + const octokitRequest = parseRequest2(request); + const octokitResponse = onUnhandledRequestDefault(octokitRequest); + return sendResponse2(octokitResponse); +} +function createWebWorkerHandler(app, { + pathPrefix, + onUnhandledRequest +} = {}) { + if (onUnhandledRequest) { + app.octokit.log.warn( + "[@octokit/oauth-app] `onUnhandledRequest` is deprecated and will be removed from the next major version." + ); + } + onUnhandledRequest ?? (onUnhandledRequest = onUnhandledRequestDefaultWebWorker); + return async function(request) { + const octokitRequest = parseRequest2(request); + const octokitResponse = await handleRequest( + app, + { pathPrefix }, + octokitRequest + ); + return octokitResponse ? sendResponse2(octokitResponse) : await onUnhandledRequest(request); + }; +} +function createCloudflareHandler(...args) { + args[0].octokit.log.warn( + "[@octokit/oauth-app] `createCloudflareHandler` is deprecated, use `createWebWorkerHandler` instead" + ); + return createWebWorkerHandler(...args); +} + +// pkg/dist-src/middleware/aws-lambda/api-gateway-v2-parse-request.js +function parseRequest3(request) { + const { method } = request.requestContext.http; + let url = request.rawPath; + const { stage } = request.requestContext; + if (url.startsWith("/" + stage)) + url = url.substring(stage.length + 1); + if (request.rawQueryString) + url += "?" + request.rawQueryString; + const headers = request.headers; + const text = async () => request.body || ""; + return { method, url, headers, text }; +} + +// pkg/dist-src/middleware/aws-lambda/api-gateway-v2-send-response.js +function sendResponse3(octokitResponse) { + return { + statusCode: octokitResponse.status, + headers: octokitResponse.headers, + body: octokitResponse.text + }; +} + +// pkg/dist-src/middleware/aws-lambda/api-gateway-v2.js +async function onUnhandledRequestDefaultAWSAPIGatewayV2(event) { + const request = parseRequest3(event); + const response = onUnhandledRequestDefault(request); + return sendResponse3(response); +} +function createAWSLambdaAPIGatewayV2Handler(app, { + pathPrefix, + onUnhandledRequest +} = {}) { + if (onUnhandledRequest) { + app.octokit.log.warn( + "[@octokit/oauth-app] `onUnhandledRequest` is deprecated and will be removed from the next major version." + ); + } + onUnhandledRequest ?? (onUnhandledRequest = onUnhandledRequestDefaultAWSAPIGatewayV2); + return async function(event) { + const request = parseRequest3(event); + const response = await handleRequest(app, { pathPrefix }, request); + return response ? sendResponse3(response) : onUnhandledRequest(event); + }; +} + +// pkg/dist-src/index.js +var OAuthApp = class { + static defaults(defaults) { + const OAuthAppWithDefaults = class extends this { + constructor(...args) { + super({ + ...defaults, + ...args[0] + }); + } + }; + return OAuthAppWithDefaults; + } + constructor(options) { + const Octokit2 = options.Octokit || OAuthAppOctokit; + this.type = options.clientType || "oauth-app"; + const octokit = new Octokit2({ + authStrategy: import_auth_oauth_app.createOAuthAppAuth, + auth: { + clientType: this.type, + clientId: options.clientId, + clientSecret: options.clientSecret + } + }); + const state = { + clientType: this.type, + clientId: options.clientId, + clientSecret: options.clientSecret, + // @ts-expect-error defaultScopes not permitted for GitHub Apps + defaultScopes: options.defaultScopes || [], + allowSignup: options.allowSignup, + baseUrl: options.baseUrl, + redirectUrl: options.redirectUrl, + log: options.log, + Octokit: Octokit2, + octokit, + eventHandlers: {} + }; + this.on = addEventHandler.bind(null, state); + this.octokit = octokit; + this.getUserOctokit = getUserOctokitWithState.bind(null, state); + this.getWebFlowAuthorizationUrl = getWebFlowAuthorizationUrlWithState.bind( + null, + state + ); + this.createToken = createTokenWithState.bind( + null, + state + ); + this.checkToken = checkTokenWithState.bind( + null, + state + ); + this.resetToken = resetTokenWithState.bind( + null, + state + ); + this.refreshToken = refreshTokenWithState.bind( + null, + state + ); + this.scopeToken = scopeTokenWithState.bind( + null, + state + ); + this.deleteToken = deleteTokenWithState.bind(null, state); + this.deleteAuthorization = deleteAuthorizationWithState.bind(null, state); + } +}; +OAuthApp.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + OAuthApp, + createAWSLambdaAPIGatewayV2Handler, + createCloudflareHandler, + createNodeMiddleware, + createWebWorkerHandler, + handleRequest +}); diff --git a/dist/node_modules/@octokit/oauth-app/dist-node/index.js.map b/dist/node_modules/@octokit/oauth-app/dist-node/index.js.map new file mode 100644 index 0000000..634ef5b --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js", "../dist-src/add-event-handler.js", "../dist-src/oauth-app-octokit.js", "../dist-src/methods/get-user-octokit.js", "../dist-src/emit-event.js", "../dist-src/methods/get-web-flow-authorization-url.js", "../dist-src/methods/create-token.js", "../dist-src/methods/check-token.js", "../dist-src/methods/reset-token.js", "../dist-src/methods/refresh-token.js", "../dist-src/methods/scope-token.js", "../dist-src/methods/delete-token.js", "../dist-src/methods/delete-authorization.js", "../dist-src/middleware/handle-request.js", "../dist-src/middleware/node/parse-request.js", "../dist-src/middleware/node/send-response.js", "../dist-src/middleware/on-unhandled-request-default.js", "../dist-src/middleware/node/index.js", "../dist-src/middleware/web-worker/parse-request.js", "../dist-src/middleware/web-worker/send-response.js", "../dist-src/middleware/web-worker/index.js", "../dist-src/middleware/aws-lambda/api-gateway-v2-parse-request.js", "../dist-src/middleware/aws-lambda/api-gateway-v2-send-response.js", "../dist-src/middleware/aws-lambda/api-gateway-v2.js"], + "sourcesContent": ["import { createOAuthAppAuth } from \"@octokit/auth-oauth-app\";\nimport { VERSION } from \"./version\";\nimport { addEventHandler } from \"./add-event-handler\";\nimport { OAuthAppOctokit } from \"./oauth-app-octokit\";\nimport {\n getUserOctokitWithState\n} from \"./methods/get-user-octokit\";\nimport {\n getWebFlowAuthorizationUrlWithState\n} from \"./methods/get-web-flow-authorization-url\";\nimport {\n createTokenWithState\n} from \"./methods/create-token\";\nimport {\n checkTokenWithState\n} from \"./methods/check-token\";\nimport {\n resetTokenWithState\n} from \"./methods/reset-token\";\nimport {\n refreshTokenWithState\n} from \"./methods/refresh-token\";\nimport {\n scopeTokenWithState\n} from \"./methods/scope-token\";\nimport {\n deleteTokenWithState\n} from \"./methods/delete-token\";\nimport {\n deleteAuthorizationWithState\n} from \"./methods/delete-authorization\";\nimport { handleRequest } from \"./middleware/handle-request\";\nimport { createNodeMiddleware } from \"./middleware/node/index\";\nimport {\n createCloudflareHandler,\n createWebWorkerHandler\n} from \"./middleware/web-worker/index\";\nimport { createAWSLambdaAPIGatewayV2Handler } from \"./middleware/aws-lambda/api-gateway-v2\";\nclass OAuthApp {\n static defaults(defaults) {\n const OAuthAppWithDefaults = class extends this {\n constructor(...args) {\n super({\n ...defaults,\n ...args[0]\n });\n }\n };\n return OAuthAppWithDefaults;\n }\n constructor(options) {\n const Octokit = options.Octokit || OAuthAppOctokit;\n this.type = options.clientType || \"oauth-app\";\n const octokit = new Octokit({\n authStrategy: createOAuthAppAuth,\n auth: {\n clientType: this.type,\n clientId: options.clientId,\n clientSecret: options.clientSecret\n }\n });\n const state = {\n clientType: this.type,\n clientId: options.clientId,\n clientSecret: options.clientSecret,\n // @ts-expect-error defaultScopes not permitted for GitHub Apps\n defaultScopes: options.defaultScopes || [],\n allowSignup: options.allowSignup,\n baseUrl: options.baseUrl,\n redirectUrl: options.redirectUrl,\n log: options.log,\n Octokit,\n octokit,\n eventHandlers: {}\n };\n this.on = addEventHandler.bind(null, state);\n this.octokit = octokit;\n this.getUserOctokit = getUserOctokitWithState.bind(null, state);\n this.getWebFlowAuthorizationUrl = getWebFlowAuthorizationUrlWithState.bind(\n null,\n state\n );\n this.createToken = createTokenWithState.bind(\n null,\n state\n );\n this.checkToken = checkTokenWithState.bind(\n null,\n state\n );\n this.resetToken = resetTokenWithState.bind(\n null,\n state\n );\n this.refreshToken = refreshTokenWithState.bind(\n null,\n state\n );\n this.scopeToken = scopeTokenWithState.bind(\n null,\n state\n );\n this.deleteToken = deleteTokenWithState.bind(null, state);\n this.deleteAuthorization = deleteAuthorizationWithState.bind(null, state);\n }\n}\nOAuthApp.VERSION = VERSION;\nexport {\n OAuthApp,\n createAWSLambdaAPIGatewayV2Handler,\n createCloudflareHandler,\n createNodeMiddleware,\n createWebWorkerHandler,\n handleRequest\n};\n", "const VERSION = \"4.2.4\";\nexport {\n VERSION\n};\n", "function addEventHandler(state, eventName, eventHandler) {\n if (Array.isArray(eventName)) {\n for (const singleEventName of eventName) {\n addEventHandler(state, singleEventName, eventHandler);\n }\n return;\n }\n if (!state.eventHandlers[eventName]) {\n state.eventHandlers[eventName] = [];\n }\n state.eventHandlers[eventName].push(eventHandler);\n}\nexport {\n addEventHandler\n};\n", "import { Octokit } from \"@octokit/core\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nconst OAuthAppOctokit = Octokit.defaults({\n userAgent: `octokit-oauth-app.js/${VERSION} ${getUserAgent()}`\n});\nexport {\n OAuthAppOctokit\n};\n", "import { createOAuthUserAuth } from \"@octokit/auth-oauth-user\";\nimport { emitEvent } from \"../emit-event\";\nasync function getUserOctokitWithState(state, options) {\n return state.octokit.auth({\n type: \"oauth-user\",\n ...options,\n async factory(options2) {\n const octokit = new state.Octokit({\n authStrategy: createOAuthUserAuth,\n auth: options2\n });\n const authentication = await octokit.auth({\n type: \"get\"\n });\n await emitEvent(state, {\n name: \"token\",\n action: \"created\",\n token: authentication.token,\n scopes: authentication.scopes,\n authentication,\n octokit\n });\n return octokit;\n }\n });\n}\nexport {\n getUserOctokitWithState\n};\n", "async function emitEvent(state, context) {\n const { name, action } = context;\n if (state.eventHandlers[`${name}.${action}`]) {\n for (const eventHandler of state.eventHandlers[`${name}.${action}`]) {\n await eventHandler(context);\n }\n }\n if (state.eventHandlers[name]) {\n for (const eventHandler of state.eventHandlers[name]) {\n await eventHandler(context);\n }\n }\n}\nexport {\n emitEvent\n};\n", "import * as OAuthMethods from \"@octokit/oauth-methods\";\nfunction getWebFlowAuthorizationUrlWithState(state, options) {\n const optionsWithDefaults = {\n clientId: state.clientId,\n request: state.octokit.request,\n ...options,\n allowSignup: state.allowSignup ?? options.allowSignup,\n redirectUrl: options.redirectUrl ?? state.redirectUrl,\n scopes: options.scopes ?? state.defaultScopes\n };\n return OAuthMethods.getWebFlowAuthorizationUrl({\n clientType: state.clientType,\n ...optionsWithDefaults\n });\n}\nexport {\n getWebFlowAuthorizationUrlWithState\n};\n", "import * as OAuthAppAuth from \"@octokit/auth-oauth-app\";\nimport { emitEvent } from \"../emit-event\";\nasync function createTokenWithState(state, options) {\n const authentication = await state.octokit.auth({\n type: \"oauth-user\",\n ...options\n });\n await emitEvent(state, {\n name: \"token\",\n action: \"created\",\n token: authentication.token,\n scopes: authentication.scopes,\n authentication,\n octokit: new state.Octokit({\n authStrategy: OAuthAppAuth.createOAuthUserAuth,\n auth: {\n clientType: state.clientType,\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n token: authentication.token,\n scopes: authentication.scopes,\n refreshToken: authentication.refreshToken,\n expiresAt: authentication.expiresAt,\n refreshTokenExpiresAt: authentication.refreshTokenExpiresAt\n }\n })\n });\n return { authentication };\n}\nexport {\n createTokenWithState\n};\n", "import * as OAuthMethods from \"@octokit/oauth-methods\";\nasync function checkTokenWithState(state, options) {\n const result = await OAuthMethods.checkToken({\n // @ts-expect-error not worth the extra code to appease TS\n clientType: state.clientType,\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n request: state.octokit.request,\n ...options\n });\n Object.assign(result.authentication, { type: \"token\", tokenType: \"oauth\" });\n return result;\n}\nexport {\n checkTokenWithState\n};\n", "import * as OAuthMethods from \"@octokit/oauth-methods\";\nimport { emitEvent } from \"../emit-event\";\nimport { createOAuthUserAuth } from \"@octokit/auth-oauth-user\";\nasync function resetTokenWithState(state, options) {\n const optionsWithDefaults = {\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n request: state.octokit.request,\n ...options\n };\n if (state.clientType === \"oauth-app\") {\n const response2 = await OAuthMethods.resetToken({\n clientType: \"oauth-app\",\n ...optionsWithDefaults\n });\n const authentication2 = Object.assign(response2.authentication, {\n type: \"token\",\n tokenType: \"oauth\"\n });\n await emitEvent(state, {\n name: \"token\",\n action: \"reset\",\n token: response2.authentication.token,\n scopes: response2.authentication.scopes || void 0,\n authentication: authentication2,\n octokit: new state.Octokit({\n authStrategy: createOAuthUserAuth,\n auth: {\n clientType: state.clientType,\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n token: response2.authentication.token,\n scopes: response2.authentication.scopes\n }\n })\n });\n return { ...response2, authentication: authentication2 };\n }\n const response = await OAuthMethods.resetToken({\n clientType: \"github-app\",\n ...optionsWithDefaults\n });\n const authentication = Object.assign(response.authentication, {\n type: \"token\",\n tokenType: \"oauth\"\n });\n await emitEvent(state, {\n name: \"token\",\n action: \"reset\",\n token: response.authentication.token,\n authentication,\n octokit: new state.Octokit({\n authStrategy: createOAuthUserAuth,\n auth: {\n clientType: state.clientType,\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n token: response.authentication.token\n }\n })\n });\n return { ...response, authentication };\n}\nexport {\n resetTokenWithState\n};\n", "import * as OAuthMethods from \"@octokit/oauth-methods\";\nimport { emitEvent } from \"../emit-event\";\nimport { createOAuthUserAuth } from \"@octokit/auth-oauth-user\";\nasync function refreshTokenWithState(state, options) {\n if (state.clientType === \"oauth-app\") {\n throw new Error(\n \"[@octokit/oauth-app] app.refreshToken() is not supported for OAuth Apps\"\n );\n }\n const response = await OAuthMethods.refreshToken({\n clientType: \"github-app\",\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n request: state.octokit.request,\n refreshToken: options.refreshToken\n });\n const authentication = Object.assign(response.authentication, {\n type: \"token\",\n tokenType: \"oauth\"\n });\n await emitEvent(state, {\n name: \"token\",\n action: \"refreshed\",\n token: response.authentication.token,\n authentication,\n octokit: new state.Octokit({\n authStrategy: createOAuthUserAuth,\n auth: {\n clientType: state.clientType,\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n token: response.authentication.token\n }\n })\n });\n return { ...response, authentication };\n}\nexport {\n refreshTokenWithState\n};\n", "import * as OAuthMethods from \"@octokit/oauth-methods\";\nimport { createOAuthUserAuth } from \"@octokit/auth-oauth-user\";\nimport { emitEvent } from \"../emit-event\";\nasync function scopeTokenWithState(state, options) {\n if (state.clientType === \"oauth-app\") {\n throw new Error(\n \"[@octokit/oauth-app] app.scopeToken() is not supported for OAuth Apps\"\n );\n }\n const response = await OAuthMethods.scopeToken({\n clientType: \"github-app\",\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n request: state.octokit.request,\n ...options\n });\n const authentication = Object.assign(response.authentication, {\n type: \"token\",\n tokenType: \"oauth\"\n });\n await emitEvent(state, {\n name: \"token\",\n action: \"scoped\",\n token: response.authentication.token,\n authentication,\n octokit: new state.Octokit({\n authStrategy: createOAuthUserAuth,\n auth: {\n clientType: state.clientType,\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n token: response.authentication.token\n }\n })\n });\n return { ...response, authentication };\n}\nexport {\n scopeTokenWithState\n};\n", "import * as OAuthMethods from \"@octokit/oauth-methods\";\nimport { createUnauthenticatedAuth } from \"@octokit/auth-unauthenticated\";\nimport { emitEvent } from \"../emit-event\";\nasync function deleteTokenWithState(state, options) {\n const optionsWithDefaults = {\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n request: state.octokit.request,\n ...options\n };\n const response = state.clientType === \"oauth-app\" ? await OAuthMethods.deleteToken({\n clientType: \"oauth-app\",\n ...optionsWithDefaults\n }) : (\n // istanbul ignore next\n await OAuthMethods.deleteToken({\n clientType: \"github-app\",\n ...optionsWithDefaults\n })\n );\n await emitEvent(state, {\n name: \"token\",\n action: \"deleted\",\n token: options.token,\n octokit: new state.Octokit({\n authStrategy: createUnauthenticatedAuth,\n auth: {\n reason: `Handling \"token.deleted\" event. The access for the token has been revoked.`\n }\n })\n });\n return response;\n}\nexport {\n deleteTokenWithState\n};\n", "import * as OAuthMethods from \"@octokit/oauth-methods\";\nimport { createUnauthenticatedAuth } from \"@octokit/auth-unauthenticated\";\nimport { emitEvent } from \"../emit-event\";\nasync function deleteAuthorizationWithState(state, options) {\n const optionsWithDefaults = {\n clientId: state.clientId,\n clientSecret: state.clientSecret,\n request: state.octokit.request,\n ...options\n };\n const response = state.clientType === \"oauth-app\" ? await OAuthMethods.deleteAuthorization({\n clientType: \"oauth-app\",\n ...optionsWithDefaults\n }) : (\n // istanbul ignore next\n await OAuthMethods.deleteAuthorization({\n clientType: \"github-app\",\n ...optionsWithDefaults\n })\n );\n await emitEvent(state, {\n name: \"token\",\n action: \"deleted\",\n token: options.token,\n octokit: new state.Octokit({\n authStrategy: createUnauthenticatedAuth,\n auth: {\n reason: `Handling \"token.deleted\" event. The access for the token has been revoked.`\n }\n })\n });\n await emitEvent(state, {\n name: \"authorization\",\n action: \"deleted\",\n token: options.token,\n octokit: new state.Octokit({\n authStrategy: createUnauthenticatedAuth,\n auth: {\n reason: `Handling \"authorization.deleted\" event. The access for the app has been revoked.`\n }\n })\n });\n return response;\n}\nexport {\n deleteAuthorizationWithState\n};\n", "import { OAuthApp } from \"../index\";\nimport fromEntries from \"fromentries\";\nasync function handleRequest(app, { pathPrefix = \"/api/github/oauth\" }, request) {\n if (request.method === \"OPTIONS\") {\n return {\n status: 200,\n headers: {\n \"access-control-allow-origin\": \"*\",\n \"access-control-allow-methods\": \"*\",\n \"access-control-allow-headers\": \"Content-Type, User-Agent, Authorization\"\n }\n };\n }\n const { pathname } = new URL(request.url, \"http://localhost\");\n const route = [request.method, pathname].join(\" \");\n const routes = {\n getLogin: `GET ${pathPrefix}/login`,\n getCallback: `GET ${pathPrefix}/callback`,\n createToken: `POST ${pathPrefix}/token`,\n getToken: `GET ${pathPrefix}/token`,\n patchToken: `PATCH ${pathPrefix}/token`,\n patchRefreshToken: `PATCH ${pathPrefix}/refresh-token`,\n scopeToken: `POST ${pathPrefix}/token/scoped`,\n deleteToken: `DELETE ${pathPrefix}/token`,\n deleteGrant: `DELETE ${pathPrefix}/grant`\n };\n if (!Object.values(routes).includes(route)) {\n return null;\n }\n let json;\n try {\n const text = await request.text();\n json = text ? JSON.parse(text) : {};\n } catch (error) {\n return {\n status: 400,\n headers: {\n \"content-type\": \"application/json\",\n \"access-control-allow-origin\": \"*\"\n },\n text: JSON.stringify({\n error: \"[@octokit/oauth-app] request error\"\n })\n };\n }\n const { searchParams } = new URL(request.url, \"http://localhost\");\n const query = fromEntries(searchParams);\n const headers = request.headers;\n try {\n if (route === routes.getLogin) {\n const { url } = app.getWebFlowAuthorizationUrl({\n state: query.state,\n scopes: query.scopes ? query.scopes.split(\",\") : void 0,\n allowSignup: query.allowSignup ? query.allowSignup === \"true\" : void 0,\n redirectUrl: query.redirectUrl\n });\n return { status: 302, headers: { location: url } };\n }\n if (route === routes.getCallback) {\n if (query.error) {\n throw new Error(\n `[@octokit/oauth-app] ${query.error} ${query.error_description}`\n );\n }\n if (!query.code) {\n throw new Error('[@octokit/oauth-app] \"code\" parameter is required');\n }\n const {\n authentication: { token: token2 }\n } = await app.createToken({\n code: query.code\n });\n return {\n status: 200,\n headers: {\n \"content-type\": \"text/html\"\n },\n text: `

Token created successfully

\n\n

Your token is: ${token2}. Copy it now as it cannot be shown again.

`\n };\n }\n if (route === routes.createToken) {\n const { code, redirectUrl } = json;\n if (!code) {\n throw new Error('[@octokit/oauth-app] \"code\" parameter is required');\n }\n const result = await app.createToken({\n code,\n redirectUrl\n });\n delete result.authentication.clientSecret;\n return {\n status: 201,\n headers: {\n \"content-type\": \"application/json\",\n \"access-control-allow-origin\": \"*\"\n },\n text: JSON.stringify(result)\n };\n }\n if (route === routes.getToken) {\n const token2 = headers.authorization?.substr(\"token \".length);\n if (!token2) {\n throw new Error(\n '[@octokit/oauth-app] \"Authorization\" header is required'\n );\n }\n const result = await app.checkToken({\n token: token2\n });\n delete result.authentication.clientSecret;\n return {\n status: 200,\n headers: {\n \"content-type\": \"application/json\",\n \"access-control-allow-origin\": \"*\"\n },\n text: JSON.stringify(result)\n };\n }\n if (route === routes.patchToken) {\n const token2 = headers.authorization?.substr(\"token \".length);\n if (!token2) {\n throw new Error(\n '[@octokit/oauth-app] \"Authorization\" header is required'\n );\n }\n const result = await app.resetToken({ token: token2 });\n delete result.authentication.clientSecret;\n return {\n status: 200,\n headers: {\n \"content-type\": \"application/json\",\n \"access-control-allow-origin\": \"*\"\n },\n text: JSON.stringify(result)\n };\n }\n if (route === routes.patchRefreshToken) {\n const token2 = headers.authorization?.substr(\"token \".length);\n if (!token2) {\n throw new Error(\n '[@octokit/oauth-app] \"Authorization\" header is required'\n );\n }\n const { refreshToken } = json;\n if (!refreshToken) {\n throw new Error(\n \"[@octokit/oauth-app] refreshToken must be sent in request body\"\n );\n }\n const result = await app.refreshToken({ refreshToken });\n delete result.authentication.clientSecret;\n return {\n status: 200,\n headers: {\n \"content-type\": \"application/json\",\n \"access-control-allow-origin\": \"*\"\n },\n text: JSON.stringify(result)\n };\n }\n if (route === routes.scopeToken) {\n const token2 = headers.authorization?.substr(\"token \".length);\n if (!token2) {\n throw new Error(\n '[@octokit/oauth-app] \"Authorization\" header is required'\n );\n }\n const result = await app.scopeToken({\n token: token2,\n ...json\n });\n delete result.authentication.clientSecret;\n return {\n status: 200,\n headers: {\n \"content-type\": \"application/json\",\n \"access-control-allow-origin\": \"*\"\n },\n text: JSON.stringify(result)\n };\n }\n if (route === routes.deleteToken) {\n const token2 = headers.authorization?.substr(\"token \".length);\n if (!token2) {\n throw new Error(\n '[@octokit/oauth-app] \"Authorization\" header is required'\n );\n }\n await app.deleteToken({\n token: token2\n });\n return {\n status: 204,\n headers: { \"access-control-allow-origin\": \"*\" }\n };\n }\n const token = headers.authorization?.substr(\"token \".length);\n if (!token) {\n throw new Error(\n '[@octokit/oauth-app] \"Authorization\" header is required'\n );\n }\n await app.deleteAuthorization({\n token\n });\n return {\n status: 204,\n headers: { \"access-control-allow-origin\": \"*\" }\n };\n } catch (error) {\n return {\n status: 400,\n headers: {\n \"content-type\": \"application/json\",\n \"access-control-allow-origin\": \"*\"\n },\n text: JSON.stringify({ error: error.message })\n };\n }\n}\nexport {\n handleRequest\n};\n", "function parseRequest(request) {\n const { method, url, headers } = request;\n async function text() {\n const text2 = await new Promise((resolve, reject) => {\n let bodyChunks = [];\n request.on(\"error\", reject).on(\"data\", (chunk) => bodyChunks.push(chunk)).on(\"end\", () => resolve(Buffer.concat(bodyChunks).toString()));\n });\n return text2;\n }\n return { method, url, headers, text };\n}\nexport {\n parseRequest\n};\n", "function sendResponse(octokitResponse, response) {\n response.writeHead(octokitResponse.status, octokitResponse.headers);\n response.end(octokitResponse.text);\n}\nexport {\n sendResponse\n};\n", "function onUnhandledRequestDefault(request) {\n return {\n status: 404,\n headers: { \"content-type\": \"application/json\" },\n text: JSON.stringify({\n error: `Unknown route: ${request.method} ${request.url}`\n })\n };\n}\nexport {\n onUnhandledRequestDefault\n};\n", "import { parseRequest } from \"./parse-request\";\nimport { sendResponse } from \"./send-response\";\nimport { onUnhandledRequestDefault } from \"../on-unhandled-request-default\";\nimport { handleRequest } from \"../handle-request\";\nimport { OAuthApp } from \"../../index\";\nfunction onUnhandledRequestDefaultNode(request, response) {\n const octokitRequest = parseRequest(request);\n const octokitResponse = onUnhandledRequestDefault(octokitRequest);\n sendResponse(octokitResponse, response);\n}\nfunction createNodeMiddleware(app, {\n pathPrefix,\n onUnhandledRequest\n} = {}) {\n if (onUnhandledRequest) {\n app.octokit.log.warn(\n \"[@octokit/oauth-app] `onUnhandledRequest` is deprecated and will be removed from the next major version.\"\n );\n }\n onUnhandledRequest ??= onUnhandledRequestDefaultNode;\n return async function(request, response, next) {\n const octokitRequest = parseRequest(request);\n const octokitResponse = await handleRequest(\n app,\n { pathPrefix },\n octokitRequest\n );\n if (octokitResponse) {\n sendResponse(octokitResponse, response);\n } else if (typeof next === \"function\") {\n next();\n } else {\n onUnhandledRequest(request, response);\n }\n };\n}\nexport {\n createNodeMiddleware\n};\n", "function parseRequest(request) {\n const headers = Object.fromEntries(request.headers.entries());\n return {\n method: request.method,\n url: request.url,\n headers,\n text: () => request.text()\n };\n}\nexport {\n parseRequest\n};\n", "function sendResponse(octokitResponse) {\n return new Response(octokitResponse.text, {\n status: octokitResponse.status,\n headers: octokitResponse.headers\n });\n}\nexport {\n sendResponse\n};\n", "import { parseRequest } from \"./parse-request\";\nimport { sendResponse } from \"./send-response\";\nimport { handleRequest } from \"../handle-request\";\nimport { onUnhandledRequestDefault } from \"../on-unhandled-request-default\";\nimport { OAuthApp } from \"../../index\";\nasync function onUnhandledRequestDefaultWebWorker(request) {\n const octokitRequest = parseRequest(request);\n const octokitResponse = onUnhandledRequestDefault(octokitRequest);\n return sendResponse(octokitResponse);\n}\nfunction createWebWorkerHandler(app, {\n pathPrefix,\n onUnhandledRequest\n} = {}) {\n if (onUnhandledRequest) {\n app.octokit.log.warn(\n \"[@octokit/oauth-app] `onUnhandledRequest` is deprecated and will be removed from the next major version.\"\n );\n }\n onUnhandledRequest ??= onUnhandledRequestDefaultWebWorker;\n return async function(request) {\n const octokitRequest = parseRequest(request);\n const octokitResponse = await handleRequest(\n app,\n { pathPrefix },\n octokitRequest\n );\n return octokitResponse ? sendResponse(octokitResponse) : await onUnhandledRequest(request);\n };\n}\nfunction createCloudflareHandler(...args) {\n args[0].octokit.log.warn(\n \"[@octokit/oauth-app] `createCloudflareHandler` is deprecated, use `createWebWorkerHandler` instead\"\n );\n return createWebWorkerHandler(...args);\n}\nexport {\n createCloudflareHandler,\n createWebWorkerHandler\n};\n", "function parseRequest(request) {\n const { method } = request.requestContext.http;\n let url = request.rawPath;\n const { stage } = request.requestContext;\n if (url.startsWith(\"/\" + stage))\n url = url.substring(stage.length + 1);\n if (request.rawQueryString)\n url += \"?\" + request.rawQueryString;\n const headers = request.headers;\n const text = async () => request.body || \"\";\n return { method, url, headers, text };\n}\nexport {\n parseRequest\n};\n", "function sendResponse(octokitResponse) {\n return {\n statusCode: octokitResponse.status,\n headers: octokitResponse.headers,\n body: octokitResponse.text\n };\n}\nexport {\n sendResponse\n};\n", "import { parseRequest } from \"./api-gateway-v2-parse-request\";\nimport { sendResponse } from \"./api-gateway-v2-send-response\";\nimport { handleRequest } from \"../handle-request\";\nimport { onUnhandledRequestDefault } from \"../on-unhandled-request-default\";\nimport { OAuthApp } from \"../../index\";\nasync function onUnhandledRequestDefaultAWSAPIGatewayV2(event) {\n const request = parseRequest(event);\n const response = onUnhandledRequestDefault(request);\n return sendResponse(response);\n}\nfunction createAWSLambdaAPIGatewayV2Handler(app, {\n pathPrefix,\n onUnhandledRequest\n} = {}) {\n if (onUnhandledRequest) {\n app.octokit.log.warn(\n \"[@octokit/oauth-app] `onUnhandledRequest` is deprecated and will be removed from the next major version.\"\n );\n }\n onUnhandledRequest ??= onUnhandledRequestDefaultAWSAPIGatewayV2;\n return async function(event) {\n const request = parseRequest(event);\n const response = await handleRequest(app, { pathPrefix }, request);\n return response ? sendResponse(response) : onUnhandledRequest(event);\n };\n}\nexport {\n createAWSLambdaAPIGatewayV2Handler\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAAmC;;;ACAnC,IAAM,UAAU;;;ACAhB,SAAS,gBAAgB,OAAO,WAAW,cAAc;AACvD,MAAI,MAAM,QAAQ,SAAS,GAAG;AAC5B,eAAW,mBAAmB,WAAW;AACvC,sBAAgB,OAAO,iBAAiB,YAAY;AAAA,IACtD;AACA;AAAA,EACF;AACA,MAAI,CAAC,MAAM,cAAc,SAAS,GAAG;AACnC,UAAM,cAAc,SAAS,IAAI,CAAC;AAAA,EACpC;AACA,QAAM,cAAc,SAAS,EAAE,KAAK,YAAY;AAClD;;;ACXA,kBAAwB;AACxB,kCAA6B;AAE7B,IAAM,kBAAkB,oBAAQ,SAAS;AAAA,EACvC,WAAW,wBAAwB,eAAW,0CAAa;AAC7D,CAAC;;;ACLD,6BAAoC;;;ACApC,eAAe,UAAU,OAAO,SAAS;AACvC,QAAM,EAAE,MAAM,OAAO,IAAI;AACzB,MAAI,MAAM,cAAc,GAAG,QAAQ,QAAQ,GAAG;AAC5C,eAAW,gBAAgB,MAAM,cAAc,GAAG,QAAQ,QAAQ,GAAG;AACnE,YAAM,aAAa,OAAO;AAAA,IAC5B;AAAA,EACF;AACA,MAAI,MAAM,cAAc,IAAI,GAAG;AAC7B,eAAW,gBAAgB,MAAM,cAAc,IAAI,GAAG;AACpD,YAAM,aAAa,OAAO;AAAA,IAC5B;AAAA,EACF;AACF;;;ADVA,eAAe,wBAAwB,OAAO,SAAS;AACrD,SAAO,MAAM,QAAQ,KAAK;AAAA,IACxB,MAAM;AAAA,IACN,GAAG;AAAA,IACH,MAAM,QAAQ,UAAU;AACtB,YAAM,UAAU,IAAI,MAAM,QAAQ;AAAA,QAChC,cAAc;AAAA,QACd,MAAM;AAAA,MACR,CAAC;AACD,YAAM,iBAAiB,MAAM,QAAQ,KAAK;AAAA,QACxC,MAAM;AAAA,MACR,CAAC;AACD,YAAM,UAAU,OAAO;AAAA,QACrB,MAAM;AAAA,QACN,QAAQ;AAAA,QACR,OAAO,eAAe;AAAA,QACtB,QAAQ,eAAe;AAAA,QACvB;AAAA,QACA;AAAA,MACF,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AACH;;;AEzBA,mBAA8B;AAC9B,SAAS,oCAAoC,OAAO,SAAS;AAC3D,QAAM,sBAAsB;AAAA,IAC1B,UAAU,MAAM;AAAA,IAChB,SAAS,MAAM,QAAQ;AAAA,IACvB,GAAG;AAAA,IACH,aAAa,MAAM,eAAe,QAAQ;AAAA,IAC1C,aAAa,QAAQ,eAAe,MAAM;AAAA,IAC1C,QAAQ,QAAQ,UAAU,MAAM;AAAA,EAClC;AACA,SAAoB,wCAA2B;AAAA,IAC7C,YAAY,MAAM;AAAA,IAClB,GAAG;AAAA,EACL,CAAC;AACH;;;ACdA,mBAA8B;AAE9B,eAAe,qBAAqB,OAAO,SAAS;AAClD,QAAM,iBAAiB,MAAM,MAAM,QAAQ,KAAK;AAAA,IAC9C,MAAM;AAAA,IACN,GAAG;AAAA,EACL,CAAC;AACD,QAAM,UAAU,OAAO;AAAA,IACrB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,eAAe;AAAA,IACtB,QAAQ,eAAe;AAAA,IACvB;AAAA,IACA,SAAS,IAAI,MAAM,QAAQ;AAAA,MACzB,cAA2B;AAAA,MAC3B,MAAM;AAAA,QACJ,YAAY,MAAM;AAAA,QAClB,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA,QACpB,OAAO,eAAe;AAAA,QACtB,QAAQ,eAAe;AAAA,QACvB,cAAc,eAAe;AAAA,QAC7B,WAAW,eAAe;AAAA,QAC1B,uBAAuB,eAAe;AAAA,MACxC;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACD,SAAO,EAAE,eAAe;AAC1B;;;AC5BA,IAAAA,gBAA8B;AAC9B,eAAe,oBAAoB,OAAO,SAAS;AACjD,QAAM,SAAS,MAAmB,yBAAW;AAAA;AAAA,IAE3C,YAAY,MAAM;AAAA,IAClB,UAAU,MAAM;AAAA,IAChB,cAAc,MAAM;AAAA,IACpB,SAAS,MAAM,QAAQ;AAAA,IACvB,GAAG;AAAA,EACL,CAAC;AACD,SAAO,OAAO,OAAO,gBAAgB,EAAE,MAAM,SAAS,WAAW,QAAQ,CAAC;AAC1E,SAAO;AACT;;;ACZA,IAAAC,gBAA8B;AAE9B,IAAAC,0BAAoC;AACpC,eAAe,oBAAoB,OAAO,SAAS;AACjD,QAAM,sBAAsB;AAAA,IAC1B,UAAU,MAAM;AAAA,IAChB,cAAc,MAAM;AAAA,IACpB,SAAS,MAAM,QAAQ;AAAA,IACvB,GAAG;AAAA,EACL;AACA,MAAI,MAAM,eAAe,aAAa;AACpC,UAAM,YAAY,MAAmB,yBAAW;AAAA,MAC9C,YAAY;AAAA,MACZ,GAAG;AAAA,IACL,CAAC;AACD,UAAM,kBAAkB,OAAO,OAAO,UAAU,gBAAgB;AAAA,MAC9D,MAAM;AAAA,MACN,WAAW;AAAA,IACb,CAAC;AACD,UAAM,UAAU,OAAO;AAAA,MACrB,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,OAAO,UAAU,eAAe;AAAA,MAChC,QAAQ,UAAU,eAAe,UAAU;AAAA,MAC3C,gBAAgB;AAAA,MAChB,SAAS,IAAI,MAAM,QAAQ;AAAA,QACzB,cAAc;AAAA,QACd,MAAM;AAAA,UACJ,YAAY,MAAM;AAAA,UAClB,UAAU,MAAM;AAAA,UAChB,cAAc,MAAM;AAAA,UACpB,OAAO,UAAU,eAAe;AAAA,UAChC,QAAQ,UAAU,eAAe;AAAA,QACnC;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AACD,WAAO,EAAE,GAAG,WAAW,gBAAgB,gBAAgB;AAAA,EACzD;AACA,QAAM,WAAW,MAAmB,yBAAW;AAAA,IAC7C,YAAY;AAAA,IACZ,GAAG;AAAA,EACL,CAAC;AACD,QAAM,iBAAiB,OAAO,OAAO,SAAS,gBAAgB;AAAA,IAC5D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AACD,QAAM,UAAU,OAAO;AAAA,IACrB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,SAAS,eAAe;AAAA,IAC/B;AAAA,IACA,SAAS,IAAI,MAAM,QAAQ;AAAA,MACzB,cAAc;AAAA,MACd,MAAM;AAAA,QACJ,YAAY,MAAM;AAAA,QAClB,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA,QACpB,OAAO,SAAS,eAAe;AAAA,MACjC;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACD,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;;;AC9DA,IAAAC,gBAA8B;AAE9B,IAAAC,0BAAoC;AACpC,eAAe,sBAAsB,OAAO,SAAS;AACnD,MAAI,MAAM,eAAe,aAAa;AACpC,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,QAAM,WAAW,MAAmB,2BAAa;AAAA,IAC/C,YAAY;AAAA,IACZ,UAAU,MAAM;AAAA,IAChB,cAAc,MAAM;AAAA,IACpB,SAAS,MAAM,QAAQ;AAAA,IACvB,cAAc,QAAQ;AAAA,EACxB,CAAC;AACD,QAAM,iBAAiB,OAAO,OAAO,SAAS,gBAAgB;AAAA,IAC5D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AACD,QAAM,UAAU,OAAO;AAAA,IACrB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,SAAS,eAAe;AAAA,IAC/B;AAAA,IACA,SAAS,IAAI,MAAM,QAAQ;AAAA,MACzB,cAAc;AAAA,MACd,MAAM;AAAA,QACJ,YAAY,MAAM;AAAA,QAClB,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA,QACpB,OAAO,SAAS,eAAe;AAAA,MACjC;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACD,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;;;ACpCA,IAAAC,gBAA8B;AAC9B,IAAAC,0BAAoC;AAEpC,eAAe,oBAAoB,OAAO,SAAS;AACjD,MAAI,MAAM,eAAe,aAAa;AACpC,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,QAAM,WAAW,MAAmB,yBAAW;AAAA,IAC7C,YAAY;AAAA,IACZ,UAAU,MAAM;AAAA,IAChB,cAAc,MAAM;AAAA,IACpB,SAAS,MAAM,QAAQ;AAAA,IACvB,GAAG;AAAA,EACL,CAAC;AACD,QAAM,iBAAiB,OAAO,OAAO,SAAS,gBAAgB;AAAA,IAC5D,MAAM;AAAA,IACN,WAAW;AAAA,EACb,CAAC;AACD,QAAM,UAAU,OAAO;AAAA,IACrB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,SAAS,eAAe;AAAA,IAC/B;AAAA,IACA,SAAS,IAAI,MAAM,QAAQ;AAAA,MACzB,cAAc;AAAA,MACd,MAAM;AAAA,QACJ,YAAY,MAAM;AAAA,QAClB,UAAU,MAAM;AAAA,QAChB,cAAc,MAAM;AAAA,QACpB,OAAO,SAAS,eAAe;AAAA,MACjC;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACD,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;;;ACpCA,IAAAC,gBAA8B;AAC9B,kCAA0C;AAE1C,eAAe,qBAAqB,OAAO,SAAS;AAClD,QAAM,sBAAsB;AAAA,IAC1B,UAAU,MAAM;AAAA,IAChB,cAAc,MAAM;AAAA,IACpB,SAAS,MAAM,QAAQ;AAAA,IACvB,GAAG;AAAA,EACL;AACA,QAAM,WAAW,MAAM,eAAe,cAAc,MAAmB,0BAAY;AAAA,IACjF,YAAY;AAAA,IACZ,GAAG;AAAA,EACL,CAAC;AAAA;AAAA,IAEC,MAAmB,0BAAY;AAAA,MAC7B,YAAY;AAAA,MACZ,GAAG;AAAA,IACL,CAAC;AAAA;AAEH,QAAM,UAAU,OAAO;AAAA,IACrB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,QAAQ;AAAA,IACf,SAAS,IAAI,MAAM,QAAQ;AAAA,MACzB,cAAc;AAAA,MACd,MAAM;AAAA,QACJ,QAAQ;AAAA,MACV;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACD,SAAO;AACT;;;AChCA,IAAAC,gBAA8B;AAC9B,IAAAC,+BAA0C;AAE1C,eAAe,6BAA6B,OAAO,SAAS;AAC1D,QAAM,sBAAsB;AAAA,IAC1B,UAAU,MAAM;AAAA,IAChB,cAAc,MAAM;AAAA,IACpB,SAAS,MAAM,QAAQ;AAAA,IACvB,GAAG;AAAA,EACL;AACA,QAAM,WAAW,MAAM,eAAe,cAAc,MAAmB,kCAAoB;AAAA,IACzF,YAAY;AAAA,IACZ,GAAG;AAAA,EACL,CAAC;AAAA;AAAA,IAEC,MAAmB,kCAAoB;AAAA,MACrC,YAAY;AAAA,MACZ,GAAG;AAAA,IACL,CAAC;AAAA;AAEH,QAAM,UAAU,OAAO;AAAA,IACrB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,QAAQ;AAAA,IACf,SAAS,IAAI,MAAM,QAAQ;AAAA,MACzB,cAAc;AAAA,MACd,MAAM;AAAA,QACJ,QAAQ;AAAA,MACV;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACD,QAAM,UAAU,OAAO;AAAA,IACrB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO,QAAQ;AAAA,IACf,SAAS,IAAI,MAAM,QAAQ;AAAA,MACzB,cAAc;AAAA,MACd,MAAM;AAAA,QACJ,QAAQ;AAAA,MACV;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACD,SAAO;AACT;;;AC1CA,yBAAwB;AACxB,eAAe,cAAc,KAAK,EAAE,aAAa,oBAAoB,GAAG,SAAS;AAFjF;AAGE,MAAI,QAAQ,WAAW,WAAW;AAChC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,+BAA+B;AAAA,QAC/B,gCAAgC;AAAA,QAChC,gCAAgC;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AACA,QAAM,EAAE,SAAS,IAAI,IAAI,IAAI,QAAQ,KAAK,kBAAkB;AAC5D,QAAM,QAAQ,CAAC,QAAQ,QAAQ,QAAQ,EAAE,KAAK,GAAG;AACjD,QAAM,SAAS;AAAA,IACb,UAAU,OAAO;AAAA,IACjB,aAAa,OAAO;AAAA,IACpB,aAAa,QAAQ;AAAA,IACrB,UAAU,OAAO;AAAA,IACjB,YAAY,SAAS;AAAA,IACrB,mBAAmB,SAAS;AAAA,IAC5B,YAAY,QAAQ;AAAA,IACpB,aAAa,UAAU;AAAA,IACvB,aAAa,UAAU;AAAA,EACzB;AACA,MAAI,CAAC,OAAO,OAAO,MAAM,EAAE,SAAS,KAAK,GAAG;AAC1C,WAAO;AAAA,EACT;AACA,MAAI;AACJ,MAAI;AACF,UAAM,OAAO,MAAM,QAAQ,KAAK;AAChC,WAAO,OAAO,KAAK,MAAM,IAAI,IAAI,CAAC;AAAA,EACpC,SAAS,OAAP;AACA,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,+BAA+B;AAAA,MACjC;AAAA,MACA,MAAM,KAAK,UAAU;AAAA,QACnB,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,EACF;AACA,QAAM,EAAE,aAAa,IAAI,IAAI,IAAI,QAAQ,KAAK,kBAAkB;AAChE,QAAM,YAAQ,mBAAAC,SAAY,YAAY;AACtC,QAAM,UAAU,QAAQ;AACxB,MAAI;AACF,QAAI,UAAU,OAAO,UAAU;AAC7B,YAAM,EAAE,IAAI,IAAI,IAAI,2BAA2B;AAAA,QAC7C,OAAO,MAAM;AAAA,QACb,QAAQ,MAAM,SAAS,MAAM,OAAO,MAAM,GAAG,IAAI;AAAA,QACjD,aAAa,MAAM,cAAc,MAAM,gBAAgB,SAAS;AAAA,QAChE,aAAa,MAAM;AAAA,MACrB,CAAC;AACD,aAAO,EAAE,QAAQ,KAAK,SAAS,EAAE,UAAU,IAAI,EAAE;AAAA,IACnD;AACA,QAAI,UAAU,OAAO,aAAa;AAChC,UAAI,MAAM,OAAO;AACf,cAAM,IAAI;AAAA,UACR,wBAAwB,MAAM,SAAS,MAAM;AAAA,QAC/C;AAAA,MACF;AACA,UAAI,CAAC,MAAM,MAAM;AACf,cAAM,IAAI,MAAM,mDAAmD;AAAA,MACrE;AACA,YAAM;AAAA,QACJ,gBAAgB,EAAE,OAAO,OAAO;AAAA,MAClC,IAAI,MAAM,IAAI,YAAY;AAAA,QACxB,MAAM,MAAM;AAAA,MACd,CAAC;AACD,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,QAClB;AAAA,QACA,MAAM;AAAA;AAAA,4BAEc;AAAA,MACtB;AAAA,IACF;AACA,QAAI,UAAU,OAAO,aAAa;AAChC,YAAM,EAAE,MAAM,YAAY,IAAI;AAC9B,UAAI,CAAC,MAAM;AACT,cAAM,IAAI,MAAM,mDAAmD;AAAA,MACrE;AACA,YAAM,SAAS,MAAM,IAAI,YAAY;AAAA,QACnC;AAAA,QACA;AAAA,MACF,CAAC;AACD,aAAO,OAAO,eAAe;AAC7B,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,+BAA+B;AAAA,QACjC;AAAA,QACA,MAAM,KAAK,UAAU,MAAM;AAAA,MAC7B;AAAA,IACF;AACA,QAAI,UAAU,OAAO,UAAU;AAC7B,YAAM,UAAS,aAAQ,kBAAR,mBAAuB,OAAO,SAAS;AACtD,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,YAAM,SAAS,MAAM,IAAI,WAAW;AAAA,QAClC,OAAO;AAAA,MACT,CAAC;AACD,aAAO,OAAO,eAAe;AAC7B,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,+BAA+B;AAAA,QACjC;AAAA,QACA,MAAM,KAAK,UAAU,MAAM;AAAA,MAC7B;AAAA,IACF;AACA,QAAI,UAAU,OAAO,YAAY;AAC/B,YAAM,UAAS,aAAQ,kBAAR,mBAAuB,OAAO,SAAS;AACtD,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,YAAM,SAAS,MAAM,IAAI,WAAW,EAAE,OAAO,OAAO,CAAC;AACrD,aAAO,OAAO,eAAe;AAC7B,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,+BAA+B;AAAA,QACjC;AAAA,QACA,MAAM,KAAK,UAAU,MAAM;AAAA,MAC7B;AAAA,IACF;AACA,QAAI,UAAU,OAAO,mBAAmB;AACtC,YAAM,UAAS,aAAQ,kBAAR,mBAAuB,OAAO,SAAS;AACtD,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,YAAM,EAAE,cAAAC,cAAa,IAAI;AACzB,UAAI,CAACA,eAAc;AACjB,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,YAAM,SAAS,MAAM,IAAI,aAAa,EAAE,cAAAA,cAAa,CAAC;AACtD,aAAO,OAAO,eAAe;AAC7B,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,+BAA+B;AAAA,QACjC;AAAA,QACA,MAAM,KAAK,UAAU,MAAM;AAAA,MAC7B;AAAA,IACF;AACA,QAAI,UAAU,OAAO,YAAY;AAC/B,YAAM,UAAS,aAAQ,kBAAR,mBAAuB,OAAO,SAAS;AACtD,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,YAAM,SAAS,MAAM,IAAI,WAAW;AAAA,QAClC,OAAO;AAAA,QACP,GAAG;AAAA,MACL,CAAC;AACD,aAAO,OAAO,eAAe;AAC7B,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,+BAA+B;AAAA,QACjC;AAAA,QACA,MAAM,KAAK,UAAU,MAAM;AAAA,MAC7B;AAAA,IACF;AACA,QAAI,UAAU,OAAO,aAAa;AAChC,YAAM,UAAS,aAAQ,kBAAR,mBAAuB,OAAO,SAAS;AACtD,UAAI,CAAC,QAAQ;AACX,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,YAAM,IAAI,YAAY;AAAA,QACpB,OAAO;AAAA,MACT,CAAC;AACD,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,SAAS,EAAE,+BAA+B,IAAI;AAAA,MAChD;AAAA,IACF;AACA,UAAM,SAAQ,aAAQ,kBAAR,mBAAuB,OAAO,SAAS;AACrD,QAAI,CAAC,OAAO;AACV,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,UAAM,IAAI,oBAAoB;AAAA,MAC5B;AAAA,IACF,CAAC;AACD,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,SAAS,EAAE,+BAA+B,IAAI;AAAA,IAChD;AAAA,EACF,SAAS,OAAP;AACA,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,+BAA+B;AAAA,MACjC;AAAA,MACA,MAAM,KAAK,UAAU,EAAE,OAAO,MAAM,QAAQ,CAAC;AAAA,IAC/C;AAAA,EACF;AACF;;;AC9NA,SAAS,aAAa,SAAS;AAC7B,QAAM,EAAE,QAAQ,KAAK,QAAQ,IAAI;AACjC,iBAAe,OAAO;AACpB,UAAM,QAAQ,MAAM,IAAI,QAAQ,CAAC,SAAS,WAAW;AACnD,UAAI,aAAa,CAAC;AAClB,cAAQ,GAAG,SAAS,MAAM,EAAE,GAAG,QAAQ,CAAC,UAAU,WAAW,KAAK,KAAK,CAAC,EAAE,GAAG,OAAO,MAAM,QAAQ,OAAO,OAAO,UAAU,EAAE,SAAS,CAAC,CAAC;AAAA,IACzI,CAAC;AACD,WAAO;AAAA,EACT;AACA,SAAO,EAAE,QAAQ,KAAK,SAAS,KAAK;AACtC;;;ACVA,SAAS,aAAa,iBAAiB,UAAU;AAC/C,WAAS,UAAU,gBAAgB,QAAQ,gBAAgB,OAAO;AAClE,WAAS,IAAI,gBAAgB,IAAI;AACnC;;;ACHA,SAAS,0BAA0B,SAAS;AAC1C,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,IAC9C,MAAM,KAAK,UAAU;AAAA,MACnB,OAAO,kBAAkB,QAAQ,UAAU,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AACF;;;ACHA,SAAS,8BAA8B,SAAS,UAAU;AACxD,QAAM,iBAAiB,aAAa,OAAO;AAC3C,QAAM,kBAAkB,0BAA0B,cAAc;AAChE,eAAa,iBAAiB,QAAQ;AACxC;AACA,SAAS,qBAAqB,KAAK;AAAA,EACjC;AAAA,EACA;AACF,IAAI,CAAC,GAAG;AACN,MAAI,oBAAoB;AACtB,QAAI,QAAQ,IAAI;AAAA,MACd;AAAA,IACF;AAAA,EACF;AACA,8CAAuB;AACvB,SAAO,eAAe,SAAS,UAAU,MAAM;AAC7C,UAAM,iBAAiB,aAAa,OAAO;AAC3C,UAAM,kBAAkB,MAAM;AAAA,MAC5B;AAAA,MACA,EAAE,WAAW;AAAA,MACb;AAAA,IACF;AACA,QAAI,iBAAiB;AACnB,mBAAa,iBAAiB,QAAQ;AAAA,IACxC,WAAW,OAAO,SAAS,YAAY;AACrC,WAAK;AAAA,IACP,OAAO;AACL,yBAAmB,SAAS,QAAQ;AAAA,IACtC;AAAA,EACF;AACF;;;ACnCA,SAASC,cAAa,SAAS;AAC7B,QAAM,UAAU,OAAO,YAAY,QAAQ,QAAQ,QAAQ,CAAC;AAC5D,SAAO;AAAA,IACL,QAAQ,QAAQ;AAAA,IAChB,KAAK,QAAQ;AAAA,IACb;AAAA,IACA,MAAM,MAAM,QAAQ,KAAK;AAAA,EAC3B;AACF;;;ACRA,SAASC,cAAa,iBAAiB;AACrC,SAAO,IAAI,SAAS,gBAAgB,MAAM;AAAA,IACxC,QAAQ,gBAAgB;AAAA,IACxB,SAAS,gBAAgB;AAAA,EAC3B,CAAC;AACH;;;ACAA,eAAe,mCAAmC,SAAS;AACzD,QAAM,iBAAiBC,cAAa,OAAO;AAC3C,QAAM,kBAAkB,0BAA0B,cAAc;AAChE,SAAOC,cAAa,eAAe;AACrC;AACA,SAAS,uBAAuB,KAAK;AAAA,EACnC;AAAA,EACA;AACF,IAAI,CAAC,GAAG;AACN,MAAI,oBAAoB;AACtB,QAAI,QAAQ,IAAI;AAAA,MACd;AAAA,IACF;AAAA,EACF;AACA,8CAAuB;AACvB,SAAO,eAAe,SAAS;AAC7B,UAAM,iBAAiBD,cAAa,OAAO;AAC3C,UAAM,kBAAkB,MAAM;AAAA,MAC5B;AAAA,MACA,EAAE,WAAW;AAAA,MACb;AAAA,IACF;AACA,WAAO,kBAAkBC,cAAa,eAAe,IAAI,MAAM,mBAAmB,OAAO;AAAA,EAC3F;AACF;AACA,SAAS,2BAA2B,MAAM;AACxC,OAAK,CAAC,EAAE,QAAQ,IAAI;AAAA,IAClB;AAAA,EACF;AACA,SAAO,uBAAuB,GAAG,IAAI;AACvC;;;ACnCA,SAASC,cAAa,SAAS;AAC7B,QAAM,EAAE,OAAO,IAAI,QAAQ,eAAe;AAC1C,MAAI,MAAM,QAAQ;AAClB,QAAM,EAAE,MAAM,IAAI,QAAQ;AAC1B,MAAI,IAAI,WAAW,MAAM,KAAK;AAC5B,UAAM,IAAI,UAAU,MAAM,SAAS,CAAC;AACtC,MAAI,QAAQ;AACV,WAAO,MAAM,QAAQ;AACvB,QAAM,UAAU,QAAQ;AACxB,QAAM,OAAO,YAAY,QAAQ,QAAQ;AACzC,SAAO,EAAE,QAAQ,KAAK,SAAS,KAAK;AACtC;;;ACXA,SAASC,cAAa,iBAAiB;AACrC,SAAO;AAAA,IACL,YAAY,gBAAgB;AAAA,IAC5B,SAAS,gBAAgB;AAAA,IACzB,MAAM,gBAAgB;AAAA,EACxB;AACF;;;ACDA,eAAe,yCAAyC,OAAO;AAC7D,QAAM,UAAUC,cAAa,KAAK;AAClC,QAAM,WAAW,0BAA0B,OAAO;AAClD,SAAOC,cAAa,QAAQ;AAC9B;AACA,SAAS,mCAAmC,KAAK;AAAA,EAC/C;AAAA,EACA;AACF,IAAI,CAAC,GAAG;AACN,MAAI,oBAAoB;AACtB,QAAI,QAAQ,IAAI;AAAA,MACd;AAAA,IACF;AAAA,EACF;AACA,8CAAuB;AACvB,SAAO,eAAe,OAAO;AAC3B,UAAM,UAAUD,cAAa,KAAK;AAClC,UAAM,WAAW,MAAM,cAAc,KAAK,EAAE,WAAW,GAAG,OAAO;AACjE,WAAO,WAAWC,cAAa,QAAQ,IAAI,mBAAmB,KAAK;AAAA,EACrE;AACF;;;AxBaA,IAAM,WAAN,MAAe;AAAA,EACb,OAAO,SAAS,UAAU;AACxB,UAAM,uBAAuB,cAAc,KAAK;AAAA,MAC9C,eAAe,MAAM;AACnB,cAAM;AAAA,UACJ,GAAG;AAAA,UACH,GAAG,KAAK,CAAC;AAAA,QACX,CAAC;AAAA,MACH;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EACA,YAAY,SAAS;AACnB,UAAMC,WAAU,QAAQ,WAAW;AACnC,SAAK,OAAO,QAAQ,cAAc;AAClC,UAAM,UAAU,IAAIA,SAAQ;AAAA,MAC1B,cAAc;AAAA,MACd,MAAM;AAAA,QACJ,YAAY,KAAK;AAAA,QACjB,UAAU,QAAQ;AAAA,QAClB,cAAc,QAAQ;AAAA,MACxB;AAAA,IACF,CAAC;AACD,UAAM,QAAQ;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB,UAAU,QAAQ;AAAA,MAClB,cAAc,QAAQ;AAAA;AAAA,MAEtB,eAAe,QAAQ,iBAAiB,CAAC;AAAA,MACzC,aAAa,QAAQ;AAAA,MACrB,SAAS,QAAQ;AAAA,MACjB,aAAa,QAAQ;AAAA,MACrB,KAAK,QAAQ;AAAA,MACb,SAAAA;AAAA,MACA;AAAA,MACA,eAAe,CAAC;AAAA,IAClB;AACA,SAAK,KAAK,gBAAgB,KAAK,MAAM,KAAK;AAC1C,SAAK,UAAU;AACf,SAAK,iBAAiB,wBAAwB,KAAK,MAAM,KAAK;AAC9D,SAAK,6BAA6B,oCAAoC;AAAA,MACpE;AAAA,MACA;AAAA,IACF;AACA,SAAK,cAAc,qBAAqB;AAAA,MACtC;AAAA,MACA;AAAA,IACF;AACA,SAAK,aAAa,oBAAoB;AAAA,MACpC;AAAA,MACA;AAAA,IACF;AACA,SAAK,aAAa,oBAAoB;AAAA,MACpC;AAAA,MACA;AAAA,IACF;AACA,SAAK,eAAe,sBAAsB;AAAA,MACxC;AAAA,MACA;AAAA,IACF;AACA,SAAK,aAAa,oBAAoB;AAAA,MACpC;AAAA,MACA;AAAA,IACF;AACA,SAAK,cAAc,qBAAqB,KAAK,MAAM,KAAK;AACxD,SAAK,sBAAsB,6BAA6B,KAAK,MAAM,KAAK;AAAA,EAC1E;AACF;AACA,SAAS,UAAU;", + "names": ["OAuthMethods", "OAuthMethods", "import_auth_oauth_user", "OAuthMethods", "import_auth_oauth_user", "OAuthMethods", "import_auth_oauth_user", "OAuthMethods", "OAuthMethods", "import_auth_unauthenticated", "fromEntries", "refreshToken", "parseRequest", "sendResponse", "parseRequest", "sendResponse", "parseRequest", "sendResponse", "parseRequest", "sendResponse", "Octokit"] +} diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/add-event-handler.js b/dist/node_modules/@octokit/oauth-app/dist-src/add-event-handler.js new file mode 100644 index 0000000..686abb8 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/add-event-handler.js @@ -0,0 +1,15 @@ +function addEventHandler(state, eventName, eventHandler) { + if (Array.isArray(eventName)) { + for (const singleEventName of eventName) { + addEventHandler(state, singleEventName, eventHandler); + } + return; + } + if (!state.eventHandlers[eventName]) { + state.eventHandlers[eventName] = []; + } + state.eventHandlers[eventName].push(eventHandler); +} +export { + addEventHandler +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/emit-event.js b/dist/node_modules/@octokit/oauth-app/dist-src/emit-event.js new file mode 100644 index 0000000..ee31c40 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/emit-event.js @@ -0,0 +1,16 @@ +async function emitEvent(state, context) { + const { name, action } = context; + if (state.eventHandlers[`${name}.${action}`]) { + for (const eventHandler of state.eventHandlers[`${name}.${action}`]) { + await eventHandler(context); + } + } + if (state.eventHandlers[name]) { + for (const eventHandler of state.eventHandlers[name]) { + await eventHandler(context); + } + } +} +export { + emitEvent +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/index.js b/dist/node_modules/@octokit/oauth-app/dist-src/index.js new file mode 100644 index 0000000..9f2965d --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/index.js @@ -0,0 +1,115 @@ +import { createOAuthAppAuth } from "@octokit/auth-oauth-app"; +import { VERSION } from "./version"; +import { addEventHandler } from "./add-event-handler"; +import { OAuthAppOctokit } from "./oauth-app-octokit"; +import { + getUserOctokitWithState +} from "./methods/get-user-octokit"; +import { + getWebFlowAuthorizationUrlWithState +} from "./methods/get-web-flow-authorization-url"; +import { + createTokenWithState +} from "./methods/create-token"; +import { + checkTokenWithState +} from "./methods/check-token"; +import { + resetTokenWithState +} from "./methods/reset-token"; +import { + refreshTokenWithState +} from "./methods/refresh-token"; +import { + scopeTokenWithState +} from "./methods/scope-token"; +import { + deleteTokenWithState +} from "./methods/delete-token"; +import { + deleteAuthorizationWithState +} from "./methods/delete-authorization"; +import { handleRequest } from "./middleware/handle-request"; +import { createNodeMiddleware } from "./middleware/node/index"; +import { + createCloudflareHandler, + createWebWorkerHandler +} from "./middleware/web-worker/index"; +import { createAWSLambdaAPIGatewayV2Handler } from "./middleware/aws-lambda/api-gateway-v2"; +class OAuthApp { + static defaults(defaults) { + const OAuthAppWithDefaults = class extends this { + constructor(...args) { + super({ + ...defaults, + ...args[0] + }); + } + }; + return OAuthAppWithDefaults; + } + constructor(options) { + const Octokit = options.Octokit || OAuthAppOctokit; + this.type = options.clientType || "oauth-app"; + const octokit = new Octokit({ + authStrategy: createOAuthAppAuth, + auth: { + clientType: this.type, + clientId: options.clientId, + clientSecret: options.clientSecret + } + }); + const state = { + clientType: this.type, + clientId: options.clientId, + clientSecret: options.clientSecret, + // @ts-expect-error defaultScopes not permitted for GitHub Apps + defaultScopes: options.defaultScopes || [], + allowSignup: options.allowSignup, + baseUrl: options.baseUrl, + redirectUrl: options.redirectUrl, + log: options.log, + Octokit, + octokit, + eventHandlers: {} + }; + this.on = addEventHandler.bind(null, state); + this.octokit = octokit; + this.getUserOctokit = getUserOctokitWithState.bind(null, state); + this.getWebFlowAuthorizationUrl = getWebFlowAuthorizationUrlWithState.bind( + null, + state + ); + this.createToken = createTokenWithState.bind( + null, + state + ); + this.checkToken = checkTokenWithState.bind( + null, + state + ); + this.resetToken = resetTokenWithState.bind( + null, + state + ); + this.refreshToken = refreshTokenWithState.bind( + null, + state + ); + this.scopeToken = scopeTokenWithState.bind( + null, + state + ); + this.deleteToken = deleteTokenWithState.bind(null, state); + this.deleteAuthorization = deleteAuthorizationWithState.bind(null, state); + } +} +OAuthApp.VERSION = VERSION; +export { + OAuthApp, + createAWSLambdaAPIGatewayV2Handler, + createCloudflareHandler, + createNodeMiddleware, + createWebWorkerHandler, + handleRequest +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/methods/check-token.js b/dist/node_modules/@octokit/oauth-app/dist-src/methods/check-token.js new file mode 100644 index 0000000..4bd68f3 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/methods/check-token.js @@ -0,0 +1,16 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +async function checkTokenWithState(state, options) { + const result = await OAuthMethods.checkToken({ + // @ts-expect-error not worth the extra code to appease TS + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.octokit.request, + ...options + }); + Object.assign(result.authentication, { type: "token", tokenType: "oauth" }); + return result; +} +export { + checkTokenWithState +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/methods/create-token.js b/dist/node_modules/@octokit/oauth-app/dist-src/methods/create-token.js new file mode 100644 index 0000000..d012db9 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/methods/create-token.js @@ -0,0 +1,32 @@ +import * as OAuthAppAuth from "@octokit/auth-oauth-app"; +import { emitEvent } from "../emit-event"; +async function createTokenWithState(state, options) { + const authentication = await state.octokit.auth({ + type: "oauth-user", + ...options + }); + await emitEvent(state, { + name: "token", + action: "created", + token: authentication.token, + scopes: authentication.scopes, + authentication, + octokit: new state.Octokit({ + authStrategy: OAuthAppAuth.createOAuthUserAuth, + auth: { + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: authentication.token, + scopes: authentication.scopes, + refreshToken: authentication.refreshToken, + expiresAt: authentication.expiresAt, + refreshTokenExpiresAt: authentication.refreshTokenExpiresAt + } + }) + }); + return { authentication }; +} +export { + createTokenWithState +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/methods/delete-authorization.js b/dist/node_modules/@octokit/oauth-app/dist-src/methods/delete-authorization.js new file mode 100644 index 0000000..172d2c8 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/methods/delete-authorization.js @@ -0,0 +1,47 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +import { createUnauthenticatedAuth } from "@octokit/auth-unauthenticated"; +import { emitEvent } from "../emit-event"; +async function deleteAuthorizationWithState(state, options) { + const optionsWithDefaults = { + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.octokit.request, + ...options + }; + const response = state.clientType === "oauth-app" ? await OAuthMethods.deleteAuthorization({ + clientType: "oauth-app", + ...optionsWithDefaults + }) : ( + // istanbul ignore next + await OAuthMethods.deleteAuthorization({ + clientType: "github-app", + ...optionsWithDefaults + }) + ); + await emitEvent(state, { + name: "token", + action: "deleted", + token: options.token, + octokit: new state.Octokit({ + authStrategy: createUnauthenticatedAuth, + auth: { + reason: `Handling "token.deleted" event. The access for the token has been revoked.` + } + }) + }); + await emitEvent(state, { + name: "authorization", + action: "deleted", + token: options.token, + octokit: new state.Octokit({ + authStrategy: createUnauthenticatedAuth, + auth: { + reason: `Handling "authorization.deleted" event. The access for the app has been revoked.` + } + }) + }); + return response; +} +export { + deleteAuthorizationWithState +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/methods/delete-token.js b/dist/node_modules/@octokit/oauth-app/dist-src/methods/delete-token.js new file mode 100644 index 0000000..65e36e5 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/methods/delete-token.js @@ -0,0 +1,36 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +import { createUnauthenticatedAuth } from "@octokit/auth-unauthenticated"; +import { emitEvent } from "../emit-event"; +async function deleteTokenWithState(state, options) { + const optionsWithDefaults = { + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.octokit.request, + ...options + }; + const response = state.clientType === "oauth-app" ? await OAuthMethods.deleteToken({ + clientType: "oauth-app", + ...optionsWithDefaults + }) : ( + // istanbul ignore next + await OAuthMethods.deleteToken({ + clientType: "github-app", + ...optionsWithDefaults + }) + ); + await emitEvent(state, { + name: "token", + action: "deleted", + token: options.token, + octokit: new state.Octokit({ + authStrategy: createUnauthenticatedAuth, + auth: { + reason: `Handling "token.deleted" event. The access for the token has been revoked.` + } + }) + }); + return response; +} +export { + deleteTokenWithState +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/methods/get-oauth-client-code.js b/dist/node_modules/@octokit/oauth-app/dist-src/methods/get-oauth-client-code.js new file mode 100644 index 0000000..dd686fa --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/methods/get-oauth-client-code.js @@ -0,0 +1,10 @@ +function getOAuthClientCode() { + return `import { Octokit: Core } from "https://cdn.pika.dev/@octokit/core"; + + export const Octokit = Core.defaults({ + oauth: {} + })`; +} +export { + getOAuthClientCode +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/methods/get-user-octokit.js b/dist/node_modules/@octokit/oauth-app/dist-src/methods/get-user-octokit.js new file mode 100644 index 0000000..e5d9739 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/methods/get-user-octokit.js @@ -0,0 +1,29 @@ +import { createOAuthUserAuth } from "@octokit/auth-oauth-user"; +import { emitEvent } from "../emit-event"; +async function getUserOctokitWithState(state, options) { + return state.octokit.auth({ + type: "oauth-user", + ...options, + async factory(options2) { + const octokit = new state.Octokit({ + authStrategy: createOAuthUserAuth, + auth: options2 + }); + const authentication = await octokit.auth({ + type: "get" + }); + await emitEvent(state, { + name: "token", + action: "created", + token: authentication.token, + scopes: authentication.scopes, + authentication, + octokit + }); + return octokit; + } + }); +} +export { + getUserOctokitWithState +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/methods/get-web-flow-authorization-url.js b/dist/node_modules/@octokit/oauth-app/dist-src/methods/get-web-flow-authorization-url.js new file mode 100644 index 0000000..ca6aa16 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/methods/get-web-flow-authorization-url.js @@ -0,0 +1,18 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +function getWebFlowAuthorizationUrlWithState(state, options) { + const optionsWithDefaults = { + clientId: state.clientId, + request: state.octokit.request, + ...options, + allowSignup: state.allowSignup ?? options.allowSignup, + redirectUrl: options.redirectUrl ?? state.redirectUrl, + scopes: options.scopes ?? state.defaultScopes + }; + return OAuthMethods.getWebFlowAuthorizationUrl({ + clientType: state.clientType, + ...optionsWithDefaults + }); +} +export { + getWebFlowAuthorizationUrlWithState +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/methods/refresh-token.js b/dist/node_modules/@octokit/oauth-app/dist-src/methods/refresh-token.js new file mode 100644 index 0000000..3b52ab2 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/methods/refresh-token.js @@ -0,0 +1,40 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +import { emitEvent } from "../emit-event"; +import { createOAuthUserAuth } from "@octokit/auth-oauth-user"; +async function refreshTokenWithState(state, options) { + if (state.clientType === "oauth-app") { + throw new Error( + "[@octokit/oauth-app] app.refreshToken() is not supported for OAuth Apps" + ); + } + const response = await OAuthMethods.refreshToken({ + clientType: "github-app", + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.octokit.request, + refreshToken: options.refreshToken + }); + const authentication = Object.assign(response.authentication, { + type: "token", + tokenType: "oauth" + }); + await emitEvent(state, { + name: "token", + action: "refreshed", + token: response.authentication.token, + authentication, + octokit: new state.Octokit({ + authStrategy: createOAuthUserAuth, + auth: { + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: response.authentication.token + } + }) + }); + return { ...response, authentication }; +} +export { + refreshTokenWithState +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/methods/reset-token.js b/dist/node_modules/@octokit/oauth-app/dist-src/methods/reset-token.js new file mode 100644 index 0000000..2ad55af --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/methods/reset-token.js @@ -0,0 +1,66 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +import { emitEvent } from "../emit-event"; +import { createOAuthUserAuth } from "@octokit/auth-oauth-user"; +async function resetTokenWithState(state, options) { + const optionsWithDefaults = { + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.octokit.request, + ...options + }; + if (state.clientType === "oauth-app") { + const response2 = await OAuthMethods.resetToken({ + clientType: "oauth-app", + ...optionsWithDefaults + }); + const authentication2 = Object.assign(response2.authentication, { + type: "token", + tokenType: "oauth" + }); + await emitEvent(state, { + name: "token", + action: "reset", + token: response2.authentication.token, + scopes: response2.authentication.scopes || void 0, + authentication: authentication2, + octokit: new state.Octokit({ + authStrategy: createOAuthUserAuth, + auth: { + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: response2.authentication.token, + scopes: response2.authentication.scopes + } + }) + }); + return { ...response2, authentication: authentication2 }; + } + const response = await OAuthMethods.resetToken({ + clientType: "github-app", + ...optionsWithDefaults + }); + const authentication = Object.assign(response.authentication, { + type: "token", + tokenType: "oauth" + }); + await emitEvent(state, { + name: "token", + action: "reset", + token: response.authentication.token, + authentication, + octokit: new state.Octokit({ + authStrategy: createOAuthUserAuth, + auth: { + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: response.authentication.token + } + }) + }); + return { ...response, authentication }; +} +export { + resetTokenWithState +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/methods/scope-token.js b/dist/node_modules/@octokit/oauth-app/dist-src/methods/scope-token.js new file mode 100644 index 0000000..04b980c --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/methods/scope-token.js @@ -0,0 +1,40 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +import { createOAuthUserAuth } from "@octokit/auth-oauth-user"; +import { emitEvent } from "../emit-event"; +async function scopeTokenWithState(state, options) { + if (state.clientType === "oauth-app") { + throw new Error( + "[@octokit/oauth-app] app.scopeToken() is not supported for OAuth Apps" + ); + } + const response = await OAuthMethods.scopeToken({ + clientType: "github-app", + clientId: state.clientId, + clientSecret: state.clientSecret, + request: state.octokit.request, + ...options + }); + const authentication = Object.assign(response.authentication, { + type: "token", + tokenType: "oauth" + }); + await emitEvent(state, { + name: "token", + action: "scoped", + token: response.authentication.token, + authentication, + octokit: new state.Octokit({ + authStrategy: createOAuthUserAuth, + auth: { + clientType: state.clientType, + clientId: state.clientId, + clientSecret: state.clientSecret, + token: response.authentication.token + } + }) + }); + return { ...response, authentication }; +} +export { + scopeTokenWithState +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/middleware/aws-lambda/api-gateway-v2-parse-request.js b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/aws-lambda/api-gateway-v2-parse-request.js new file mode 100644 index 0000000..f566b2b --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/aws-lambda/api-gateway-v2-parse-request.js @@ -0,0 +1,15 @@ +function parseRequest(request) { + const { method } = request.requestContext.http; + let url = request.rawPath; + const { stage } = request.requestContext; + if (url.startsWith("/" + stage)) + url = url.substring(stage.length + 1); + if (request.rawQueryString) + url += "?" + request.rawQueryString; + const headers = request.headers; + const text = async () => request.body || ""; + return { method, url, headers, text }; +} +export { + parseRequest +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/middleware/aws-lambda/api-gateway-v2-send-response.js b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/aws-lambda/api-gateway-v2-send-response.js new file mode 100644 index 0000000..bcef10f --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/aws-lambda/api-gateway-v2-send-response.js @@ -0,0 +1,10 @@ +function sendResponse(octokitResponse) { + return { + statusCode: octokitResponse.status, + headers: octokitResponse.headers, + body: octokitResponse.text + }; +} +export { + sendResponse +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/middleware/aws-lambda/api-gateway-v2.js b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/aws-lambda/api-gateway-v2.js new file mode 100644 index 0000000..93e44c4 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/aws-lambda/api-gateway-v2.js @@ -0,0 +1,29 @@ +import { parseRequest } from "./api-gateway-v2-parse-request"; +import { sendResponse } from "./api-gateway-v2-send-response"; +import { handleRequest } from "../handle-request"; +import { onUnhandledRequestDefault } from "../on-unhandled-request-default"; +import { OAuthApp } from "../../index"; +async function onUnhandledRequestDefaultAWSAPIGatewayV2(event) { + const request = parseRequest(event); + const response = onUnhandledRequestDefault(request); + return sendResponse(response); +} +function createAWSLambdaAPIGatewayV2Handler(app, { + pathPrefix, + onUnhandledRequest +} = {}) { + if (onUnhandledRequest) { + app.octokit.log.warn( + "[@octokit/oauth-app] `onUnhandledRequest` is deprecated and will be removed from the next major version." + ); + } + onUnhandledRequest ??= onUnhandledRequestDefaultAWSAPIGatewayV2; + return async function(event) { + const request = parseRequest(event); + const response = await handleRequest(app, { pathPrefix }, request); + return response ? sendResponse(response) : onUnhandledRequest(event); + }; +} +export { + createAWSLambdaAPIGatewayV2Handler +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/middleware/handle-request.js b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/handle-request.js new file mode 100644 index 0000000..52aa22b --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/handle-request.js @@ -0,0 +1,226 @@ +import { OAuthApp } from "../index"; +import fromEntries from "fromentries"; +async function handleRequest(app, { pathPrefix = "/api/github/oauth" }, request) { + if (request.method === "OPTIONS") { + return { + status: 200, + headers: { + "access-control-allow-origin": "*", + "access-control-allow-methods": "*", + "access-control-allow-headers": "Content-Type, User-Agent, Authorization" + } + }; + } + const { pathname } = new URL(request.url, "http://localhost"); + const route = [request.method, pathname].join(" "); + const routes = { + getLogin: `GET ${pathPrefix}/login`, + getCallback: `GET ${pathPrefix}/callback`, + createToken: `POST ${pathPrefix}/token`, + getToken: `GET ${pathPrefix}/token`, + patchToken: `PATCH ${pathPrefix}/token`, + patchRefreshToken: `PATCH ${pathPrefix}/refresh-token`, + scopeToken: `POST ${pathPrefix}/token/scoped`, + deleteToken: `DELETE ${pathPrefix}/token`, + deleteGrant: `DELETE ${pathPrefix}/grant` + }; + if (!Object.values(routes).includes(route)) { + return null; + } + let json; + try { + const text = await request.text(); + json = text ? JSON.parse(text) : {}; + } catch (error) { + return { + status: 400, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify({ + error: "[@octokit/oauth-app] request error" + }) + }; + } + const { searchParams } = new URL(request.url, "http://localhost"); + const query = fromEntries(searchParams); + const headers = request.headers; + try { + if (route === routes.getLogin) { + const { url } = app.getWebFlowAuthorizationUrl({ + state: query.state, + scopes: query.scopes ? query.scopes.split(",") : void 0, + allowSignup: query.allowSignup ? query.allowSignup === "true" : void 0, + redirectUrl: query.redirectUrl + }); + return { status: 302, headers: { location: url } }; + } + if (route === routes.getCallback) { + if (query.error) { + throw new Error( + `[@octokit/oauth-app] ${query.error} ${query.error_description}` + ); + } + if (!query.code) { + throw new Error('[@octokit/oauth-app] "code" parameter is required'); + } + const { + authentication: { token: token2 } + } = await app.createToken({ + code: query.code + }); + return { + status: 200, + headers: { + "content-type": "text/html" + }, + text: `

Token created successfully

+ +

Your token is: ${token2}. Copy it now as it cannot be shown again.

` + }; + } + if (route === routes.createToken) { + const { code, redirectUrl } = json; + if (!code) { + throw new Error('[@octokit/oauth-app] "code" parameter is required'); + } + const result = await app.createToken({ + code, + redirectUrl + }); + delete result.authentication.clientSecret; + return { + status: 201, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify(result) + }; + } + if (route === routes.getToken) { + const token2 = headers.authorization?.substr("token ".length); + if (!token2) { + throw new Error( + '[@octokit/oauth-app] "Authorization" header is required' + ); + } + const result = await app.checkToken({ + token: token2 + }); + delete result.authentication.clientSecret; + return { + status: 200, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify(result) + }; + } + if (route === routes.patchToken) { + const token2 = headers.authorization?.substr("token ".length); + if (!token2) { + throw new Error( + '[@octokit/oauth-app] "Authorization" header is required' + ); + } + const result = await app.resetToken({ token: token2 }); + delete result.authentication.clientSecret; + return { + status: 200, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify(result) + }; + } + if (route === routes.patchRefreshToken) { + const token2 = headers.authorization?.substr("token ".length); + if (!token2) { + throw new Error( + '[@octokit/oauth-app] "Authorization" header is required' + ); + } + const { refreshToken } = json; + if (!refreshToken) { + throw new Error( + "[@octokit/oauth-app] refreshToken must be sent in request body" + ); + } + const result = await app.refreshToken({ refreshToken }); + delete result.authentication.clientSecret; + return { + status: 200, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify(result) + }; + } + if (route === routes.scopeToken) { + const token2 = headers.authorization?.substr("token ".length); + if (!token2) { + throw new Error( + '[@octokit/oauth-app] "Authorization" header is required' + ); + } + const result = await app.scopeToken({ + token: token2, + ...json + }); + delete result.authentication.clientSecret; + return { + status: 200, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify(result) + }; + } + if (route === routes.deleteToken) { + const token2 = headers.authorization?.substr("token ".length); + if (!token2) { + throw new Error( + '[@octokit/oauth-app] "Authorization" header is required' + ); + } + await app.deleteToken({ + token: token2 + }); + return { + status: 204, + headers: { "access-control-allow-origin": "*" } + }; + } + const token = headers.authorization?.substr("token ".length); + if (!token) { + throw new Error( + '[@octokit/oauth-app] "Authorization" header is required' + ); + } + await app.deleteAuthorization({ + token + }); + return { + status: 204, + headers: { "access-control-allow-origin": "*" } + }; + } catch (error) { + return { + status: 400, + headers: { + "content-type": "application/json", + "access-control-allow-origin": "*" + }, + text: JSON.stringify({ error: error.message }) + }; + } +} +export { + handleRequest +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/middleware/node/index.js b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/node/index.js new file mode 100644 index 0000000..c3909e4 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/node/index.js @@ -0,0 +1,39 @@ +import { parseRequest } from "./parse-request"; +import { sendResponse } from "./send-response"; +import { onUnhandledRequestDefault } from "../on-unhandled-request-default"; +import { handleRequest } from "../handle-request"; +import { OAuthApp } from "../../index"; +function onUnhandledRequestDefaultNode(request, response) { + const octokitRequest = parseRequest(request); + const octokitResponse = onUnhandledRequestDefault(octokitRequest); + sendResponse(octokitResponse, response); +} +function createNodeMiddleware(app, { + pathPrefix, + onUnhandledRequest +} = {}) { + if (onUnhandledRequest) { + app.octokit.log.warn( + "[@octokit/oauth-app] `onUnhandledRequest` is deprecated and will be removed from the next major version." + ); + } + onUnhandledRequest ??= onUnhandledRequestDefaultNode; + return async function(request, response, next) { + const octokitRequest = parseRequest(request); + const octokitResponse = await handleRequest( + app, + { pathPrefix }, + octokitRequest + ); + if (octokitResponse) { + sendResponse(octokitResponse, response); + } else if (typeof next === "function") { + next(); + } else { + onUnhandledRequest(request, response); + } + }; +} +export { + createNodeMiddleware +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/middleware/node/parse-request.js b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/node/parse-request.js new file mode 100644 index 0000000..11ad6b4 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/node/parse-request.js @@ -0,0 +1,14 @@ +function parseRequest(request) { + const { method, url, headers } = request; + async function text() { + const text2 = await new Promise((resolve, reject) => { + let bodyChunks = []; + request.on("error", reject).on("data", (chunk) => bodyChunks.push(chunk)).on("end", () => resolve(Buffer.concat(bodyChunks).toString())); + }); + return text2; + } + return { method, url, headers, text }; +} +export { + parseRequest +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/middleware/node/send-response.js b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/node/send-response.js new file mode 100644 index 0000000..cf4e892 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/node/send-response.js @@ -0,0 +1,7 @@ +function sendResponse(octokitResponse, response) { + response.writeHead(octokitResponse.status, octokitResponse.headers); + response.end(octokitResponse.text); +} +export { + sendResponse +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/middleware/on-unhandled-request-default.js b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/on-unhandled-request-default.js new file mode 100644 index 0000000..26fe84a --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/on-unhandled-request-default.js @@ -0,0 +1,12 @@ +function onUnhandledRequestDefault(request) { + return { + status: 404, + headers: { "content-type": "application/json" }, + text: JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}` + }) + }; +} +export { + onUnhandledRequestDefault +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/middleware/web-worker/index.js b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/web-worker/index.js new file mode 100644 index 0000000..af54fb0 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/web-worker/index.js @@ -0,0 +1,40 @@ +import { parseRequest } from "./parse-request"; +import { sendResponse } from "./send-response"; +import { handleRequest } from "../handle-request"; +import { onUnhandledRequestDefault } from "../on-unhandled-request-default"; +import { OAuthApp } from "../../index"; +async function onUnhandledRequestDefaultWebWorker(request) { + const octokitRequest = parseRequest(request); + const octokitResponse = onUnhandledRequestDefault(octokitRequest); + return sendResponse(octokitResponse); +} +function createWebWorkerHandler(app, { + pathPrefix, + onUnhandledRequest +} = {}) { + if (onUnhandledRequest) { + app.octokit.log.warn( + "[@octokit/oauth-app] `onUnhandledRequest` is deprecated and will be removed from the next major version." + ); + } + onUnhandledRequest ??= onUnhandledRequestDefaultWebWorker; + return async function(request) { + const octokitRequest = parseRequest(request); + const octokitResponse = await handleRequest( + app, + { pathPrefix }, + octokitRequest + ); + return octokitResponse ? sendResponse(octokitResponse) : await onUnhandledRequest(request); + }; +} +function createCloudflareHandler(...args) { + args[0].octokit.log.warn( + "[@octokit/oauth-app] `createCloudflareHandler` is deprecated, use `createWebWorkerHandler` instead" + ); + return createWebWorkerHandler(...args); +} +export { + createCloudflareHandler, + createWebWorkerHandler +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/middleware/web-worker/parse-request.js b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/web-worker/parse-request.js new file mode 100644 index 0000000..21f15b4 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/web-worker/parse-request.js @@ -0,0 +1,12 @@ +function parseRequest(request) { + const headers = Object.fromEntries(request.headers.entries()); + return { + method: request.method, + url: request.url, + headers, + text: () => request.text() + }; +} +export { + parseRequest +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/middleware/web-worker/send-response.js b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/web-worker/send-response.js new file mode 100644 index 0000000..ff630b9 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/middleware/web-worker/send-response.js @@ -0,0 +1,9 @@ +function sendResponse(octokitResponse) { + return new Response(octokitResponse.text, { + status: octokitResponse.status, + headers: octokitResponse.headers + }); +} +export { + sendResponse +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/oauth-app-octokit.js b/dist/node_modules/@octokit/oauth-app/dist-src/oauth-app-octokit.js new file mode 100644 index 0000000..dd5133a --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/oauth-app-octokit.js @@ -0,0 +1,9 @@ +import { Octokit } from "@octokit/core"; +import { getUserAgent } from "universal-user-agent"; +import { VERSION } from "./version"; +const OAuthAppOctokit = Octokit.defaults({ + userAgent: `octokit-oauth-app.js/${VERSION} ${getUserAgent()}` +}); +export { + OAuthAppOctokit +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-src/version.js b/dist/node_modules/@octokit/oauth-app/dist-src/version.js new file mode 100644 index 0000000..6b36c09 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "4.2.4"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/add-event-handler.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/add-event-handler.d.ts new file mode 100644 index 0000000..ccf0e2f --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/add-event-handler.d.ts @@ -0,0 +1,2 @@ +import type { EventHandler, EventAndActionName, State, ClientType, Options } from "./types"; +export declare function addEventHandler(state: State, eventName: EventAndActionName | EventAndActionName[], eventHandler: EventHandler>): void; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/emit-event.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/emit-event.d.ts new file mode 100644 index 0000000..f4e37ff --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/emit-event.d.ts @@ -0,0 +1,2 @@ +import type { State, EventHandlerContext, ClientType, Options } from "./types"; +export declare function emitEvent(state: State, context: EventHandlerContext>): Promise; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/index.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/index.d.ts new file mode 100644 index 0000000..a3e7c6d --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/index.d.ts @@ -0,0 +1,48 @@ +import { type GetUserOctokitWithStateInterface } from "./methods/get-user-octokit"; +import { type GetWebFlowAuthorizationUrlInterface } from "./methods/get-web-flow-authorization-url"; +import { type CreateTokenInterface } from "./methods/create-token"; +import { type CheckTokenInterface } from "./methods/check-token"; +import { type ResetTokenInterface } from "./methods/reset-token"; +import { type RefreshTokenInterface } from "./methods/refresh-token"; +import { type ScopeTokenInterface } from "./methods/scope-token"; +import { type DeleteTokenInterface } from "./methods/delete-token"; +import { type DeleteAuthorizationInterface } from "./methods/delete-authorization"; +import type { AddEventHandler, ClientType, ClientTypeFromOptions, ConstructorOptions, OctokitTypeFromOptions, Options } from "./types"; +export type { HandlerOptions, OctokitRequest, OctokitResponse, } from "./middleware/types"; +export { handleRequest } from "./middleware/handle-request"; +export { createNodeMiddleware } from "./middleware/node/index"; +export { createCloudflareHandler, createWebWorkerHandler, } from "./middleware/web-worker/index"; +export { createAWSLambdaAPIGatewayV2Handler } from "./middleware/aws-lambda/api-gateway-v2"; +type Constructor = new (...args: any[]) => T; +export declare class OAuthApp = Options<"oauth-app">> { + static VERSION: string; + static defaults, S extends Constructor>>(this: S, defaults: TDefaults): { + new (...args: any[]): { + type: ClientTypeFromOptions; + on: AddEventHandler; + octokit: OctokitTypeFromOptions; + getUserOctokit: GetUserOctokitWithStateInterface>; + getWebFlowAuthorizationUrl: GetWebFlowAuthorizationUrlInterface>; + createToken: CreateTokenInterface>; + checkToken: CheckTokenInterface>; + resetToken: ResetTokenInterface>; + refreshToken: RefreshTokenInterface; + scopeToken: ScopeTokenInterface; + deleteToken: DeleteTokenInterface; + deleteAuthorization: DeleteAuthorizationInterface; + }; + } & S; + constructor(options: ConstructorOptions); + type: ClientTypeFromOptions; + on: AddEventHandler; + octokit: OctokitTypeFromOptions; + getUserOctokit: GetUserOctokitWithStateInterface>; + getWebFlowAuthorizationUrl: GetWebFlowAuthorizationUrlInterface>; + createToken: CreateTokenInterface>; + checkToken: CheckTokenInterface>; + resetToken: ResetTokenInterface>; + refreshToken: RefreshTokenInterface; + scopeToken: ScopeTokenInterface; + deleteToken: DeleteTokenInterface; + deleteAuthorization: DeleteAuthorizationInterface; +} diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/methods/check-token.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/methods/check-token.d.ts new file mode 100644 index 0000000..6400b76 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/methods/check-token.d.ts @@ -0,0 +1,14 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +import type { ClientType, State } from "../types"; +export type CheckTokenOptions = { + token: string; +}; +export declare function checkTokenWithState(state: State, options: CheckTokenOptions): Promise; +export interface CheckTokenInterface { + (options: CheckTokenOptions): (TClientType extends "oauth-app" ? Promise : Promise) & { + authentication: { + type: "token"; + tokenType: "oauth"; + }; + }; +} diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/methods/create-token.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/methods/create-token.d.ts new file mode 100644 index 0000000..506c3dc --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/methods/create-token.d.ts @@ -0,0 +1,24 @@ +import * as OAuthAppAuth from "@octokit/auth-oauth-app"; +import type { ClientType, State } from "../types"; +export type CreateTokenWebFlowOptions = Omit; +export type CreateTokenOAuthAppDeviceFlowOptions = Omit; +export type CreateTokenGitHubAppDeviceFlowOptions = Omit; +export declare function createTokenWithState(state: State, options: CreateTokenWebFlowOptions | CreateTokenOAuthAppDeviceFlowOptions | CreateTokenGitHubAppDeviceFlowOptions): Promise<{ + authentication: OAuthAppAuth.OAuthAppUserAuthentication | OAuthAppAuth.GitHubAppUserAuthentication | OAuthAppAuth.GitHubAppUserAuthenticationWithExpiration; +}>; +export interface CreateTokenInterface { + (options: CreateTokenWebFlowOptions): TClientType extends "oauth-app" ? Promise<{ + authentication: OAuthAppAuth.OAuthAppUserAuthentication; + }> : Promise<{ + authentication: OAuthAppAuth.GitHubAppUserAuthentication; + } | { + authentication: OAuthAppAuth.GitHubAppUserAuthenticationWithExpiration; + }>; + (options: TClientType extends "oauth-app" ? CreateTokenOAuthAppDeviceFlowOptions : CreateTokenGitHubAppDeviceFlowOptions): TClientType extends "oauth-app" ? Promise<{ + authentication: OAuthAppAuth.OAuthAppUserAuthentication; + }> : Promise<{ + authentication: OAuthAppAuth.GitHubAppUserAuthentication; + } | { + authentication: OAuthAppAuth.GitHubAppUserAuthenticationWithExpiration; + }>; +} diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/methods/delete-authorization.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/methods/delete-authorization.d.ts new file mode 100644 index 0000000..14cdec1 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/methods/delete-authorization.d.ts @@ -0,0 +1,9 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +import type { State } from "../types"; +export type DeleteAuthorizationOptions = { + token: string; +}; +export declare function deleteAuthorizationWithState(state: State, options: DeleteAuthorizationOptions): Promise; +export interface DeleteAuthorizationInterface { + (options: DeleteAuthorizationOptions): Promise; +} diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/methods/delete-token.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/methods/delete-token.d.ts new file mode 100644 index 0000000..14ed1dc --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/methods/delete-token.d.ts @@ -0,0 +1,9 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +import type { State } from "../types"; +export type DeleteTokenOptions = { + token: string; +}; +export declare function deleteTokenWithState(state: State, options: DeleteTokenOptions): Promise; +export interface DeleteTokenInterface { + (options: DeleteTokenOptions): Promise; +} diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/methods/get-oauth-client-code.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/methods/get-oauth-client-code.d.ts new file mode 100644 index 0000000..2a21bde --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/methods/get-oauth-client-code.d.ts @@ -0,0 +1 @@ +export declare function getOAuthClientCode(): string; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/methods/get-user-octokit.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/methods/get-user-octokit.d.ts new file mode 100644 index 0000000..caef1cf --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/methods/get-user-octokit.d.ts @@ -0,0 +1,10 @@ +import type { OAuthAppStrategyOptionsWebFlow, OAuthAppStrategyOptionsDeviceFlow, OAuthAppStrategyOptionsExistingAuthentication, GitHubAppStrategyOptionsWebFlow, GitHubAppStrategyOptionsDeviceFlow, GitHubAppStrategyOptionsExistingAuthentication, GitHubAppStrategyOptionsExistingAuthenticationWithExpiration } from "@octokit/auth-oauth-user"; +import type { State, OctokitInstance, ClientType } from "../types"; +type StateOptions = "clientType" | "clientId" | "clientSecret" | "request"; +export type GetUserOctokitOAuthAppOptions = Omit | Omit | Omit; +export type GetUserOctokitGitHubAppOptions = Omit | Omit | Omit | Omit; +export declare function getUserOctokitWithState(state: State, options: GetUserOctokitOAuthAppOptions | GetUserOctokitGitHubAppOptions): Promise; +export interface GetUserOctokitWithStateInterface { + (options: TClientType extends "oauth-app" ? GetUserOctokitOAuthAppOptions : GetUserOctokitGitHubAppOptions): Promise; +} +export {}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/methods/get-web-flow-authorization-url.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/methods/get-web-flow-authorization-url.d.ts new file mode 100644 index 0000000..6088d6a --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/methods/get-web-flow-authorization-url.d.ts @@ -0,0 +1,10 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +import type { ClientType, State } from "../types"; +type StateOptions = "clientType" | "clientId" | "clientSecret" | "request"; +export type GetWebFlowAuthorizationUrlOAuthAppOptions = Omit; +export type GetWebFlowAuthorizationUrlGitHubAppOptions = Omit; +export declare function getWebFlowAuthorizationUrlWithState(state: State, options: any): any; +export interface GetWebFlowAuthorizationUrlInterface { + (options: TClientType extends "oauth-app" ? GetWebFlowAuthorizationUrlOAuthAppOptions : GetWebFlowAuthorizationUrlGitHubAppOptions): TClientType extends "oauth-app" ? OAuthMethods.GetWebFlowAuthorizationUrlOAuthAppResult : OAuthMethods.GetWebFlowAuthorizationUrlGitHubAppResult; +} +export {}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/methods/refresh-token.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/methods/refresh-token.d.ts new file mode 100644 index 0000000..41b3ba9 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/methods/refresh-token.d.ts @@ -0,0 +1,19 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +import type { State } from "../types"; +export type RefreshTokenOptions = { + refreshToken: string; +}; +export declare function refreshTokenWithState(state: State, options: RefreshTokenOptions): Promise; +export interface RefreshTokenInterface { + (options: RefreshTokenOptions): Promise; +} diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/methods/reset-token.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/methods/reset-token.d.ts new file mode 100644 index 0000000..1a7e625 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/methods/reset-token.d.ts @@ -0,0 +1,24 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +import type { ClientType, State } from "../types"; +export type ResetTokenOptions = { + token: string; +}; +export declare function resetTokenWithState(state: State, options: ResetTokenOptions): Promise<(OAuthMethods.ResetTokenOAuthAppResponse | OAuthMethods.ResetTokenGitHubAppResponse) & { + authentication: { + type: "token"; + tokenType: "oauth"; + }; +}>; +export interface ResetTokenInterface { + (options: ResetTokenOptions): TClientType extends "oauth-app" ? Promise : Promise; +} diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/methods/scope-token.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/methods/scope-token.d.ts new file mode 100644 index 0000000..b53ab01 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/methods/scope-token.d.ts @@ -0,0 +1,19 @@ +import * as OAuthMethods from "@octokit/oauth-methods"; +import type { State } from "../types"; +type StateOptions = "clientType" | "clientId" | "clientSecret" | "request"; +export type ScopeTokenOptions = Omit; +export declare function scopeTokenWithState(state: State, options: ScopeTokenOptions): Promise; +export interface ScopeTokenInterface { + (options: ScopeTokenOptions): Promise; +} +export {}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/middleware/aws-lambda/api-gateway-v2-parse-request.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/aws-lambda/api-gateway-v2-parse-request.d.ts new file mode 100644 index 0000000..908a542 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/aws-lambda/api-gateway-v2-parse-request.d.ts @@ -0,0 +1,3 @@ +import type { OctokitRequest } from "../types"; +import type { APIGatewayProxyEventV2 } from "aws-lambda"; +export declare function parseRequest(request: APIGatewayProxyEventV2): OctokitRequest; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/middleware/aws-lambda/api-gateway-v2-send-response.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/aws-lambda/api-gateway-v2-send-response.d.ts new file mode 100644 index 0000000..0426e84 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/aws-lambda/api-gateway-v2-send-response.d.ts @@ -0,0 +1,3 @@ +import type { OctokitResponse } from "../types"; +import type { APIGatewayProxyStructuredResultV2 } from "aws-lambda"; +export declare function sendResponse(octokitResponse: OctokitResponse): APIGatewayProxyStructuredResultV2; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/middleware/aws-lambda/api-gateway-v2.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/aws-lambda/api-gateway-v2.d.ts new file mode 100644 index 0000000..21081e6 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/aws-lambda/api-gateway-v2.d.ts @@ -0,0 +1,7 @@ +import type { HandlerOptions } from "../types"; +import { OAuthApp } from "../../index"; +import type { ClientType, Options } from "../../types"; +import type { APIGatewayProxyEventV2, APIGatewayProxyStructuredResultV2 } from "aws-lambda"; +export declare function createAWSLambdaAPIGatewayV2Handler(app: OAuthApp>, { pathPrefix, onUnhandledRequest, }?: HandlerOptions & { + onUnhandledRequest?: (event: APIGatewayProxyEventV2) => Promise; +}): (event: APIGatewayProxyEventV2) => Promise; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/middleware/handle-request.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/handle-request.d.ts new file mode 100644 index 0000000..309efde --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/handle-request.d.ts @@ -0,0 +1,4 @@ +import { OAuthApp } from "../index"; +import type { HandlerOptions, OctokitRequest, OctokitResponse } from "./types"; +import type { ClientType, Options } from "../types"; +export declare function handleRequest(app: OAuthApp>, { pathPrefix }: HandlerOptions, request: OctokitRequest): Promise; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/middleware/node/index.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/node/index.d.ts new file mode 100644 index 0000000..0a1d048 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/node/index.d.ts @@ -0,0 +1,9 @@ +type IncomingMessage = any; +type ServerResponse = any; +import { OAuthApp } from "../../index"; +import type { HandlerOptions } from "../types"; +import type { ClientType, Options } from "../../types"; +export declare function createNodeMiddleware(app: OAuthApp>, { pathPrefix, onUnhandledRequest, }?: HandlerOptions & { + onUnhandledRequest?: (request: IncomingMessage, response: ServerResponse) => void; +}): (request: IncomingMessage, response: ServerResponse, next?: Function) => Promise; +export {}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/middleware/node/parse-request.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/node/parse-request.d.ts new file mode 100644 index 0000000..2012346 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/node/parse-request.d.ts @@ -0,0 +1,4 @@ +type IncomingMessage = any; +import type { OctokitRequest } from "../types"; +export declare function parseRequest(request: IncomingMessage): OctokitRequest; +export {}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/middleware/node/send-response.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/node/send-response.d.ts new file mode 100644 index 0000000..85e906a --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/node/send-response.d.ts @@ -0,0 +1,4 @@ +type ServerResponse = any; +import type { OctokitResponse } from "../types"; +export declare function sendResponse(octokitResponse: OctokitResponse, response: ServerResponse): void; +export {}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/middleware/on-unhandled-request-default.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/on-unhandled-request-default.d.ts new file mode 100644 index 0000000..f2ccd36 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/on-unhandled-request-default.d.ts @@ -0,0 +1,2 @@ +import type { OctokitRequest, OctokitResponse } from "./types"; +export declare function onUnhandledRequestDefault(request: OctokitRequest): OctokitResponse; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/middleware/types.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/types.d.ts new file mode 100644 index 0000000..71a0138 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/types.d.ts @@ -0,0 +1,14 @@ +export type OctokitRequest = { + method: string; + url: string; + headers: Record; + text: () => Promise; +}; +export type OctokitResponse = { + status: number; + headers?: Record; + text?: string; +}; +export type HandlerOptions = { + pathPrefix?: string; +}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/middleware/web-worker/index.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/web-worker/index.d.ts new file mode 100644 index 0000000..b006c97 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/web-worker/index.d.ts @@ -0,0 +1,8 @@ +import { OAuthApp } from "../../index"; +import type { HandlerOptions } from "../types"; +import type { ClientType, Options } from "../../types"; +export declare function createWebWorkerHandler>(app: OAuthApp, { pathPrefix, onUnhandledRequest, }?: HandlerOptions & { + onUnhandledRequest?: (request: Request) => Response | Promise; +}): (request: Request) => Promise; +/** @deprecated */ +export declare function createCloudflareHandler>(...args: Parameters): (request: Request) => Promise; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/middleware/web-worker/parse-request.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/web-worker/parse-request.d.ts new file mode 100644 index 0000000..f48ab1d --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/web-worker/parse-request.d.ts @@ -0,0 +1,2 @@ +import type { OctokitRequest } from "../types"; +export declare function parseRequest(request: Request): OctokitRequest; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/middleware/web-worker/send-response.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/web-worker/send-response.d.ts new file mode 100644 index 0000000..54aad3f --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/middleware/web-worker/send-response.d.ts @@ -0,0 +1,2 @@ +import type { OctokitResponse } from "../types"; +export declare function sendResponse(octokitResponse: OctokitResponse): Response; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/oauth-app-octokit.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/oauth-app-octokit.d.ts new file mode 100644 index 0000000..9f4e220 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/oauth-app-octokit.d.ts @@ -0,0 +1,2 @@ +import { Octokit } from "@octokit/core"; +export declare const OAuthAppOctokit: typeof Octokit; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/types.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/types.d.ts new file mode 100644 index 0000000..0824961 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/types.d.ts @@ -0,0 +1,67 @@ +import { Octokit } from "@octokit/core"; +import type { OAuthAppUserAuthentication, GitHubAppUserAuthentication, GitHubAppUserAuthenticationWithExpiration } from "@octokit/auth-oauth-app"; +import { OAuthAppOctokit } from "./oauth-app-octokit"; +export type ClientType = "oauth-app" | "github-app"; +export type OAuthAppOctokitClassType = typeof OAuthAppOctokit; +export type Scope = string; +export type ClientId = string; +export type ClientSecret = string; +export type Token = string; +export type EventName = "token" | "authorization"; +export type ActionName = "created" | "reset" | "deleted" | "refreshed" | "scoped"; +export type EventAndActionName = "token" | "token.created" | "token.reset" | "token.refreshed" | "token.scoped" | "token.deleted" | "authorization" | "authorization.deleted"; +type CommonOptions = { + clientId?: ClientId; + clientSecret?: ClientSecret; + allowSignup?: boolean; + baseUrl?: string; + redirectUrl?: string; + log?: typeof console; + Octokit?: TOctokit; +}; +export type Options = TClientType extends "oauth-app" ? CommonOptions & { + clientType?: TClientType; + defaultScopes?: Scope[]; +} : CommonOptions & { + clientType?: TClientType; +}; +export type ConstructorOptions> = TOptions & { + clientId: ClientId; + clientSecret: ClientSecret; +}; +export type OctokitTypeFromOptions> = TOptions["Octokit"] extends typeof Octokit ? InstanceType : Octokit; +export type OctokitClassTypeFromOptions> = TOptions["Octokit"] extends typeof Octokit ? TOptions["Octokit"] : typeof Octokit; +export type ClientTypeFromOptions> = TOptions["clientType"] extends "github-app" ? "github-app" : "oauth-app"; +export type OctokitInstance = InstanceType; +export type State = { + clientType: ClientType; + clientId: ClientId; + clientSecret: ClientSecret; + defaultScopes: Scope[]; + allowSignup?: boolean; + baseUrl?: string; + redirectUrl?: string; + log?: typeof console; + Octokit: OAuthAppOctokitClassType; + octokit: OctokitInstance; + eventHandlers: { + [key: string]: EventHandler>[]; + }; +}; +export type EventHandlerContext> = ClientTypeFromOptions extends "oauth-app" ? { + name: EventName; + action: ActionName; + token: Token; + scopes?: Scope[]; + octokit: OctokitTypeFromOptions; + authentication?: OAuthAppUserAuthentication | GitHubAppUserAuthentication | GitHubAppUserAuthenticationWithExpiration; +} : { + name: EventName; + action: ActionName; + token: Token; + octokit: OctokitTypeFromOptions; + authentication?: GitHubAppUserAuthentication | GitHubAppUserAuthenticationWithExpiration; +}; +export type EventHandler> = (context: EventHandlerContext) => void; +export type AddEventHandler> = (eventName: EventAndActionName | EventAndActionName[], eventHandler: EventHandler) => void; +export {}; diff --git a/dist/node_modules/@octokit/oauth-app/dist-types/version.d.ts b/dist/node_modules/@octokit/oauth-app/dist-types/version.d.ts new file mode 100644 index 0000000..b8a0794 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "4.2.4"; diff --git a/dist/node_modules/@octokit/oauth-app/package.json b/dist/node_modules/@octokit/oauth-app/package.json new file mode 100644 index 0000000..9c896a7 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-app/package.json @@ -0,0 +1,56 @@ +{ + "name": "@octokit/oauth-app", + "version": "4.2.4", + "description": "GitHub OAuth toolset for Node.js", + "repository": "github:octokit/oauth-app.js", + "keywords": [ + "github", + "api", + "sdk", + "toolkit" + ], + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "dependencies": { + "@octokit/auth-oauth-app": "^5.0.0", + "@octokit/auth-oauth-user": "^2.0.0", + "@octokit/auth-unauthenticated": "^3.0.0", + "@octokit/core": "^4.0.0", + "@octokit/oauth-authorization-url": "^5.0.0", + "@octokit/oauth-methods": "^2.0.0", + "@types/aws-lambda": "^8.10.83", + "fromentries": "^1.3.1", + "universal-user-agent": "^6.0.0" + }, + "devDependencies": { + "@octokit/tsconfig": "^2.0.0", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "@types/node-fetch": "^2.5.4", + "esbuild": "^0.18.0", + "express": "^4.17.1", + "fetch-mock": "^9.0.0", + "glob": "^10.2.5", + "jest": "^29.0.0", + "nock": "^13.0.0", + "node-fetch": "^2.6.0", + "prettier": "2.8.8", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "publishConfig": { + "access": "public" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "types": "dist-types/index.d.ts", + "source": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/oauth-authorization-url/LICENSE b/dist/node_modules/@octokit/oauth-authorization-url/LICENSE new file mode 100644 index 0000000..ef2c18e --- /dev/null +++ b/dist/node_modules/@octokit/oauth-authorization-url/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/@octokit/oauth-authorization-url/README.md b/dist/node_modules/@octokit/oauth-authorization-url/README.md new file mode 100644 index 0000000..b032aa5 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-authorization-url/README.md @@ -0,0 +1,281 @@ +# oauth-authorization-url.js + +> Universal library to retrieve GitHub’s identity URL for the OAuth web flow + +[![@latest](https://img.shields.io/npm/v/@octokit/oauth-authorization-url.svg)](https://www.npmjs.com/package/@octokit/oauth-authorization-url) +[![Build Status](https://github.com/octokit/oauth-authorization-url.js/workflows/Test/badge.svg)](https://github.com/octokit/oauth-authorization-url.js/actions?query=workflow%3ATest+branch%3Amaster) + +See [GitHub’s Developer Guide for the OAuth App web application flow](https://developer.github.com/apps/building-oauth-apps/authorizing-oauth-apps/#web-application-flow). Note that the [OAuth web application flow for GitHub Apps](https://docs.github.com/en/developers/apps/identifying-and-authorizing-users-for-github-apps#web-application-flow) is slightly different. GitHub Apps do not support scopes for its user access tokens (they are called user-to-server tokens for GitHub Apps), instead they inherit the user permissions from the GitHub App's registration and the repository/organization access and permissions from the respective installation. + + + +- [Usage](#usage) + - [For OAuth Apps](#for-oauth-apps) + - [For GitHub Apps](#for-github-apps) +- [Options](#options) +- [Result](#result) +- [Types](#types) +- [License](#license) + + + +## Usage + + + + + + + + + + +
+ Browsers + + +Load `@octokit/oauth-authorization-url` directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+ Node + + +Install with npm install @octokit/oauth-authorization-url + +```js +const { oauthAuthorizationUrl } = require("@octokit/oauth-authorization-url"); +// or: import { oauthAuthorizationUrl } from "@octokit/oauth-authorization-url"; +``` + +
+ +### For OAuth Apps + +```js +const { + url, + clientId, + redirectUrl, + login, + scopes, + state, +} = oauthAuthorizationUrl({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + redirectUrl: "https://example.com", + login: "octocat", + scopes: ["repo", "admin:org"], + state: "secret123", +}); +``` + +### For GitHub Apps + +```js +const { url, clientId, redirectUrl, login, state } = oauthAuthorizationUrl({ + clientType: "github-app", + clientId: "lv1.1234567890abcdef", + redirectUrl: "https://example.com", + login: "octocat", + state: "secret123", +}); +``` + +## Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + description +
+ clientId + + Required. The client ID you received from GitHub when you registered. +
+ clientType + + +Must be set to either `"oauth-app"` or `"github-app"`. Defaults to `"oauth-app"`. + +
+ redirectUrl + + The URL in your application where users will be sent after authorization. See Redirect URLs in GitHub’s Developer Guide. +
+ login + + Suggests a specific account to use for signing in and authorizing the app. +
+ scopes + + +Only relevant when `clientType` is set to `"oauth-app"`. + +An array of scope names (or: space-delimited list of scopes). If not provided, scope defaults to an empty list for users that have not authorized any scopes for the application. For users who have authorized scopes for the application, the user won't be shown the OAuth authorization page with the list of scopes. Instead, this step of the flow will automatically complete with the set of scopes the user has authorized for the application. For example, if a user has already performed the web flow twice and has authorized one token with user scope and another token with repo scope, a third web flow that does not provide a scope will receive a token with user and repo scope. + +Defaults to `[]` if `clientType` is set to `"oauth-app"`. + +
+ state + + An unguessable random string. It is used to protect against cross-site request forgery attacks. + Defaults to Math.random().toString(36).substr(2). +
+ allowSignup + + Whether or not unauthenticated users will be offered an option to sign up for GitHub during the OAuth flow. Use false in the case that a policy prohibits signups. Defaults to true. +
+ baseUrl + + When using GitHub Enterprise Server, set the baseUrl to the origin, e.g. https://github.my-enterprise.com. +
+ +## Result + +`oauthAuthorizationUrl()` returns an object with the following properties + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + description +
+ allowSignup + + Returns options.allowSignup if it was set. Defaults to true. +
+ clientType + + Returns options.clientType. Defaults to "oauth-app". +
+ clientId + + Returns options.clientId. +
+ login + + Returns options.login if it was set. Defaults to null. +
+ redirectUrl + + Returns options.redirectUrl if it was set. Defaults to null. +
+ scopes + + +Only set if `options.clientType` is set to `"oauth-app"`. + +Returns an array of strings. Returns options.scopes if it was set and turns the string into an array if a string was passed, otherwise []. + +
+ state + + Returns options.state if it was set. Defaults to Defaults to Math.random().toString(36).substr(2). +
+ url + + The authorization URL +
+ +## Types + +```ts +import { + ClientType, + OAuthAppOptions, + OAuthAppResult, + GitHubAppOptions, + GitHubAppResult, +} from "@octokit/oauth-authorization-url"; +``` + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/oauth-authorization-url/dist-node/index.js b/dist/node_modules/@octokit/oauth-authorization-url/dist-node/index.js new file mode 100644 index 0000000..a739ce7 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-authorization-url/dist-node/index.js @@ -0,0 +1,54 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function oauthAuthorizationUrl(options) { + const clientType = options.clientType || "oauth-app"; + const baseUrl = options.baseUrl || "https://github.com"; + const result = { + clientType, + allowSignup: options.allowSignup === false ? false : true, + clientId: options.clientId, + login: options.login || null, + redirectUrl: options.redirectUrl || null, + state: options.state || Math.random().toString(36).substr(2), + url: "" + }; + + if (clientType === "oauth-app") { + const scopes = "scopes" in options ? options.scopes : []; + result.scopes = typeof scopes === "string" ? scopes.split(/[,\s]+/).filter(Boolean) : scopes; + } + + result.url = urlBuilderAuthorize(`${baseUrl}/login/oauth/authorize`, result); + return result; +} + +function urlBuilderAuthorize(base, options) { + const map = { + allowSignup: "allow_signup", + clientId: "client_id", + login: "login", + redirectUrl: "redirect_uri", + scopes: "scope", + state: "state" + }; + let url = base; + Object.keys(map) // Filter out keys that are null and remove the url key + .filter(k => options[k] !== null) // Filter out empty scopes array + .filter(k => { + if (k !== "scopes") return true; + if (options.clientType === "github-app") return false; + return !Array.isArray(options[k]) || options[k].length > 0; + }) // Map Array with the proper URL parameter names and change the value to a string using template strings + // @ts-ignore + .map(key => [map[key], `${options[key]}`]) // Finally, build the URL + .forEach(([key, value], index) => { + url += index === 0 ? `?` : "&"; + url += `${key}=${encodeURIComponent(value)}`; + }); + return url; +} + +exports.oauthAuthorizationUrl = oauthAuthorizationUrl; +//# sourceMappingURL=index.js.map diff --git a/dist/node_modules/@octokit/oauth-authorization-url/dist-node/index.js.map b/dist/node_modules/@octokit/oauth-authorization-url/dist-node/index.js.map new file mode 100644 index 0000000..5072026 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-authorization-url/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/index.js"],"sourcesContent":["export function oauthAuthorizationUrl(options) {\n const clientType = options.clientType || \"oauth-app\";\n const baseUrl = options.baseUrl || \"https://github.com\";\n const result = {\n clientType,\n allowSignup: options.allowSignup === false ? false : true,\n clientId: options.clientId,\n login: options.login || null,\n redirectUrl: options.redirectUrl || null,\n state: options.state || Math.random().toString(36).substr(2),\n url: \"\",\n };\n if (clientType === \"oauth-app\") {\n const scopes = \"scopes\" in options ? options.scopes : [];\n result.scopes =\n typeof scopes === \"string\"\n ? scopes.split(/[,\\s]+/).filter(Boolean)\n : scopes;\n }\n result.url = urlBuilderAuthorize(`${baseUrl}/login/oauth/authorize`, result);\n return result;\n}\nfunction urlBuilderAuthorize(base, options) {\n const map = {\n allowSignup: \"allow_signup\",\n clientId: \"client_id\",\n login: \"login\",\n redirectUrl: \"redirect_uri\",\n scopes: \"scope\",\n state: \"state\",\n };\n let url = base;\n Object.keys(map)\n // Filter out keys that are null and remove the url key\n .filter((k) => options[k] !== null)\n // Filter out empty scopes array\n .filter((k) => {\n if (k !== \"scopes\")\n return true;\n if (options.clientType === \"github-app\")\n return false;\n return !Array.isArray(options[k]) || options[k].length > 0;\n })\n // Map Array with the proper URL parameter names and change the value to a string using template strings\n // @ts-ignore\n .map((key) => [map[key], `${options[key]}`])\n // Finally, build the URL\n .forEach(([key, value], index) => {\n url += index === 0 ? `?` : \"&\";\n url += `${key}=${encodeURIComponent(value)}`;\n });\n return url;\n}\n"],"names":["oauthAuthorizationUrl","options","clientType","baseUrl","result","allowSignup","clientId","login","redirectUrl","state","Math","random","toString","substr","url","scopes","split","filter","Boolean","urlBuilderAuthorize","base","map","Object","keys","k","Array","isArray","length","key","forEach","value","index","encodeURIComponent"],"mappings":";;;;AAAO,SAASA,qBAAT,CAA+BC,OAA/B,EAAwC;AAC3C,QAAMC,UAAU,GAAGD,OAAO,CAACC,UAAR,IAAsB,WAAzC;AACA,QAAMC,OAAO,GAAGF,OAAO,CAACE,OAAR,IAAmB,oBAAnC;AACA,QAAMC,MAAM,GAAG;AACXF,IAAAA,UADW;AAEXG,IAAAA,WAAW,EAAEJ,OAAO,CAACI,WAAR,KAAwB,KAAxB,GAAgC,KAAhC,GAAwC,IAF1C;AAGXC,IAAAA,QAAQ,EAAEL,OAAO,CAACK,QAHP;AAIXC,IAAAA,KAAK,EAAEN,OAAO,CAACM,KAAR,IAAiB,IAJb;AAKXC,IAAAA,WAAW,EAAEP,OAAO,CAACO,WAAR,IAAuB,IALzB;AAMXC,IAAAA,KAAK,EAAER,OAAO,CAACQ,KAAR,IAAiBC,IAAI,CAACC,MAAL,GAAcC,QAAd,CAAuB,EAAvB,EAA2BC,MAA3B,CAAkC,CAAlC,CANb;AAOXC,IAAAA,GAAG,EAAE;AAPM,GAAf;;AASA,MAAIZ,UAAU,KAAK,WAAnB,EAAgC;AAC5B,UAAMa,MAAM,GAAG,YAAYd,OAAZ,GAAsBA,OAAO,CAACc,MAA9B,GAAuC,EAAtD;AACAX,IAAAA,MAAM,CAACW,MAAP,GACI,OAAOA,MAAP,KAAkB,QAAlB,GACMA,MAAM,CAACC,KAAP,CAAa,QAAb,EAAuBC,MAAvB,CAA8BC,OAA9B,CADN,GAEMH,MAHV;AAIH;;AACDX,EAAAA,MAAM,CAACU,GAAP,GAAaK,mBAAmB,CAAE,GAAEhB,OAAQ,wBAAZ,EAAqCC,MAArC,CAAhC;AACA,SAAOA,MAAP;AACH;;AACD,SAASe,mBAAT,CAA6BC,IAA7B,EAAmCnB,OAAnC,EAA4C;AACxC,QAAMoB,GAAG,GAAG;AACRhB,IAAAA,WAAW,EAAE,cADL;AAERC,IAAAA,QAAQ,EAAE,WAFF;AAGRC,IAAAA,KAAK,EAAE,OAHC;AAIRC,IAAAA,WAAW,EAAE,cAJL;AAKRO,IAAAA,MAAM,EAAE,OALA;AAMRN,IAAAA,KAAK,EAAE;AANC,GAAZ;AAQA,MAAIK,GAAG,GAAGM,IAAV;AACAE,EAAAA,MAAM,CAACC,IAAP,CAAYF,GAAZ;AAAA,GAEKJ,MAFL,CAEaO,CAAD,IAAOvB,OAAO,CAACuB,CAAD,CAAP,KAAe,IAFlC;AAAA,GAIKP,MAJL,CAIaO,CAAD,IAAO;AACf,QAAIA,CAAC,KAAK,QAAV,EACI,OAAO,IAAP;AACJ,QAAIvB,OAAO,CAACC,UAAR,KAAuB,YAA3B,EACI,OAAO,KAAP;AACJ,WAAO,CAACuB,KAAK,CAACC,OAAN,CAAczB,OAAO,CAACuB,CAAD,CAArB,CAAD,IAA8BvB,OAAO,CAACuB,CAAD,CAAP,CAAWG,MAAX,GAAoB,CAAzD;AACH,GAVD;AAYI;AAZJ,GAaKN,GAbL,CAaUO,GAAD,IAAS,CAACP,GAAG,CAACO,GAAD,CAAJ,EAAY,GAAE3B,OAAO,CAAC2B,GAAD,CAAM,EAA3B,CAblB;AAAA,GAeKC,OAfL,CAea,CAAC,CAACD,GAAD,EAAME,KAAN,CAAD,EAAeC,KAAf,KAAyB;AAClCjB,IAAAA,GAAG,IAAIiB,KAAK,KAAK,CAAV,GAAe,GAAf,GAAoB,GAA3B;AACAjB,IAAAA,GAAG,IAAK,GAAEc,GAAI,IAAGI,kBAAkB,CAACF,KAAD,CAAQ,EAA3C;AACH,GAlBD;AAmBA,SAAOhB,GAAP;AACH;;;;"} \ No newline at end of file diff --git a/dist/node_modules/@octokit/oauth-authorization-url/dist-src/index.js b/dist/node_modules/@octokit/oauth-authorization-url/dist-src/index.js new file mode 100644 index 0000000..3a0be85 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-authorization-url/dist-src/index.js @@ -0,0 +1,53 @@ +export function oauthAuthorizationUrl(options) { + const clientType = options.clientType || "oauth-app"; + const baseUrl = options.baseUrl || "https://github.com"; + const result = { + clientType, + allowSignup: options.allowSignup === false ? false : true, + clientId: options.clientId, + login: options.login || null, + redirectUrl: options.redirectUrl || null, + state: options.state || Math.random().toString(36).substr(2), + url: "", + }; + if (clientType === "oauth-app") { + const scopes = "scopes" in options ? options.scopes : []; + result.scopes = + typeof scopes === "string" + ? scopes.split(/[,\s]+/).filter(Boolean) + : scopes; + } + result.url = urlBuilderAuthorize(`${baseUrl}/login/oauth/authorize`, result); + return result; +} +function urlBuilderAuthorize(base, options) { + const map = { + allowSignup: "allow_signup", + clientId: "client_id", + login: "login", + redirectUrl: "redirect_uri", + scopes: "scope", + state: "state", + }; + let url = base; + Object.keys(map) + // Filter out keys that are null and remove the url key + .filter((k) => options[k] !== null) + // Filter out empty scopes array + .filter((k) => { + if (k !== "scopes") + return true; + if (options.clientType === "github-app") + return false; + return !Array.isArray(options[k]) || options[k].length > 0; + }) + // Map Array with the proper URL parameter names and change the value to a string using template strings + // @ts-ignore + .map((key) => [map[key], `${options[key]}`]) + // Finally, build the URL + .forEach(([key, value], index) => { + url += index === 0 ? `?` : "&"; + url += `${key}=${encodeURIComponent(value)}`; + }); + return url; +} diff --git a/dist/node_modules/@octokit/oauth-authorization-url/dist-src/types.js b/dist/node_modules/@octokit/oauth-authorization-url/dist-src/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/dist/node_modules/@octokit/oauth-authorization-url/dist-src/types.js @@ -0,0 +1 @@ +export {}; diff --git a/dist/node_modules/@octokit/oauth-authorization-url/dist-types/index.d.ts b/dist/node_modules/@octokit/oauth-authorization-url/dist-types/index.d.ts new file mode 100644 index 0000000..b4efd4a --- /dev/null +++ b/dist/node_modules/@octokit/oauth-authorization-url/dist-types/index.d.ts @@ -0,0 +1,4 @@ +import { OAuthAppOptions, GitHubAppOptions, OAuthAppResult, GitHubAppResult } from "./types"; +export { ClientType, OAuthAppOptions, GitHubAppOptions, OAuthAppResult, GitHubAppResult, } from "./types"; +export declare function oauthAuthorizationUrl(options: OAuthAppOptions): OAuthAppResult; +export declare function oauthAuthorizationUrl(options: GitHubAppOptions): GitHubAppResult; diff --git a/dist/node_modules/@octokit/oauth-authorization-url/dist-types/types.d.ts b/dist/node_modules/@octokit/oauth-authorization-url/dist-types/types.d.ts new file mode 100644 index 0000000..0dd3ad1 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-authorization-url/dist-types/types.d.ts @@ -0,0 +1,39 @@ +export declare type ClientType = "oauth-app" | "github-app"; +export declare type OAuthAppOptions = { + clientId: string; + clientType?: "oauth-app"; + allowSignup?: boolean; + login?: string; + scopes?: string | string[]; + redirectUrl?: string; + state?: string; + baseUrl?: string; +}; +export declare type GitHubAppOptions = { + clientId: string; + clientType: "github-app"; + allowSignup?: boolean; + login?: string; + redirectUrl?: string; + state?: string; + baseUrl?: string; +}; +export declare type OAuthAppResult = { + allowSignup: boolean; + clientId: string; + clientType: "oauth-app"; + login: string | null; + redirectUrl: string | null; + scopes: string[]; + state: string; + url: string; +}; +export declare type GitHubAppResult = { + allowSignup: boolean; + clientId: string; + clientType: "github-app"; + login: string | null; + redirectUrl: string | null; + state: string; + url: string; +}; diff --git a/dist/node_modules/@octokit/oauth-authorization-url/dist-web/index.js b/dist/node_modules/@octokit/oauth-authorization-url/dist-web/index.js new file mode 100644 index 0000000..e778a09 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-authorization-url/dist-web/index.js @@ -0,0 +1,56 @@ +function oauthAuthorizationUrl(options) { + const clientType = options.clientType || "oauth-app"; + const baseUrl = options.baseUrl || "https://github.com"; + const result = { + clientType, + allowSignup: options.allowSignup === false ? false : true, + clientId: options.clientId, + login: options.login || null, + redirectUrl: options.redirectUrl || null, + state: options.state || Math.random().toString(36).substr(2), + url: "", + }; + if (clientType === "oauth-app") { + const scopes = "scopes" in options ? options.scopes : []; + result.scopes = + typeof scopes === "string" + ? scopes.split(/[,\s]+/).filter(Boolean) + : scopes; + } + result.url = urlBuilderAuthorize(`${baseUrl}/login/oauth/authorize`, result); + return result; +} +function urlBuilderAuthorize(base, options) { + const map = { + allowSignup: "allow_signup", + clientId: "client_id", + login: "login", + redirectUrl: "redirect_uri", + scopes: "scope", + state: "state", + }; + let url = base; + Object.keys(map) + // Filter out keys that are null and remove the url key + .filter((k) => options[k] !== null) + // Filter out empty scopes array + .filter((k) => { + if (k !== "scopes") + return true; + if (options.clientType === "github-app") + return false; + return !Array.isArray(options[k]) || options[k].length > 0; + }) + // Map Array with the proper URL parameter names and change the value to a string using template strings + // @ts-ignore + .map((key) => [map[key], `${options[key]}`]) + // Finally, build the URL + .forEach(([key, value], index) => { + url += index === 0 ? `?` : "&"; + url += `${key}=${encodeURIComponent(value)}`; + }); + return url; +} + +export { oauthAuthorizationUrl }; +//# sourceMappingURL=index.js.map diff --git a/dist/node_modules/@octokit/oauth-authorization-url/dist-web/index.js.map b/dist/node_modules/@octokit/oauth-authorization-url/dist-web/index.js.map new file mode 100644 index 0000000..cad5e93 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-authorization-url/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/index.js"],"sourcesContent":["export function oauthAuthorizationUrl(options) {\n const clientType = options.clientType || \"oauth-app\";\n const baseUrl = options.baseUrl || \"https://github.com\";\n const result = {\n clientType,\n allowSignup: options.allowSignup === false ? false : true,\n clientId: options.clientId,\n login: options.login || null,\n redirectUrl: options.redirectUrl || null,\n state: options.state || Math.random().toString(36).substr(2),\n url: \"\",\n };\n if (clientType === \"oauth-app\") {\n const scopes = \"scopes\" in options ? options.scopes : [];\n result.scopes =\n typeof scopes === \"string\"\n ? scopes.split(/[,\\s]+/).filter(Boolean)\n : scopes;\n }\n result.url = urlBuilderAuthorize(`${baseUrl}/login/oauth/authorize`, result);\n return result;\n}\nfunction urlBuilderAuthorize(base, options) {\n const map = {\n allowSignup: \"allow_signup\",\n clientId: \"client_id\",\n login: \"login\",\n redirectUrl: \"redirect_uri\",\n scopes: \"scope\",\n state: \"state\",\n };\n let url = base;\n Object.keys(map)\n // Filter out keys that are null and remove the url key\n .filter((k) => options[k] !== null)\n // Filter out empty scopes array\n .filter((k) => {\n if (k !== \"scopes\")\n return true;\n if (options.clientType === \"github-app\")\n return false;\n return !Array.isArray(options[k]) || options[k].length > 0;\n })\n // Map Array with the proper URL parameter names and change the value to a string using template strings\n // @ts-ignore\n .map((key) => [map[key], `${options[key]}`])\n // Finally, build the URL\n .forEach(([key, value], index) => {\n url += index === 0 ? `?` : \"&\";\n url += `${key}=${encodeURIComponent(value)}`;\n });\n return url;\n}\n"],"names":[],"mappings":"AAAO,SAAS,qBAAqB,CAAC,OAAO,EAAE;AAC/C,IAAI,MAAM,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,WAAW,CAAC;AACzD,IAAI,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,oBAAoB,CAAC;AAC5D,IAAI,MAAM,MAAM,GAAG;AACnB,QAAQ,UAAU;AAClB,QAAQ,WAAW,EAAE,OAAO,CAAC,WAAW,KAAK,KAAK,GAAG,KAAK,GAAG,IAAI;AACjE,QAAQ,QAAQ,EAAE,OAAO,CAAC,QAAQ;AAClC,QAAQ,KAAK,EAAE,OAAO,CAAC,KAAK,IAAI,IAAI;AACpC,QAAQ,WAAW,EAAE,OAAO,CAAC,WAAW,IAAI,IAAI;AAChD,QAAQ,KAAK,EAAE,OAAO,CAAC,KAAK,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC;AACpE,QAAQ,GAAG,EAAE,EAAE;AACf,KAAK,CAAC;AACN,IAAI,IAAI,UAAU,KAAK,WAAW,EAAE;AACpC,QAAQ,MAAM,MAAM,GAAG,QAAQ,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,GAAG,EAAE,CAAC;AACjE,QAAQ,MAAM,CAAC,MAAM;AACrB,YAAY,OAAO,MAAM,KAAK,QAAQ;AACtC,kBAAkB,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC;AACxD,kBAAkB,MAAM,CAAC;AACzB,KAAK;AACL,IAAI,MAAM,CAAC,GAAG,GAAG,mBAAmB,CAAC,CAAC,EAAE,OAAO,CAAC,sBAAsB,CAAC,EAAE,MAAM,CAAC,CAAC;AACjF,IAAI,OAAO,MAAM,CAAC;AAClB,CAAC;AACD,SAAS,mBAAmB,CAAC,IAAI,EAAE,OAAO,EAAE;AAC5C,IAAI,MAAM,GAAG,GAAG;AAChB,QAAQ,WAAW,EAAE,cAAc;AACnC,QAAQ,QAAQ,EAAE,WAAW;AAC7B,QAAQ,KAAK,EAAE,OAAO;AACtB,QAAQ,WAAW,EAAE,cAAc;AACnC,QAAQ,MAAM,EAAE,OAAO;AACvB,QAAQ,KAAK,EAAE,OAAO;AACtB,KAAK,CAAC;AACN,IAAI,IAAI,GAAG,GAAG,IAAI,CAAC;AACnB,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC;AACpB;AACA,SAAS,MAAM,CAAC,CAAC,CAAC,KAAK,OAAO,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC;AAC3C;AACA,SAAS,MAAM,CAAC,CAAC,CAAC,KAAK;AACvB,QAAQ,IAAI,CAAC,KAAK,QAAQ;AAC1B,YAAY,OAAO,IAAI,CAAC;AACxB,QAAQ,IAAI,OAAO,CAAC,UAAU,KAAK,YAAY;AAC/C,YAAY,OAAO,KAAK,CAAC;AACzB,QAAQ,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;AACnE,KAAK,CAAC;AACN;AACA;AACA,SAAS,GAAG,CAAC,CAAC,GAAG,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD;AACA,SAAS,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,KAAK,KAAK;AAC1C,QAAQ,GAAG,IAAI,KAAK,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC;AACvC,QAAQ,GAAG,IAAI,CAAC,EAAE,GAAG,CAAC,CAAC,EAAE,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACrD,KAAK,CAAC,CAAC;AACP,IAAI,OAAO,GAAG,CAAC;AACf;;;;"} \ No newline at end of file diff --git a/dist/node_modules/@octokit/oauth-authorization-url/package.json b/dist/node_modules/@octokit/oauth-authorization-url/package.json new file mode 100644 index 0000000..f303097 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-authorization-url/package.json @@ -0,0 +1,41 @@ +{ + "name": "@octokit/oauth-authorization-url", + "description": "Universal library to retrieve GitHub’s identity URL for the OAuth web flow", + "version": "5.0.0", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "pika": true, + "sideEffects": false, + "keywords": [ + "octokit", + "github", + "oauth" + ], + "repository": "github:octokit/oauth-authorization-url.js", + "dependencies": {}, + "devDependencies": { + "@pika/pack": "^0.5.0", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/jest": "^27.0.0", + "jest": "^27.0.0", + "pika-plugin-unpkg-field": "^1.0.1", + "ts-jest": "^27.0.0-next.12", + "typescript": "^4.0.2" + }, + "engines": { + "node": ">= 14" + }, + "publishConfig": { + "access": "public" + }, + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "unpkg": "dist-web/index.js" +} diff --git a/dist/node_modules/@octokit/oauth-methods/LICENSE b/dist/node_modules/@octokit/oauth-methods/LICENSE new file mode 100644 index 0000000..57049d0 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/LICENSE @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2021 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/@octokit/oauth-methods/README.md b/dist/node_modules/@octokit/oauth-methods/README.md new file mode 100644 index 0000000..acaac06 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/README.md @@ -0,0 +1,1607 @@ +# oauth-methods.js + +> Set of stateless request methods to create, check, reset, refresh, and delete user access tokens for OAuth and GitHub Apps + +[![@latest](https://img.shields.io/npm/v/@octokit/oauth-methods.svg)](https://www.npmjs.com/package/@octokit/oauth-methods) +[![Build Status](https://github.com/octokit/oauth-methods.js/workflows/Test/badge.svg)](https://github.com/octokit/oauth-methods.js/actions?query=workflow%3ATest+branch%3Amain) + + + +- [Usage](#usage) + - [OAuth Web Flow](#oauth-web-flow) + - [OAuth Device Flow](#oauth-device-flow) +- [Methods](#methods) + - [`getWebFlowAuthorizationUrl()`](#getwebflowauthorizationurl) + - [`exchangeWebFlowCode()`](#exchangewebflowcode) + - [`createDeviceCode()`](#createdevicecode) + - [`exchangeDeviceCode()`](#exchangedevicecode) + - [`checkToken()`](#checktoken) + - [`refreshToken()`](#refreshtoken) + - [`scopeToken()`](#scopetoken) + - [`resetToken()`](#resettoken) + - [`deleteToken()`](#deletetoken) + - [`deleteAuthorization()`](#deleteauthorization) +- [Authentication object](#authentication-object) + - [OAuth APP authentication](#oauth-app-authentication) + - [GitHub App with non-expiring user authentication](#github-app-with-non-expiring-user-authentication) + - [GitHub App with expiring user authentication](#github-app-with-expiring-user-authentication) +- [Types](#types) +- [Contributing](#contributing) +- [License](#license) + + + +The OAuth endpoints related to user access tokens are not all part of GitHub's REST API and they behave slightly different. The methods exported by `@octokit/normalize the differences so you don't have to. + +## Usage + + + + + + +
+ +Browsers + + + +`@octokit/oauth-methods` is not meant for browser usage. + +Some of the methods will work, but others do not have CORS headers enabled and will fail (`exchangeWebFlowCode()`, `createDeviceCode()`, `exchangeDeviceCode()`, `refreshToken()`). Also the Client Secret should not be exposed to clients as it can be used for a [Person-in-the-middle attack](https://en.wikipedia.org/wiki/Person-in-the-middle_attack). + +
+ +Node + + + +Install with `npm install @octokit/core @octokit/oauth-methods` + +```js +const { + exchangeWebFlowCode, + createDeviceCode, + exchangeDeviceCode, + checkToken, + refreshToken, + scopeToken, + resetToken, + deleteToken, + deleteAuthorization, +} = require("@octokit/oauth-methods"); +``` + +
+ +### OAuth Web Flow + +After a user granted access to an OAuth App or GitHub App on [Step 1 of GitHub's OAuth Web Flow](https://docs.github.com/en/developers/apps/identifying-and-authorizing-users-for-github-apps#1-request-a-users-github-identity), they get redirected to a URL controlled by your app with a `?code=...` query parameter. + +You can exchange that code for a user access token as described in [Step 2 of GitHub's OAuth Web Flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#2-users-are-redirected-back-to-your-site-by-github). + +Setting `clientType` is required because there are slight differences between `"oauth-app"` and `"github-app"`. Most importantly, GitHub Apps do not support scopes. + +```js +const { data, authentication } = await exchangeWebFlowCode({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef12347890abcdef12345678", + code: "code123", + scopes: ["repo"], +}); +``` + +`data` is the raw response data. `authentication` is a [User Authentication object](#authentication-object). + +### OAuth Device Flow + +In [step 1 of GitHub's OAuth Device Flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#step-1-app-requests-the-device-and-user-verification-codes-from-github), you need to create a device and user code + +```js +const { + data: { device_code, user_code, verification_uri }, +} = await createDeviceCode({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + scopes: ["repo"], +}); +``` + +In [step 2 of GitHub's OAuth Device Flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#step-2-prompt-the-user-to-enter-the-user-code-in-a-browser), the user has to enter `user_code` on `verification_uri` (https://github.com/login/device unless you use GitHub Enterprise Server). + +Once the user entered the code and granted access, you can exchange the `device_code` for a user access token in [step 3 of GitHub's OAuth Device Flow](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#step-3-app-polls-github-to-check-if-the-user-authorized-the-device) + +```js +const { data, authentication } = await exchangeDeviceCode({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + code: device_code, +}); +``` + +`data` is the raw response data. `authentication` is a [User Authentication object](#authentication-object). + +## Methods + +### `getWebFlowAuthorizationUrl()` + +This is a wrapper around [`@octokit/oauth-authorization-url`](https://github.com/octokit/oauth-authorization-url.js#readme) that accepts a `request` option instead of `baseUrl` for consistency with the other OAuth methods. `getWebFlowAuthorizationUrl()` is a synchronous method and does not send any request. + +```js +const { url } = getWebFlowAuthorizationUrl({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + scopes: ["repo"], +}); +``` + +Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientId + + string + + Required. The client ID you received from GitHub when you registered. +
+ clientType + + string + + Required. Must be set to either "oauth-app" or "github-app". +
+ redirectUrl + + string + + The URL in your application where users will be sent after authorization. See Redirect URLs in GitHub’s Developer Guide. +
+ login + + string + + Suggests a specific account to use for signing in and authorizing the app. +
+ scopes + + array of strings + + +Only relevant if `clientType` is set to `"oauth-app"`. + +An array of scope names (or: space-delimited list of scopes). If not provided, scope defaults to an empty list for users that have not authorized any scopes for the application. For users who have authorized scopes for the application, the user won't be shown the OAuth authorization page with the list of scopes. Instead, this step of the flow will automatically complete with the set of scopes the user has authorized for the application. For example, if a user has already performed the web flow twice and has authorized one token with user scope and another token with repo scope, a third web flow that does not provide a scope will receive a token with user and repo scope. + +Defaults to `[]`. + +
+ state + + string + + An unguessable random string. It is used to protect against cross-site request forgery attacks. + Defaults to Math.random().toString(36).substr(2). +
+ allowSignup + + boolean + + Whether or not unauthenticated users will be offered an option to sign up for GitHub during the OAuth flow. Use false in the case that a policy prohibits signups. Defaults to true. +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the REST API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +const { url } = getWebFlowAuthorizationUrl({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + scopes: ["repo"], + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +The `getWebFlowAuthorizationUrl` method is synchronous and returns an object with the following properties. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ allowSignup + + boolean + + Returns options.allowSignup if it was set. Defaults to true. +
+ clientType + + string + + Returns options.clientType +
+ clientId + + string + + Returns options.clientId. +
+ login + + string + + Returns options.login if it was set. Defaults to null. +
+ redirectUrl + + string + + Returns options.redirectUrl if it was set. Defaults to null. +
+ scopes + + array of strings + + +Only set if `options.clientType` is set to `"oauth-app"`. + +Returns an array of strings. Returns options.scopes if it was set and turns the string into an array if a string was passed, otherwise []. + +
+ state + + string + + Returns options.state if it was set. Defaults to Math.random().toString(36).substr(2). +
+ url + + string + + The authorization URL +
+ +### `exchangeWebFlowCode()` + +```js +const { data, authentication } = await exchangeWebFlowCode({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef12347890abcdef12345678", + code: "code123", +}); +``` + +Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + Required. Must be set to either "oauth-app" or "github-app" +
+ clientId + + string + + Required. Your app's client ID +
+ clientSecret + + string + + Required. One of your app's client secrets +
+ code + + string + + Required. The code from GitHub's OAuth flow redirect's ?code=... query parameter +
+ redirectUrl + + string + + The redirectUrl option you passed to getWebFlowAuthorizationUrl() +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the REST API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +const { data, authentication } = await exchangeWebFlowCode({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef12347890abcdef12345678", + code: "code123", + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +Resolves with an [`@octokit/request` response object](https://github.com/octokit/request.js/#request) for [`POST /login/oauth/access_token`](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#response) (JSON) with an additional `authentication` key which is the [authentication object](#authentication-object). + +### `createDeviceCode()` + +```js +const { data, authentication } = await createDeviceCode({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + scopes: ["repo"], +}); +``` + +Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + Required. Must be set to either "oauth-app" or "github-app" +
+ clientId + + string + + Required. Your app's client ID +
+ scopes + + array of strings + + +Only permitted if `clientType` is set to `"oauth-app"`. GitHub Apps do not support scopes. + +Array of [scope names](https://docs.github.com/en/developers/apps/scopes-for-oauth-apps#available-scopes) you want to request for the user access token. + +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the REST API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +const { data } = await createDeviceCode({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + scopes: ["repo"], + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +Resolves with an [`@octokit/request` response object](https://github.com/octokit/request.js/#request) for [`POST https://github.com/login/device/code`](https://docs.github.com/en/developers/apps/authorizing-oauth-apps#response-1) (JSON). + +### `exchangeDeviceCode()` + +```js +const { data, authentication } = await exchangeDeviceCode({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + code: "code123", +}); +``` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + Required. Must be set to either "oauth-app" or "github-app" +
+ clientId + + string + + Required. Your app's client ID +
+ code + + string + + Required. The decive_code from the createDeviceCode() response +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the REST API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +const { data, authentication } = await exchangeDeviceCode({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + code: "code123", + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +### `checkToken()` + +```js +const { data, authentication } = await checkToken({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef12347890abcdef12345678", + token: "usertoken123", +}); +``` + +Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + Required. Must be set to either "oauth-app" or "github-app" +
+ clientId + + string + + Required. Your app's client ID +
+ clientSecret + + string + + Required. One of your app's client secrets +
+ token + + string + + Required. The user access token to check +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the REST API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +const { data, authentication } = await checkToken({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef12347890abcdef12345678", + token: "usertoken123", + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +Resolves with an [`@octokit/request` response object](https://github.com/octokit/request.js/#request) for [`POST /applications/{client_id}/token`](https://docs.github.com/en/rest/reference/apps#check-a-token) with an additional `authentication` key which is the [authentication object](#authentication-object). Note that the `authentication` object will not include the keys for expiring authentication. + +### `refreshToken()` + +Expiring user access tokens are currently in preview. You can [enable them for any of your GitHub apps](https://docs.github.com/en/developers/apps/refreshing-user-to-server-access-tokens#configuring-expiring-user-tokens-for-an-existing-github-app). OAuth Apps do not support expiring user access tokens + +When a user access token expires it can be [refreshed using a refresh token](https://docs.github.com/en/developers/apps/refreshing-user-to-server-access-tokens). Refreshing a token invalidates the current user access token. + +```js +const { data, authentication } = await refreshToken({ + clientType: "github-app", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef12347890abcdef12345678", + refreshToken: "r1.refreshtoken123", +}); +``` + +Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + Must be set to "github-app" +
+ clientId + + string + + Required. Your app's client ID +
+ clientSecret + + string + + Required. One of your app's client secrets +
+ refreshToken + + string + + Required. The refresh token that was received alongside the user access token. +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the REST API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +const { data, authentication } = await refreshToken({ + clientType: "github-app", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef12347890abcdef12345678", + refreshToken: "r1.refreshtoken123", + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +Resolves with an [`@octokit/request` response object](https://github.com/octokit/request.js/#request) for [`POST /login/oauth/access_token`](https://docs.github.com/en/developers/apps/refreshing-user-to-server-access-tokens#response) with an additional `authentication` key which is the [GitHub App expiring user authentication](#github-app-with-expiring-user-authentication). + +### `scopeToken()` + +```js +const { data, authentication } = await scopeToken({ + clientType: "github-app", + clientId: "lv1.1234567890abcdef", + clientSecret: "1234567890abcdef12347890abcdef12345678", + token: "usertoken123", + target: "octokit", + repositories: ["oauth-methods.js"], + permissions: { + issues: "write", + }, +}); +``` + +Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + Required. Must be set to "github-app". +
+ clientId + + string + + Required. Your app's client ID +
+ clientSecret + + string + + Required. One of your app's client secrets +
+ target + + string + + Required unless targetId is set. The name of the user or organization to scope the user-to-server access token to. +
+ targetId + + integer + + Required unless target is set. The ID of the user or organization to scope the user-to-server access token to. +
+ repositories + + array of strings + + The list of repository names to scope the user-to-server access token to. repositories may not be specified if repository_ids is specified. +
+ repository_ids + + array of integers + + The list of repository IDs to scope the user-to-server access token to. repositories may not be specified if repositories is specified. +
+ permissions + + object + + The permissions granted to the user-to-server access token. See GitHub App Permissions. +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the REST API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +const { data, authentication } = await scopeToken({ + clientType: "github-app", + clientId: "lv1.1234567890abcdef", + token: "usertoken123", + target: "octokit", + repositories: ["oauth-methods.js"], + permissions: { + issues: "write", + }, + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +Resolves with an [`@octokit/request` response object](https://github.com/octokit/request.js/#request) for [`POST /applications/{client_id}/token/scoped`](https://docs.github.com/en/rest/reference/apps#create-a-scoped-access-token) with an additional `authentication` key which is the new [authentication object](#authentication-object). + +### `resetToken()` + +```js +const { data, authentication } = await resetToken({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef12347890abcdef12345678", + token: "usertoken123", +}); +``` + +Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + Must be set to "oauth-app" or "github-app". +
+ clientId + + string + + Required. Your app's client ID +
+ clientSecret + + string + + Required. One of your app's client secrets +
+ token + + string + + Required. The user access token to reset +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the REST API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +const { data, authentication } = await resetToken({ + clientId: "1234567890abcdef1234", + clientSecret: "secret", + token: "usertoken123", + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +Resolves with an [`@octokit/request` response object](https://github.com/octokit/request.js/#request) for [`POST /applications/{client_id}/token`](https://docs.github.com/en/rest/reference/apps#reset-a-token) with an additional `authentication` key which is the new [authentication object](#authentication-object). + +### `deleteToken()` + +```js +const { status } = await deleteToken({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef12347890abcdef12345678", + token: "usertoken123", +}); +``` + +Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + Must be set to "oauth-app" or "github-app" +
+ clientId + + string + + Required. Your app's client ID +
+ clientSecret + + string + + Required. One of your app's client secrets +
+ token + + string + + Required. The user access token to delete +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the REST API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +const { data, authentication } = await deleteToken({ + clientId: "1234567890abcdef1234", + clientSecret: "secret", + token: "usertoken123", + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +Resolves with an [`@octokit/request` response object](https://github.com/octokit/request.js/#request) for [`DELETE /applications/{client_id}/token`](https://docs.github.com/en/rest/reference/apps#delete-an-app-token) (which is an empty `204` response). + +### `deleteAuthorization()` + +```js +const { status } = await deleteAuthorization({ + clientType: "oauth-app", + clientId: "1234567890abcdef1234", + clientSecret: "1234567890abcdef12347890abcdef12345678", + token: "usertoken123", +}); +``` + +Options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + Must be set to "oauth-app" or "github-app" +
+ clientId + + string + + Required. Your app's client ID +
+ clientSecret + + string + + Required. One of your app's client secrets +
+ token + + string + + Required. A valid user access token for the authorization +
+ request + + function + + You can pass in your own @octokit/request instance. For usage with enterprise, set baseUrl to the REST API root endpoint. Example: + +```js +const { request } = require("@octokit/request"); +const { data, authentication } = await deleteAuthorization({ + clientId: "1234567890abcdef1234", + clientSecret: "secret", + token: "usertoken123", + request: request.defaults({ + baseUrl: "https://ghe.my-company.com/api/v3", + }), +}); +``` + +
+ +Resolves with an [`@octokit/request` response object](https://github.com/octokit/request.js/#request) for [`DELETE /applications/{client_id}/grant`](https://docs.github.com/en/rest/reference/apps#delete-an-app-authorization) (which is an empty `204` response). + +## Authentication object + +The `authentication` object returned by the methods have one of three formats. + +1. [OAuth APP authentication token](#oauth-app-authentication-token) +1. [GitHub APP non-expiring user authentication token with expiring disabled](#github-app-user-authentication-token-with-expiring-disabled) +1. [GitHub APP user authentication token with expiring enabled](#github-app-user-authentication-token-with-expiring-enabled) + +The differences are + +1. `scopes` is only present for OAuth Apps +2. `refreshToken`, `expiresAt`, `refreshTokenExpiresAt` are only present for GitHub Apps, and only if token expiration is enabled + +Note that the `clientSecret` may not be set when using [`exchangeDeviceCode()`](#exchangedevicecode) as `clientSecret` is not required for the OAuth device flow. + +### OAuth APP authentication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + "oauth-app" +
+ clientId + + string + + The app's Client ID +
+ token + + string + + The user access token +
+ scopes + + array of strings + + array of scope names enabled for the token +
+ +### GitHub App with non-expiring user authentication + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + "github-app" +
+ clientId + + string + + The app's Client ID +
+ token + + string + + The user access token +
+ +### GitHub App with expiring user authentication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ clientType + + string + + "github-app" +
+ clientId + + string + + The app's Client ID +
+ token + + string + + The user access token +
+ refreshToken + + string + + The refresh token +
+ expiresAt + + string + + Date timestamp in ISO 8601 standard. Example: 2022-01-01T08:00:0.000Z +
+ refreshTokenExpiresAt + + string + + Date timestamp in ISO 8601 standard. Example: 2021-07-01T00:00:0.000Z +
+ +## Types + +```ts +import { + OAuthAppAuthentication, + GitHubAppAuthentication, + GitHubAppAuthenticationWithExpiration, + GetWebFlowAuthorizationUrlOAuthAppOptions, + GetWebFlowAuthorizationUrlGitHubAppOptions, + GetWebFlowAuthorizationUrlOAuthAppResult, + GetWebFlowAuthorizationUrlGitHubAppResult, + CheckTokenOAuthAppOptions, + CheckTokenGitHubAppOptions, + CheckTokenOAuthAppResponse, + CheckTokenGitHubAppResponse, + ExchangeWebFlowCodeOAuthAppOptions, + ExchangeWebFlowCodeGitHubAppOptions, + ExchangeWebFlowCodeOAuthAppResponse, + ExchangeWebFlowCodeGitHubAppResponse, + CreateDeviceCodeOAuthAppOptions, + CreateDeviceCodeGitHubAppOptions, + CreateDeviceCodeDeviceTokenResponse, + ExchangeDeviceCodeOAuthAppOptionsWithoutClientSecret, + ExchangeDeviceCodeOAuthAppOptions, + ExchangeDeviceCodeGitHubAppOptionsWithoutClientSecret, + ExchangeDeviceCodeGitHubAppOptions, + ExchangeDeviceCodeOAuthAppResponse, + ExchangeDeviceCodeOAuthAppResponseWithoutClientSecret, + ExchangeDeviceCodeGitHubAppResponse, + ExchangeDeviceCodeGitHubAppResponseWithoutClientSecret, + RefreshTokenOptions, + RefreshTokenResponse, + ScopeTokenOptions, + ScopeTokenResponse, + ResetTokenOAuthAppOptions, + ResetTokenGitHubAppOptions, + ResetTokenOAuthAppResponse, + ResetTokenGitHubAppResponse, + DeleteTokenOAuthAppOptions, + DeleteTokenGitHubAppOptions, + DeleteTokenResponse, + DeleteAuthorizationOAuthAppOptions, + DeleteAuthorizationGitHubAppOptions, + DeleteAuthorizationResponse, +} from "@octokit/oauth-methods"; +``` + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/oauth-methods/dist-node/index.js b/dist/node_modules/@octokit/oauth-methods/dist-node/index.js new file mode 100644 index 0000000..5e8f578 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-node/index.js @@ -0,0 +1,379 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + VERSION: () => VERSION, + checkToken: () => checkToken, + createDeviceCode: () => createDeviceCode, + deleteAuthorization: () => deleteAuthorization, + deleteToken: () => deleteToken, + exchangeDeviceCode: () => exchangeDeviceCode, + exchangeWebFlowCode: () => exchangeWebFlowCode, + getWebFlowAuthorizationUrl: () => getWebFlowAuthorizationUrl, + refreshToken: () => refreshToken, + resetToken: () => resetToken, + scopeToken: () => scopeToken +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "2.0.6"; + +// pkg/dist-src/get-web-flow-authorization-url.js +var import_oauth_authorization_url = require("@octokit/oauth-authorization-url"); +var import_request = require("@octokit/request"); + +// pkg/dist-src/utils.js +var import_request_error = require("@octokit/request-error"); +function requestToOAuthBaseUrl(request) { + const endpointDefaults = request.endpoint.DEFAULTS; + return /^https:\/\/(api\.)?github\.com$/.test(endpointDefaults.baseUrl) ? "https://github.com" : endpointDefaults.baseUrl.replace("/api/v3", ""); +} +async function oauthRequest(request, route, parameters) { + const withOAuthParameters = { + baseUrl: requestToOAuthBaseUrl(request), + headers: { + accept: "application/json" + }, + ...parameters + }; + const response = await request(route, withOAuthParameters); + if ("error" in response.data) { + const error = new import_request_error.RequestError( + `${response.data.error_description} (${response.data.error}, ${response.data.error_uri})`, + 400, + { + request: request.endpoint.merge( + route, + withOAuthParameters + ), + headers: response.headers + } + ); + error.response = response; + throw error; + } + return response; +} + +// pkg/dist-src/get-web-flow-authorization-url.js +function getWebFlowAuthorizationUrl({ + request = import_request.request, + ...options +}) { + const baseUrl = requestToOAuthBaseUrl(request); + return (0, import_oauth_authorization_url.oauthAuthorizationUrl)({ + ...options, + baseUrl + }); +} + +// pkg/dist-src/exchange-web-flow-code.js +var import_request2 = require("@octokit/request"); +async function exchangeWebFlowCode(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + import_request2.request; + const response = await oauthRequest( + request, + "POST /login/oauth/access_token", + { + client_id: options.clientId, + client_secret: options.clientSecret, + code: options.code, + redirect_uri: options.redirectUrl + } + ); + const authentication = { + clientType: options.clientType, + clientId: options.clientId, + clientSecret: options.clientSecret, + token: response.data.access_token, + scopes: response.data.scope.split(/\s+/).filter(Boolean) + }; + if (options.clientType === "github-app") { + if ("refresh_token" in response.data) { + const apiTimeInMs = new Date(response.headers.date).getTime(); + authentication.refreshToken = response.data.refresh_token, authentication.expiresAt = toTimestamp( + apiTimeInMs, + response.data.expires_in + ), authentication.refreshTokenExpiresAt = toTimestamp( + apiTimeInMs, + response.data.refresh_token_expires_in + ); + } + delete authentication.scopes; + } + return { ...response, authentication }; +} +function toTimestamp(apiTimeInMs, expirationInSeconds) { + return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString(); +} + +// pkg/dist-src/create-device-code.js +var import_request3 = require("@octokit/request"); +async function createDeviceCode(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + import_request3.request; + const parameters = { + client_id: options.clientId + }; + if ("scopes" in options && Array.isArray(options.scopes)) { + parameters.scope = options.scopes.join(" "); + } + return oauthRequest(request, "POST /login/device/code", parameters); +} + +// pkg/dist-src/exchange-device-code.js +var import_request4 = require("@octokit/request"); +async function exchangeDeviceCode(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + import_request4.request; + const response = await oauthRequest( + request, + "POST /login/oauth/access_token", + { + client_id: options.clientId, + device_code: options.code, + grant_type: "urn:ietf:params:oauth:grant-type:device_code" + } + ); + const authentication = { + clientType: options.clientType, + clientId: options.clientId, + token: response.data.access_token, + scopes: response.data.scope.split(/\s+/).filter(Boolean) + }; + if ("clientSecret" in options) { + authentication.clientSecret = options.clientSecret; + } + if (options.clientType === "github-app") { + if ("refresh_token" in response.data) { + const apiTimeInMs = new Date(response.headers.date).getTime(); + authentication.refreshToken = response.data.refresh_token, authentication.expiresAt = toTimestamp2( + apiTimeInMs, + response.data.expires_in + ), authentication.refreshTokenExpiresAt = toTimestamp2( + apiTimeInMs, + response.data.refresh_token_expires_in + ); + } + delete authentication.scopes; + } + return { ...response, authentication }; +} +function toTimestamp2(apiTimeInMs, expirationInSeconds) { + return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString(); +} + +// pkg/dist-src/check-token.js +var import_request5 = require("@octokit/request"); +var import_btoa_lite = __toESM(require("btoa-lite")); +async function checkToken(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + import_request5.request; + const response = await request("POST /applications/{client_id}/token", { + headers: { + authorization: `basic ${(0, import_btoa_lite.default)( + `${options.clientId}:${options.clientSecret}` + )}` + }, + client_id: options.clientId, + access_token: options.token + }); + const authentication = { + clientType: options.clientType, + clientId: options.clientId, + clientSecret: options.clientSecret, + token: options.token, + scopes: response.data.scopes + }; + if (response.data.expires_at) + authentication.expiresAt = response.data.expires_at; + if (options.clientType === "github-app") { + delete authentication.scopes; + } + return { ...response, authentication }; +} + +// pkg/dist-src/refresh-token.js +var import_request6 = require("@octokit/request"); +async function refreshToken(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + import_request6.request; + const response = await oauthRequest( + request, + "POST /login/oauth/access_token", + { + client_id: options.clientId, + client_secret: options.clientSecret, + grant_type: "refresh_token", + refresh_token: options.refreshToken + } + ); + const apiTimeInMs = new Date(response.headers.date).getTime(); + const authentication = { + clientType: "github-app", + clientId: options.clientId, + clientSecret: options.clientSecret, + token: response.data.access_token, + refreshToken: response.data.refresh_token, + expiresAt: toTimestamp3(apiTimeInMs, response.data.expires_in), + refreshTokenExpiresAt: toTimestamp3( + apiTimeInMs, + response.data.refresh_token_expires_in + ) + }; + return { ...response, authentication }; +} +function toTimestamp3(apiTimeInMs, expirationInSeconds) { + return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString(); +} + +// pkg/dist-src/scope-token.js +var import_request7 = require("@octokit/request"); +var import_btoa_lite2 = __toESM(require("btoa-lite")); +async function scopeToken(options) { + const { + request: optionsRequest, + clientType, + clientId, + clientSecret, + token, + ...requestOptions + } = options; + const request = optionsRequest || /* istanbul ignore next: we always pass a custom request in tests */ + import_request7.request; + const response = await request( + "POST /applications/{client_id}/token/scoped", + { + headers: { + authorization: `basic ${(0, import_btoa_lite2.default)(`${clientId}:${clientSecret}`)}` + }, + client_id: clientId, + access_token: token, + ...requestOptions + } + ); + const authentication = Object.assign( + { + clientType, + clientId, + clientSecret, + token: response.data.token + }, + response.data.expires_at ? { expiresAt: response.data.expires_at } : {} + ); + return { ...response, authentication }; +} + +// pkg/dist-src/reset-token.js +var import_request8 = require("@octokit/request"); +var import_btoa_lite3 = __toESM(require("btoa-lite")); +async function resetToken(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + import_request8.request; + const auth = (0, import_btoa_lite3.default)(`${options.clientId}:${options.clientSecret}`); + const response = await request( + "PATCH /applications/{client_id}/token", + { + headers: { + authorization: `basic ${auth}` + }, + client_id: options.clientId, + access_token: options.token + } + ); + const authentication = { + clientType: options.clientType, + clientId: options.clientId, + clientSecret: options.clientSecret, + token: response.data.token, + scopes: response.data.scopes + }; + if (response.data.expires_at) + authentication.expiresAt = response.data.expires_at; + if (options.clientType === "github-app") { + delete authentication.scopes; + } + return { ...response, authentication }; +} + +// pkg/dist-src/delete-token.js +var import_request9 = require("@octokit/request"); +var import_btoa_lite4 = __toESM(require("btoa-lite")); +async function deleteToken(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + import_request9.request; + const auth = (0, import_btoa_lite4.default)(`${options.clientId}:${options.clientSecret}`); + return request( + "DELETE /applications/{client_id}/token", + { + headers: { + authorization: `basic ${auth}` + }, + client_id: options.clientId, + access_token: options.token + } + ); +} + +// pkg/dist-src/delete-authorization.js +var import_request10 = require("@octokit/request"); +var import_btoa_lite5 = __toESM(require("btoa-lite")); +async function deleteAuthorization(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + import_request10.request; + const auth = (0, import_btoa_lite5.default)(`${options.clientId}:${options.clientSecret}`); + return request( + "DELETE /applications/{client_id}/grant", + { + headers: { + authorization: `basic ${auth}` + }, + client_id: options.clientId, + access_token: options.token + } + ); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + VERSION, + checkToken, + createDeviceCode, + deleteAuthorization, + deleteToken, + exchangeDeviceCode, + exchangeWebFlowCode, + getWebFlowAuthorizationUrl, + refreshToken, + resetToken, + scopeToken +}); diff --git a/dist/node_modules/@octokit/oauth-methods/dist-node/index.js.map b/dist/node_modules/@octokit/oauth-methods/dist-node/index.js.map new file mode 100644 index 0000000..308aa0a --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js", "../dist-src/get-web-flow-authorization-url.js", "../dist-src/utils.js", "../dist-src/exchange-web-flow-code.js", "../dist-src/create-device-code.js", "../dist-src/exchange-device-code.js", "../dist-src/check-token.js", "../dist-src/refresh-token.js", "../dist-src/scope-token.js", "../dist-src/reset-token.js", "../dist-src/delete-token.js", "../dist-src/delete-authorization.js"], + "sourcesContent": ["import { VERSION } from \"./version\";\nexport * from \"./get-web-flow-authorization-url\";\nexport * from \"./exchange-web-flow-code\";\nexport * from \"./create-device-code\";\nexport * from \"./exchange-device-code\";\nexport * from \"./check-token\";\nexport * from \"./refresh-token\";\nexport * from \"./scope-token\";\nexport * from \"./reset-token\";\nexport * from \"./delete-token\";\nexport * from \"./delete-authorization\";\nexport {\n VERSION\n};\n", "const VERSION = \"2.0.6\";\nexport {\n VERSION\n};\n", "import { oauthAuthorizationUrl } from \"@octokit/oauth-authorization-url\";\nimport { request as defaultRequest } from \"@octokit/request\";\nimport { requestToOAuthBaseUrl } from \"./utils\";\nfunction getWebFlowAuthorizationUrl({\n request = defaultRequest,\n ...options\n}) {\n const baseUrl = requestToOAuthBaseUrl(request);\n return oauthAuthorizationUrl({\n ...options,\n baseUrl\n });\n}\nexport {\n getWebFlowAuthorizationUrl\n};\n", "import { RequestError } from \"@octokit/request-error\";\nfunction requestToOAuthBaseUrl(request) {\n const endpointDefaults = request.endpoint.DEFAULTS;\n return /^https:\\/\\/(api\\.)?github\\.com$/.test(endpointDefaults.baseUrl) ? \"https://github.com\" : endpointDefaults.baseUrl.replace(\"/api/v3\", \"\");\n}\nasync function oauthRequest(request, route, parameters) {\n const withOAuthParameters = {\n baseUrl: requestToOAuthBaseUrl(request),\n headers: {\n accept: \"application/json\"\n },\n ...parameters\n };\n const response = await request(route, withOAuthParameters);\n if (\"error\" in response.data) {\n const error = new RequestError(\n `${response.data.error_description} (${response.data.error}, ${response.data.error_uri})`,\n 400,\n {\n request: request.endpoint.merge(\n route,\n withOAuthParameters\n ),\n headers: response.headers\n }\n );\n error.response = response;\n throw error;\n }\n return response;\n}\nexport {\n oauthRequest,\n requestToOAuthBaseUrl\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport { oauthRequest } from \"./utils\";\nasync function exchangeWebFlowCode(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const response = await oauthRequest(\n request,\n \"POST /login/oauth/access_token\",\n {\n client_id: options.clientId,\n client_secret: options.clientSecret,\n code: options.code,\n redirect_uri: options.redirectUrl\n }\n );\n const authentication = {\n clientType: options.clientType,\n clientId: options.clientId,\n clientSecret: options.clientSecret,\n token: response.data.access_token,\n scopes: response.data.scope.split(/\\s+/).filter(Boolean)\n };\n if (options.clientType === \"github-app\") {\n if (\"refresh_token\" in response.data) {\n const apiTimeInMs = new Date(response.headers.date).getTime();\n authentication.refreshToken = response.data.refresh_token, authentication.expiresAt = toTimestamp(\n apiTimeInMs,\n response.data.expires_in\n ), authentication.refreshTokenExpiresAt = toTimestamp(\n apiTimeInMs,\n response.data.refresh_token_expires_in\n );\n }\n delete authentication.scopes;\n }\n return { ...response, authentication };\n}\nfunction toTimestamp(apiTimeInMs, expirationInSeconds) {\n return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString();\n}\nexport {\n exchangeWebFlowCode\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport { oauthRequest } from \"./utils\";\nasync function createDeviceCode(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const parameters = {\n client_id: options.clientId\n };\n if (\"scopes\" in options && Array.isArray(options.scopes)) {\n parameters.scope = options.scopes.join(\" \");\n }\n return oauthRequest(request, \"POST /login/device/code\", parameters);\n}\nexport {\n createDeviceCode\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport { oauthRequest } from \"./utils\";\nasync function exchangeDeviceCode(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const response = await oauthRequest(\n request,\n \"POST /login/oauth/access_token\",\n {\n client_id: options.clientId,\n device_code: options.code,\n grant_type: \"urn:ietf:params:oauth:grant-type:device_code\"\n }\n );\n const authentication = {\n clientType: options.clientType,\n clientId: options.clientId,\n token: response.data.access_token,\n scopes: response.data.scope.split(/\\s+/).filter(Boolean)\n };\n if (\"clientSecret\" in options) {\n authentication.clientSecret = options.clientSecret;\n }\n if (options.clientType === \"github-app\") {\n if (\"refresh_token\" in response.data) {\n const apiTimeInMs = new Date(response.headers.date).getTime();\n authentication.refreshToken = response.data.refresh_token, authentication.expiresAt = toTimestamp(\n apiTimeInMs,\n response.data.expires_in\n ), authentication.refreshTokenExpiresAt = toTimestamp(\n apiTimeInMs,\n response.data.refresh_token_expires_in\n );\n }\n delete authentication.scopes;\n }\n return { ...response, authentication };\n}\nfunction toTimestamp(apiTimeInMs, expirationInSeconds) {\n return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString();\n}\nexport {\n exchangeDeviceCode\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport btoa from \"btoa-lite\";\nasync function checkToken(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const response = await request(\"POST /applications/{client_id}/token\", {\n headers: {\n authorization: `basic ${btoa(\n `${options.clientId}:${options.clientSecret}`\n )}`\n },\n client_id: options.clientId,\n access_token: options.token\n });\n const authentication = {\n clientType: options.clientType,\n clientId: options.clientId,\n clientSecret: options.clientSecret,\n token: options.token,\n scopes: response.data.scopes\n };\n if (response.data.expires_at)\n authentication.expiresAt = response.data.expires_at;\n if (options.clientType === \"github-app\") {\n delete authentication.scopes;\n }\n return { ...response, authentication };\n}\nexport {\n checkToken\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport { oauthRequest } from \"./utils\";\nasync function refreshToken(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const response = await oauthRequest(\n request,\n \"POST /login/oauth/access_token\",\n {\n client_id: options.clientId,\n client_secret: options.clientSecret,\n grant_type: \"refresh_token\",\n refresh_token: options.refreshToken\n }\n );\n const apiTimeInMs = new Date(response.headers.date).getTime();\n const authentication = {\n clientType: \"github-app\",\n clientId: options.clientId,\n clientSecret: options.clientSecret,\n token: response.data.access_token,\n refreshToken: response.data.refresh_token,\n expiresAt: toTimestamp(apiTimeInMs, response.data.expires_in),\n refreshTokenExpiresAt: toTimestamp(\n apiTimeInMs,\n response.data.refresh_token_expires_in\n )\n };\n return { ...response, authentication };\n}\nfunction toTimestamp(apiTimeInMs, expirationInSeconds) {\n return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString();\n}\nexport {\n refreshToken\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport btoa from \"btoa-lite\";\nasync function scopeToken(options) {\n const {\n request: optionsRequest,\n clientType,\n clientId,\n clientSecret,\n token,\n ...requestOptions\n } = options;\n const request = optionsRequest || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const response = await request(\n \"POST /applications/{client_id}/token/scoped\",\n {\n headers: {\n authorization: `basic ${btoa(`${clientId}:${clientSecret}`)}`\n },\n client_id: clientId,\n access_token: token,\n ...requestOptions\n }\n );\n const authentication = Object.assign(\n {\n clientType,\n clientId,\n clientSecret,\n token: response.data.token\n },\n response.data.expires_at ? { expiresAt: response.data.expires_at } : {}\n );\n return { ...response, authentication };\n}\nexport {\n scopeToken\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport btoa from \"btoa-lite\";\nasync function resetToken(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const auth = btoa(`${options.clientId}:${options.clientSecret}`);\n const response = await request(\n \"PATCH /applications/{client_id}/token\",\n {\n headers: {\n authorization: `basic ${auth}`\n },\n client_id: options.clientId,\n access_token: options.token\n }\n );\n const authentication = {\n clientType: options.clientType,\n clientId: options.clientId,\n clientSecret: options.clientSecret,\n token: response.data.token,\n scopes: response.data.scopes\n };\n if (response.data.expires_at)\n authentication.expiresAt = response.data.expires_at;\n if (options.clientType === \"github-app\") {\n delete authentication.scopes;\n }\n return { ...response, authentication };\n}\nexport {\n resetToken\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport btoa from \"btoa-lite\";\nasync function deleteToken(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const auth = btoa(`${options.clientId}:${options.clientSecret}`);\n return request(\n \"DELETE /applications/{client_id}/token\",\n {\n headers: {\n authorization: `basic ${auth}`\n },\n client_id: options.clientId,\n access_token: options.token\n }\n );\n}\nexport {\n deleteToken\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport btoa from \"btoa-lite\";\nasync function deleteAuthorization(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const auth = btoa(`${options.clientId}:${options.clientSecret}`);\n return request(\n \"DELETE /applications/{client_id}/grant\",\n {\n headers: {\n authorization: `basic ${auth}`\n },\n client_id: options.clientId,\n access_token: options.token\n }\n );\n}\nexport {\n deleteAuthorization\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAM,UAAU;;;ACAhB,qCAAsC;AACtC,qBAA0C;;;ACD1C,2BAA6B;AAC7B,SAAS,sBAAsB,SAAS;AACtC,QAAM,mBAAmB,QAAQ,SAAS;AAC1C,SAAO,kCAAkC,KAAK,iBAAiB,OAAO,IAAI,uBAAuB,iBAAiB,QAAQ,QAAQ,WAAW,EAAE;AACjJ;AACA,eAAe,aAAa,SAAS,OAAO,YAAY;AACtD,QAAM,sBAAsB;AAAA,IAC1B,SAAS,sBAAsB,OAAO;AAAA,IACtC,SAAS;AAAA,MACP,QAAQ;AAAA,IACV;AAAA,IACA,GAAG;AAAA,EACL;AACA,QAAM,WAAW,MAAM,QAAQ,OAAO,mBAAmB;AACzD,MAAI,WAAW,SAAS,MAAM;AAC5B,UAAM,QAAQ,IAAI;AAAA,MAChB,GAAG,SAAS,KAAK,sBAAsB,SAAS,KAAK,UAAU,SAAS,KAAK;AAAA,MAC7E;AAAA,MACA;AAAA,QACE,SAAS,QAAQ,SAAS;AAAA,UACxB;AAAA,UACA;AAAA,QACF;AAAA,QACA,SAAS,SAAS;AAAA,MACpB;AAAA,IACF;AACA,UAAM,WAAW;AACjB,UAAM;AAAA,EACR;AACA,SAAO;AACT;;;AD3BA,SAAS,2BAA2B;AAAA,EAClC,UAAU,eAAAA;AAAA,EACV,GAAG;AACL,GAAG;AACD,QAAM,UAAU,sBAAsB,OAAO;AAC7C,aAAO,sDAAsB;AAAA,IAC3B,GAAG;AAAA,IACH;AAAA,EACF,CAAC;AACH;;;AEZA,IAAAC,kBAA0C;AAE1C,eAAe,oBAAoB,SAAS;AAC1C,QAAM,UAAU,QAAQ;AAAA,EACxB,gBAAAC;AACA,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,MACE,WAAW,QAAQ;AAAA,MACnB,eAAe,QAAQ;AAAA,MACvB,MAAM,QAAQ;AAAA,MACd,cAAc,QAAQ;AAAA,IACxB;AAAA,EACF;AACA,QAAM,iBAAiB;AAAA,IACrB,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ;AAAA,IAClB,cAAc,QAAQ;AAAA,IACtB,OAAO,SAAS,KAAK;AAAA,IACrB,QAAQ,SAAS,KAAK,MAAM,MAAM,KAAK,EAAE,OAAO,OAAO;AAAA,EACzD;AACA,MAAI,QAAQ,eAAe,cAAc;AACvC,QAAI,mBAAmB,SAAS,MAAM;AACpC,YAAM,cAAc,IAAI,KAAK,SAAS,QAAQ,IAAI,EAAE,QAAQ;AAC5D,qBAAe,eAAe,SAAS,KAAK,eAAe,eAAe,YAAY;AAAA,QACpF;AAAA,QACA,SAAS,KAAK;AAAA,MAChB,GAAG,eAAe,wBAAwB;AAAA,QACxC;AAAA,QACA,SAAS,KAAK;AAAA,MAChB;AAAA,IACF;AACA,WAAO,eAAe;AAAA,EACxB;AACA,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;AACA,SAAS,YAAY,aAAa,qBAAqB;AACrD,SAAO,IAAI,KAAK,cAAc,sBAAsB,GAAG,EAAE,YAAY;AACvE;;;ACvCA,IAAAC,kBAA0C;AAE1C,eAAe,iBAAiB,SAAS;AACvC,QAAM,UAAU,QAAQ;AAAA,EACxB,gBAAAC;AACA,QAAM,aAAa;AAAA,IACjB,WAAW,QAAQ;AAAA,EACrB;AACA,MAAI,YAAY,WAAW,MAAM,QAAQ,QAAQ,MAAM,GAAG;AACxD,eAAW,QAAQ,QAAQ,OAAO,KAAK,GAAG;AAAA,EAC5C;AACA,SAAO,aAAa,SAAS,2BAA2B,UAAU;AACpE;;;ACZA,IAAAC,kBAA0C;AAE1C,eAAe,mBAAmB,SAAS;AACzC,QAAM,UAAU,QAAQ;AAAA,EACxB,gBAAAC;AACA,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,MACE,WAAW,QAAQ;AAAA,MACnB,aAAa,QAAQ;AAAA,MACrB,YAAY;AAAA,IACd;AAAA,EACF;AACA,QAAM,iBAAiB;AAAA,IACrB,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ;AAAA,IAClB,OAAO,SAAS,KAAK;AAAA,IACrB,QAAQ,SAAS,KAAK,MAAM,MAAM,KAAK,EAAE,OAAO,OAAO;AAAA,EACzD;AACA,MAAI,kBAAkB,SAAS;AAC7B,mBAAe,eAAe,QAAQ;AAAA,EACxC;AACA,MAAI,QAAQ,eAAe,cAAc;AACvC,QAAI,mBAAmB,SAAS,MAAM;AACpC,YAAM,cAAc,IAAI,KAAK,SAAS,QAAQ,IAAI,EAAE,QAAQ;AAC5D,qBAAe,eAAe,SAAS,KAAK,eAAe,eAAe,YAAYC;AAAA,QACpF;AAAA,QACA,SAAS,KAAK;AAAA,MAChB,GAAG,eAAe,wBAAwBA;AAAA,QACxC;AAAA,QACA,SAAS,KAAK;AAAA,MAChB;AAAA,IACF;AACA,WAAO,eAAe;AAAA,EACxB;AACA,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;AACA,SAASA,aAAY,aAAa,qBAAqB;AACrD,SAAO,IAAI,KAAK,cAAc,sBAAsB,GAAG,EAAE,YAAY;AACvE;;;ACxCA,IAAAC,kBAA0C;AAC1C,uBAAiB;AACjB,eAAe,WAAW,SAAS;AACjC,QAAM,UAAU,QAAQ;AAAA,EACxB,gBAAAC;AACA,QAAM,WAAW,MAAM,QAAQ,wCAAwC;AAAA,IACrE,SAAS;AAAA,MACP,eAAe,aAAS,iBAAAC;AAAA,QACtB,GAAG,QAAQ,YAAY,QAAQ;AAAA,MACjC;AAAA,IACF;AAAA,IACA,WAAW,QAAQ;AAAA,IACnB,cAAc,QAAQ;AAAA,EACxB,CAAC;AACD,QAAM,iBAAiB;AAAA,IACrB,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ;AAAA,IAClB,cAAc,QAAQ;AAAA,IACtB,OAAO,QAAQ;AAAA,IACf,QAAQ,SAAS,KAAK;AAAA,EACxB;AACA,MAAI,SAAS,KAAK;AAChB,mBAAe,YAAY,SAAS,KAAK;AAC3C,MAAI,QAAQ,eAAe,cAAc;AACvC,WAAO,eAAe;AAAA,EACxB;AACA,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;;;AC3BA,IAAAC,kBAA0C;AAE1C,eAAe,aAAa,SAAS;AACnC,QAAM,UAAU,QAAQ;AAAA,EACxB,gBAAAC;AACA,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,MACE,WAAW,QAAQ;AAAA,MACnB,eAAe,QAAQ;AAAA,MACvB,YAAY;AAAA,MACZ,eAAe,QAAQ;AAAA,IACzB;AAAA,EACF;AACA,QAAM,cAAc,IAAI,KAAK,SAAS,QAAQ,IAAI,EAAE,QAAQ;AAC5D,QAAM,iBAAiB;AAAA,IACrB,YAAY;AAAA,IACZ,UAAU,QAAQ;AAAA,IAClB,cAAc,QAAQ;AAAA,IACtB,OAAO,SAAS,KAAK;AAAA,IACrB,cAAc,SAAS,KAAK;AAAA,IAC5B,WAAWC,aAAY,aAAa,SAAS,KAAK,UAAU;AAAA,IAC5D,uBAAuBA;AAAA,MACrB;AAAA,MACA,SAAS,KAAK;AAAA,IAChB;AAAA,EACF;AACA,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;AACA,SAASA,aAAY,aAAa,qBAAqB;AACrD,SAAO,IAAI,KAAK,cAAc,sBAAsB,GAAG,EAAE,YAAY;AACvE;;;AChCA,IAAAC,kBAA0C;AAC1C,IAAAC,oBAAiB;AACjB,eAAe,WAAW,SAAS;AACjC,QAAM;AAAA,IACJ,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EACL,IAAI;AACJ,QAAM,UAAU;AAAA,EAChB,gBAAAC;AACA,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA;AAAA,MACE,SAAS;AAAA,QACP,eAAe,aAAS,kBAAAC,SAAK,GAAG,YAAY,cAAc;AAAA,MAC5D;AAAA,MACA,WAAW;AAAA,MACX,cAAc;AAAA,MACd,GAAG;AAAA,IACL;AAAA,EACF;AACA,QAAM,iBAAiB,OAAO;AAAA,IAC5B;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,SAAS,KAAK;AAAA,IACvB;AAAA,IACA,SAAS,KAAK,aAAa,EAAE,WAAW,SAAS,KAAK,WAAW,IAAI,CAAC;AAAA,EACxE;AACA,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;;;AClCA,IAAAC,kBAA0C;AAC1C,IAAAC,oBAAiB;AACjB,eAAe,WAAW,SAAS;AACjC,QAAM,UAAU,QAAQ;AAAA,EACxB,gBAAAC;AACA,QAAM,WAAO,kBAAAC,SAAK,GAAG,QAAQ,YAAY,QAAQ,cAAc;AAC/D,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA;AAAA,MACE,SAAS;AAAA,QACP,eAAe,SAAS;AAAA,MAC1B;AAAA,MACA,WAAW,QAAQ;AAAA,MACnB,cAAc,QAAQ;AAAA,IACxB;AAAA,EACF;AACA,QAAM,iBAAiB;AAAA,IACrB,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ;AAAA,IAClB,cAAc,QAAQ;AAAA,IACtB,OAAO,SAAS,KAAK;AAAA,IACrB,QAAQ,SAAS,KAAK;AAAA,EACxB;AACA,MAAI,SAAS,KAAK;AAChB,mBAAe,YAAY,SAAS,KAAK;AAC3C,MAAI,QAAQ,eAAe,cAAc;AACvC,WAAO,eAAe;AAAA,EACxB;AACA,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;;;AC7BA,IAAAC,kBAA0C;AAC1C,IAAAC,oBAAiB;AACjB,eAAe,YAAY,SAAS;AAClC,QAAM,UAAU,QAAQ;AAAA,EACxB,gBAAAC;AACA,QAAM,WAAO,kBAAAC,SAAK,GAAG,QAAQ,YAAY,QAAQ,cAAc;AAC/D,SAAO;AAAA,IACL;AAAA,IACA;AAAA,MACE,SAAS;AAAA,QACP,eAAe,SAAS;AAAA,MAC1B;AAAA,MACA,WAAW,QAAQ;AAAA,MACnB,cAAc,QAAQ;AAAA,IACxB;AAAA,EACF;AACF;;;AChBA,IAAAC,mBAA0C;AAC1C,IAAAC,oBAAiB;AACjB,eAAe,oBAAoB,SAAS;AAC1C,QAAM,UAAU,QAAQ;AAAA,EACxB,iBAAAC;AACA,QAAM,WAAO,kBAAAC,SAAK,GAAG,QAAQ,YAAY,QAAQ,cAAc;AAC/D,SAAO;AAAA,IACL;AAAA,IACA;AAAA,MACE,SAAS;AAAA,QACP,eAAe,SAAS;AAAA,MAC1B;AAAA,MACA,WAAW,QAAQ;AAAA,MACnB,cAAc,QAAQ;AAAA,IACxB;AAAA,EACF;AACF;", + "names": ["defaultRequest", "import_request", "defaultRequest", "import_request", "defaultRequest", "import_request", "defaultRequest", "toTimestamp", "import_request", "defaultRequest", "btoa", "import_request", "defaultRequest", "toTimestamp", "import_request", "import_btoa_lite", "defaultRequest", "btoa", "import_request", "import_btoa_lite", "defaultRequest", "btoa", "import_request", "import_btoa_lite", "defaultRequest", "btoa", "import_request", "import_btoa_lite", "defaultRequest", "btoa"] +} diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/check-token.js b/dist/node_modules/@octokit/oauth-methods/dist-src/check-token.js new file mode 100644 index 0000000..45ab311 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/check-token.js @@ -0,0 +1,31 @@ +import { request as defaultRequest } from "@octokit/request"; +import btoa from "btoa-lite"; +async function checkToken(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest; + const response = await request("POST /applications/{client_id}/token", { + headers: { + authorization: `basic ${btoa( + `${options.clientId}:${options.clientSecret}` + )}` + }, + client_id: options.clientId, + access_token: options.token + }); + const authentication = { + clientType: options.clientType, + clientId: options.clientId, + clientSecret: options.clientSecret, + token: options.token, + scopes: response.data.scopes + }; + if (response.data.expires_at) + authentication.expiresAt = response.data.expires_at; + if (options.clientType === "github-app") { + delete authentication.scopes; + } + return { ...response, authentication }; +} +export { + checkToken +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/create-device-code.js b/dist/node_modules/@octokit/oauth-methods/dist-src/create-device-code.js new file mode 100644 index 0000000..5340f8c --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/create-device-code.js @@ -0,0 +1,16 @@ +import { request as defaultRequest } from "@octokit/request"; +import { oauthRequest } from "./utils"; +async function createDeviceCode(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest; + const parameters = { + client_id: options.clientId + }; + if ("scopes" in options && Array.isArray(options.scopes)) { + parameters.scope = options.scopes.join(" "); + } + return oauthRequest(request, "POST /login/device/code", parameters); +} +export { + createDeviceCode +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/delete-authorization.js b/dist/node_modules/@octokit/oauth-methods/dist-src/delete-authorization.js new file mode 100644 index 0000000..0e9b0de --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/delete-authorization.js @@ -0,0 +1,20 @@ +import { request as defaultRequest } from "@octokit/request"; +import btoa from "btoa-lite"; +async function deleteAuthorization(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest; + const auth = btoa(`${options.clientId}:${options.clientSecret}`); + return request( + "DELETE /applications/{client_id}/grant", + { + headers: { + authorization: `basic ${auth}` + }, + client_id: options.clientId, + access_token: options.token + } + ); +} +export { + deleteAuthorization +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/delete-token.js b/dist/node_modules/@octokit/oauth-methods/dist-src/delete-token.js new file mode 100644 index 0000000..8fe5f55 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/delete-token.js @@ -0,0 +1,20 @@ +import { request as defaultRequest } from "@octokit/request"; +import btoa from "btoa-lite"; +async function deleteToken(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest; + const auth = btoa(`${options.clientId}:${options.clientSecret}`); + return request( + "DELETE /applications/{client_id}/token", + { + headers: { + authorization: `basic ${auth}` + }, + client_id: options.clientId, + access_token: options.token + } + ); +} +export { + deleteToken +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/exchange-device-code.js b/dist/node_modules/@octokit/oauth-methods/dist-src/exchange-device-code.js new file mode 100644 index 0000000..cb94309 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/exchange-device-code.js @@ -0,0 +1,44 @@ +import { request as defaultRequest } from "@octokit/request"; +import { oauthRequest } from "./utils"; +async function exchangeDeviceCode(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest; + const response = await oauthRequest( + request, + "POST /login/oauth/access_token", + { + client_id: options.clientId, + device_code: options.code, + grant_type: "urn:ietf:params:oauth:grant-type:device_code" + } + ); + const authentication = { + clientType: options.clientType, + clientId: options.clientId, + token: response.data.access_token, + scopes: response.data.scope.split(/\s+/).filter(Boolean) + }; + if ("clientSecret" in options) { + authentication.clientSecret = options.clientSecret; + } + if (options.clientType === "github-app") { + if ("refresh_token" in response.data) { + const apiTimeInMs = new Date(response.headers.date).getTime(); + authentication.refreshToken = response.data.refresh_token, authentication.expiresAt = toTimestamp( + apiTimeInMs, + response.data.expires_in + ), authentication.refreshTokenExpiresAt = toTimestamp( + apiTimeInMs, + response.data.refresh_token_expires_in + ); + } + delete authentication.scopes; + } + return { ...response, authentication }; +} +function toTimestamp(apiTimeInMs, expirationInSeconds) { + return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString(); +} +export { + exchangeDeviceCode +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/exchange-web-flow-code.js b/dist/node_modules/@octokit/oauth-methods/dist-src/exchange-web-flow-code.js new file mode 100644 index 0000000..968ff40 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/exchange-web-flow-code.js @@ -0,0 +1,43 @@ +import { request as defaultRequest } from "@octokit/request"; +import { oauthRequest } from "./utils"; +async function exchangeWebFlowCode(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest; + const response = await oauthRequest( + request, + "POST /login/oauth/access_token", + { + client_id: options.clientId, + client_secret: options.clientSecret, + code: options.code, + redirect_uri: options.redirectUrl + } + ); + const authentication = { + clientType: options.clientType, + clientId: options.clientId, + clientSecret: options.clientSecret, + token: response.data.access_token, + scopes: response.data.scope.split(/\s+/).filter(Boolean) + }; + if (options.clientType === "github-app") { + if ("refresh_token" in response.data) { + const apiTimeInMs = new Date(response.headers.date).getTime(); + authentication.refreshToken = response.data.refresh_token, authentication.expiresAt = toTimestamp( + apiTimeInMs, + response.data.expires_in + ), authentication.refreshTokenExpiresAt = toTimestamp( + apiTimeInMs, + response.data.refresh_token_expires_in + ); + } + delete authentication.scopes; + } + return { ...response, authentication }; +} +function toTimestamp(apiTimeInMs, expirationInSeconds) { + return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString(); +} +export { + exchangeWebFlowCode +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/get-web-flow-authorization-url.js b/dist/node_modules/@octokit/oauth-methods/dist-src/get-web-flow-authorization-url.js new file mode 100644 index 0000000..9e9b6c6 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/get-web-flow-authorization-url.js @@ -0,0 +1,16 @@ +import { oauthAuthorizationUrl } from "@octokit/oauth-authorization-url"; +import { request as defaultRequest } from "@octokit/request"; +import { requestToOAuthBaseUrl } from "./utils"; +function getWebFlowAuthorizationUrl({ + request = defaultRequest, + ...options +}) { + const baseUrl = requestToOAuthBaseUrl(request); + return oauthAuthorizationUrl({ + ...options, + baseUrl + }); +} +export { + getWebFlowAuthorizationUrl +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/index.js b/dist/node_modules/@octokit/oauth-methods/dist-src/index.js new file mode 100644 index 0000000..d518b43 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/index.js @@ -0,0 +1,14 @@ +import { VERSION } from "./version"; +export * from "./get-web-flow-authorization-url"; +export * from "./exchange-web-flow-code"; +export * from "./create-device-code"; +export * from "./exchange-device-code"; +export * from "./check-token"; +export * from "./refresh-token"; +export * from "./scope-token"; +export * from "./reset-token"; +export * from "./delete-token"; +export * from "./delete-authorization"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/refresh-token.js b/dist/node_modules/@octokit/oauth-methods/dist-src/refresh-token.js new file mode 100644 index 0000000..5dbef12 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/refresh-token.js @@ -0,0 +1,36 @@ +import { request as defaultRequest } from "@octokit/request"; +import { oauthRequest } from "./utils"; +async function refreshToken(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest; + const response = await oauthRequest( + request, + "POST /login/oauth/access_token", + { + client_id: options.clientId, + client_secret: options.clientSecret, + grant_type: "refresh_token", + refresh_token: options.refreshToken + } + ); + const apiTimeInMs = new Date(response.headers.date).getTime(); + const authentication = { + clientType: "github-app", + clientId: options.clientId, + clientSecret: options.clientSecret, + token: response.data.access_token, + refreshToken: response.data.refresh_token, + expiresAt: toTimestamp(apiTimeInMs, response.data.expires_in), + refreshTokenExpiresAt: toTimestamp( + apiTimeInMs, + response.data.refresh_token_expires_in + ) + }; + return { ...response, authentication }; +} +function toTimestamp(apiTimeInMs, expirationInSeconds) { + return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString(); +} +export { + refreshToken +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/reset-token.js b/dist/node_modules/@octokit/oauth-methods/dist-src/reset-token.js new file mode 100644 index 0000000..a335de0 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/reset-token.js @@ -0,0 +1,33 @@ +import { request as defaultRequest } from "@octokit/request"; +import btoa from "btoa-lite"; +async function resetToken(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest; + const auth = btoa(`${options.clientId}:${options.clientSecret}`); + const response = await request( + "PATCH /applications/{client_id}/token", + { + headers: { + authorization: `basic ${auth}` + }, + client_id: options.clientId, + access_token: options.token + } + ); + const authentication = { + clientType: options.clientType, + clientId: options.clientId, + clientSecret: options.clientSecret, + token: response.data.token, + scopes: response.data.scopes + }; + if (response.data.expires_at) + authentication.expiresAt = response.data.expires_at; + if (options.clientType === "github-app") { + delete authentication.scopes; + } + return { ...response, authentication }; +} +export { + resetToken +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/scope-token.js b/dist/node_modules/@octokit/oauth-methods/dist-src/scope-token.js new file mode 100644 index 0000000..e17d00c --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/scope-token.js @@ -0,0 +1,38 @@ +import { request as defaultRequest } from "@octokit/request"; +import btoa from "btoa-lite"; +async function scopeToken(options) { + const { + request: optionsRequest, + clientType, + clientId, + clientSecret, + token, + ...requestOptions + } = options; + const request = optionsRequest || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest; + const response = await request( + "POST /applications/{client_id}/token/scoped", + { + headers: { + authorization: `basic ${btoa(`${clientId}:${clientSecret}`)}` + }, + client_id: clientId, + access_token: token, + ...requestOptions + } + ); + const authentication = Object.assign( + { + clientType, + clientId, + clientSecret, + token: response.data.token + }, + response.data.expires_at ? { expiresAt: response.data.expires_at } : {} + ); + return { ...response, authentication }; +} +export { + scopeToken +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/utils.js b/dist/node_modules/@octokit/oauth-methods/dist-src/utils.js new file mode 100644 index 0000000..79940a4 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/utils.js @@ -0,0 +1,35 @@ +import { RequestError } from "@octokit/request-error"; +function requestToOAuthBaseUrl(request) { + const endpointDefaults = request.endpoint.DEFAULTS; + return /^https:\/\/(api\.)?github\.com$/.test(endpointDefaults.baseUrl) ? "https://github.com" : endpointDefaults.baseUrl.replace("/api/v3", ""); +} +async function oauthRequest(request, route, parameters) { + const withOAuthParameters = { + baseUrl: requestToOAuthBaseUrl(request), + headers: { + accept: "application/json" + }, + ...parameters + }; + const response = await request(route, withOAuthParameters); + if ("error" in response.data) { + const error = new RequestError( + `${response.data.error_description} (${response.data.error}, ${response.data.error_uri})`, + 400, + { + request: request.endpoint.merge( + route, + withOAuthParameters + ), + headers: response.headers + } + ); + error.response = response; + throw error; + } + return response; +} +export { + oauthRequest, + requestToOAuthBaseUrl +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-src/version.js b/dist/node_modules/@octokit/oauth-methods/dist-src/version.js new file mode 100644 index 0000000..6a407d8 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "2.0.6"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/check-token.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/check-token.d.ts new file mode 100644 index 0000000..6898ba8 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/check-token.d.ts @@ -0,0 +1,24 @@ +import type { RequestInterface, Endpoints } from "@octokit/types"; +import type { OAuthAppAuthentication, GitHubAppAuthenticationWithExpirationEnabled, GitHubAppAuthenticationWithExpirationDisabled } from "./types"; +export type CheckTokenOAuthAppOptions = { + clientType: "oauth-app"; + clientId: string; + clientSecret: string; + token: string; + request?: RequestInterface; +}; +export type CheckTokenGitHubAppOptions = { + clientType: "github-app"; + clientId: string; + clientSecret: string; + token: string; + request?: RequestInterface; +}; +export type CheckTokenOAuthAppResponse = Endpoints["POST /applications/{client_id}/token"]["response"] & { + authentication: OAuthAppAuthentication; +}; +export type CheckTokenGitHubAppResponse = Endpoints["POST /applications/{client_id}/token"]["response"] & { + authentication: GitHubAppAuthenticationWithExpirationEnabled | GitHubAppAuthenticationWithExpirationDisabled; +}; +export declare function checkToken(options: CheckTokenOAuthAppOptions): Promise; +export declare function checkToken(options: CheckTokenGitHubAppOptions): Promise; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/create-device-code.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/create-device-code.d.ts new file mode 100644 index 0000000..4953a54 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/create-device-code.d.ts @@ -0,0 +1,20 @@ +import type { OctokitResponse, RequestInterface } from "@octokit/types"; +export type CreateDeviceCodeOAuthAppOptions = { + clientType: "oauth-app"; + clientId: string; + scopes?: string[]; + request?: RequestInterface; +}; +export type CreateDeviceCodeGitHubAppOptions = { + clientType: "github-app"; + clientId: string; + request?: RequestInterface; +}; +export type CreateDeviceCodeDeviceTokenResponse = OctokitResponse<{ + device_code: string; + user_code: string; + verification_uri: string; + expires_in: number; + interval: number; +}>; +export declare function createDeviceCode(options: CreateDeviceCodeOAuthAppOptions | CreateDeviceCodeGitHubAppOptions): Promise; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/delete-authorization.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/delete-authorization.d.ts new file mode 100644 index 0000000..640c869 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/delete-authorization.d.ts @@ -0,0 +1,18 @@ +import type { RequestInterface, Endpoints } from "@octokit/types"; +export type DeleteAuthorizationOAuthAppOptions = { + clientType: "oauth-app"; + clientId: string; + clientSecret: string; + token: string; + request?: RequestInterface; +}; +export type DeleteAuthorizationGitHubAppOptions = { + clientType: "github-app"; + clientId: string; + clientSecret: string; + token: string; + request?: RequestInterface; +}; +export type DeleteAuthorizationResponse = Endpoints["DELETE /applications/{client_id}/grant"]["response"]; +export declare function deleteAuthorization(options: DeleteAuthorizationOAuthAppOptions): Promise; +export declare function deleteAuthorization(options: DeleteAuthorizationGitHubAppOptions): Promise; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/delete-token.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/delete-token.d.ts new file mode 100644 index 0000000..9d49c22 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/delete-token.d.ts @@ -0,0 +1,18 @@ +import type { RequestInterface, Endpoints } from "@octokit/types"; +export type DeleteTokenOAuthAppOptions = { + clientType: "oauth-app"; + clientId: string; + clientSecret: string; + token: string; + request?: RequestInterface; +}; +export type DeleteTokenGitHubAppOptions = { + clientType: "github-app"; + clientId: string; + clientSecret: string; + token: string; + request?: RequestInterface; +}; +export type DeleteTokenResponse = Endpoints["DELETE /applications/{client_id}/token"]["response"]; +export declare function deleteToken(options: DeleteTokenOAuthAppOptions): Promise; +export declare function deleteToken(options: DeleteTokenGitHubAppOptions): Promise; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/exchange-device-code.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/exchange-device-code.d.ts new file mode 100644 index 0000000..1cbeef9 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/exchange-device-code.d.ts @@ -0,0 +1,57 @@ +import type { OctokitResponse, RequestInterface } from "@octokit/types"; +import type { OAuthAppAuthentication, GitHubAppAuthenticationWithExpirationEnabled, GitHubAppAuthenticationWithExpirationDisabled, GitHubAppAuthenticationWithRefreshToken, OAuthAppCreateTokenResponseData, GitHubAppCreateTokenResponseData, GitHubAppCreateTokenWithExpirationResponseData } from "./types"; +export type ExchangeDeviceCodeOAuthAppOptionsWithoutClientSecret = { + clientType: "oauth-app"; + clientId: string; + code: string; + redirectUrl?: string; + state?: string; + request?: RequestInterface; + scopes?: string[]; +}; +export type ExchangeDeviceCodeOAuthAppOptions = ExchangeDeviceCodeOAuthAppOptionsWithoutClientSecret & { + clientSecret: string; +}; +export type ExchangeDeviceCodeGitHubAppOptionsWithoutClientSecret = { + clientType: "github-app"; + clientId: string; + code: string; + redirectUrl?: string; + state?: string; + request?: RequestInterface; +}; +export type ExchangeDeviceCodeGitHubAppOptions = ExchangeDeviceCodeGitHubAppOptionsWithoutClientSecret & { + clientSecret: string; +}; +type OAuthAppAuthenticationWithoutClientSecret = Omit; +type GitHubAppAuthenticationWithoutClientSecret = Omit; +type GitHubAppAuthenticationWithExpirationWithoutClientSecret = Omit; +export type ExchangeDeviceCodeOAuthAppResponse = OctokitResponse & { + authentication: OAuthAppAuthentication; +}; +export type ExchangeDeviceCodeOAuthAppResponseWithoutClientSecret = OctokitResponse & { + authentication: OAuthAppAuthenticationWithoutClientSecret; +}; +export type ExchangeDeviceCodeGitHubAppResponse = OctokitResponse & { + authentication: GitHubAppAuthenticationWithExpirationEnabled | GitHubAppAuthenticationWithExpirationDisabled | GitHubAppAuthenticationWithRefreshToken; +}; +export type ExchangeDeviceCodeGitHubAppResponseWithoutClientSecret = OctokitResponse & { + authentication: GitHubAppAuthenticationWithoutClientSecret | GitHubAppAuthenticationWithExpirationWithoutClientSecret; +}; +/** + * Exchange the code from GitHub's OAuth Web flow for OAuth Apps. + */ +export declare function exchangeDeviceCode(options: ExchangeDeviceCodeOAuthAppOptions): Promise; +/** + * Exchange the code from GitHub's OAuth Web flow for OAuth Apps without clientSecret + */ +export declare function exchangeDeviceCode(options: ExchangeDeviceCodeOAuthAppOptionsWithoutClientSecret): Promise; +/** + * Exchange the code from GitHub's OAuth Web flow for GitHub Apps. `scopes` are not supported by GitHub Apps. + */ +export declare function exchangeDeviceCode(options: ExchangeDeviceCodeGitHubAppOptions): Promise; +/** + * Exchange the code from GitHub's OAuth Web flow for GitHub Apps without using `clientSecret`. `scopes` are not supported by GitHub Apps. + */ +export declare function exchangeDeviceCode(options: ExchangeDeviceCodeGitHubAppOptionsWithoutClientSecret): Promise; +export {}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/exchange-web-flow-code.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/exchange-web-flow-code.d.ts new file mode 100644 index 0000000..7b14135 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/exchange-web-flow-code.d.ts @@ -0,0 +1,32 @@ +import type { OctokitResponse, RequestInterface } from "@octokit/types"; +import type { OAuthAppAuthentication, GitHubAppAuthenticationWithExpirationEnabled, GitHubAppAuthenticationWithExpirationDisabled, GitHubAppAuthenticationWithRefreshToken, OAuthAppCreateTokenResponseData, GitHubAppCreateTokenResponseData, GitHubAppCreateTokenWithExpirationResponseData } from "./types"; +export type ExchangeWebFlowCodeOAuthAppOptions = { + clientType: "oauth-app"; + clientId: string; + clientSecret: string; + code: string; + redirectUrl?: string; + request?: RequestInterface; +}; +export type ExchangeWebFlowCodeGitHubAppOptions = { + clientType: "github-app"; + clientId: string; + clientSecret: string; + code: string; + redirectUrl?: string; + request?: RequestInterface; +}; +export type ExchangeWebFlowCodeOAuthAppResponse = OctokitResponse & { + authentication: OAuthAppAuthentication; +}; +export type ExchangeWebFlowCodeGitHubAppResponse = OctokitResponse & { + authentication: GitHubAppAuthenticationWithExpirationEnabled | GitHubAppAuthenticationWithExpirationDisabled | GitHubAppAuthenticationWithRefreshToken; +}; +/** + * Exchange the code from GitHub's OAuth Web flow for OAuth Apps. + */ +export declare function exchangeWebFlowCode(options: ExchangeWebFlowCodeOAuthAppOptions): Promise; +/** + * Exchange the code from GitHub's OAuth Web flow for GitHub Apps. Note that `scopes` are not supported by GitHub Apps. + */ +export declare function exchangeWebFlowCode(options: ExchangeWebFlowCodeGitHubAppOptions): Promise; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/get-web-flow-authorization-url.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/get-web-flow-authorization-url.d.ts new file mode 100644 index 0000000..26d0f96 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/get-web-flow-authorization-url.d.ts @@ -0,0 +1,25 @@ +import type { OAuthAppResult, GitHubAppResult } from "@octokit/oauth-authorization-url"; +import type { RequestInterface } from "@octokit/types"; +export type GetWebFlowAuthorizationUrlOAuthAppOptions = { + clientType: "oauth-app"; + clientId: string; + allowSignup?: boolean; + login?: string; + scopes?: string | string[]; + redirectUrl?: string; + state?: string; + request?: RequestInterface; +}; +export type GetWebFlowAuthorizationUrlGitHubAppOptions = { + clientType: "github-app"; + clientId: string; + allowSignup?: boolean; + login?: string; + redirectUrl?: string; + state?: string; + request?: RequestInterface; +}; +export type GetWebFlowAuthorizationUrlOAuthAppResult = OAuthAppResult; +export type GetWebFlowAuthorizationUrlGitHubAppResult = GitHubAppResult; +export declare function getWebFlowAuthorizationUrl(options: GetWebFlowAuthorizationUrlOAuthAppOptions): OAuthAppResult; +export declare function getWebFlowAuthorizationUrl(options: GetWebFlowAuthorizationUrlGitHubAppOptions): GitHubAppResult; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/index.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/index.d.ts new file mode 100644 index 0000000..7c386bc --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/index.d.ts @@ -0,0 +1,12 @@ +export { VERSION } from "./version"; +export * from "./get-web-flow-authorization-url"; +export * from "./exchange-web-flow-code"; +export * from "./create-device-code"; +export * from "./exchange-device-code"; +export * from "./check-token"; +export * from "./refresh-token"; +export * from "./scope-token"; +export * from "./reset-token"; +export * from "./delete-token"; +export * from "./delete-authorization"; +export type { OAuthAppAuthentication, GitHubAppAuthenticationWithExpirationDisabled, GitHubAppAuthenticationWithExpirationEnabled, GitHubAppAuthenticationWithRefreshToken, GitHubAppAuthentication, GitHubAppAuthenticationWithExpiration, } from "./types"; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/refresh-token.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/refresh-token.d.ts new file mode 100644 index 0000000..8792acc --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/refresh-token.d.ts @@ -0,0 +1,13 @@ +import type { OctokitResponse, RequestInterface } from "@octokit/types"; +import type { GitHubAppAuthenticationWithRefreshToken, GitHubAppCreateTokenWithExpirationResponseData } from "./types"; +export type RefreshTokenOptions = { + clientType: "github-app"; + clientId: string; + clientSecret: string; + refreshToken: string; + request?: RequestInterface; +}; +export type RefreshTokenResponse = OctokitResponse & { + authentication: GitHubAppAuthenticationWithRefreshToken; +}; +export declare function refreshToken(options: RefreshTokenOptions): Promise; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/reset-token.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/reset-token.d.ts new file mode 100644 index 0000000..cd56182 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/reset-token.d.ts @@ -0,0 +1,24 @@ +import type { Endpoints, RequestInterface } from "@octokit/types"; +import type { OAuthAppAuthentication, GitHubAppAuthenticationWithExpirationEnabled, GitHubAppAuthenticationWithExpirationDisabled } from "./types"; +export type ResetTokenOAuthAppOptions = { + clientType: "oauth-app"; + clientId: string; + clientSecret: string; + token: string; + request?: RequestInterface; +}; +export type ResetTokenGitHubAppOptions = { + clientType: "github-app"; + clientId: string; + clientSecret: string; + token: string; + request?: RequestInterface; +}; +export type ResetTokenOAuthAppResponse = Endpoints["PATCH /applications/{client_id}/token"]["response"] & { + authentication: OAuthAppAuthentication; +}; +export type ResetTokenGitHubAppResponse = Endpoints["PATCH /applications/{client_id}/token"]["response"] & { + authentication: GitHubAppAuthenticationWithExpirationEnabled | GitHubAppAuthenticationWithExpirationDisabled; +}; +export declare function resetToken(options: ResetTokenOAuthAppOptions): Promise; +export declare function resetToken(options: ResetTokenGitHubAppOptions): Promise; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/scope-token.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/scope-token.d.ts new file mode 100644 index 0000000..445e3e7 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/scope-token.d.ts @@ -0,0 +1,29 @@ +import type { RequestInterface, Endpoints } from "@octokit/types"; +import type { GitHubAppAuthenticationWithExpirationEnabled, GitHubAppAuthenticationWithExpirationDisabled } from "./types"; +type CommonOptions = { + clientType: "github-app"; + clientId: string; + clientSecret: string; + token: string; + permissions?: Endpoint["parameters"]["permissions"]; + request?: RequestInterface; +}; +type TargetOption = { + target: string; +}; +type TargetIdOption = { + target_id: number; +}; +type RepositoriesOption = { + repositories?: string[]; +}; +type RepositoryIdsOption = { + repository_ids?: number[]; +}; +type Endpoint = Endpoints["POST /applications/{client_id}/token/scoped"]; +export type ScopeTokenOptions = (CommonOptions & TargetOption & RepositoriesOption) | (CommonOptions & TargetIdOption & RepositoriesOption) | (CommonOptions & TargetOption & RepositoryIdsOption) | (CommonOptions & TargetIdOption & RepositoryIdsOption); +export type ScopeTokenResponse = Endpoint["response"] & { + authentication: GitHubAppAuthenticationWithExpirationEnabled | GitHubAppAuthenticationWithExpirationDisabled; +}; +export declare function scopeToken(options: ScopeTokenOptions): Promise; +export {}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/types.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/types.d.ts new file mode 100644 index 0000000..b461364 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/types.d.ts @@ -0,0 +1,58 @@ +export type OAuthAppAuthentication = { + clientType: "oauth-app"; + clientId: string; + clientSecret: string; + token: string; + scopes: string[]; +}; +export type GitHubAppAuthenticationWithExpirationDisabled = { + clientType: "github-app"; + clientId: string; + clientSecret: string; + token: string; +}; +export type GitHubAppAuthenticationWithExpirationEnabled = GitHubAppAuthenticationWithExpirationDisabled & { + expiresAt: string; +}; +export type GitHubAppAuthenticationWithRefreshToken = GitHubAppAuthenticationWithExpirationEnabled & { + refreshToken: string; + refreshTokenExpiresAt: string; +}; +/** + * @deprecated Use `GitHubAppAuthenticationWithExpirationDisabled` or + * `GitHubAppAuthenticationWithExpirationEnabled` instead. + */ +export type GitHubAppAuthentication = { + clientType: "github-app"; + clientId: string; + clientSecret: string; + token: string; +}; +/** + * @deprecated Use `GitHubAppAuthenticationWithRefreshToken` instead. + */ +export type GitHubAppAuthenticationWithExpiration = { + clientType: "github-app"; + clientId: string; + clientSecret: string; + token: string; + refreshToken: string; + expiresAt: string; + refreshTokenExpiresAt: string; +}; +export type OAuthAppCreateTokenResponseData = { + access_token: string; + scope: string; + token_type: "bearer"; +}; +export type GitHubAppCreateTokenResponseData = { + access_token: string; + token_type: "bearer"; +}; +export type GitHubAppCreateTokenWithExpirationResponseData = { + access_token: string; + token_type: "bearer"; + expires_in: number; + refresh_token: string; + refresh_token_expires_in: number; +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/utils.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/utils.d.ts new file mode 100644 index 0000000..2e83f70 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/utils.d.ts @@ -0,0 +1,3 @@ +import type { RequestInterface } from "@octokit/types"; +export declare function requestToOAuthBaseUrl(request: RequestInterface): string; +export declare function oauthRequest(request: RequestInterface, route: string, parameters: Record): Promise>; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-types/version.d.ts b/dist/node_modules/@octokit/oauth-methods/dist-types/version.d.ts new file mode 100644 index 0000000..2fa9763 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "2.0.6"; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-web/index.js b/dist/node_modules/@octokit/oauth-methods/dist-web/index.js new file mode 100644 index 0000000..fd316f6 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-web/index.js @@ -0,0 +1,332 @@ +// pkg/dist-src/version.js +var VERSION = "2.0.6"; + +// pkg/dist-src/get-web-flow-authorization-url.js +import { oauthAuthorizationUrl } from "@octokit/oauth-authorization-url"; +import { request as defaultRequest } from "@octokit/request"; + +// pkg/dist-src/utils.js +import { RequestError } from "@octokit/request-error"; +function requestToOAuthBaseUrl(request) { + const endpointDefaults = request.endpoint.DEFAULTS; + return /^https:\/\/(api\.)?github\.com$/.test(endpointDefaults.baseUrl) ? "https://github.com" : endpointDefaults.baseUrl.replace("/api/v3", ""); +} +async function oauthRequest(request, route, parameters) { + const withOAuthParameters = { + baseUrl: requestToOAuthBaseUrl(request), + headers: { + accept: "application/json" + }, + ...parameters + }; + const response = await request(route, withOAuthParameters); + if ("error" in response.data) { + const error = new RequestError( + `${response.data.error_description} (${response.data.error}, ${response.data.error_uri})`, + 400, + { + request: request.endpoint.merge( + route, + withOAuthParameters + ), + headers: response.headers + } + ); + error.response = response; + throw error; + } + return response; +} + +// pkg/dist-src/get-web-flow-authorization-url.js +function getWebFlowAuthorizationUrl({ + request = defaultRequest, + ...options +}) { + const baseUrl = requestToOAuthBaseUrl(request); + return oauthAuthorizationUrl({ + ...options, + baseUrl + }); +} + +// pkg/dist-src/exchange-web-flow-code.js +import { request as defaultRequest2 } from "@octokit/request"; +async function exchangeWebFlowCode(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest2; + const response = await oauthRequest( + request, + "POST /login/oauth/access_token", + { + client_id: options.clientId, + client_secret: options.clientSecret, + code: options.code, + redirect_uri: options.redirectUrl + } + ); + const authentication = { + clientType: options.clientType, + clientId: options.clientId, + clientSecret: options.clientSecret, + token: response.data.access_token, + scopes: response.data.scope.split(/\s+/).filter(Boolean) + }; + if (options.clientType === "github-app") { + if ("refresh_token" in response.data) { + const apiTimeInMs = new Date(response.headers.date).getTime(); + authentication.refreshToken = response.data.refresh_token, authentication.expiresAt = toTimestamp( + apiTimeInMs, + response.data.expires_in + ), authentication.refreshTokenExpiresAt = toTimestamp( + apiTimeInMs, + response.data.refresh_token_expires_in + ); + } + delete authentication.scopes; + } + return { ...response, authentication }; +} +function toTimestamp(apiTimeInMs, expirationInSeconds) { + return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString(); +} + +// pkg/dist-src/create-device-code.js +import { request as defaultRequest3 } from "@octokit/request"; +async function createDeviceCode(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest3; + const parameters = { + client_id: options.clientId + }; + if ("scopes" in options && Array.isArray(options.scopes)) { + parameters.scope = options.scopes.join(" "); + } + return oauthRequest(request, "POST /login/device/code", parameters); +} + +// pkg/dist-src/exchange-device-code.js +import { request as defaultRequest4 } from "@octokit/request"; +async function exchangeDeviceCode(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest4; + const response = await oauthRequest( + request, + "POST /login/oauth/access_token", + { + client_id: options.clientId, + device_code: options.code, + grant_type: "urn:ietf:params:oauth:grant-type:device_code" + } + ); + const authentication = { + clientType: options.clientType, + clientId: options.clientId, + token: response.data.access_token, + scopes: response.data.scope.split(/\s+/).filter(Boolean) + }; + if ("clientSecret" in options) { + authentication.clientSecret = options.clientSecret; + } + if (options.clientType === "github-app") { + if ("refresh_token" in response.data) { + const apiTimeInMs = new Date(response.headers.date).getTime(); + authentication.refreshToken = response.data.refresh_token, authentication.expiresAt = toTimestamp2( + apiTimeInMs, + response.data.expires_in + ), authentication.refreshTokenExpiresAt = toTimestamp2( + apiTimeInMs, + response.data.refresh_token_expires_in + ); + } + delete authentication.scopes; + } + return { ...response, authentication }; +} +function toTimestamp2(apiTimeInMs, expirationInSeconds) { + return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString(); +} + +// pkg/dist-src/check-token.js +import { request as defaultRequest5 } from "@octokit/request"; +import btoa from "btoa-lite"; +async function checkToken(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest5; + const response = await request("POST /applications/{client_id}/token", { + headers: { + authorization: `basic ${btoa( + `${options.clientId}:${options.clientSecret}` + )}` + }, + client_id: options.clientId, + access_token: options.token + }); + const authentication = { + clientType: options.clientType, + clientId: options.clientId, + clientSecret: options.clientSecret, + token: options.token, + scopes: response.data.scopes + }; + if (response.data.expires_at) + authentication.expiresAt = response.data.expires_at; + if (options.clientType === "github-app") { + delete authentication.scopes; + } + return { ...response, authentication }; +} + +// pkg/dist-src/refresh-token.js +import { request as defaultRequest6 } from "@octokit/request"; +async function refreshToken(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest6; + const response = await oauthRequest( + request, + "POST /login/oauth/access_token", + { + client_id: options.clientId, + client_secret: options.clientSecret, + grant_type: "refresh_token", + refresh_token: options.refreshToken + } + ); + const apiTimeInMs = new Date(response.headers.date).getTime(); + const authentication = { + clientType: "github-app", + clientId: options.clientId, + clientSecret: options.clientSecret, + token: response.data.access_token, + refreshToken: response.data.refresh_token, + expiresAt: toTimestamp3(apiTimeInMs, response.data.expires_in), + refreshTokenExpiresAt: toTimestamp3( + apiTimeInMs, + response.data.refresh_token_expires_in + ) + }; + return { ...response, authentication }; +} +function toTimestamp3(apiTimeInMs, expirationInSeconds) { + return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString(); +} + +// pkg/dist-src/scope-token.js +import { request as defaultRequest7 } from "@octokit/request"; +import btoa2 from "btoa-lite"; +async function scopeToken(options) { + const { + request: optionsRequest, + clientType, + clientId, + clientSecret, + token, + ...requestOptions + } = options; + const request = optionsRequest || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest7; + const response = await request( + "POST /applications/{client_id}/token/scoped", + { + headers: { + authorization: `basic ${btoa2(`${clientId}:${clientSecret}`)}` + }, + client_id: clientId, + access_token: token, + ...requestOptions + } + ); + const authentication = Object.assign( + { + clientType, + clientId, + clientSecret, + token: response.data.token + }, + response.data.expires_at ? { expiresAt: response.data.expires_at } : {} + ); + return { ...response, authentication }; +} + +// pkg/dist-src/reset-token.js +import { request as defaultRequest8 } from "@octokit/request"; +import btoa3 from "btoa-lite"; +async function resetToken(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest8; + const auth = btoa3(`${options.clientId}:${options.clientSecret}`); + const response = await request( + "PATCH /applications/{client_id}/token", + { + headers: { + authorization: `basic ${auth}` + }, + client_id: options.clientId, + access_token: options.token + } + ); + const authentication = { + clientType: options.clientType, + clientId: options.clientId, + clientSecret: options.clientSecret, + token: response.data.token, + scopes: response.data.scopes + }; + if (response.data.expires_at) + authentication.expiresAt = response.data.expires_at; + if (options.clientType === "github-app") { + delete authentication.scopes; + } + return { ...response, authentication }; +} + +// pkg/dist-src/delete-token.js +import { request as defaultRequest9 } from "@octokit/request"; +import btoa4 from "btoa-lite"; +async function deleteToken(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest9; + const auth = btoa4(`${options.clientId}:${options.clientSecret}`); + return request( + "DELETE /applications/{client_id}/token", + { + headers: { + authorization: `basic ${auth}` + }, + client_id: options.clientId, + access_token: options.token + } + ); +} + +// pkg/dist-src/delete-authorization.js +import { request as defaultRequest10 } from "@octokit/request"; +import btoa5 from "btoa-lite"; +async function deleteAuthorization(options) { + const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */ + defaultRequest10; + const auth = btoa5(`${options.clientId}:${options.clientSecret}`); + return request( + "DELETE /applications/{client_id}/grant", + { + headers: { + authorization: `basic ${auth}` + }, + client_id: options.clientId, + access_token: options.token + } + ); +} +export { + VERSION, + checkToken, + createDeviceCode, + deleteAuthorization, + deleteToken, + exchangeDeviceCode, + exchangeWebFlowCode, + getWebFlowAuthorizationUrl, + refreshToken, + resetToken, + scopeToken +}; diff --git a/dist/node_modules/@octokit/oauth-methods/dist-web/index.js.map b/dist/node_modules/@octokit/oauth-methods/dist-web/index.js.map new file mode 100644 index 0000000..dd0e138 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/version.js", "../dist-src/get-web-flow-authorization-url.js", "../dist-src/utils.js", "../dist-src/exchange-web-flow-code.js", "../dist-src/create-device-code.js", "../dist-src/exchange-device-code.js", "../dist-src/check-token.js", "../dist-src/refresh-token.js", "../dist-src/scope-token.js", "../dist-src/reset-token.js", "../dist-src/delete-token.js", "../dist-src/delete-authorization.js"], + "sourcesContent": ["const VERSION = \"2.0.6\";\nexport {\n VERSION\n};\n", "import { oauthAuthorizationUrl } from \"@octokit/oauth-authorization-url\";\nimport { request as defaultRequest } from \"@octokit/request\";\nimport { requestToOAuthBaseUrl } from \"./utils\";\nfunction getWebFlowAuthorizationUrl({\n request = defaultRequest,\n ...options\n}) {\n const baseUrl = requestToOAuthBaseUrl(request);\n return oauthAuthorizationUrl({\n ...options,\n baseUrl\n });\n}\nexport {\n getWebFlowAuthorizationUrl\n};\n", "import { RequestError } from \"@octokit/request-error\";\nfunction requestToOAuthBaseUrl(request) {\n const endpointDefaults = request.endpoint.DEFAULTS;\n return /^https:\\/\\/(api\\.)?github\\.com$/.test(endpointDefaults.baseUrl) ? \"https://github.com\" : endpointDefaults.baseUrl.replace(\"/api/v3\", \"\");\n}\nasync function oauthRequest(request, route, parameters) {\n const withOAuthParameters = {\n baseUrl: requestToOAuthBaseUrl(request),\n headers: {\n accept: \"application/json\"\n },\n ...parameters\n };\n const response = await request(route, withOAuthParameters);\n if (\"error\" in response.data) {\n const error = new RequestError(\n `${response.data.error_description} (${response.data.error}, ${response.data.error_uri})`,\n 400,\n {\n request: request.endpoint.merge(\n route,\n withOAuthParameters\n ),\n headers: response.headers\n }\n );\n error.response = response;\n throw error;\n }\n return response;\n}\nexport {\n oauthRequest,\n requestToOAuthBaseUrl\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport { oauthRequest } from \"./utils\";\nasync function exchangeWebFlowCode(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const response = await oauthRequest(\n request,\n \"POST /login/oauth/access_token\",\n {\n client_id: options.clientId,\n client_secret: options.clientSecret,\n code: options.code,\n redirect_uri: options.redirectUrl\n }\n );\n const authentication = {\n clientType: options.clientType,\n clientId: options.clientId,\n clientSecret: options.clientSecret,\n token: response.data.access_token,\n scopes: response.data.scope.split(/\\s+/).filter(Boolean)\n };\n if (options.clientType === \"github-app\") {\n if (\"refresh_token\" in response.data) {\n const apiTimeInMs = new Date(response.headers.date).getTime();\n authentication.refreshToken = response.data.refresh_token, authentication.expiresAt = toTimestamp(\n apiTimeInMs,\n response.data.expires_in\n ), authentication.refreshTokenExpiresAt = toTimestamp(\n apiTimeInMs,\n response.data.refresh_token_expires_in\n );\n }\n delete authentication.scopes;\n }\n return { ...response, authentication };\n}\nfunction toTimestamp(apiTimeInMs, expirationInSeconds) {\n return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString();\n}\nexport {\n exchangeWebFlowCode\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport { oauthRequest } from \"./utils\";\nasync function createDeviceCode(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const parameters = {\n client_id: options.clientId\n };\n if (\"scopes\" in options && Array.isArray(options.scopes)) {\n parameters.scope = options.scopes.join(\" \");\n }\n return oauthRequest(request, \"POST /login/device/code\", parameters);\n}\nexport {\n createDeviceCode\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport { oauthRequest } from \"./utils\";\nasync function exchangeDeviceCode(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const response = await oauthRequest(\n request,\n \"POST /login/oauth/access_token\",\n {\n client_id: options.clientId,\n device_code: options.code,\n grant_type: \"urn:ietf:params:oauth:grant-type:device_code\"\n }\n );\n const authentication = {\n clientType: options.clientType,\n clientId: options.clientId,\n token: response.data.access_token,\n scopes: response.data.scope.split(/\\s+/).filter(Boolean)\n };\n if (\"clientSecret\" in options) {\n authentication.clientSecret = options.clientSecret;\n }\n if (options.clientType === \"github-app\") {\n if (\"refresh_token\" in response.data) {\n const apiTimeInMs = new Date(response.headers.date).getTime();\n authentication.refreshToken = response.data.refresh_token, authentication.expiresAt = toTimestamp(\n apiTimeInMs,\n response.data.expires_in\n ), authentication.refreshTokenExpiresAt = toTimestamp(\n apiTimeInMs,\n response.data.refresh_token_expires_in\n );\n }\n delete authentication.scopes;\n }\n return { ...response, authentication };\n}\nfunction toTimestamp(apiTimeInMs, expirationInSeconds) {\n return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString();\n}\nexport {\n exchangeDeviceCode\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport btoa from \"btoa-lite\";\nasync function checkToken(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const response = await request(\"POST /applications/{client_id}/token\", {\n headers: {\n authorization: `basic ${btoa(\n `${options.clientId}:${options.clientSecret}`\n )}`\n },\n client_id: options.clientId,\n access_token: options.token\n });\n const authentication = {\n clientType: options.clientType,\n clientId: options.clientId,\n clientSecret: options.clientSecret,\n token: options.token,\n scopes: response.data.scopes\n };\n if (response.data.expires_at)\n authentication.expiresAt = response.data.expires_at;\n if (options.clientType === \"github-app\") {\n delete authentication.scopes;\n }\n return { ...response, authentication };\n}\nexport {\n checkToken\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport { oauthRequest } from \"./utils\";\nasync function refreshToken(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const response = await oauthRequest(\n request,\n \"POST /login/oauth/access_token\",\n {\n client_id: options.clientId,\n client_secret: options.clientSecret,\n grant_type: \"refresh_token\",\n refresh_token: options.refreshToken\n }\n );\n const apiTimeInMs = new Date(response.headers.date).getTime();\n const authentication = {\n clientType: \"github-app\",\n clientId: options.clientId,\n clientSecret: options.clientSecret,\n token: response.data.access_token,\n refreshToken: response.data.refresh_token,\n expiresAt: toTimestamp(apiTimeInMs, response.data.expires_in),\n refreshTokenExpiresAt: toTimestamp(\n apiTimeInMs,\n response.data.refresh_token_expires_in\n )\n };\n return { ...response, authentication };\n}\nfunction toTimestamp(apiTimeInMs, expirationInSeconds) {\n return new Date(apiTimeInMs + expirationInSeconds * 1e3).toISOString();\n}\nexport {\n refreshToken\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport btoa from \"btoa-lite\";\nasync function scopeToken(options) {\n const {\n request: optionsRequest,\n clientType,\n clientId,\n clientSecret,\n token,\n ...requestOptions\n } = options;\n const request = optionsRequest || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const response = await request(\n \"POST /applications/{client_id}/token/scoped\",\n {\n headers: {\n authorization: `basic ${btoa(`${clientId}:${clientSecret}`)}`\n },\n client_id: clientId,\n access_token: token,\n ...requestOptions\n }\n );\n const authentication = Object.assign(\n {\n clientType,\n clientId,\n clientSecret,\n token: response.data.token\n },\n response.data.expires_at ? { expiresAt: response.data.expires_at } : {}\n );\n return { ...response, authentication };\n}\nexport {\n scopeToken\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport btoa from \"btoa-lite\";\nasync function resetToken(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const auth = btoa(`${options.clientId}:${options.clientSecret}`);\n const response = await request(\n \"PATCH /applications/{client_id}/token\",\n {\n headers: {\n authorization: `basic ${auth}`\n },\n client_id: options.clientId,\n access_token: options.token\n }\n );\n const authentication = {\n clientType: options.clientType,\n clientId: options.clientId,\n clientSecret: options.clientSecret,\n token: response.data.token,\n scopes: response.data.scopes\n };\n if (response.data.expires_at)\n authentication.expiresAt = response.data.expires_at;\n if (options.clientType === \"github-app\") {\n delete authentication.scopes;\n }\n return { ...response, authentication };\n}\nexport {\n resetToken\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport btoa from \"btoa-lite\";\nasync function deleteToken(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const auth = btoa(`${options.clientId}:${options.clientSecret}`);\n return request(\n \"DELETE /applications/{client_id}/token\",\n {\n headers: {\n authorization: `basic ${auth}`\n },\n client_id: options.clientId,\n access_token: options.token\n }\n );\n}\nexport {\n deleteToken\n};\n", "import { request as defaultRequest } from \"@octokit/request\";\nimport btoa from \"btoa-lite\";\nasync function deleteAuthorization(options) {\n const request = options.request || /* istanbul ignore next: we always pass a custom request in tests */\n defaultRequest;\n const auth = btoa(`${options.clientId}:${options.clientSecret}`);\n return request(\n \"DELETE /applications/{client_id}/grant\",\n {\n headers: {\n authorization: `basic ${auth}`\n },\n client_id: options.clientId,\n access_token: options.token\n }\n );\n}\nexport {\n deleteAuthorization\n};\n"], + "mappings": ";AAAA,IAAM,UAAU;;;ACAhB,SAAS,6BAA6B;AACtC,SAAS,WAAW,sBAAsB;;;ACD1C,SAAS,oBAAoB;AAC7B,SAAS,sBAAsB,SAAS;AACtC,QAAM,mBAAmB,QAAQ,SAAS;AAC1C,SAAO,kCAAkC,KAAK,iBAAiB,OAAO,IAAI,uBAAuB,iBAAiB,QAAQ,QAAQ,WAAW,EAAE;AACjJ;AACA,eAAe,aAAa,SAAS,OAAO,YAAY;AACtD,QAAM,sBAAsB;AAAA,IAC1B,SAAS,sBAAsB,OAAO;AAAA,IACtC,SAAS;AAAA,MACP,QAAQ;AAAA,IACV;AAAA,IACA,GAAG;AAAA,EACL;AACA,QAAM,WAAW,MAAM,QAAQ,OAAO,mBAAmB;AACzD,MAAI,WAAW,SAAS,MAAM;AAC5B,UAAM,QAAQ,IAAI;AAAA,MAChB,GAAG,SAAS,KAAK,sBAAsB,SAAS,KAAK,UAAU,SAAS,KAAK;AAAA,MAC7E;AAAA,MACA;AAAA,QACE,SAAS,QAAQ,SAAS;AAAA,UACxB;AAAA,UACA;AAAA,QACF;AAAA,QACA,SAAS,SAAS;AAAA,MACpB;AAAA,IACF;AACA,UAAM,WAAW;AACjB,UAAM;AAAA,EACR;AACA,SAAO;AACT;;;AD3BA,SAAS,2BAA2B;AAAA,EAClC,UAAU;AAAA,EACV,GAAG;AACL,GAAG;AACD,QAAM,UAAU,sBAAsB,OAAO;AAC7C,SAAO,sBAAsB;AAAA,IAC3B,GAAG;AAAA,IACH;AAAA,EACF,CAAC;AACH;;;AEZA,SAAS,WAAWA,uBAAsB;AAE1C,eAAe,oBAAoB,SAAS;AAC1C,QAAM,UAAU,QAAQ;AAAA,EACxBC;AACA,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,MACE,WAAW,QAAQ;AAAA,MACnB,eAAe,QAAQ;AAAA,MACvB,MAAM,QAAQ;AAAA,MACd,cAAc,QAAQ;AAAA,IACxB;AAAA,EACF;AACA,QAAM,iBAAiB;AAAA,IACrB,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ;AAAA,IAClB,cAAc,QAAQ;AAAA,IACtB,OAAO,SAAS,KAAK;AAAA,IACrB,QAAQ,SAAS,KAAK,MAAM,MAAM,KAAK,EAAE,OAAO,OAAO;AAAA,EACzD;AACA,MAAI,QAAQ,eAAe,cAAc;AACvC,QAAI,mBAAmB,SAAS,MAAM;AACpC,YAAM,cAAc,IAAI,KAAK,SAAS,QAAQ,IAAI,EAAE,QAAQ;AAC5D,qBAAe,eAAe,SAAS,KAAK,eAAe,eAAe,YAAY;AAAA,QACpF;AAAA,QACA,SAAS,KAAK;AAAA,MAChB,GAAG,eAAe,wBAAwB;AAAA,QACxC;AAAA,QACA,SAAS,KAAK;AAAA,MAChB;AAAA,IACF;AACA,WAAO,eAAe;AAAA,EACxB;AACA,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;AACA,SAAS,YAAY,aAAa,qBAAqB;AACrD,SAAO,IAAI,KAAK,cAAc,sBAAsB,GAAG,EAAE,YAAY;AACvE;;;ACvCA,SAAS,WAAWC,uBAAsB;AAE1C,eAAe,iBAAiB,SAAS;AACvC,QAAM,UAAU,QAAQ;AAAA,EACxBC;AACA,QAAM,aAAa;AAAA,IACjB,WAAW,QAAQ;AAAA,EACrB;AACA,MAAI,YAAY,WAAW,MAAM,QAAQ,QAAQ,MAAM,GAAG;AACxD,eAAW,QAAQ,QAAQ,OAAO,KAAK,GAAG;AAAA,EAC5C;AACA,SAAO,aAAa,SAAS,2BAA2B,UAAU;AACpE;;;ACZA,SAAS,WAAWC,uBAAsB;AAE1C,eAAe,mBAAmB,SAAS;AACzC,QAAM,UAAU,QAAQ;AAAA,EACxBC;AACA,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,MACE,WAAW,QAAQ;AAAA,MACnB,aAAa,QAAQ;AAAA,MACrB,YAAY;AAAA,IACd;AAAA,EACF;AACA,QAAM,iBAAiB;AAAA,IACrB,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ;AAAA,IAClB,OAAO,SAAS,KAAK;AAAA,IACrB,QAAQ,SAAS,KAAK,MAAM,MAAM,KAAK,EAAE,OAAO,OAAO;AAAA,EACzD;AACA,MAAI,kBAAkB,SAAS;AAC7B,mBAAe,eAAe,QAAQ;AAAA,EACxC;AACA,MAAI,QAAQ,eAAe,cAAc;AACvC,QAAI,mBAAmB,SAAS,MAAM;AACpC,YAAM,cAAc,IAAI,KAAK,SAAS,QAAQ,IAAI,EAAE,QAAQ;AAC5D,qBAAe,eAAe,SAAS,KAAK,eAAe,eAAe,YAAYC;AAAA,QACpF;AAAA,QACA,SAAS,KAAK;AAAA,MAChB,GAAG,eAAe,wBAAwBA;AAAA,QACxC;AAAA,QACA,SAAS,KAAK;AAAA,MAChB;AAAA,IACF;AACA,WAAO,eAAe;AAAA,EACxB;AACA,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;AACA,SAASA,aAAY,aAAa,qBAAqB;AACrD,SAAO,IAAI,KAAK,cAAc,sBAAsB,GAAG,EAAE,YAAY;AACvE;;;ACxCA,SAAS,WAAWC,uBAAsB;AAC1C,OAAO,UAAU;AACjB,eAAe,WAAW,SAAS;AACjC,QAAM,UAAU,QAAQ;AAAA,EACxBA;AACA,QAAM,WAAW,MAAM,QAAQ,wCAAwC;AAAA,IACrE,SAAS;AAAA,MACP,eAAe,SAAS;AAAA,QACtB,GAAG,QAAQ,YAAY,QAAQ;AAAA,MACjC;AAAA,IACF;AAAA,IACA,WAAW,QAAQ;AAAA,IACnB,cAAc,QAAQ;AAAA,EACxB,CAAC;AACD,QAAM,iBAAiB;AAAA,IACrB,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ;AAAA,IAClB,cAAc,QAAQ;AAAA,IACtB,OAAO,QAAQ;AAAA,IACf,QAAQ,SAAS,KAAK;AAAA,EACxB;AACA,MAAI,SAAS,KAAK;AAChB,mBAAe,YAAY,SAAS,KAAK;AAC3C,MAAI,QAAQ,eAAe,cAAc;AACvC,WAAO,eAAe;AAAA,EACxB;AACA,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;;;AC3BA,SAAS,WAAWC,uBAAsB;AAE1C,eAAe,aAAa,SAAS;AACnC,QAAM,UAAU,QAAQ;AAAA,EACxBC;AACA,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,MACE,WAAW,QAAQ;AAAA,MACnB,eAAe,QAAQ;AAAA,MACvB,YAAY;AAAA,MACZ,eAAe,QAAQ;AAAA,IACzB;AAAA,EACF;AACA,QAAM,cAAc,IAAI,KAAK,SAAS,QAAQ,IAAI,EAAE,QAAQ;AAC5D,QAAM,iBAAiB;AAAA,IACrB,YAAY;AAAA,IACZ,UAAU,QAAQ;AAAA,IAClB,cAAc,QAAQ;AAAA,IACtB,OAAO,SAAS,KAAK;AAAA,IACrB,cAAc,SAAS,KAAK;AAAA,IAC5B,WAAWC,aAAY,aAAa,SAAS,KAAK,UAAU;AAAA,IAC5D,uBAAuBA;AAAA,MACrB;AAAA,MACA,SAAS,KAAK;AAAA,IAChB;AAAA,EACF;AACA,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;AACA,SAASA,aAAY,aAAa,qBAAqB;AACrD,SAAO,IAAI,KAAK,cAAc,sBAAsB,GAAG,EAAE,YAAY;AACvE;;;AChCA,SAAS,WAAWC,uBAAsB;AAC1C,OAAOC,WAAU;AACjB,eAAe,WAAW,SAAS;AACjC,QAAM;AAAA,IACJ,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EACL,IAAI;AACJ,QAAM,UAAU;AAAA,EAChBD;AACA,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA;AAAA,MACE,SAAS;AAAA,QACP,eAAe,SAASC,MAAK,GAAG,YAAY,cAAc;AAAA,MAC5D;AAAA,MACA,WAAW;AAAA,MACX,cAAc;AAAA,MACd,GAAG;AAAA,IACL;AAAA,EACF;AACA,QAAM,iBAAiB,OAAO;AAAA,IAC5B;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,SAAS,KAAK;AAAA,IACvB;AAAA,IACA,SAAS,KAAK,aAAa,EAAE,WAAW,SAAS,KAAK,WAAW,IAAI,CAAC;AAAA,EACxE;AACA,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;;;AClCA,SAAS,WAAWC,uBAAsB;AAC1C,OAAOC,WAAU;AACjB,eAAe,WAAW,SAAS;AACjC,QAAM,UAAU,QAAQ;AAAA,EACxBD;AACA,QAAM,OAAOC,MAAK,GAAG,QAAQ,YAAY,QAAQ,cAAc;AAC/D,QAAM,WAAW,MAAM;AAAA,IACrB;AAAA,IACA;AAAA,MACE,SAAS;AAAA,QACP,eAAe,SAAS;AAAA,MAC1B;AAAA,MACA,WAAW,QAAQ;AAAA,MACnB,cAAc,QAAQ;AAAA,IACxB;AAAA,EACF;AACA,QAAM,iBAAiB;AAAA,IACrB,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ;AAAA,IAClB,cAAc,QAAQ;AAAA,IACtB,OAAO,SAAS,KAAK;AAAA,IACrB,QAAQ,SAAS,KAAK;AAAA,EACxB;AACA,MAAI,SAAS,KAAK;AAChB,mBAAe,YAAY,SAAS,KAAK;AAC3C,MAAI,QAAQ,eAAe,cAAc;AACvC,WAAO,eAAe;AAAA,EACxB;AACA,SAAO,EAAE,GAAG,UAAU,eAAe;AACvC;;;AC7BA,SAAS,WAAWC,uBAAsB;AAC1C,OAAOC,WAAU;AACjB,eAAe,YAAY,SAAS;AAClC,QAAM,UAAU,QAAQ;AAAA,EACxBD;AACA,QAAM,OAAOC,MAAK,GAAG,QAAQ,YAAY,QAAQ,cAAc;AAC/D,SAAO;AAAA,IACL;AAAA,IACA;AAAA,MACE,SAAS;AAAA,QACP,eAAe,SAAS;AAAA,MAC1B;AAAA,MACA,WAAW,QAAQ;AAAA,MACnB,cAAc,QAAQ;AAAA,IACxB;AAAA,EACF;AACF;;;AChBA,SAAS,WAAWC,wBAAsB;AAC1C,OAAOC,WAAU;AACjB,eAAe,oBAAoB,SAAS;AAC1C,QAAM,UAAU,QAAQ;AAAA,EACxBD;AACA,QAAM,OAAOC,MAAK,GAAG,QAAQ,YAAY,QAAQ,cAAc;AAC/D,SAAO;AAAA,IACL;AAAA,IACA;AAAA,MACE,SAAS;AAAA,QACP,eAAe,SAAS;AAAA,MAC1B;AAAA,MACA,WAAW,QAAQ;AAAA,MACnB,cAAc,QAAQ;AAAA,IACxB;AAAA,EACF;AACF;", + "names": ["defaultRequest", "defaultRequest", "defaultRequest", "defaultRequest", "defaultRequest", "defaultRequest", "toTimestamp", "defaultRequest", "defaultRequest", "defaultRequest", "toTimestamp", "defaultRequest", "btoa", "defaultRequest", "btoa", "defaultRequest", "btoa", "defaultRequest", "btoa"] +} diff --git a/dist/node_modules/@octokit/oauth-methods/package.json b/dist/node_modules/@octokit/oauth-methods/package.json new file mode 100644 index 0000000..2496767 --- /dev/null +++ b/dist/node_modules/@octokit/oauth-methods/package.json @@ -0,0 +1,53 @@ +{ + "name": "@octokit/oauth-methods", + "version": "2.0.6", + "description": "Set of stateless request methods to create, check, reset, refresh, and delete user access tokens for OAuth and GitHub Apps", + "repository": "https://github.com/octokit/oauth-methods.js", + "keywords": [ + "github", + "api", + "sdk", + "toolkit", + "oauth" + ], + "author": "Gregor Martynus (https://dev.to/gr2m)", + "license": "MIT", + "dependencies": { + "@octokit/oauth-authorization-url": "^5.0.0", + "@octokit/request": "^6.2.3", + "@octokit/request-error": "^3.0.3", + "@octokit/types": "^9.0.0", + "btoa-lite": "^1.0.0" + }, + "devDependencies": { + "@octokit/tsconfig": "^2.0.0", + "@types/btoa-lite": "^1.0.0", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "esbuild": "^0.17.19", + "fetch-mock": "^9.11.0", + "glob": "^10.2.7", + "jest": "^29.0.0", + "prettier": "2.8.8", + "semantic-release": "^21.0.0", + "semantic-release-plugin-update-version-in-files": "^1.1.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "peerDependencies": {}, + "publishConfig": { + "access": "public" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "browser": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "module": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/openapi-types/LICENSE b/dist/node_modules/@octokit/openapi-types/LICENSE new file mode 100644 index 0000000..c61fbbe --- /dev/null +++ b/dist/node_modules/@octokit/openapi-types/LICENSE @@ -0,0 +1,7 @@ +Copyright 2020 Gregor Martynus + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/dist/node_modules/@octokit/openapi-types/README.md b/dist/node_modules/@octokit/openapi-types/README.md new file mode 100644 index 0000000..9da833c --- /dev/null +++ b/dist/node_modules/@octokit/openapi-types/README.md @@ -0,0 +1,17 @@ +# @octokit/openapi-types + +> Generated TypeScript definitions based on GitHub's OpenAPI spec + +This package is continously updated based on [GitHub's OpenAPI specification](https://github.com/github/rest-api-description/) + +## Usage + +```ts +import { components } from "@octokit/openapi-types"; + +type Repository = components["schemas"]["full-repository"]; +``` + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/openapi-types/package.json b/dist/node_modules/@octokit/openapi-types/package.json new file mode 100644 index 0000000..c9d6f21 --- /dev/null +++ b/dist/node_modules/@octokit/openapi-types/package.json @@ -0,0 +1,20 @@ +{ + "name": "@octokit/openapi-types", + "description": "Generated TypeScript definitions based on GitHub's OpenAPI spec for api.github.com", + "repository": { + "type": "git", + "url": "https://github.com/octokit/openapi-types.ts.git", + "directory": "packages/openapi-types" + }, + "publishConfig": { + "access": "public" + }, + "version": "18.1.1", + "main": "", + "types": "types.d.ts", + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "octokit": { + "openapi-version": "12.0.0" + } +} diff --git a/dist/node_modules/@octokit/openapi-types/types.d.ts b/dist/node_modules/@octokit/openapi-types/types.d.ts new file mode 100644 index 0000000..d4dbb33 --- /dev/null +++ b/dist/node_modules/@octokit/openapi-types/types.d.ts @@ -0,0 +1,111421 @@ +/** + * This file was auto-generated by openapi-typescript. + * Do not make direct changes to the file. + */ + +/** OneOf type helpers */ +type Without = { [P in Exclude]?: never }; +type XOR = T | U extends object + ? (Without & U) | (Without & T) + : T | U; +type OneOf = T extends [infer Only] + ? Only + : T extends [infer A, infer B, ...infer Rest] + ? OneOf<[XOR, ...Rest]> + : never; + +export interface paths { + "/": { + /** + * GitHub API Root + * @description Get Hypermedia links to resources accessible in GitHub's REST API + */ + get: operations["meta/root"]; + }; + "/app": { + /** + * Get the authenticated app + * @description Returns the GitHub App associated with the authentication credentials used. To see how many app installations are associated with this GitHub App, see the `installations_count` in the response. For more details about your app's installations, see the "[List installations for the authenticated app](https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-authenticated"]; + }; + "/app-manifests/{code}/conversions": { + /** + * Create a GitHub App from a manifest + * @description Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`. + */ + post: operations["apps/create-from-manifest"]; + }; + "/app/hook/config": { + /** + * Get a webhook configuration for an app + * @description Returns the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-webhook-config-for-app"]; + /** + * Update a webhook configuration for an app + * @description Updates the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + patch: operations["apps/update-webhook-config-for-app"]; + }; + "/app/hook/deliveries": { + /** + * List deliveries for an app webhook + * @description Returns a list of webhook deliveries for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/list-webhook-deliveries"]; + }; + "/app/hook/deliveries/{delivery_id}": { + /** + * Get a delivery for an app webhook + * @description Returns a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-webhook-delivery"]; + }; + "/app/hook/deliveries/{delivery_id}/attempts": { + /** + * Redeliver a delivery for an app webhook + * @description Redeliver a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + post: operations["apps/redeliver-webhook-delivery"]; + }; + "/app/installation-requests": { + /** + * List installation requests for the authenticated app + * @description Lists all the pending installation requests for the authenticated GitHub App. + */ + get: operations["apps/list-installation-requests-for-authenticated-app"]; + }; + "/app/installations": { + /** + * List installations for the authenticated app + * @description You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + * + * The permissions the installation has are included under the `permissions` key. + */ + get: operations["apps/list-installations"]; + }; + "/app/installations/{installation_id}": { + /** + * Get an installation for the authenticated app + * @description Enables an authenticated GitHub App to find an installation's information using the installation id. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-installation"]; + /** + * Delete an installation for the authenticated app + * @description Uninstalls a GitHub App on a user, organization, or business account. If you prefer to temporarily suspend an app's access to your account's resources, then we recommend the "[Suspend an app installation](https://docs.github.com/rest/reference/apps/#suspend-an-app-installation)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + delete: operations["apps/delete-installation"]; + }; + "/app/installations/{installation_id}/access_tokens": { + /** + * Create an installation access token for an app + * @description Creates an installation access token that enables a GitHub App to make authenticated API requests for the app's installation on an organization or individual account. Installation tokens expire one hour from the time you create them. Using an expired token produces a status code of `401 - Unauthorized`, and requires creating a new installation token. By default the installation token has access to all repositories that the installation can access. To restrict the access to specific repositories, you can provide the `repository_ids` when creating the token. When you omit `repository_ids`, the response does not contain the `repositories` key. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + post: operations["apps/create-installation-access-token"]; + }; + "/app/installations/{installation_id}/suspended": { + /** + * Suspend an app installation + * @description Suspends a GitHub App on a user, organization, or business account, which blocks the app from accessing the account's resources. When a GitHub App is suspended, the app's access to the GitHub API or webhook events is blocked for that account. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + put: operations["apps/suspend-installation"]; + /** + * Unsuspend an app installation + * @description Removes a GitHub App installation suspension. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + delete: operations["apps/unsuspend-installation"]; + }; + "/applications/{client_id}/grant": { + /** + * Delete an app authorization + * @description OAuth and GitHub application owners can revoke a grant for their application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid OAuth `access_token` as an input parameter and the grant for the token's owner will be deleted. + * Deleting an application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). + */ + delete: operations["apps/delete-authorization"]; + }; + "/applications/{client_id}/token": { + /** + * Check a token + * @description OAuth applications and GitHub applications with OAuth authorizations can use this API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to use this endpoint, where the username is the application `client_id` and the password is its `client_secret`. Invalid tokens will return `404 NOT FOUND`. + */ + post: operations["apps/check-token"]; + /** + * Delete an app token + * @description OAuth or GitHub application owners can revoke a single token for an OAuth application or a GitHub application with an OAuth authorization. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the application's `client_id` and `client_secret` as the username and password. + */ + delete: operations["apps/delete-token"]; + /** + * Reset a token + * @description OAuth applications and GitHub applications with OAuth authorizations can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. + */ + patch: operations["apps/reset-token"]; + }; + "/applications/{client_id}/token/scoped": { + /** + * Create a scoped access token + * @description Use a non-scoped user-to-server access token to create a repository scoped and/or permission scoped user-to-server access token. You can specify which repositories the token can access and which permissions are granted to the token. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the `client_id` and `client_secret` of the GitHub App as the username and password. Invalid tokens will return `404 NOT FOUND`. + */ + post: operations["apps/scope-token"]; + }; + "/apps/{app_slug}": { + /** + * Get an app + * @description **Note**: The `:app_slug` is just the URL-friendly name of your GitHub App. You can find this on the settings page for your GitHub App (e.g., `https://github.com/settings/apps/:app_slug`). + * + * If the GitHub App you specify is public, you can access this endpoint without authenticating. If the GitHub App you specify is private, you must authenticate with a [personal access token](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + get: operations["apps/get-by-slug"]; + }; + "/codes_of_conduct": { + /** Get all codes of conduct */ + get: operations["codes-of-conduct/get-all-codes-of-conduct"]; + }; + "/codes_of_conduct/{key}": { + /** Get a code of conduct */ + get: operations["codes-of-conduct/get-conduct-code"]; + }; + "/emojis": { + /** + * Get emojis + * @description Lists all the emojis available to use on GitHub. + */ + get: operations["emojis/get"]; + }; + "/enterprises/{enterprise}/dependabot/alerts": { + /** + * List Dependabot alerts for an enterprise + * @description Lists Dependabot alerts for repositories that are owned by the specified enterprise. + * To use this endpoint, you must be a member of the enterprise, and you must use an + * access token with the `repo` scope or `security_events` scope. + * Alerts are only returned for organizations in the enterprise for which you are an organization owner or a security manager. For more information about security managers, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + get: operations["dependabot/list-alerts-for-enterprise"]; + }; + "/enterprises/{enterprise}/secret-scanning/alerts": { + /** + * List secret scanning alerts for an enterprise + * @description Lists secret scanning alerts for eligible repositories in an enterprise, from newest to oldest. + * To use this endpoint, you must be a member of the enterprise, and you must use an access token with the `repo` scope or `security_events` scope. Alerts are only returned for organizations in the enterprise for which you are an organization owner or a [security manager](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization). + */ + get: operations["secret-scanning/list-alerts-for-enterprise"]; + }; + "/events": { + /** + * List public events + * @description We delay the public events feed by five minutes, which means the most recent event returned by the public events API actually occurred at least five minutes ago. + */ + get: operations["activity/list-public-events"]; + }; + "/feeds": { + /** + * Get feeds + * @description GitHub provides several timeline resources in [Atom](http://en.wikipedia.org/wiki/Atom_(standard)) format. The Feeds API lists all the feeds available to the authenticated user: + * + * * **Timeline**: The GitHub global public timeline + * * **User**: The public timeline for any user, using [URI template](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) + * * **Current user public**: The public timeline for the authenticated user + * * **Current user**: The private timeline for the authenticated user + * * **Current user actor**: The private timeline for activity created by the authenticated user + * * **Current user organizations**: The private timeline for the organizations the authenticated user is a member of. + * * **Security advisories**: A collection of public announcements that provide information about security-related vulnerabilities in software on GitHub. + * + * **Note**: Private feeds are only returned when [authenticating via Basic Auth](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) since current feed URIs use the older, non revocable auth tokens. + */ + get: operations["activity/get-feeds"]; + }; + "/gists": { + /** + * List gists for the authenticated user + * @description Lists the authenticated user's gists or if called anonymously, this endpoint returns all public gists: + */ + get: operations["gists/list"]; + /** + * Create a gist + * @description Allows you to add a new gist with one or more files. + * + * **Note:** Don't name your files "gistfile" with a numerical suffix. This is the format of the automatic naming scheme that Gist uses internally. + */ + post: operations["gists/create"]; + }; + "/gists/public": { + /** + * List public gists + * @description List public gists sorted by most recently updated to least recently updated. + * + * Note: With [pagination](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination), you can fetch up to 3000 gists. For example, you can fetch 100 pages with 30 gists per page or 30 pages with 100 gists per page. + */ + get: operations["gists/list-public"]; + }; + "/gists/starred": { + /** + * List starred gists + * @description List the authenticated user's starred gists: + */ + get: operations["gists/list-starred"]; + }; + "/gists/{gist_id}": { + /** Get a gist */ + get: operations["gists/get"]; + /** Delete a gist */ + delete: operations["gists/delete"]; + /** + * Update a gist + * @description Allows you to update a gist's description and to update, delete, or rename gist files. Files from the previous version of the gist that aren't explicitly changed during an edit are unchanged. + */ + patch: operations["gists/update"]; + }; + "/gists/{gist_id}/comments": { + /** List gist comments */ + get: operations["gists/list-comments"]; + /** Create a gist comment */ + post: operations["gists/create-comment"]; + }; + "/gists/{gist_id}/comments/{comment_id}": { + /** Get a gist comment */ + get: operations["gists/get-comment"]; + /** Delete a gist comment */ + delete: operations["gists/delete-comment"]; + /** Update a gist comment */ + patch: operations["gists/update-comment"]; + }; + "/gists/{gist_id}/commits": { + /** List gist commits */ + get: operations["gists/list-commits"]; + }; + "/gists/{gist_id}/forks": { + /** List gist forks */ + get: operations["gists/list-forks"]; + /** Fork a gist */ + post: operations["gists/fork"]; + }; + "/gists/{gist_id}/star": { + /** Check if a gist is starred */ + get: operations["gists/check-is-starred"]; + /** + * Star a gist + * @description Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["gists/star"]; + /** Unstar a gist */ + delete: operations["gists/unstar"]; + }; + "/gists/{gist_id}/{sha}": { + /** Get a gist revision */ + get: operations["gists/get-revision"]; + }; + "/gitignore/templates": { + /** + * Get all gitignore templates + * @description List all templates available to pass as an option when [creating a repository](https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user). + */ + get: operations["gitignore/get-all-templates"]; + }; + "/gitignore/templates/{name}": { + /** + * Get a gitignore template + * @description The API also allows fetching the source of a single template. + * Use the raw [media type](https://docs.github.com/rest/overview/media-types/) to get the raw contents. + */ + get: operations["gitignore/get-template"]; + }; + "/installation/repositories": { + /** + * List repositories accessible to the app installation + * @description List repositories that an app installation can access. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + get: operations["apps/list-repos-accessible-to-installation"]; + }; + "/installation/token": { + /** + * Revoke an installation access token + * @description Revokes the installation token you're using to authenticate as an installation and access this endpoint. + * + * Once an installation token is revoked, the token is invalidated and cannot be used. Other endpoints that require the revoked installation token must have a new installation token to work. You can create a new token using the "[Create an installation access token for an app](https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app)" endpoint. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + delete: operations["apps/revoke-installation-access-token"]; + }; + "/issues": { + /** + * List issues assigned to the authenticated user + * @description List issues assigned to the authenticated user across all visible repositories including owned repositories, member + * repositories, and organization repositories. You can use the `filter` query parameter to fetch issues that are not + * necessarily assigned to you. + * + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/list"]; + }; + "/licenses": { + /** Get all commonly used licenses */ + get: operations["licenses/get-all-commonly-used"]; + }; + "/licenses/{license}": { + /** Get a license */ + get: operations["licenses/get"]; + }; + "/markdown": { + /** Render a Markdown document */ + post: operations["markdown/render"]; + }; + "/markdown/raw": { + /** + * Render a Markdown document in raw mode + * @description You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less. + */ + post: operations["markdown/render-raw"]; + }; + "/marketplace_listing/accounts/{account_id}": { + /** + * Get a subscription plan for an account + * @description Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/get-subscription-plan-for-account"]; + }; + "/marketplace_listing/plans": { + /** + * List plans + * @description Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/list-plans"]; + }; + "/marketplace_listing/plans/{plan_id}/accounts": { + /** + * List accounts for a plan + * @description Returns user and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/list-accounts-for-plan"]; + }; + "/marketplace_listing/stubbed/accounts/{account_id}": { + /** + * Get a subscription plan for an account (stubbed) + * @description Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/get-subscription-plan-for-account-stubbed"]; + }; + "/marketplace_listing/stubbed/plans": { + /** + * List plans (stubbed) + * @description Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/list-plans-stubbed"]; + }; + "/marketplace_listing/stubbed/plans/{plan_id}/accounts": { + /** + * List accounts for a plan (stubbed) + * @description Returns repository and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/list-accounts-for-plan-stubbed"]; + }; + "/meta": { + /** + * Get GitHub meta information + * @description Returns meta information about GitHub, including a list of GitHub's IP addresses. For more information, see "[About GitHub's IP addresses](https://docs.github.com/articles/about-github-s-ip-addresses/)." + * + * The API's response also includes a list of GitHub's domain names. + * + * The values shown in the documentation's response are example values. You must always query the API directly to get the latest values. + * + * **Note:** This endpoint returns both IPv4 and IPv6 addresses. However, not all features support IPv6. You should refer to the specific documentation for each feature to determine if IPv6 is supported. + */ + get: operations["meta/get"]; + }; + "/networks/{owner}/{repo}/events": { + /** List public events for a network of repositories */ + get: operations["activity/list-public-events-for-repo-network"]; + }; + "/notifications": { + /** + * List notifications for the authenticated user + * @description List all notifications for the current user, sorted by most recently updated. + */ + get: operations["activity/list-notifications-for-authenticated-user"]; + /** + * Mark notifications as read + * @description Marks all notifications as "read" for the current user. If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. + */ + put: operations["activity/mark-notifications-as-read"]; + }; + "/notifications/threads/{thread_id}": { + /** + * Get a thread + * @description Gets information about a notification thread. + */ + get: operations["activity/get-thread"]; + /** + * Mark a thread as read + * @description Marks a thread as "read." Marking a thread as "read" is equivalent to clicking a notification in your notification inbox on GitHub: https://github.com/notifications. + */ + patch: operations["activity/mark-thread-as-read"]; + }; + "/notifications/threads/{thread_id}/subscription": { + /** + * Get a thread subscription for the authenticated user + * @description This checks to see if the current user is subscribed to a thread. You can also [get a repository subscription](https://docs.github.com/rest/reference/activity#get-a-repository-subscription). + * + * Note that subscriptions are only generated if a user is participating in a conversation--for example, they've replied to the thread, were **@mentioned**, or manually subscribe to a thread. + */ + get: operations["activity/get-thread-subscription-for-authenticated-user"]; + /** + * Set a thread subscription + * @description If you are watching a repository, you receive notifications for all threads by default. Use this endpoint to ignore future notifications for threads until you comment on the thread or get an **@mention**. + * + * You can also use this endpoint to subscribe to threads that you are currently not receiving notifications for or to subscribed to threads that you have previously ignored. + * + * Unsubscribing from a conversation in a repository that you are not watching is functionally equivalent to the [Delete a thread subscription](https://docs.github.com/rest/reference/activity#delete-a-thread-subscription) endpoint. + */ + put: operations["activity/set-thread-subscription"]; + /** + * Delete a thread subscription + * @description Mutes all future notifications for a conversation until you comment on the thread or get an **@mention**. If you are watching the repository of the thread, you will still receive notifications. To ignore future notifications for a repository you are watching, use the [Set a thread subscription](https://docs.github.com/rest/reference/activity#set-a-thread-subscription) endpoint and set `ignore` to `true`. + */ + delete: operations["activity/delete-thread-subscription"]; + }; + "/octocat": { + /** + * Get Octocat + * @description Get the octocat as ASCII art + */ + get: operations["meta/get-octocat"]; + }; + "/organizations": { + /** + * List organizations + * @description Lists all organizations, in the order that they were created on GitHub. + * + * **Note:** Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers) to get the URL for the next page of organizations. + */ + get: operations["orgs/list"]; + }; + "/organizations/{org}/personal-access-token-requests": { + /** + * List requests to access organization resources with fine-grained personal access tokens + * @description Lists requests from organization members to access organization resources with a fine-grained personal access token. Only GitHub Apps can call this API, + * using the `organization_personal_access_token_requests: read` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + get: operations["orgs/list-pat-grant-requests"]; + /** + * Review requests to access organization resources with fine-grained personal access tokens + * @description Approves or denies multiple pending requests to access organization resources via a fine-grained personal access token. Only GitHub Apps can call this API, + * using the `organization_personal_access_token_requests: write` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + post: operations["orgs/review-pat-grant-requests-in-bulk"]; + }; + "/organizations/{org}/personal-access-token-requests/{pat_request_id}": { + /** + * Review a request to access organization resources with a fine-grained personal access token + * @description Approves or denies a pending request to access organization resources via a fine-grained personal access token. Only GitHub Apps can call this API, + * using the `organization_personal_access_token_requests: write` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + post: operations["orgs/review-pat-grant-request"]; + }; + "/organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories": { + /** + * List repositories requested to be accessed by a fine-grained personal access token + * @description Lists the repositories a fine-grained personal access token request is requesting access to. Only GitHub Apps can call this API, + * using the `organization_personal_access_token_requests: read` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + get: operations["orgs/list-pat-grant-request-repositories"]; + }; + "/organizations/{org}/personal-access-tokens": { + /** + * List fine-grained personal access tokens with access to organization resources + * @description Lists approved fine-grained personal access tokens owned by organization members that can access organization resources. Only GitHub Apps can call this API, + * using the `organization_personal_access_tokens: read` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + get: operations["orgs/list-pat-grants"]; + /** + * Update the access to organization resources via fine-grained personal access tokens + * @description Updates the access organization members have to organization resources via fine-grained personal access tokens. Limited to revoking a token's existing access. Only GitHub Apps can call this API, + * using the `organization_personal_access_tokens: write` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + post: operations["orgs/update-pat-accesses"]; + }; + "/organizations/{org}/personal-access-tokens/{pat_id}": { + /** + * Update the access a fine-grained personal access token has to organization resources + * @description Updates the access an organization member has to organization resources via a fine-grained personal access token. Limited to revoking the token's existing access. Limited to revoking a token's existing access. Only GitHub Apps can call this API, + * using the `organization_personal_access_tokens: write` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + post: operations["orgs/update-pat-access"]; + }; + "/organizations/{org}/personal-access-tokens/{pat_id}/repositories": { + /** + * List repositories a fine-grained personal access token has access to + * @description Lists the repositories a fine-grained personal access token has access to. Only GitHub Apps can call this API, + * using the `organization_personal_access_tokens: read` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + get: operations["orgs/list-pat-grant-repositories"]; + }; + "/orgs/{org}": { + /** + * Get an organization + * @description To see many of the organization response values, you need to be an authenticated organization owner with the `admin:org` scope. When the value of `two_factor_requirement_enabled` is `true`, the organization requires all members, billing managers, and outside collaborators to enable [two-factor authentication](https://docs.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/). + * + * GitHub Apps with the `Organization plan` permission can use this endpoint to retrieve information about an organization's GitHub plan. See "[Authenticating with GitHub Apps](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/)" for details. For an example response, see 'Response with GitHub plan information' below." + */ + get: operations["orgs/get"]; + /** + * Delete an organization + * @description Deletes an organization and all its repositories. + * + * The organization login will be unavailable for 90 days after deletion. + * + * Please review the Terms of Service regarding account deletion before using this endpoint: + * + * https://docs.github.com/site-policy/github-terms/github-terms-of-service + */ + delete: operations["orgs/delete"]; + /** + * Update an organization + * @description **Parameter Deprecation Notice:** GitHub will replace and discontinue `members_allowed_repository_creation_type` in favor of more granular permissions. The new input parameters are `members_can_create_public_repositories`, `members_can_create_private_repositories` for all organizations and `members_can_create_internal_repositories` for organizations associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see the [blog post](https://developer.github.com/changes/2019-12-03-internal-visibility-changes). + * + * Enables an authenticated organization owner with the `admin:org` scope or the `repo` scope to update the organization's profile and member privileges. + */ + patch: operations["orgs/update"]; + }; + "/orgs/{org}/actions/cache/usage": { + /** + * Get GitHub Actions cache usage for an organization + * @description Gets the total GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + get: operations["actions/get-actions-cache-usage-for-org"]; + }; + "/orgs/{org}/actions/cache/usage-by-repository": { + /** + * List repositories with GitHub Actions cache usage for an organization + * @description Lists repositories and their GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + get: operations["actions/get-actions-cache-usage-by-repo-for-org"]; + }; + "/orgs/{org}/actions/oidc/customization/sub": { + /** + * Get the customization template for an OIDC subject claim for an organization + * @description Gets the customization template for an OpenID Connect (OIDC) subject claim. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. + * GitHub Apps must have the `organization_administration:write` permission to use this endpoint. + */ + get: operations["oidc/get-oidc-custom-sub-template-for-org"]; + /** + * Set the customization template for an OIDC subject claim for an organization + * @description Creates or updates the customization template for an OpenID Connect (OIDC) subject claim. + * You must authenticate using an access token with the `write:org` scope to use this endpoint. + * GitHub Apps must have the `admin:org` permission to use this endpoint. + */ + put: operations["oidc/update-oidc-custom-sub-template-for-org"]; + }; + "/orgs/{org}/actions/permissions": { + /** + * Get GitHub Actions permissions for an organization + * @description Gets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + get: operations["actions/get-github-actions-permissions-organization"]; + /** + * Set GitHub Actions permissions for an organization + * @description Sets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/set-github-actions-permissions-organization"]; + }; + "/orgs/{org}/actions/permissions/repositories": { + /** + * List selected repositories enabled for GitHub Actions in an organization + * @description Lists the selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + get: operations["actions/list-selected-repositories-enabled-github-actions-organization"]; + /** + * Set selected repositories enabled for GitHub Actions in an organization + * @description Replaces the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/set-selected-repositories-enabled-github-actions-organization"]; + }; + "/orgs/{org}/actions/permissions/repositories/{repository_id}": { + /** + * Enable a selected repository for GitHub Actions in an organization + * @description Adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/enable-selected-repository-github-actions-organization"]; + /** + * Disable a selected repository for GitHub Actions in an organization + * @description Removes a repository from the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + delete: operations["actions/disable-selected-repository-github-actions-organization"]; + }; + "/orgs/{org}/actions/permissions/selected-actions": { + /** + * Get allowed actions and reusable workflows for an organization + * @description Gets the selected actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."" + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + get: operations["actions/get-allowed-actions-organization"]; + /** + * Set allowed actions and reusable workflows for an organization + * @description Sets the actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/set-allowed-actions-organization"]; + }; + "/orgs/{org}/actions/permissions/workflow": { + /** + * Get default workflow permissions for an organization + * @description Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + get: operations["actions/get-github-actions-default-workflow-permissions-organization"]; + /** + * Set default workflow permissions for an organization + * @description Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, and sets if GitHub Actions + * can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/set-github-actions-default-workflow-permissions-organization"]; + }; + "/orgs/{org}/actions/required_workflows": { + /** + * List required workflows + * @description List all required workflows in an organization. + * + * You must authenticate using an access token with the `read:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + get: operations["actions/list-required-workflows"]; + /** + * Create a required workflow + * @description Create a required workflow in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + post: operations["actions/create-required-workflow"]; + }; + "/orgs/{org}/actions/required_workflows/{required_workflow_id}": { + /** + * Get a required workflow + * @description Get a required workflow configured in an organization. + * + * You must authenticate using an access token with the `read:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + get: operations["actions/get-required-workflow"]; + /** + * Delete a required workflow + * @description Deletes a required workflow configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + delete: operations["actions/delete-required-workflow"]; + /** + * Update a required workflow + * @description Update a required workflow in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + patch: operations["actions/update-required-workflow"]; + }; + "/orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories": { + /** + * List selected repositories for a required workflow + * @description Lists the selected repositories that are configured for a required workflow in an organization. To use this endpoint, the required workflow must be configured to run on selected repositories. + * + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + get: operations["actions/list-selected-repositories-required-workflow"]; + /** + * Sets repositories for a required workflow + * @description Sets the repositories for a required workflow that is required for selected repositories. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + put: operations["actions/set-selected-repos-to-required-workflow"]; + }; + "/orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}": { + /** + * Add a repository to a required workflow + * @description Adds a repository to a required workflow. To use this endpoint, the required workflow must be configured to run on selected repositories. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + put: operations["actions/add-selected-repo-to-required-workflow"]; + /** + * Remove a selected repository from required workflow + * @description Removes a repository from a required workflow. To use this endpoint, the required workflow must be configured to run on selected repositories. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + delete: operations["actions/remove-selected-repo-from-required-workflow"]; + }; + "/orgs/{org}/actions/runners": { + /** + * List self-hosted runners for an organization + * @description Lists all self-hosted runners configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-self-hosted-runners-for-org"]; + }; + "/orgs/{org}/actions/runners/downloads": { + /** + * List runner applications for an organization + * @description Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-runner-applications-for-org"]; + }; + "/orgs/{org}/actions/runners/generate-jitconfig": { + /** + * Create configuration for a just-in-time runner for an organization + * @description Generates a configuration that can be passed to the runner application at startup. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + post: operations["actions/generate-runner-jitconfig-for-org"]; + }; + "/orgs/{org}/actions/runners/registration-token": { + /** + * Create a registration token for an organization + * @description Returns a token that you can pass to the `config` script. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org --token TOKEN + * ``` + */ + post: operations["actions/create-registration-token-for-org"]; + }; + "/orgs/{org}/actions/runners/remove-token": { + /** + * Create a remove token for an organization + * @description Returns a token that you can pass to the `config` script to remove a self-hosted runner from an organization. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from an organization, replace `TOKEN` with the remove token provided by this + * endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + post: operations["actions/create-remove-token-for-org"]; + }; + "/orgs/{org}/actions/runners/{runner_id}": { + /** + * Get a self-hosted runner for an organization + * @description Gets a specific self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/get-self-hosted-runner-for-org"]; + /** + * Delete a self-hosted runner from an organization + * @description Forces the removal of a self-hosted runner from an organization. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/delete-self-hosted-runner-from-org"]; + }; + "/orgs/{org}/actions/runners/{runner_id}/labels": { + /** + * List labels for a self-hosted runner for an organization + * @description Lists all labels for a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-labels-for-self-hosted-runner-for-org"]; + /** + * Set custom labels for a self-hosted runner for an organization + * @description Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + put: operations["actions/set-custom-labels-for-self-hosted-runner-for-org"]; + /** + * Add custom labels to a self-hosted runner for an organization + * @description Add custom labels to a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + post: operations["actions/add-custom-labels-to-self-hosted-runner-for-org"]; + /** + * Remove all custom labels from a self-hosted runner for an organization + * @description Remove all custom labels from a self-hosted runner configured in an + * organization. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/remove-all-custom-labels-from-self-hosted-runner-for-org"]; + }; + "/orgs/{org}/actions/runners/{runner_id}/labels/{name}": { + /** + * Remove a custom label from a self-hosted runner for an organization + * @description Remove a custom label from a self-hosted runner configured + * in an organization. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/remove-custom-label-from-self-hosted-runner-for-org"]; + }; + "/orgs/{org}/actions/secrets": { + /** + * List organization secrets + * @description Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + get: operations["actions/list-org-secrets"]; + }; + "/orgs/{org}/actions/secrets/public-key": { + /** + * Get an organization public key + * @description Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + get: operations["actions/get-org-public-key"]; + }; + "/orgs/{org}/actions/secrets/{secret_name}": { + /** + * Get an organization secret + * @description Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + get: operations["actions/get-org-secret"]; + /** + * Create or update an organization secret + * @description Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to + * use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["actions/create-or-update-org-secret"]; + /** + * Delete an organization secret + * @description Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + delete: operations["actions/delete-org-secret"]; + }; + "/orgs/{org}/actions/secrets/{secret_name}/repositories": { + /** + * List selected repositories for an organization secret + * @description Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + get: operations["actions/list-selected-repos-for-org-secret"]; + /** + * Set selected repositories for an organization secret + * @description Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + put: operations["actions/set-selected-repos-for-org-secret"]; + }; + "/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": { + /** + * Add selected repository to an organization secret + * @description Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + put: operations["actions/add-selected-repo-to-org-secret"]; + /** + * Remove selected repository from an organization secret + * @description Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + delete: operations["actions/remove-selected-repo-from-org-secret"]; + }; + "/orgs/{org}/actions/variables": { + /** + * List organization variables + * @description Lists all organization variables. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:read` organization permission to use this endpoint. + */ + get: operations["actions/list-org-variables"]; + /** + * Create an organization variable + * @description Creates an organization variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + post: operations["actions/create-org-variable"]; + }; + "/orgs/{org}/actions/variables/{name}": { + /** + * Get an organization variable + * @description Gets a specific variable in an organization. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:read` organization permission to use this endpoint. + */ + get: operations["actions/get-org-variable"]; + /** + * Delete an organization variable + * @description Deletes an organization variable using the variable name. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + delete: operations["actions/delete-org-variable"]; + /** + * Update an organization variable + * @description Updates an organization variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + patch: operations["actions/update-org-variable"]; + }; + "/orgs/{org}/actions/variables/{name}/repositories": { + /** + * List selected repositories for an organization variable + * @description Lists all repositories that can access an organization variable that is available to selected repositories. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:read` organization permission to use this endpoint. + */ + get: operations["actions/list-selected-repos-for-org-variable"]; + /** + * Set selected repositories for an organization variable + * @description Replaces all repositories for an organization variable that is available to selected repositories. Organization variables that are available to selected repositories have their `visibility` field set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + put: operations["actions/set-selected-repos-for-org-variable"]; + }; + "/orgs/{org}/actions/variables/{name}/repositories/{repository_id}": { + /** + * Add selected repository to an organization variable + * @description Adds a repository to an organization variable that is available to selected repositories. Organization variables that are available to selected repositories have their `visibility` field set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + put: operations["actions/add-selected-repo-to-org-variable"]; + /** + * Remove selected repository from an organization variable + * @description Removes a repository from an organization variable that is available to selected repositories. Organization variables that are available to selected repositories have their `visibility` field set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + delete: operations["actions/remove-selected-repo-from-org-variable"]; + }; + "/orgs/{org}/blocks": { + /** + * List users blocked by an organization + * @description List the users blocked by an organization. + */ + get: operations["orgs/list-blocked-users"]; + }; + "/orgs/{org}/blocks/{username}": { + /** Check if a user is blocked by an organization */ + get: operations["orgs/check-blocked-user"]; + /** Block a user from an organization */ + put: operations["orgs/block-user"]; + /** Unblock a user from an organization */ + delete: operations["orgs/unblock-user"]; + }; + "/orgs/{org}/code-scanning/alerts": { + /** + * List code scanning alerts for an organization + * @description Lists code scanning alerts for the default branch for all eligible repositories in an organization. Eligible repositories are repositories that are owned by organizations that you own or for which you are a security manager. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be an owner or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + get: operations["code-scanning/list-alerts-for-org"]; + }; + "/orgs/{org}/codespaces": { + /** + * List codespaces for the organization + * @description Lists the codespaces associated to a specified organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["codespaces/list-in-organization"]; + }; + "/orgs/{org}/codespaces/billing": { + /** + * Manage access control for organization codespaces + * @description Sets which users can access codespaces in an organization. This is synonymous with granting or revoking codespaces billing permissions for users according to the visibility. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + put: operations["codespaces/set-codespaces-billing"]; + }; + "/orgs/{org}/codespaces/billing/selected_users": { + /** + * Add users to Codespaces billing for an organization + * @description Codespaces for the specified users will be billed to the organization. + * To use this endpoint, the billing settings for the organization must be set to `selected_members`. For information on how to change this setting please see [these docs].(https://docs.github.com/rest/codespaces/organizations#manage-access-control-for-organization-codespaces) You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + post: operations["codespaces/set-codespaces-billing-users"]; + /** + * Removes users from Codespaces billing for an organization + * @description Codespaces for the specified users will no longer be billed to the organization. + * To use this endpoint, the billing settings for the organization must be set to `selected_members`. For information on how to change this setting please see [these docs].(https://docs.github.com/rest/codespaces/organizations#manage-access-control-for-organization-codespaces) You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["codespaces/delete-codespaces-billing-users"]; + }; + "/orgs/{org}/codespaces/secrets": { + /** + * List organization secrets + * @description Lists all Codespaces secrets available at the organization-level without revealing their encrypted values. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["codespaces/list-org-secrets"]; + }; + "/orgs/{org}/codespaces/secrets/public-key": { + /** + * Get an organization public key + * @description Gets a public key for an organization, which is required in order to encrypt secrets. You need to encrypt the value of a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["codespaces/get-org-public-key"]; + }; + "/orgs/{org}/codespaces/secrets/{secret_name}": { + /** + * Get an organization secret + * @description Gets an organization secret without revealing its encrypted value. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["codespaces/get-org-secret"]; + /** + * Create or update an organization secret + * @description Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["codespaces/create-or-update-org-secret"]; + /** + * Delete an organization secret + * @description Deletes an organization secret using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["codespaces/delete-org-secret"]; + }; + "/orgs/{org}/codespaces/secrets/{secret_name}/repositories": { + /** + * List selected repositories for an organization secret + * @description Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["codespaces/list-selected-repos-for-org-secret"]; + /** + * Set selected repositories for an organization secret + * @description Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + put: operations["codespaces/set-selected-repos-for-org-secret"]; + }; + "/orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}": { + /** + * Add selected repository to an organization secret + * @description Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + put: operations["codespaces/add-selected-repo-to-org-secret"]; + /** + * Remove selected repository from an organization secret + * @description Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["codespaces/remove-selected-repo-from-org-secret"]; + }; + "/orgs/{org}/dependabot/alerts": { + /** + * List Dependabot alerts for an organization + * @description Lists Dependabot alerts for an organization. + * + * To use this endpoint, you must be an owner or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + get: operations["dependabot/list-alerts-for-org"]; + }; + "/orgs/{org}/dependabot/secrets": { + /** + * List organization secrets + * @description Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + get: operations["dependabot/list-org-secrets"]; + }; + "/orgs/{org}/dependabot/secrets/public-key": { + /** + * Get an organization public key + * @description Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + get: operations["dependabot/get-org-public-key"]; + }; + "/orgs/{org}/dependabot/secrets/{secret_name}": { + /** + * Get an organization secret + * @description Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + get: operations["dependabot/get-org-secret"]; + /** + * Create or update an organization secret + * @description Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["dependabot/create-or-update-org-secret"]; + /** + * Delete an organization secret + * @description Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + delete: operations["dependabot/delete-org-secret"]; + }; + "/orgs/{org}/dependabot/secrets/{secret_name}/repositories": { + /** + * List selected repositories for an organization secret + * @description Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + get: operations["dependabot/list-selected-repos-for-org-secret"]; + /** + * Set selected repositories for an organization secret + * @description Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + put: operations["dependabot/set-selected-repos-for-org-secret"]; + }; + "/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}": { + /** + * Add selected repository to an organization secret + * @description Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + put: operations["dependabot/add-selected-repo-to-org-secret"]; + /** + * Remove selected repository from an organization secret + * @description Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + delete: operations["dependabot/remove-selected-repo-from-org-secret"]; + }; + "/orgs/{org}/docker/conflicts": { + /** + * Get list of conflicting packages during Docker migration for organization + * @description Lists all packages that are in a specific organization, are readable by the requesting user, and that encountered a conflict during a Docker migration. + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. + */ + get: operations["packages/list-docker-migration-conflicting-packages-for-organization"]; + }; + "/orgs/{org}/events": { + /** List public organization events */ + get: operations["activity/list-public-org-events"]; + }; + "/orgs/{org}/failed_invitations": { + /** + * List failed organization invitations + * @description The return hash contains `failed_at` and `failed_reason` fields which represent the time at which the invitation failed and the reason for the failure. + */ + get: operations["orgs/list-failed-invitations"]; + }; + "/orgs/{org}/hooks": { + /** List organization webhooks */ + get: operations["orgs/list-webhooks"]; + /** + * Create an organization webhook + * @description Here's how you can create a hook that posts payloads in JSON format: + */ + post: operations["orgs/create-webhook"]; + }; + "/orgs/{org}/hooks/{hook_id}": { + /** + * Get an organization webhook + * @description Returns a webhook configured in an organization. To get only the webhook `config` properties, see "[Get a webhook configuration for an organization](/rest/reference/orgs#get-a-webhook-configuration-for-an-organization)." + */ + get: operations["orgs/get-webhook"]; + /** Delete an organization webhook */ + delete: operations["orgs/delete-webhook"]; + /** + * Update an organization webhook + * @description Updates a webhook configured in an organization. When you update a webhook, the `secret` will be overwritten. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for an organization](/rest/reference/orgs#update-a-webhook-configuration-for-an-organization)." + */ + patch: operations["orgs/update-webhook"]; + }; + "/orgs/{org}/hooks/{hook_id}/config": { + /** + * Get a webhook configuration for an organization + * @description Returns the webhook configuration for an organization. To get more information about the webhook, including the `active` state and `events`, use "[Get an organization webhook ](/rest/reference/orgs#get-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:read` permission. + */ + get: operations["orgs/get-webhook-config-for-org"]; + /** + * Update a webhook configuration for an organization + * @description Updates the webhook configuration for an organization. To update more information about the webhook, including the `active` state and `events`, use "[Update an organization webhook ](/rest/reference/orgs#update-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:write` permission. + */ + patch: operations["orgs/update-webhook-config-for-org"]; + }; + "/orgs/{org}/hooks/{hook_id}/deliveries": { + /** + * List deliveries for an organization webhook + * @description Returns a list of webhook deliveries for a webhook configured in an organization. + */ + get: operations["orgs/list-webhook-deliveries"]; + }; + "/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}": { + /** + * Get a webhook delivery for an organization webhook + * @description Returns a delivery for a webhook configured in an organization. + */ + get: operations["orgs/get-webhook-delivery"]; + }; + "/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": { + /** + * Redeliver a delivery for an organization webhook + * @description Redeliver a delivery for a webhook configured in an organization. + */ + post: operations["orgs/redeliver-webhook-delivery"]; + }; + "/orgs/{org}/hooks/{hook_id}/pings": { + /** + * Ping an organization webhook + * @description This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. + */ + post: operations["orgs/ping-webhook"]; + }; + "/orgs/{org}/installation": { + /** + * Get an organization installation for the authenticated app + * @description Enables an authenticated GitHub App to find the organization's installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-org-installation"]; + }; + "/orgs/{org}/installations": { + /** + * List app installations for an organization + * @description Lists all GitHub Apps in an organization. The installation count includes all GitHub Apps installed on repositories in the organization. You must be an organization owner with `admin:read` scope to use this endpoint. + */ + get: operations["orgs/list-app-installations"]; + }; + "/orgs/{org}/interaction-limits": { + /** + * Get interaction restrictions for an organization + * @description Shows which type of GitHub user can interact with this organization and when the restriction expires. If there is no restrictions, you will see an empty response. + */ + get: operations["interactions/get-restrictions-for-org"]; + /** + * Set interaction restrictions for an organization + * @description Temporarily restricts interactions to a certain type of GitHub user in any public repository in the given organization. You must be an organization owner to set these restrictions. Setting the interaction limit at the organization level will overwrite any interaction limits that are set for individual repositories owned by the organization. + */ + put: operations["interactions/set-restrictions-for-org"]; + /** + * Remove interaction restrictions for an organization + * @description Removes all interaction restrictions from public repositories in the given organization. You must be an organization owner to remove restrictions. + */ + delete: operations["interactions/remove-restrictions-for-org"]; + }; + "/orgs/{org}/invitations": { + /** + * List pending organization invitations + * @description The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, or `hiring_manager`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + */ + get: operations["orgs/list-pending-invitations"]; + /** + * Create an organization invitation + * @description Invite people to an organization by using their GitHub user ID or their email address. In order to create invitations in an organization, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["orgs/create-invitation"]; + }; + "/orgs/{org}/invitations/{invitation_id}": { + /** + * Cancel an organization invitation + * @description Cancel an organization invitation. In order to cancel an organization invitation, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). + */ + delete: operations["orgs/cancel-invitation"]; + }; + "/orgs/{org}/invitations/{invitation_id}/teams": { + /** + * List organization invitation teams + * @description List all teams associated with an invitation. In order to see invitations in an organization, the authenticated user must be an organization owner. + */ + get: operations["orgs/list-invitation-teams"]; + }; + "/orgs/{org}/issues": { + /** + * List organization issues assigned to the authenticated user + * @description List issues in an organization assigned to the authenticated user. + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/list-for-org"]; + }; + "/orgs/{org}/members": { + /** + * List organization members + * @description List all users who are members of an organization. If the authenticated user is also a member of this organization then both concealed and public members will be returned. + */ + get: operations["orgs/list-members"]; + }; + "/orgs/{org}/members/{username}": { + /** + * Check organization membership for a user + * @description Check if a user is, publicly or privately, a member of the organization. + */ + get: operations["orgs/check-membership-for-user"]; + /** + * Remove an organization member + * @description Removing a user from this list will remove them from all teams and they will no longer have any access to the organization's repositories. + */ + delete: operations["orgs/remove-member"]; + }; + "/orgs/{org}/members/{username}/codespaces": { + /** + * List codespaces for a user in organization + * @description Lists the codespaces that a member of an organization has for repositories in that organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["codespaces/get-codespaces-for-user-in-org"]; + }; + "/orgs/{org}/members/{username}/codespaces/{codespace_name}": { + /** + * Delete a codespace from the organization + * @description Deletes a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["codespaces/delete-from-organization"]; + }; + "/orgs/{org}/members/{username}/codespaces/{codespace_name}/stop": { + /** + * Stop a codespace for an organization user + * @description Stops a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + post: operations["codespaces/stop-in-organization"]; + }; + "/orgs/{org}/memberships/{username}": { + /** + * Get organization membership for a user + * @description In order to get a user's membership with an organization, the authenticated user must be an organization member. The `state` parameter in the response can be used to identify the user's membership status. + */ + get: operations["orgs/get-membership-for-user"]; + /** + * Set organization membership for a user + * @description Only authenticated organization owners can add a member to the organization or update the member's role. + * + * * If the authenticated user is _adding_ a member to the organization, the invited user will receive an email inviting them to the organization. The user's [membership status](https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user) will be `pending` until they accept the invitation. + * + * * Authenticated users can _update_ a user's membership by passing the `role` parameter. If the authenticated user changes a member's role to `admin`, the affected user will receive an email notifying them that they've been made an organization owner. If the authenticated user changes an owner's role to `member`, no email will be sent. + * + * **Rate limits** + * + * To prevent abuse, the authenticated user is limited to 50 organization invitations per 24 hour period. If the organization is more than one month old or on a paid plan, the limit is 500 invitations per 24 hour period. + */ + put: operations["orgs/set-membership-for-user"]; + /** + * Remove organization membership for a user + * @description In order to remove a user's membership with an organization, the authenticated user must be an organization owner. + * + * If the specified user is an active member of the organization, this will remove them from the organization. If the specified user has been invited to the organization, this will cancel their invitation. The specified user will receive an email notification in both cases. + */ + delete: operations["orgs/remove-membership-for-user"]; + }; + "/orgs/{org}/migrations": { + /** + * List organization migrations + * @description Lists the most recent migrations, including both exports (which can be started through the REST API) and imports (which cannot be started using the REST API). + * + * A list of `repositories` is only returned for export migrations. + */ + get: operations["migrations/list-for-org"]; + /** + * Start an organization migration + * @description Initiates the generation of a migration archive. + */ + post: operations["migrations/start-for-org"]; + }; + "/orgs/{org}/migrations/{migration_id}": { + /** + * Get an organization migration status + * @description Fetches the status of a migration. + * + * The `state` of a migration can be one of the following values: + * + * * `pending`, which means the migration hasn't started yet. + * * `exporting`, which means the migration is in progress. + * * `exported`, which means the migration finished successfully. + * * `failed`, which means the migration failed. + */ + get: operations["migrations/get-status-for-org"]; + }; + "/orgs/{org}/migrations/{migration_id}/archive": { + /** + * Download an organization migration archive + * @description Fetches the URL to a migration archive. + */ + get: operations["migrations/download-archive-for-org"]; + /** + * Delete an organization migration archive + * @description Deletes a previous migration archive. Migration archives are automatically deleted after seven days. + */ + delete: operations["migrations/delete-archive-for-org"]; + }; + "/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock": { + /** + * Unlock an organization repository + * @description Unlocks a repository that was locked for migration. You should unlock each migrated repository and [delete them](https://docs.github.com/rest/repos/repos#delete-a-repository) when the migration is complete and you no longer need the source data. + */ + delete: operations["migrations/unlock-repo-for-org"]; + }; + "/orgs/{org}/migrations/{migration_id}/repositories": { + /** + * List repositories in an organization migration + * @description List all the repositories for this organization migration. + */ + get: operations["migrations/list-repos-for-org"]; + }; + "/orgs/{org}/outside_collaborators": { + /** + * List outside collaborators for an organization + * @description List all users who are outside collaborators of an organization. + */ + get: operations["orgs/list-outside-collaborators"]; + }; + "/orgs/{org}/outside_collaborators/{username}": { + /** + * Convert an organization member to outside collaborator + * @description When an organization member is converted to an outside collaborator, they'll only have access to the repositories that their current team membership allows. The user will no longer be a member of the organization. For more information, see "[Converting an organization member to an outside collaborator](https://docs.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)". Converting an organization member to an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." + */ + put: operations["orgs/convert-member-to-outside-collaborator"]; + /** + * Remove outside collaborator from an organization + * @description Removing a user from this list will remove them from all the organization's repositories. + */ + delete: operations["orgs/remove-outside-collaborator"]; + }; + "/orgs/{org}/packages": { + /** + * List packages for an organization + * @description Lists packages in an organization readable by the user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + get: operations["packages/list-packages-for-organization"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}": { + /** + * Get a package for an organization + * @description Gets a specific package in an organization. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + get: operations["packages/get-package-for-organization"]; + /** + * Delete a package for an organization + * @description Deletes an entire package in an organization. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `delete:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package you want to delete. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + delete: operations["packages/delete-package-for-org"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}/restore": { + /** + * Restore a package for an organization + * @description Restores an entire package in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `write:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package you want to restore. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + post: operations["packages/restore-package-for-org"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}/versions": { + /** + * List package versions for a package owned by an organization + * @description Lists package versions for a package owned by an organization. + * + * If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + get: operations["packages/get-all-package-versions-for-package-owned-by-org"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}": { + /** + * Get a package version for an organization + * @description Gets a specific package version in an organization. + * + * You must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + get: operations["packages/get-package-version-for-organization"]; + /** + * Delete package version for an organization + * @description Deletes a specific package version in an organization. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `delete:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package whose version you want to delete. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + delete: operations["packages/delete-package-version-for-org"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": { + /** + * Restore package version for an organization + * @description Restores a specific package version in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `write:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package whose version you want to restore. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + post: operations["packages/restore-package-version-for-org"]; + }; + "/orgs/{org}/projects": { + /** + * List organization projects + * @description Lists the projects in an organization. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + get: operations["projects/list-for-org"]; + /** + * Create an organization project + * @description Creates an organization project board. Returns a `410 Gone` status if projects are disabled in the organization or if the organization does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + post: operations["projects/create-for-org"]; + }; + "/orgs/{org}/public_members": { + /** + * List public organization members + * @description Members of an organization can choose to have their membership publicized or not. + */ + get: operations["orgs/list-public-members"]; + }; + "/orgs/{org}/public_members/{username}": { + /** + * Check public organization membership for a user + * @description Check if the provided user is a public member of the organization. + */ + get: operations["orgs/check-public-membership-for-user"]; + /** + * Set public organization membership for the authenticated user + * @description The user can publicize their own membership. (A user cannot publicize the membership for another user.) + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["orgs/set-public-membership-for-authenticated-user"]; + /** + * Remove public organization membership for the authenticated user + * @description Removes the public membership for the authenticated user from the specified organization, unless public visibility is enforced by default. + */ + delete: operations["orgs/remove-public-membership-for-authenticated-user"]; + }; + "/orgs/{org}/repos": { + /** + * List organization repositories + * @description Lists repositories for the specified organization. + * + * **Note:** In order to see the `security_and_analysis` block for a repository you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + get: operations["repos/list-for-org"]; + /** + * Create an organization repository + * @description Creates a new repository in the specified organization. The authenticated user must be a member of the organization. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + post: operations["repos/create-in-org"]; + }; + "/orgs/{org}/rulesets": { + /** + * Get all organization repository rulesets + * @description Get all the repository rulesets for an organization. + */ + get: operations["repos/get-org-rulesets"]; + /** + * Create an organization repository ruleset + * @description Create a repository ruleset for an organization. + */ + post: operations["repos/create-org-ruleset"]; + }; + "/orgs/{org}/rulesets/{ruleset_id}": { + /** + * Get an organization repository ruleset + * @description Get a repository ruleset for an organization. + */ + get: operations["repos/get-org-ruleset"]; + /** + * Update an organization repository ruleset + * @description Update a ruleset for an organization. + */ + put: operations["repos/update-org-ruleset"]; + /** + * Delete an organization repository ruleset + * @description Delete a ruleset for an organization. + */ + delete: operations["repos/delete-org-ruleset"]; + }; + "/orgs/{org}/secret-scanning/alerts": { + /** + * List secret scanning alerts for an organization + * @description Lists secret scanning alerts for eligible repositories in an organization, from newest to oldest. + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + get: operations["secret-scanning/list-alerts-for-org"]; + }; + "/orgs/{org}/security-managers": { + /** + * List security manager teams + * @description Lists teams that are security managers for an organization. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `read:org` scope. + * + * GitHub Apps must have the `administration` organization read permission to use this endpoint. + */ + get: operations["orgs/list-security-manager-teams"]; + }; + "/orgs/{org}/security-managers/teams/{team_slug}": { + /** + * Add a security manager team + * @description Adds a team as a security manager for an organization. For more information, see "[Managing security for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization) for an organization." + * + * To use this endpoint, you must be an administrator for the organization, and you must use an access token with the `write:org` scope. + * + * GitHub Apps must have the `administration` organization read-write permission to use this endpoint. + */ + put: operations["orgs/add-security-manager-team"]; + /** + * Remove a security manager team + * @description Removes the security manager role from a team for an organization. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization) team from an organization." + * + * To use this endpoint, you must be an administrator for the organization, and you must use an access token with the `admin:org` scope. + * + * GitHub Apps must have the `administration` organization read-write permission to use this endpoint. + */ + delete: operations["orgs/remove-security-manager-team"]; + }; + "/orgs/{org}/settings/billing/actions": { + /** + * Get GitHub Actions billing for an organization + * @description Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + get: operations["billing/get-github-actions-billing-org"]; + }; + "/orgs/{org}/settings/billing/packages": { + /** + * Get GitHub Packages billing for an organization + * @description Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + get: operations["billing/get-github-packages-billing-org"]; + }; + "/orgs/{org}/settings/billing/shared-storage": { + /** + * Get shared storage billing for an organization + * @description Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + get: operations["billing/get-shared-storage-billing-org"]; + }; + "/orgs/{org}/teams": { + /** + * List teams + * @description Lists all teams in an organization that are visible to the authenticated user. + */ + get: operations["teams/list"]; + /** + * Create a team + * @description To create a team, the authenticated user must be a member or owner of `{org}`. By default, organization members can create teams. Organization owners can limit team creation to organization owners. For more information, see "[Setting team creation permissions](https://docs.github.com/articles/setting-team-creation-permissions-in-your-organization)." + * + * When you create a new team, you automatically become a team maintainer without explicitly adding yourself to the optional array of `maintainers`. For more information, see "[About teams](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/about-teams)". + */ + post: operations["teams/create"]; + }; + "/orgs/{org}/teams/{team_slug}": { + /** + * Get a team by name + * @description Gets a team using the team's `slug`. To create the `slug`, GitHub replaces special characters in the `name` string, changes all words to lowercase, and replaces spaces with a `-` separator. For example, `"My TEam Näme"` would become `my-team-name`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}`. + */ + get: operations["teams/get-by-name"]; + /** + * Delete a team + * @description To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}`. + */ + delete: operations["teams/delete-in-org"]; + /** + * Update a team + * @description To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}`. + */ + patch: operations["teams/update-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions": { + /** + * List discussions + * @description List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions`. + */ + get: operations["teams/list-discussions-in-org"]; + /** + * Create a discussion + * @description Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions`. + */ + post: operations["teams/create-discussion-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": { + /** + * Get a discussion + * @description Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + get: operations["teams/get-discussion-in-org"]; + /** + * Delete a discussion + * @description Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + delete: operations["teams/delete-discussion-in-org"]; + /** + * Update a discussion + * @description Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + patch: operations["teams/update-discussion-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": { + /** + * List discussion comments + * @description List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + get: operations["teams/list-discussion-comments-in-org"]; + /** + * Create a discussion comment + * @description Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + post: operations["teams/create-discussion-comment-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": { + /** + * Get a discussion comment + * @description Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + get: operations["teams/get-discussion-comment-in-org"]; + /** + * Delete a discussion comment + * @description Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + delete: operations["teams/delete-discussion-comment-in-org"]; + /** + * Update a discussion comment + * @description Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + patch: operations["teams/update-discussion-comment-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": { + /** + * List reactions for a team discussion comment + * @description List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments/). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + get: operations["reactions/list-for-team-discussion-comment-in-org"]; + /** + * Create reaction for a team discussion comment + * @description Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + post: operations["reactions/create-for-team-discussion-comment-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}": { + /** + * Delete team discussion comment reaction + * @description **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["reactions/delete-for-team-discussion-comment"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": { + /** + * List reactions for a team discussion + * @description List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + get: operations["reactions/list-for-team-discussion-in-org"]; + /** + * Create reaction for a team discussion + * @description Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + post: operations["reactions/create-for-team-discussion-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}": { + /** + * Delete team discussion reaction + * @description **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["reactions/delete-for-team-discussion"]; + }; + "/orgs/{org}/teams/{team_slug}/invitations": { + /** + * List pending team invitations + * @description The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/invitations`. + */ + get: operations["teams/list-pending-invitations-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/members": { + /** + * List team members + * @description Team members will include the members of child teams. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + get: operations["teams/list-members-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/memberships/{username}": { + /** + * Get team membership for a user + * @description Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/memberships/{username}`. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + get: operations["teams/get-membership-for-user-in-org"]; + /** + * Add or update team membership for a user + * @description Adds an organization member to a team. An authenticated organization owner or team maintainer can add organization members to a team. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * An organization owner can add someone who is not part of the team's organization to a team. When an organization owner adds someone to a team who is not an organization member, this endpoint will send an invitation to the person via email. This newly-created membership will be in the "pending" state until the person accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + put: operations["teams/add-or-update-membership-for-user-in-org"]; + /** + * Remove team membership for a user + * @description To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + delete: operations["teams/remove-membership-for-user-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/projects": { + /** + * List team projects + * @description Lists the organization projects for a team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects`. + */ + get: operations["teams/list-projects-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/projects/{project_id}": { + /** + * Check team permissions for a project + * @description Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + get: operations["teams/check-permissions-for-project-in-org"]; + /** + * Add or update team project permissions + * @description Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + put: operations["teams/add-or-update-project-permissions-in-org"]; + /** + * Remove a project from a team + * @description Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. This endpoint removes the project from the team, but does not delete the project. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + delete: operations["teams/remove-project-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/repos": { + /** + * List team repositories + * @description Lists a team's repositories visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos`. + */ + get: operations["teams/list-repos-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": { + /** + * Check team permissions for a repository + * @description Checks whether a team has `admin`, `push`, `maintain`, `triage`, or `pull` permission for a repository. Repositories inherited through a parent team will also be checked. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `application/vnd.github.v3.repository+json` accept header. + * + * If a team doesn't have permission for the repository, you will receive a `404 Not Found` response status. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + get: operations["teams/check-permissions-for-repo-in-org"]; + /** + * Add or update team repository permissions + * @description To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + * + * For more information about the permission levels, see "[Repository permission levels for an organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". + */ + put: operations["teams/add-or-update-repo-permissions-in-org"]; + /** + * Remove a repository from a team + * @description If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. This does not delete the repository, it just removes it from the team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + delete: operations["teams/remove-repo-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/teams": { + /** + * List child teams + * @description Lists the child teams of the team specified by `{team_slug}`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/teams`. + */ + get: operations["teams/list-child-in-org"]; + }; + "/orgs/{org}/{security_product}/{enablement}": { + /** + * Enable or disable a security feature for an organization + * @description Enables or disables the specified security feature for all eligible repositories in an organization. + * + * To use this endpoint, you must be an organization owner or be member of a team with the security manager role. + * A token with the 'write:org' scope is also required. + * + * GitHub Apps must have the `organization_administration:write` permission to use this endpoint. + * + * For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + post: operations["orgs/enable-or-disable-security-product-on-all-org-repos"]; + }; + "/projects/columns/cards/{card_id}": { + /** + * Get a project card + * @description Gets information about a project card. + */ + get: operations["projects/get-card"]; + /** + * Delete a project card + * @description Deletes a project card + */ + delete: operations["projects/delete-card"]; + /** Update an existing project card */ + patch: operations["projects/update-card"]; + }; + "/projects/columns/cards/{card_id}/moves": { + /** Move a project card */ + post: operations["projects/move-card"]; + }; + "/projects/columns/{column_id}": { + /** + * Get a project column + * @description Gets information about a project column. + */ + get: operations["projects/get-column"]; + /** + * Delete a project column + * @description Deletes a project column. + */ + delete: operations["projects/delete-column"]; + /** Update an existing project column */ + patch: operations["projects/update-column"]; + }; + "/projects/columns/{column_id}/cards": { + /** + * List project cards + * @description Lists the project cards in a project. + */ + get: operations["projects/list-cards"]; + /** Create a project card */ + post: operations["projects/create-card"]; + }; + "/projects/columns/{column_id}/moves": { + /** Move a project column */ + post: operations["projects/move-column"]; + }; + "/projects/{project_id}": { + /** + * Get a project + * @description Gets a project by its `id`. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + get: operations["projects/get"]; + /** + * Delete a project + * @description Deletes a project board. Returns a `404 Not Found` status if projects are disabled. + */ + delete: operations["projects/delete"]; + /** + * Update a project + * @description Updates a project board's information. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + patch: operations["projects/update"]; + }; + "/projects/{project_id}/collaborators": { + /** + * List project collaborators + * @description Lists the collaborators for an organization project. For a project, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. You must be an organization owner or a project `admin` to list collaborators. + */ + get: operations["projects/list-collaborators"]; + }; + "/projects/{project_id}/collaborators/{username}": { + /** + * Add project collaborator + * @description Adds a collaborator to an organization project and sets their permission level. You must be an organization owner or a project `admin` to add a collaborator. + */ + put: operations["projects/add-collaborator"]; + /** + * Remove user as a collaborator + * @description Removes a collaborator from an organization project. You must be an organization owner or a project `admin` to remove a collaborator. + */ + delete: operations["projects/remove-collaborator"]; + }; + "/projects/{project_id}/collaborators/{username}/permission": { + /** + * Get project permission for a user + * @description Returns the collaborator's permission level for an organization project. Possible values for the `permission` key: `admin`, `write`, `read`, `none`. You must be an organization owner or a project `admin` to review a user's permission level. + */ + get: operations["projects/get-permission-for-user"]; + }; + "/projects/{project_id}/columns": { + /** + * List project columns + * @description Lists the project columns in a project. + */ + get: operations["projects/list-columns"]; + /** + * Create a project column + * @description Creates a new project column. + */ + post: operations["projects/create-column"]; + }; + "/rate_limit": { + /** + * Get rate limit status for the authenticated user + * @description **Note:** Accessing this endpoint does not count against your REST API rate limit. + * + * **Note:** The `rate` object is deprecated. If you're writing new API client code or updating existing code, you should use the `core` object instead of the `rate` object. The `core` object contains the same information that is present in the `rate` object. + */ + get: operations["rate-limit/get"]; + }; + "/repos/{org}/{repo}/actions/required_workflows": { + /** + * List repository required workflows + * @description Lists the required workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + get: operations["actions/list-repo-required-workflows"]; + }; + "/repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}": { + /** + * Get a required workflow entity for a repository + * @description Gets a specific required workflow present in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + get: operations["actions/get-repo-required-workflow"]; + }; + "/repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing": { + /** + * Get required workflow usage + * @description Gets the number of billable minutes used by a specific required workflow during the current billing cycle. + * + * Billable minutes only apply to required workflows running in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)." + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-repo-required-workflow-usage"]; + }; + "/repos/{owner}/{repo}": { + /** + * Get a repository + * @description The `parent` and `source` objects are present when the repository is a fork. `parent` is the repository this repository was forked from, `source` is the ultimate source for the network. + * + * **Note:** In order to see the `security_and_analysis` block for a repository you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + get: operations["repos/get"]; + /** + * Delete a repository + * @description Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required. + * + * If an organization owner has configured the organization to prevent members from deleting organization-owned + * repositories, you will get a `403 Forbidden` response. + */ + delete: operations["repos/delete"]; + /** + * Update a repository + * @description **Note**: To edit a repository's topics, use the [Replace all repository topics](https://docs.github.com/rest/reference/repos#replace-all-repository-topics) endpoint. + */ + patch: operations["repos/update"]; + }; + "/repos/{owner}/{repo}/actions/artifacts": { + /** + * List artifacts for a repository + * @description Lists all artifacts for a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/list-artifacts-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/artifacts/{artifact_id}": { + /** + * Get an artifact + * @description Gets a specific artifact for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-artifact"]; + /** + * Delete an artifact + * @description Deletes an artifact for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + delete: operations["actions/delete-artifact"]; + }; + "/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}": { + /** + * Download an artifact + * @description Gets a redirect URL to download an archive for a repository. This URL expires after 1 minute. Look for `Location:` in + * the response header to find the URL for the download. The `:archive_format` must be `zip`. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/download-artifact"]; + }; + "/repos/{owner}/{repo}/actions/cache/usage": { + /** + * Get GitHub Actions cache usage for a repository + * @description Gets GitHub Actions cache usage for a repository. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-actions-cache-usage"]; + }; + "/repos/{owner}/{repo}/actions/caches": { + /** + * List GitHub Actions caches for a repository + * @description Lists the GitHub Actions caches for a repository. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-actions-cache-list"]; + /** + * Delete GitHub Actions caches for a repository (using a cache key) + * @description Deletes one or more GitHub Actions caches for a repository, using a complete cache key. By default, all caches that match the provided key are deleted, but you can optionally provide a Git ref to restrict deletions to caches that match both the provided key and the Git ref. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + delete: operations["actions/delete-actions-cache-by-key"]; + }; + "/repos/{owner}/{repo}/actions/caches/{cache_id}": { + /** + * Delete a GitHub Actions cache for a repository (using a cache ID) + * @description Deletes a GitHub Actions cache for a repository, using a cache ID. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + delete: operations["actions/delete-actions-cache-by-id"]; + }; + "/repos/{owner}/{repo}/actions/jobs/{job_id}": { + /** + * Get a job for a workflow run + * @description Gets a specific job in a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-job-for-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/jobs/{job_id}/logs": { + /** + * Download job logs for a workflow run + * @description Gets a redirect URL to download a plain text file of logs for a workflow job. This link expires after 1 minute. Look + * for `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can + * use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must + * have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/download-job-logs-for-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/jobs/{job_id}/rerun": { + /** + * Re-run a job from a workflow run + * @description Re-run a job and its dependent jobs in a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + post: operations["actions/re-run-job-for-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/oidc/customization/sub": { + /** + * Get the customization template for an OIDC subject claim for a repository + * @description Gets the customization template for an OpenID Connect (OIDC) subject claim. + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. GitHub Apps must have the `organization_administration:read` permission to use this endpoint. + */ + get: operations["actions/get-custom-oidc-sub-claim-for-repo"]; + /** + * Set the customization template for an OIDC subject claim for a repository + * @description Sets the customization template and `opt-in` or `opt-out` flag for an OpenID Connect (OIDC) subject claim for a repository. + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + put: operations["actions/set-custom-oidc-sub-claim-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/organization-secrets": { + /** + * List repository organization secrets + * @description Lists all organization secrets shared with a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + get: operations["actions/list-repo-organization-secrets"]; + }; + "/repos/{owner}/{repo}/actions/organization-variables": { + /** + * List repository organization variables + * @description Lists all organiation variables shared with a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions_variables:read` repository permission to use this endpoint. + */ + get: operations["actions/list-repo-organization-variables"]; + }; + "/repos/{owner}/{repo}/actions/permissions": { + /** + * Get GitHub Actions permissions for a repository + * @description Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions and reusable workflows allowed to run in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + get: operations["actions/get-github-actions-permissions-repository"]; + /** + * Set GitHub Actions permissions for a repository + * @description Sets the GitHub Actions permissions policy for enabling GitHub Actions and allowed actions and reusable workflows in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + put: operations["actions/set-github-actions-permissions-repository"]; + }; + "/repos/{owner}/{repo}/actions/permissions/access": { + /** + * Get the level of access for workflows outside of the repository + * @description Gets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to private repositories. + * For more information, see "[Allowing access to components in a private repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-a-private-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + get: operations["actions/get-workflow-access-to-repository"]; + /** + * Set the level of access for workflows outside of the repository + * @description Sets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to private repositories. + * For more information, see "[Allowing access to components in a private repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-a-private-repository)". + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + put: operations["actions/set-workflow-access-to-repository"]; + }; + "/repos/{owner}/{repo}/actions/permissions/selected-actions": { + /** + * Get allowed actions and reusable workflows for a repository + * @description Gets the settings for selected actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + get: operations["actions/get-allowed-actions-repository"]; + /** + * Set allowed actions and reusable workflows for a repository + * @description Sets the actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + put: operations["actions/set-allowed-actions-repository"]; + }; + "/repos/{owner}/{repo}/actions/permissions/workflow": { + /** + * Get default workflow permissions for a repository + * @description Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, + * as well as if GitHub Actions can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + get: operations["actions/get-github-actions-default-workflow-permissions-repository"]; + /** + * Set default workflow permissions for a repository + * @description Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, and sets if GitHub Actions + * can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + put: operations["actions/set-github-actions-default-workflow-permissions-repository"]; + }; + "/repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs": { + /** + * List workflow runs for a required workflow + * @description List all workflow runs for a required workflow. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + get: operations["actions/list-required-workflow-runs"]; + }; + "/repos/{owner}/{repo}/actions/runners": { + /** + * List self-hosted runners for a repository + * @description Lists all self-hosted runners configured in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + get: operations["actions/list-self-hosted-runners-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/downloads": { + /** + * List runner applications for a repository + * @description Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + get: operations["actions/list-runner-applications-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/generate-jitconfig": { + /** + * Create configuration for a just-in-time runner for a repository + * @description Generates a configuration that can be passed to the runner application at startup. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + post: operations["actions/generate-runner-jitconfig-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/registration-token": { + /** + * Create a registration token for a repository + * @description Returns a token that you can pass to the `config` script. The token expires after one hour. You must authenticate + * using an access token with the `repo` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org/octo-repo-artifacts --token TOKEN + * ``` + */ + post: operations["actions/create-registration-token-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/remove-token": { + /** + * Create a remove token for a repository + * @description Returns a token that you can pass to remove a self-hosted runner from a repository. The token expires after one hour. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from a repository, replace TOKEN with the remove token provided by this endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + post: operations["actions/create-remove-token-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/{runner_id}": { + /** + * Get a self-hosted runner for a repository + * @description Gets a specific self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + get: operations["actions/get-self-hosted-runner-for-repo"]; + /** + * Delete a self-hosted runner from a repository + * @description Forces the removal of a self-hosted runner from a repository. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `repo` + * scope to use this endpoint. + */ + delete: operations["actions/delete-self-hosted-runner-from-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/{runner_id}/labels": { + /** + * List labels for a self-hosted runner for a repository + * @description Lists all labels for a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + get: operations["actions/list-labels-for-self-hosted-runner-for-repo"]; + /** + * Set custom labels for a self-hosted runner for a repository + * @description Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + put: operations["actions/set-custom-labels-for-self-hosted-runner-for-repo"]; + /** + * Add custom labels to a self-hosted runner for a repository + * @description Add custom labels to a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + post: operations["actions/add-custom-labels-to-self-hosted-runner-for-repo"]; + /** + * Remove all custom labels from a self-hosted runner for a repository + * @description Remove all custom labels from a self-hosted runner configured in a + * repository. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + delete: operations["actions/remove-all-custom-labels-from-self-hosted-runner-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}": { + /** + * Remove a custom label from a self-hosted runner for a repository + * @description Remove a custom label from a self-hosted runner configured + * in a repository. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + delete: operations["actions/remove-custom-label-from-self-hosted-runner-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runs": { + /** + * List workflow runs for a repository + * @description Lists all workflow runs for a repository. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/list-workflow-runs-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}": { + /** + * Get a workflow run + * @description Gets a specific workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-workflow-run"]; + /** + * Delete a workflow run + * @description Delete a specific workflow run. Anyone with write access to the repository can use this endpoint. If the repository is + * private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:write` permission to use + * this endpoint. + */ + delete: operations["actions/delete-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/approvals": { + /** + * Get the review history for a workflow run + * @description Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-reviews-for-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/approve": { + /** + * Approve a workflow run for a fork pull request + * @description Approves a workflow run for a pull request from a public fork of a first time contributor. For more information, see ["Approving workflow runs from public forks](https://docs.github.com/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + post: operations["actions/approve-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts": { + /** + * List workflow run artifacts + * @description Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/list-workflow-run-artifacts"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}": { + /** + * Get a workflow run attempt + * @description Gets a specific workflow run attempt. Anyone with read access to the repository + * can use this endpoint. If the repository is private you must use an access token + * with the `repo` scope. GitHub Apps must have the `actions:read` permission to + * use this endpoint. + */ + get: operations["actions/get-workflow-run-attempt"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs": { + /** + * List jobs for a workflow run attempt + * @description Lists jobs for a specific workflow run attempt. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + */ + get: operations["actions/list-jobs-for-workflow-run-attempt"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs": { + /** + * Download workflow run attempt logs + * @description Gets a redirect URL to download an archive of log files for a specific workflow run attempt. This link expires after + * 1 minute. Look for `Location:` in the response header to find the URL for the download. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/download-workflow-run-attempt-logs"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/cancel": { + /** + * Cancel a workflow run + * @description Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + post: operations["actions/cancel-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule": { + /** + * Review custom deployment protection rules for a workflow run + * @description Approve or reject custom deployment protection rules provided by a GitHub App for a workflow run. For more information, see "[Using environments for deployment](https://docs.github.com/actions/deployment/targeting-different-environments/using-environments-for-deployment)." + * + * **Note:** GitHub Apps can only review their own custom deployment protection rules. + * To approve or reject pending deployments that are waiting for review from a specific person or team, see [`POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments`](/rest/actions/workflow-runs#review-pending-deployments-for-a-workflow-run). + * + * GitHub Apps must have read and write permission for **Deployments** to use this endpoint. + */ + post: operations["actions/review-custom-gates-for-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/jobs": { + /** + * List jobs for a workflow run + * @description Lists jobs for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + */ + get: operations["actions/list-jobs-for-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/logs": { + /** + * Download workflow run logs + * @description Gets a redirect URL to download an archive of log files for a workflow run. This link expires after 1 minute. Look for + * `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can use + * this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have + * the `actions:read` permission to use this endpoint. + */ + get: operations["actions/download-workflow-run-logs"]; + /** + * Delete workflow run logs + * @description Deletes all logs for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + delete: operations["actions/delete-workflow-run-logs"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments": { + /** + * Get pending deployments for a workflow run + * @description Get all deployment environments for a workflow run that are waiting for protection rules to pass. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-pending-deployments-for-run"]; + /** + * Review pending deployments for a workflow run + * @description Approve or reject pending deployments that are waiting on approval by a required reviewer. + * + * Required reviewers with read access to the repository contents and deployments can use this endpoint. Required reviewers must authenticate using an access token with the `repo` scope to use this endpoint. + */ + post: operations["actions/review-pending-deployments-for-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/rerun": { + /** + * Re-run a workflow + * @description Re-runs your workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + post: operations["actions/re-run-workflow"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs": { + /** + * Re-run failed jobs from a workflow run + * @description Re-run all of the failed jobs and their dependent jobs in a workflow run using the `id` of the workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + post: operations["actions/re-run-workflow-failed-jobs"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/timing": { + /** + * Get workflow run usage + * @description Gets the number of billable minutes and total run time for a specific workflow run. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-workflow-run-usage"]; + }; + "/repos/{owner}/{repo}/actions/secrets": { + /** + * List repository secrets + * @description Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + get: operations["actions/list-repo-secrets"]; + }; + "/repos/{owner}/{repo}/actions/secrets/public-key": { + /** + * Get a repository public key + * @description Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + get: operations["actions/get-repo-public-key"]; + }; + "/repos/{owner}/{repo}/actions/secrets/{secret_name}": { + /** + * Get a repository secret + * @description Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + get: operations["actions/get-repo-secret"]; + /** + * Create or update a repository secret + * @description Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["actions/create-or-update-repo-secret"]; + /** + * Delete a repository secret + * @description Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + delete: operations["actions/delete-repo-secret"]; + }; + "/repos/{owner}/{repo}/actions/variables": { + /** + * List repository variables + * @description Lists all repository variables. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions_variables:read` repository permission to use this endpoint. + */ + get: operations["actions/list-repo-variables"]; + /** + * Create a repository variable + * @description Creates a repository variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions_variables:write` repository permission to use this endpoint. + */ + post: operations["actions/create-repo-variable"]; + }; + "/repos/{owner}/{repo}/actions/variables/{name}": { + /** + * Get a repository variable + * @description Gets a specific variable in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions_variables:read` repository permission to use this endpoint. + */ + get: operations["actions/get-repo-variable"]; + /** + * Delete a repository variable + * @description Deletes a repository variable using the variable name. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions_variables:write` repository permission to use this endpoint. + */ + delete: operations["actions/delete-repo-variable"]; + /** + * Update a repository variable + * @description Updates a repository variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions_variables:write` repository permission to use this endpoint. + */ + patch: operations["actions/update-repo-variable"]; + }; + "/repos/{owner}/{repo}/actions/workflows": { + /** + * List repository workflows + * @description Lists the workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/list-repo-workflows"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}": { + /** + * Get a workflow + * @description Gets a specific workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-workflow"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable": { + /** + * Disable a workflow + * @description Disables a workflow and sets the `state` of the workflow to `disabled_manually`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + put: operations["actions/disable-workflow"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches": { + /** + * Create a workflow dispatch event + * @description You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must configure your GitHub Actions workflow to run when the [`workflow_dispatch` webhook](/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch) event occurs. The `inputs` are configured in the workflow file. For more information about how to configure the `workflow_dispatch` event in the workflow file, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows#workflow_dispatch)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)." + */ + post: operations["actions/create-workflow-dispatch"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable": { + /** + * Enable a workflow + * @description Enables a workflow and sets the `state` of the workflow to `active`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + put: operations["actions/enable-workflow"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs": { + /** + * List workflow runs for a workflow + * @description List all workflow runs for a workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + */ + get: operations["actions/list-workflow-runs"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing": { + /** + * Get workflow usage + * @description Gets the number of billable minutes used by a specific workflow during the current billing cycle. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-workflow-usage"]; + }; + "/repos/{owner}/{repo}/assignees": { + /** + * List assignees + * @description Lists the [available assignees](https://docs.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) for issues in a repository. + */ + get: operations["issues/list-assignees"]; + }; + "/repos/{owner}/{repo}/assignees/{assignee}": { + /** + * Check if a user can be assigned + * @description Checks if a user has permission to be assigned to an issue in this repository. + * + * If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. + * + * Otherwise a `404` status code is returned. + */ + get: operations["issues/check-user-can-be-assigned"]; + }; + "/repos/{owner}/{repo}/autolinks": { + /** + * List all autolinks of a repository + * @description This returns a list of autolinks configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + get: operations["repos/list-autolinks"]; + /** + * Create an autolink reference for a repository + * @description Users with admin access to the repository can create an autolink. + */ + post: operations["repos/create-autolink"]; + }; + "/repos/{owner}/{repo}/autolinks/{autolink_id}": { + /** + * Get an autolink reference of a repository + * @description This returns a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + get: operations["repos/get-autolink"]; + /** + * Delete an autolink reference from a repository + * @description This deletes a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + delete: operations["repos/delete-autolink"]; + }; + "/repos/{owner}/{repo}/automated-security-fixes": { + /** + * Enable automated security fixes + * @description Enables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/articles/configuring-automated-security-fixes)". + */ + put: operations["repos/enable-automated-security-fixes"]; + /** + * Disable automated security fixes + * @description Disables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/articles/configuring-automated-security-fixes)". + */ + delete: operations["repos/disable-automated-security-fixes"]; + }; + "/repos/{owner}/{repo}/branches": { + /** List branches */ + get: operations["repos/list-branches"]; + }; + "/repos/{owner}/{repo}/branches/{branch}": { + /** Get a branch */ + get: operations["repos/get-branch"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection": { + /** + * Get branch protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + get: operations["repos/get-branch-protection"]; + /** + * Update branch protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Protecting a branch requires admin or owner permissions to the repository. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + * + * **Note**: The list of users, apps, and teams in total is limited to 100 items. + */ + put: operations["repos/update-branch-protection"]; + /** + * Delete branch protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + delete: operations["repos/delete-branch-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": { + /** + * Get admin branch protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + get: operations["repos/get-admin-branch-protection"]; + /** + * Set admin branch protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adding admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + post: operations["repos/set-admin-branch-protection"]; + /** + * Delete admin branch protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removing admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + delete: operations["repos/delete-admin-branch-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": { + /** + * Get pull request review protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + get: operations["repos/get-pull-request-review-protection"]; + /** + * Delete pull request review protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + delete: operations["repos/delete-pull-request-review-protection"]; + /** + * Update pull request review protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating pull request review enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + */ + patch: operations["repos/update-pull-request-review-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": { + /** + * Get commit signature protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to check whether a branch requires signed commits. An enabled status of `true` indicates you must sign commits on this branch. For more information, see [Signing commits with GPG](https://docs.github.com/articles/signing-commits-with-gpg) in GitHub Help. + * + * **Note**: You must enable branch protection to require signed commits. + */ + get: operations["repos/get-commit-signature-protection"]; + /** + * Create commit signature protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to require signed commits on a branch. You must enable branch protection to require signed commits. + */ + post: operations["repos/create-commit-signature-protection"]; + /** + * Delete commit signature protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to disable required signed commits on a branch. You must enable branch protection to require signed commits. + */ + delete: operations["repos/delete-commit-signature-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": { + /** + * Get status checks protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + get: operations["repos/get-status-checks-protection"]; + /** + * Remove status check protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + delete: operations["repos/remove-status-check-protection"]; + /** + * Update status check protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. + */ + patch: operations["repos/update-status-check-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": { + /** + * Get all status check contexts + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + get: operations["repos/get-all-status-check-contexts"]; + /** + * Set status check contexts + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + put: operations["repos/set-status-check-contexts"]; + /** + * Add status check contexts + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + post: operations["repos/add-status-check-contexts"]; + /** + * Remove status check contexts + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + delete: operations["repos/remove-status-check-contexts"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions": { + /** + * Get access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists who has access to this protected branch. + * + * **Note**: Users, apps, and teams `restrictions` are only available for organization-owned repositories. + */ + get: operations["repos/get-access-restrictions"]; + /** + * Delete access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Disables the ability to restrict who can push to this branch. + */ + delete: operations["repos/delete-access-restrictions"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": { + /** + * Get apps with access to the protected branch + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the GitHub Apps that have push access to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + get: operations["repos/get-apps-with-access-to-protected-branch"]; + /** + * Set app access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of apps that have push access to this branch. This removes all apps that previously had push access and grants push access to the new list of apps. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + put: operations["repos/set-app-access-restrictions"]; + /** + * Add app access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified apps push access for this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + post: operations["repos/add-app-access-restrictions"]; + /** + * Remove app access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of an app to push to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + delete: operations["repos/remove-app-access-restrictions"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": { + /** + * Get teams with access to the protected branch + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the teams who have push access to this branch. The list includes child teams. + */ + get: operations["repos/get-teams-with-access-to-protected-branch"]; + /** + * Set team access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of teams that have push access to this branch. This removes all teams that previously had push access and grants push access to the new list of teams. Team restrictions include child teams. + */ + put: operations["repos/set-team-access-restrictions"]; + /** + * Add team access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified teams push access for this branch. You can also give push access to child teams. + */ + post: operations["repos/add-team-access-restrictions"]; + /** + * Remove team access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a team to push to this branch. You can also remove push access for child teams. + */ + delete: operations["repos/remove-team-access-restrictions"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": { + /** + * Get users with access to the protected branch + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the people who have push access to this branch. + */ + get: operations["repos/get-users-with-access-to-protected-branch"]; + /** + * Set user access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of people that have push access to this branch. This removes all people that previously had push access and grants push access to the new list of people. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + put: operations["repos/set-user-access-restrictions"]; + /** + * Add user access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified people push access for this branch. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + post: operations["repos/add-user-access-restrictions"]; + /** + * Remove user access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a user to push to this branch. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames of the people who should no longer have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + delete: operations["repos/remove-user-access-restrictions"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/rename": { + /** + * Rename a branch + * @description Renames a branch in a repository. + * + * **Note:** Although the API responds immediately, the branch rename process might take some extra time to complete in the background. You won't be able to push to the old branch name while the rename process is in progress. For more information, see "[Renaming a branch](https://docs.github.com/github/administering-a-repository/renaming-a-branch)". + * + * The permissions required to use this endpoint depends on whether you are renaming the default branch. + * + * To rename a non-default branch: + * + * * Users must have push access. + * * GitHub Apps must have the `contents:write` repository permission. + * + * To rename the default branch: + * + * * Users must have admin or owner permissions. + * * GitHub Apps must have the `administration:write` repository permission. + */ + post: operations["repos/rename-branch"]; + }; + "/repos/{owner}/{repo}/check-runs": { + /** + * Create a check run + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Creates a new check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to create check runs. + * + * In a check suite, GitHub limits the number of check runs with the same name to 1000. Once these check runs exceed 1000, GitHub will start to automatically delete older check runs. + */ + post: operations["checks/create"]; + }; + "/repos/{owner}/{repo}/check-runs/{check_run_id}": { + /** + * Get a check run + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Gets a single check run using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + get: operations["checks/get"]; + /** + * Update a check run + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Updates a check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to edit check runs. + */ + patch: operations["checks/update"]; + }; + "/repos/{owner}/{repo}/check-runs/{check_run_id}/annotations": { + /** + * List check run annotations + * @description Lists annotations for a check run using the annotation `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get annotations for a check run. OAuth Apps and authenticated users must have the `repo` scope to get annotations for a check run in a private repository. + */ + get: operations["checks/list-annotations"]; + }; + "/repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest": { + /** + * Rerequest a check run + * @description Triggers GitHub to rerequest an existing check run, without pushing new code to a repository. This endpoint will trigger the [`check_run` webhook](https://docs.github.com/webhooks/event-payloads/#check_run) event with the action `rerequested`. When a check run is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check run, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + post: operations["checks/rerequest-run"]; + }; + "/repos/{owner}/{repo}/check-suites": { + /** + * Create a check suite + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * By default, check suites are automatically created when you create a [check run](https://docs.github.com/rest/reference/checks#check-runs). You only need to use this endpoint for manually creating check suites when you've disabled automatic creation using "[Update repository preferences for check suites](https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites)". Your GitHub App must have the `checks:write` permission to create check suites. + */ + post: operations["checks/create-suite"]; + }; + "/repos/{owner}/{repo}/check-suites/preferences": { + /** + * Update repository preferences for check suites + * @description Changes the default automatic flow when creating check suites. By default, a check suite is automatically created each time code is pushed to a repository. When you disable the automatic creation of check suites, you can manually [Create a check suite](https://docs.github.com/rest/reference/checks#create-a-check-suite). You must have admin permissions in the repository to set preferences for check suites. + */ + patch: operations["checks/set-suites-preferences"]; + }; + "/repos/{owner}/{repo}/check-suites/{check_suite_id}": { + /** + * Get a check suite + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Gets a single check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + get: operations["checks/get-suite"]; + }; + "/repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs": { + /** + * List check runs in a check suite + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + get: operations["checks/list-for-suite"]; + }; + "/repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest": { + /** + * Rerequest a check suite + * @description Triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. This endpoint will trigger the [`check_suite` webhook](https://docs.github.com/webhooks/event-payloads/#check_suite) event with the action `rerequested`. When a check suite is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check suite, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + post: operations["checks/rerequest-suite"]; + }; + "/repos/{owner}/{repo}/code-scanning/alerts": { + /** + * List code scanning alerts for a repository + * @description Lists all open code scanning alerts for the default branch (usually `main` + * or `master`). You must use an access token with the `security_events` scope to use + * this endpoint with private repos, the `public_repo` scope also grants permission to read + * security events on public repos only. GitHub Apps must have the `security_events` read + * permission to use this endpoint. + * + * The response includes a `most_recent_instance` object. + * This provides details of the most recent instance of this alert + * for the default branch or for the specified Git reference + * (if you used `ref` in the request). + */ + get: operations["code-scanning/list-alerts-for-repo"]; + }; + "/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": { + /** + * Get a code scanning alert + * @description Gets a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + get: operations["code-scanning/get-alert"]; + /** + * Update a code scanning alert + * @description Updates the status of a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. + */ + patch: operations["code-scanning/update-alert"]; + }; + "/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances": { + /** + * List instances of a code scanning alert + * @description Lists all instances of the specified code scanning alert. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + get: operations["code-scanning/list-alert-instances"]; + }; + "/repos/{owner}/{repo}/code-scanning/analyses": { + /** + * List code scanning analyses for a repository + * @description Lists the details of all code scanning analyses for a repository, + * starting with the most recent. + * The response is paginated and you can use the `page` and `per_page` parameters + * to list the analyses you're interested in. + * By default 30 analyses are listed per page. + * + * The `rules_count` field in the response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. + */ + get: operations["code-scanning/list-recent-analyses"]; + }; + "/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}": { + /** + * Get a code scanning analysis for a repository + * @description Gets a specified code scanning analysis for a repository. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * The default JSON response contains fields that describe the analysis. + * This includes the Git reference and commit SHA to which the analysis relates, + * the datetime of the analysis, the name of the code scanning tool, + * and the number of alerts. + * + * The `rules_count` field in the default response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * If you use the Accept header `application/sarif+json`, + * the response contains the analysis data that was uploaded. + * This is formatted as + * [SARIF version 2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html). + */ + get: operations["code-scanning/get-analysis"]; + /** + * Delete a code scanning analysis from a repository + * @description Deletes a specified code scanning analysis from a repository. For + * private repositories, you must use an access token with the `repo` scope. For public repositories, + * you must use an access token with `public_repo` scope. + * GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * You can delete one analysis at a time. + * To delete a series of analyses, start with the most recent analysis and work backwards. + * Conceptually, the process is similar to the undo function in a text editor. + * + * When you list the analyses for a repository, + * one or more will be identified as deletable in the response: + * + * ``` + * "deletable": true + * ``` + * + * An analysis is deletable when it's the most recent in a set of analyses. + * Typically, a repository will have multiple sets of analyses + * for each enabled code scanning tool, + * where a set is determined by a unique combination of analysis values: + * + * * `ref` + * * `tool` + * * `category` + * + * If you attempt to delete an analysis that is not the most recent in a set, + * you'll get a 400 response with the message: + * + * ``` + * Analysis specified is not deletable. + * ``` + * + * The response from a successful `DELETE` operation provides you with + * two alternative URLs for deleting the next analysis in the set: + * `next_analysis_url` and `confirm_delete_url`. + * Use the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis + * in a set. This is a useful option if you want to preserve at least one analysis + * for the specified tool in your repository. + * Use the `confirm_delete_url` URL if you are content to remove all analyses for a tool. + * When you delete the last analysis in a set, the value of `next_analysis_url` and `confirm_delete_url` + * in the 200 response is `null`. + * + * As an example of the deletion process, + * let's imagine that you added a workflow that configured a particular code scanning tool + * to analyze the code in a repository. This tool has added 15 analyses: + * 10 on the default branch, and another 5 on a topic branch. + * You therefore have two separate sets of analyses for this tool. + * You've now decided that you want to remove all of the analyses for the tool. + * To do this you must make 15 separate deletion requests. + * To start, you must find an analysis that's identified as deletable. + * Each set of analyses always has one that's identified as deletable. + * Having found the deletable analysis for one of the two sets, + * delete this analysis and then continue deleting the next analysis in the set until they're all deleted. + * Then repeat the process for the second set. + * The procedure therefore consists of a nested loop: + * + * **Outer loop**: + * * List the analyses for the repository, filtered by tool. + * * Parse this list to find a deletable analysis. If found: + * + * **Inner loop**: + * * Delete the identified analysis. + * * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration. + * + * The above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely. + */ + delete: operations["code-scanning/delete-analysis"]; + }; + "/repos/{owner}/{repo}/code-scanning/codeql/databases": { + /** + * List CodeQL databases for a repository + * @description Lists the CodeQL databases that are available in a repository. + * + * For private repositories, you must use an access token with the `security_events` scope. + * For public repositories, you can use tokens with the `security_events` or `public_repo` scope. + * GitHub Apps must have the `contents` read permission to use this endpoint. + */ + get: operations["code-scanning/list-codeql-databases"]; + }; + "/repos/{owner}/{repo}/code-scanning/codeql/databases/{language}": { + /** + * Get a CodeQL database for a repository + * @description Gets a CodeQL database for a language in a repository. + * + * By default this endpoint returns JSON metadata about the CodeQL database. To + * download the CodeQL database binary content, set the `Accept` header of the request + * to [`application/zip`](https://docs.github.com/rest/overview/media-types), and make sure + * your HTTP client is configured to follow redirects or use the `Location` header + * to make a second request to get the redirect URL. + * + * For private repositories, you must use an access token with the `security_events` scope. + * For public repositories, you can use tokens with the `security_events` or `public_repo` scope. + * GitHub Apps must have the `contents` read permission to use this endpoint. + */ + get: operations["code-scanning/get-codeql-database"]; + }; + "/repos/{owner}/{repo}/code-scanning/default-setup": { + /** + * Get a code scanning default setup configuration + * @description Gets a code scanning default setup configuration. + * You must use an access token with the `repo` scope to use this endpoint with private repos or the `public_repo` + * scope for public repos. GitHub Apps must have the `repo` write permission to use this endpoint. + */ + get: operations["code-scanning/get-default-setup"]; + /** + * Update a code scanning default setup configuration + * @description Updates a code scanning default setup configuration. + * You must use an access token with the `repo` scope to use this endpoint with private repos or the `public_repo` + * scope for public repos. GitHub Apps must have the `repo` write permission to use this endpoint. + */ + patch: operations["code-scanning/update-default-setup"]; + }; + "/repos/{owner}/{repo}/code-scanning/sarifs": { + /** + * Upload an analysis as SARIF data + * @description Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint for private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * There are two places where you can upload code scanning results. + * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests)." + * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." + * + * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: + * + * ``` + * gzip -c analysis-data.sarif | base64 -w0 + * ``` + *
+ * SARIF upload supports a maximum number of entries per the following data objects, and an analysis will be rejected if any of these objects is above its maximum value. For some objects, there are additional values over which the entries will be ignored while keeping the most important entries whenever applicable. + * To get the most out of your analysis when it includes data above the supported limits, try to optimize the analysis configuration. For example, for the CodeQL tool, identify and remove the most noisy queries. + * + * + * | **SARIF data** | **Maximum values** | **Additional limits** | + * |----------------------------------|:------------------:|----------------------------------------------------------------------------------| + * | Runs per file | 20 | | + * | Results per run | 25,000 | Only the top 5,000 results will be included, prioritized by severity. | + * | Rules per run | 25,000 | | + * | Tool extensions per run | 100 | | + * | Thread Flow Locations per result | 10,000 | Only the top 1,000 Thread Flow Locations will be included, using prioritization. | + * | Location per result | 1,000 | Only 100 locations will be included. | + * | Tags per rule | 20 | Only 10 tags will be included. | + * + * + * The `202 Accepted` response includes an `id` value. + * You can use this ID to check the status of the upload by using it in the `/sarifs/{sarif_id}` endpoint. + * For more information, see "[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload)." + */ + post: operations["code-scanning/upload-sarif"]; + }; + "/repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}": { + /** + * Get information about a SARIF upload + * @description Gets information about a SARIF upload, including the status and the URL of the analysis that was uploaded so that you can retrieve details of the analysis. For more information, see "[Get a code scanning analysis for a repository](/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository)." You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + get: operations["code-scanning/get-sarif"]; + }; + "/repos/{owner}/{repo}/codeowners/errors": { + /** + * List CODEOWNERS errors + * @description List any syntax errors that are detected in the CODEOWNERS + * file. + * + * For more information about the correct CODEOWNERS syntax, + * see "[About code owners](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners)." + */ + get: operations["repos/codeowners-errors"]; + }; + "/repos/{owner}/{repo}/codespaces": { + /** + * List codespaces in a repository for the authenticated user + * @description Lists the codespaces associated to a specified repository and the authenticated user. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + get: operations["codespaces/list-in-repository-for-authenticated-user"]; + /** + * Create a codespace in a repository + * @description Creates a codespace owned by the authenticated user in the specified repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + post: operations["codespaces/create-with-repo-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/codespaces/devcontainers": { + /** + * List devcontainer configurations in a repository for the authenticated user + * @description Lists the devcontainer.json files associated with a specified repository and the authenticated user. These files + * specify launchpoint configurations for codespaces created within the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + get: operations["codespaces/list-devcontainers-in-repository-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/codespaces/machines": { + /** + * List available machine types for a repository + * @description List the machine types available for a given repository based on its configuration. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_metadata` repository permission to use this endpoint. + */ + get: operations["codespaces/repo-machines-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/codespaces/new": { + /** + * Get default attributes for a codespace + * @description Gets the default attributes for codespaces created by the user with the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + get: operations["codespaces/pre-flight-with-repo-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/codespaces/secrets": { + /** + * List repository secrets + * @description Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have write access to the `codespaces_secrets` repository permission to use this endpoint. + */ + get: operations["codespaces/list-repo-secrets"]; + }; + "/repos/{owner}/{repo}/codespaces/secrets/public-key": { + /** + * Get a repository public key + * @description Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have write access to the `codespaces_secrets` repository permission to use this endpoint. + */ + get: operations["codespaces/get-repo-public-key"]; + }; + "/repos/{owner}/{repo}/codespaces/secrets/{secret_name}": { + /** + * Get a repository secret + * @description Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have write access to the `codespaces_secrets` repository permission to use this endpoint. + */ + get: operations["codespaces/get-repo-secret"]; + /** + * Create or update a repository secret + * @description Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have write access to the `codespaces_secrets` + * repository permission to use this endpoint. + * + * #### Example of encrypting a secret using Node.js + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * #### Example of encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example of encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example of encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["codespaces/create-or-update-repo-secret"]; + /** + * Delete a repository secret + * @description Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have write access to the `codespaces_secrets` repository permission to use this endpoint. + */ + delete: operations["codespaces/delete-repo-secret"]; + }; + "/repos/{owner}/{repo}/collaborators": { + /** + * List repository collaborators + * @description For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * Organization members with write, maintain, or admin privileges on the organization-owned repository can use this endpoint. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + get: operations["repos/list-collaborators"]; + }; + "/repos/{owner}/{repo}/collaborators/{username}": { + /** + * Check if a user is a repository collaborator + * @description For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + get: operations["repos/check-collaborator"]; + /** + * Add a repository collaborator + * @description This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Adding an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." + * + * For more information on permission levels, see "[Repository permission levels for an organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". There are restrictions on which permissions can be granted to organization members when an organization base role is in place. In this case, the permission being given must be equal to or higher than the org base permission. Otherwise, the request will fail with: + * + * ``` + * Cannot assign {member} permission of {role name} + * ``` + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * The invitee will receive a notification that they have been invited to the repository, which they must accept or decline. They may do this via the notifications page, the email they receive, or by using the [repository invitations API endpoints](https://docs.github.com/rest/reference/repos#invitations). + * + * **Updating an existing collaborator's permission level** + * + * The endpoint can also be used to change the permissions of an existing collaborator without first removing and re-adding the collaborator. To change the permissions, use the same endpoint and pass a different `permission` parameter. The response will be a `204`, with no other indication that the permission level changed. + * + * **Rate limits** + * + * You are limited to sending 50 invitations to a repository per 24 hour period. Note there is no limit if you are inviting organization members to an organization repository. + */ + put: operations["repos/add-collaborator"]; + /** + * Remove a repository collaborator + * @description Removes a collaborator from a repository. + * + * To use this endpoint, the authenticated user must either be an administrator of the repository or target themselves for removal. + * + * This endpoint also: + * - Cancels any outstanding invitations + * - Unasigns the user from any issues + * - Removes access to organization projects if the user is not an organization member and is not a collaborator on any other organization repositories. + * - Unstars the repository + * - Updates access permissions to packages + * + * Removing a user as a collaborator has the following effects on forks: + * - If the user had access to a fork through their membership to this repository, the user will also be removed from the fork. + * - If the user had their own fork of the repository, the fork will be deleted. + * - If the user still has read access to the repository, open pull requests by this user from a fork will be denied. + * + * **Note**: A user can still have access to the repository through organization permissions like base repository permissions. + * + * Although the API responds immediately, the additional permission updates might take some extra time to complete in the background. + * + * For more information on fork permissions, see "[About permissions and visibility of forks](https://docs.github.com/pull-requests/collaborating-with-pull-requests/working-with-forks/about-permissions-and-visibility-of-forks)". + */ + delete: operations["repos/remove-collaborator"]; + }; + "/repos/{owner}/{repo}/collaborators/{username}/permission": { + /** + * Get repository permissions for a user + * @description Checks the repository permission of a collaborator. The possible repository permissions are `admin`, `write`, `read`, and `none`. + */ + get: operations["repos/get-collaborator-permission-level"]; + }; + "/repos/{owner}/{repo}/comments": { + /** + * List commit comments for a repository + * @description Commit Comments use [these custom media types](https://docs.github.com/rest/reference/repos#custom-media-types). You can read more about the use of media types in the API [here](https://docs.github.com/rest/overview/media-types/). + * + * Comments are ordered by ascending ID. + */ + get: operations["repos/list-commit-comments-for-repo"]; + }; + "/repos/{owner}/{repo}/comments/{comment_id}": { + /** Get a commit comment */ + get: operations["repos/get-commit-comment"]; + /** Delete a commit comment */ + delete: operations["repos/delete-commit-comment"]; + /** Update a commit comment */ + patch: operations["repos/update-commit-comment"]; + }; + "/repos/{owner}/{repo}/comments/{comment_id}/reactions": { + /** + * List reactions for a commit comment + * @description List the reactions to a [commit comment](https://docs.github.com/rest/reference/repos#comments). + */ + get: operations["reactions/list-for-commit-comment"]; + /** + * Create reaction for a commit comment + * @description Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with an HTTP `200` status means that you already added the reaction type to this commit comment. + */ + post: operations["reactions/create-for-commit-comment"]; + }; + "/repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}": { + /** + * Delete a commit comment reaction + * @description **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). + */ + delete: operations["reactions/delete-for-commit-comment"]; + }; + "/repos/{owner}/{repo}/commits": { + /** + * List commits + * @description **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/list-commits"]; + }; + "/repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head": { + /** + * List branches for HEAD commit + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Returns all branches where the given commit SHA is the HEAD, or latest commit for the branch. + */ + get: operations["repos/list-branches-for-head-commit"]; + }; + "/repos/{owner}/{repo}/commits/{commit_sha}/comments": { + /** + * List commit comments + * @description Use the `:commit_sha` to specify the commit that will have its comments listed. + */ + get: operations["repos/list-comments-for-commit"]; + /** + * Create a commit comment + * @description Create a comment for a commit using its `:commit_sha`. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["repos/create-commit-comment"]; + }; + "/repos/{owner}/{repo}/commits/{commit_sha}/pulls": { + /** + * List pull requests associated with a commit + * @description Lists the merged pull request that introduced the commit to the repository. If the commit is not present in the default branch, will only return open pull requests associated with the commit. + */ + get: operations["repos/list-pull-requests-associated-with-commit"]; + }; + "/repos/{owner}/{repo}/commits/{ref}": { + /** + * Get a commit + * @description Returns the contents of a single commit reference. You must have `read` access for the repository to use this endpoint. + * + * **Note:** If there are more than 300 files in the commit diff, the response will include pagination link headers for the remaining files, up to a limit of 3000 files. Each page contains the static commit information, and the only changes are to the file listing. + * + * You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch `diff` and `patch` formats. Diffs with binary data will have no `patch` property. + * + * To return only the SHA-1 hash of the commit reference, you can provide the `sha` custom [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) in the `Accept` header. You can use this endpoint to check if a remote reference's SHA-1 hash is the same as your local reference's SHA-1 hash by providing the local SHA-1 reference as the ETag. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/get-commit"]; + }; + "/repos/{owner}/{repo}/commits/{ref}/check-runs": { + /** + * List check runs for a Git reference + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a commit ref. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + get: operations["checks/list-for-ref"]; + }; + "/repos/{owner}/{repo}/commits/{ref}/check-suites": { + /** + * List check suites for a Git reference + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Lists check suites for a commit `ref`. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to list check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + get: operations["checks/list-suites-for-ref"]; + }; + "/repos/{owner}/{repo}/commits/{ref}/status": { + /** + * Get the combined status for a specific reference + * @description Users with pull access in a repository can access a combined view of commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. + * + * + * Additionally, a combined `state` is returned. The `state` is one of: + * + * * **failure** if any of the contexts report as `error` or `failure` + * * **pending** if there are no statuses or a context is `pending` + * * **success** if the latest status for all contexts is `success` + */ + get: operations["repos/get-combined-status-for-ref"]; + }; + "/repos/{owner}/{repo}/commits/{ref}/statuses": { + /** + * List commit statuses for a reference + * @description Users with pull access in a repository can view commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. Statuses are returned in reverse chronological order. The first status in the list will be the latest one. + * + * This resource is also available via a legacy route: `GET /repos/:owner/:repo/statuses/:ref`. + */ + get: operations["repos/list-commit-statuses-for-ref"]; + }; + "/repos/{owner}/{repo}/community/profile": { + /** + * Get community profile metrics + * @description Returns all community profile metrics for a repository. The repository cannot be a fork. + * + * The returned metrics include an overall health score, the repository description, the presence of documentation, the + * detected code of conduct, the detected license, and the presence of ISSUE\_TEMPLATE, PULL\_REQUEST\_TEMPLATE, + * README, and CONTRIBUTING files. + * + * The `health_percentage` score is defined as a percentage of how many of + * these four documents are present: README, CONTRIBUTING, LICENSE, and + * CODE_OF_CONDUCT. For example, if all four documents are present, then + * the `health_percentage` is `100`. If only one is present, then the + * `health_percentage` is `25`. + * + * `content_reports_enabled` is only returned for organization-owned repositories. + */ + get: operations["repos/get-community-profile-metrics"]; + }; + "/repos/{owner}/{repo}/compare/{basehead}": { + /** + * Compare two commits + * @description Compares two commits against one another. You can compare branches in the same repository, or you can compare branches that exist in different repositories within the same repository network, including fork branches. For more information about how to view a repository's network, see "[Understanding connections between repositories](https://docs.github.com/repositories/viewing-activity-and-data-for-your-repository/understanding-connections-between-repositories)." + * + * This endpoint is equivalent to running the `git log BASE..HEAD` command, but it returns commits in a different order. The `git log BASE..HEAD` command returns commits in reverse chronological order, whereas the API returns commits in chronological order. You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The API response includes details about the files that were changed between the two commits. This includes the status of the change (if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * When calling this endpoint without any paging parameter (`per_page` or `page`), the returned list is limited to 250 commits, and the last commit in the list is the most recent of the entire comparison. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, use a query parameter (`per_page` or `page`) to paginate the results. When using pagination: + * + * - The list of changed files is only shown on the first page of results, but it includes all changed files for the entire comparison. + * - The results are returned in chronological order, but the last commit in the returned list may not be the most recent one in the entire set if there are more pages of results. + * + * For more information on working with pagination, see "[Using pagination in the REST API](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api)." + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The `verification` object includes the following fields: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/compare-commits-with-basehead"]; + }; + "/repos/{owner}/{repo}/contents/{path}": { + /** + * Get repository content + * @description Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit + * `:path`, you will receive the contents of the repository's root directory. See the description below regarding what the API response includes for directories. + * + * Files and symlinks support [a custom media type](https://docs.github.com/rest/reference/repos#custom-media-types) for + * retrieving the raw content or rendered HTML (when supported). All content types support [a custom media + * type](https://docs.github.com/rest/reference/repos#custom-media-types) to ensure the content is returned in a consistent + * object format. + * + * **Notes**: + * * To get a repository's contents recursively, you can [recursively get the tree](https://docs.github.com/rest/reference/git#trees). + * * This API has an upper limit of 1,000 files for a directory. If you need to retrieve more files, use the [Git Trees + * API](https://docs.github.com/rest/reference/git#get-a-tree). + * * Download URLs expire and are meant to be used just once. To ensure the download URL does not expire, please use the contents API to obtain a fresh download URL for each download. + * #### Size limits + * If the requested file's size is: + * * 1 MB or smaller: All features of this endpoint are supported. + * * Between 1-100 MB: Only the `raw` or `object` [custom media types](https://docs.github.com/rest/repos/contents#custom-media-types-for-repository-contents) are supported. Both will work as normal, except that when using the `object` media type, the `content` field will be an empty string and the `encoding` field will be `"none"`. To get the contents of these larger files, use the `raw` media type. + * * Greater than 100 MB: This endpoint is not supported. + * + * #### If the content is a directory + * The response will be an array of objects, one object for each item in the directory. + * When listing the contents of a directory, submodules have their "type" specified as "file". Logically, the value + * _should_ be "submodule". This behavior exists in API v3 [for backwards compatibility purposes](https://git.io/v1YCW). + * In the next major version of the API, the type will be returned as "submodule". + * + * #### If the content is a symlink + * If the requested `:path` points to a symlink, and the symlink's target is a normal file in the repository, then the + * API responds with the content of the file (in the format shown in the example. Otherwise, the API responds with an object + * describing the symlink itself. + * + * #### If the content is a submodule + * The `submodule_git_url` identifies the location of the submodule repository, and the `sha` identifies a specific + * commit within the submodule repository. Git uses the given URL when cloning the submodule repository, and checks out + * the submodule at that specific commit. + * + * If the submodule repository is not hosted on github.com, the Git URLs (`git_url` and `_links["git"]`) and the + * github.com URLs (`html_url` and `_links["html"]`) will have null values. + */ + get: operations["repos/get-content"]; + /** + * Create or update file contents + * @description Creates a new file or replaces an existing file in a repository. You must authenticate using an access token with the `workflow` scope to use this endpoint. + * + * **Note:** If you use this endpoint and the "[Delete a file](https://docs.github.com/rest/reference/repos/#delete-file)" endpoint in parallel, the concurrent requests will conflict and you will receive errors. You must use these endpoints serially instead. + */ + put: operations["repos/create-or-update-file-contents"]; + /** + * Delete a file + * @description Deletes a file in a repository. + * + * You can provide an additional `committer` parameter, which is an object containing information about the committer. Or, you can provide an `author` parameter, which is an object containing information about the author. + * + * The `author` section is optional and is filled in with the `committer` information if omitted. If the `committer` information is omitted, the authenticated user's information is used. + * + * You must provide values for both `name` and `email`, whether you choose to use `author` or `committer`. Otherwise, you'll receive a `422` status code. + * + * **Note:** If you use this endpoint and the "[Create or update file contents](https://docs.github.com/rest/reference/repos/#create-or-update-file-contents)" endpoint in parallel, the concurrent requests will conflict and you will receive errors. You must use these endpoints serially instead. + */ + delete: operations["repos/delete-file"]; + }; + "/repos/{owner}/{repo}/contributors": { + /** + * List repository contributors + * @description Lists contributors to the specified repository and sorts them by the number of commits per contributor in descending order. This endpoint may return information that is a few hours old because the GitHub REST API caches contributor data to improve performance. + * + * GitHub identifies contributors by author email address. This endpoint groups contribution counts by GitHub user, which includes all associated email addresses. To improve performance, only the first 500 author email addresses in the repository link to GitHub users. The rest will appear as anonymous contributors without associated GitHub user information. + */ + get: operations["repos/list-contributors"]; + }; + "/repos/{owner}/{repo}/dependabot/alerts": { + /** + * List Dependabot alerts for a repository + * @description You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + get: operations["dependabot/list-alerts-for-repo"]; + }; + "/repos/{owner}/{repo}/dependabot/alerts/{alert_number}": { + /** + * Get a Dependabot alert + * @description You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + get: operations["dependabot/get-alert"]; + /** + * Update a Dependabot alert + * @description You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** write permission to use this endpoint. + * + * To use this endpoint, you must have access to security alerts for the repository. For more information, see "[Granting access to security alerts](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-security-and-analysis-settings-for-your-repository#granting-access-to-security-alerts)." + */ + patch: operations["dependabot/update-alert"]; + }; + "/repos/{owner}/{repo}/dependabot/secrets": { + /** + * List repository secrets + * @description Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + get: operations["dependabot/list-repo-secrets"]; + }; + "/repos/{owner}/{repo}/dependabot/secrets/public-key": { + /** + * Get a repository public key + * @description Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + get: operations["dependabot/get-repo-public-key"]; + }; + "/repos/{owner}/{repo}/dependabot/secrets/{secret_name}": { + /** + * Get a repository secret + * @description Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + get: operations["dependabot/get-repo-secret"]; + /** + * Create or update a repository secret + * @description Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository + * permission to use this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["dependabot/create-or-update-repo-secret"]; + /** + * Delete a repository secret + * @description Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + delete: operations["dependabot/delete-repo-secret"]; + }; + "/repos/{owner}/{repo}/dependency-graph/compare/{basehead}": { + /** + * Get a diff of the dependencies between commits + * @description Gets the diff of the dependency changes between two commits of a repository, based on the changes to the dependency manifests made in those commits. + */ + get: operations["dependency-graph/diff-range"]; + }; + "/repos/{owner}/{repo}/dependency-graph/sbom": { + /** + * Export a software bill of materials (SBOM) for a repository. + * @description Exports the software bill of materials (SBOM) for a repository in SPDX JSON format. + */ + get: operations["dependency-graph/export-sbom"]; + }; + "/repos/{owner}/{repo}/dependency-graph/snapshots": { + /** + * Create a snapshot of dependencies for a repository + * @description Create a new snapshot of a repository's dependencies. You must authenticate using an access token with the `repo` scope to use this endpoint for a repository that the requesting user has access to. + */ + post: operations["dependency-graph/create-repository-snapshot"]; + }; + "/repos/{owner}/{repo}/deployments": { + /** + * List deployments + * @description Simple filtering of deployments is available via query parameters: + */ + get: operations["repos/list-deployments"]; + /** + * Create a deployment + * @description Deployments offer a few configurable parameters with certain defaults. + * + * The `ref` parameter can be any named branch, tag, or SHA. At GitHub we often deploy branches and verify them + * before we merge a pull request. + * + * The `environment` parameter allows deployments to be issued to different runtime environments. Teams often have + * multiple environments for verifying their applications, such as `production`, `staging`, and `qa`. This parameter + * makes it easier to track which environments have requested deployments. The default environment is `production`. + * + * The `auto_merge` parameter is used to ensure that the requested ref is not behind the repository's default branch. If + * the ref _is_ behind the default branch for the repository, we will attempt to merge it for you. If the merge succeeds, + * the API will return a successful merge commit. If merge conflicts prevent the merge from succeeding, the API will + * return a failure response. + * + * By default, [commit statuses](https://docs.github.com/rest/commits/statuses) for every submitted context must be in a `success` + * state. The `required_contexts` parameter allows you to specify a subset of contexts that must be `success`, or to + * specify contexts that have not yet been submitted. You are not required to use commit statuses to deploy. If you do + * not require any contexts or create any commit statuses, the deployment will always succeed. + * + * The `payload` parameter is available for any extra information that a deployment system might need. It is a JSON text + * field that will be passed on when a deployment event is dispatched. + * + * The `task` parameter is used by the deployment system to allow different execution paths. In the web world this might + * be `deploy:migrations` to run schema changes on the system. In the compiled world this could be a flag to compile an + * application with debugging enabled. + * + * Users with `repo` or `repo_deployment` scopes can create a deployment for a given ref. + * + * #### Merged branch response + * You will see this response when GitHub automatically merges the base branch into the topic branch instead of creating + * a deployment. This auto-merge happens when: + * * Auto-merge option is enabled in the repository + * * Topic branch does not include the latest changes on the base branch, which is `master` in the response example + * * There are no merge conflicts + * + * If there are no new commits in the base branch, a new request to create a deployment should give a successful + * response. + * + * #### Merge conflict response + * This error happens when the `auto_merge` option is enabled and when the default branch (in this case `master`), can't + * be merged into the branch that's being deployed (in this case `topic-branch`), due to merge conflicts. + * + * #### Failed commit status checks + * This error happens when the `required_contexts` parameter indicates that one or more contexts need to have a `success` + * status for the commit to be deployed, but one or more of the required contexts do not have a state of `success`. + */ + post: operations["repos/create-deployment"]; + }; + "/repos/{owner}/{repo}/deployments/{deployment_id}": { + /** Get a deployment */ + get: operations["repos/get-deployment"]; + /** + * Delete a deployment + * @description If the repository only has one deployment, you can delete the deployment regardless of its status. If the repository has more than one deployment, you can only delete inactive deployments. This ensures that repositories with multiple deployments will always have an active deployment. Anyone with `repo` or `repo_deployment` scopes can delete a deployment. + * + * To set a deployment as inactive, you must: + * + * * Create a new deployment that is active so that the system has a record of the current state, then delete the previously active deployment. + * * Mark the active deployment as inactive by adding any non-successful deployment status. + * + * For more information, see "[Create a deployment](https://docs.github.com/rest/deployments/deployments/#create-a-deployment)" and "[Create a deployment status](https://docs.github.com/rest/deployments/deployment-statuses#create-a-deployment-status)." + */ + delete: operations["repos/delete-deployment"]; + }; + "/repos/{owner}/{repo}/deployments/{deployment_id}/statuses": { + /** + * List deployment statuses + * @description Users with pull access can view deployment statuses for a deployment: + */ + get: operations["repos/list-deployment-statuses"]; + /** + * Create a deployment status + * @description Users with `push` access can create deployment statuses for a given deployment. + * + * GitHub Apps require `read & write` access to "Deployments" and `read-only` access to "Repo contents" (for private repos). OAuth Apps require the `repo_deployment` scope. + */ + post: operations["repos/create-deployment-status"]; + }; + "/repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}": { + /** + * Get a deployment status + * @description Users with pull access can view a deployment status for a deployment: + */ + get: operations["repos/get-deployment-status"]; + }; + "/repos/{owner}/{repo}/dispatches": { + /** + * Create a repository dispatch event + * @description You can use this endpoint to trigger a webhook event called `repository_dispatch` when you want activity that happens outside of GitHub to trigger a GitHub Actions workflow or GitHub App webhook. You must configure your GitHub Actions workflow or GitHub App to run when the `repository_dispatch` event occurs. For an example `repository_dispatch` webhook payload, see "[RepositoryDispatchEvent](https://docs.github.com/webhooks/event-payloads/#repository_dispatch)." + * + * The `client_payload` parameter is available for any extra information that your workflow might need. This parameter is a JSON payload that will be passed on when the webhook event is dispatched. For example, the `client_payload` can include a message that a user would like to send using a GitHub Actions workflow. Or the `client_payload` can be used as a test to debug your workflow. + * + * This endpoint requires write access to the repository by providing either: + * + * - Personal access tokens with `repo` scope. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)" in the GitHub Help documentation. + * - GitHub Apps with both `metadata:read` and `contents:read&write` permissions. + * + * This input example shows how you can use the `client_payload` as a test to debug your workflow. + */ + post: operations["repos/create-dispatch-event"]; + }; + "/repos/{owner}/{repo}/environments": { + /** + * List environments + * @description Lists the environments for a repository. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["repos/get-all-environments"]; + }; + "/repos/{owner}/{repo}/environments/{environment_name}": { + /** + * Get an environment + * @description **Note:** To get information about name patterns that branches must match in order to deploy to this environment, see "[Get a deployment branch policy](/rest/deployments/branch-policies#get-a-deployment-branch-policy)." + * + * Anyone with read access to the repository can use this endpoint. If the + * repository is private, you must use an access token with the `repo` scope. GitHub + * Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["repos/get-environment"]; + /** + * Create or update an environment + * @description Create or update an environment with protection rules, such as required reviewers. For more information about environment protection rules, see "[Environments](/actions/reference/environments#environment-protection-rules)." + * + * **Note:** To create or update name patterns that branches must match in order to deploy to this environment, see "[Deployment branch policies](/rest/deployments/branch-policies)." + * + * **Note:** To create or update secrets for an environment, see "[Secrets](/rest/reference/actions#secrets)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + put: operations["repos/create-or-update-environment"]; + /** + * Delete an environment + * @description You must authenticate using an access token with the repo scope to use this endpoint. + */ + delete: operations["repos/delete-an-environment"]; + }; + "/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies": { + /** + * List deployment branch policies + * @description Lists the deployment branch policies for an environment. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["repos/list-deployment-branch-policies"]; + /** + * Create a deployment branch policy + * @description Creates a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + post: operations["repos/create-deployment-branch-policy"]; + }; + "/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}": { + /** + * Get a deployment branch policy + * @description Gets a deployment branch policy for an environment. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["repos/get-deployment-branch-policy"]; + /** + * Update a deployment branch policy + * @description Updates a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + put: operations["repos/update-deployment-branch-policy"]; + /** + * Delete a deployment branch policy + * @description Deletes a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + delete: operations["repos/delete-deployment-branch-policy"]; + }; + "/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules": { + /** + * Get all deployment protection rules for an environment + * @description Gets all custom deployment protection rules that are enabled for an environment. Anyone with read access to the repository can use this endpoint. If the repository is private and you want to use a personal access token (classic), you must use an access token with the `repo` scope. GitHub Apps and fine-grained personal access tokens must have the `actions:read` permission to use this endpoint. For more information about environments, see "[Using environments for deployment](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment)." + * + * For more information about the app that is providing this custom deployment rule, see the [documentation for the `GET /apps/{app_slug}` endpoint](https://docs.github.com/rest/apps/apps#get-an-app). + */ + get: operations["repos/get-all-deployment-protection-rules"]; + /** + * Create a custom deployment protection rule on an environment + * @description Enable a custom deployment protection rule for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. Enabling a custom protection rule requires admin or owner permissions to the repository. GitHub Apps must have the `actions:write` permission to use this endpoint. + * + * For more information about the app that is providing this custom deployment rule, see the [documentation for the `GET /apps/{app_slug}` endpoint](https://docs.github.com/rest/apps/apps#get-an-app). + */ + post: operations["repos/create-deployment-protection-rule"]; + }; + "/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps": { + /** + * List custom deployment rule integrations available for an environment + * @description Gets all custom deployment protection rule integrations that are available for an environment. Anyone with read access to the repository can use this endpoint. If the repository is private and you want to use a personal access token (classic), you must use an access token with the `repo` scope. GitHub Apps and fine-grained personal access tokens must have the `actions:read` permission to use this endpoint. + * + * For more information about environments, see "[Using environments for deployment](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment)." + * + * For more information about the app that is providing this custom deployment rule, see "[GET an app](https://docs.github.com/rest/apps/apps#get-an-app)". + */ + get: operations["repos/list-custom-deployment-rule-integrations"]; + }; + "/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}": { + /** + * Get a custom deployment protection rule + * @description Gets an enabled custom deployment protection rule for an environment. Anyone with read access to the repository can use this endpoint. If the repository is private and you want to use a personal access token (classic), you must use an access token with the `repo` scope. GitHub Apps and fine-grained personal access tokens must have the `actions:read` permission to use this endpoint. For more information about environments, see "[Using environments for deployment](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment)." + * + * For more information about the app that is providing this custom deployment rule, see [`GET /apps/{app_slug}`](https://docs.github.com/rest/apps/apps#get-an-app). + */ + get: operations["repos/get-custom-deployment-protection-rule"]; + /** + * Disable a custom protection rule for an environment + * @description Disables a custom deployment protection rule for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. Removing a custom protection rule requires admin or owner permissions to the repository. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Get an app](https://docs.github.com/rest/apps/apps#get-an-app)". + */ + delete: operations["repos/disable-deployment-protection-rule"]; + }; + "/repos/{owner}/{repo}/events": { + /** + * List repository events + * @description **Note**: This API is not built to serve real-time use cases. Depending on the time of day, event latency can be anywhere from 30s to 6h. + */ + get: operations["activity/list-repo-events"]; + }; + "/repos/{owner}/{repo}/forks": { + /** List forks */ + get: operations["repos/list-forks"]; + /** + * Create a fork + * @description Create a fork for the authenticated user. + * + * **Note**: Forking a Repository happens asynchronously. You may have to wait a short period of time before you can access the git objects. If this takes longer than 5 minutes, be sure to contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + * + * **Note**: Although this endpoint works with GitHub Apps, the GitHub App must be installed on the destination account with access to all repositories and on the source account with access to the source repository. + */ + post: operations["repos/create-fork"]; + }; + "/repos/{owner}/{repo}/git/blobs": { + /** Create a blob */ + post: operations["git/create-blob"]; + }; + "/repos/{owner}/{repo}/git/blobs/{file_sha}": { + /** + * Get a blob + * @description The `content` in the response will always be Base64 encoded. + * + * _Note_: This API supports blobs up to 100 megabytes in size. + */ + get: operations["git/get-blob"]; + }; + "/repos/{owner}/{repo}/git/commits": { + /** + * Create a commit + * @description Creates a new Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + post: operations["git/create-commit"]; + }; + "/repos/{owner}/{repo}/git/commits/{commit_sha}": { + /** + * Get a commit + * @description Gets a Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["git/get-commit"]; + }; + "/repos/{owner}/{repo}/git/matching-refs/{ref}": { + /** + * List matching references + * @description Returns an array of references from your Git database that match the supplied name. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't exist in the repository, but existing refs start with `:ref`, they will be returned as an array. + * + * When you use this endpoint without providing a `:ref`, it will return an array of all the references from your Git database, including notes and stashes if they exist on the server. Anything in the namespace is returned, not just `heads` and `tags`. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * If you request matching references for a branch named `feature` but the branch `feature` doesn't exist, the response can still include other matching head refs that start with the word `feature`, such as `featureA` and `featureB`. + */ + get: operations["git/list-matching-refs"]; + }; + "/repos/{owner}/{repo}/git/ref/{ref}": { + /** + * Get a reference + * @description Returns a single reference from your Git database. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't match an existing ref, a `404` is returned. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + */ + get: operations["git/get-ref"]; + }; + "/repos/{owner}/{repo}/git/refs": { + /** + * Create a reference + * @description Creates a reference for your repository. You are unable to create new references for empty repositories, even if the commit SHA-1 hash used exists. Empty repositories are repositories without branches. + */ + post: operations["git/create-ref"]; + }; + "/repos/{owner}/{repo}/git/refs/{ref}": { + /** Delete a reference */ + delete: operations["git/delete-ref"]; + /** Update a reference */ + patch: operations["git/update-ref"]; + }; + "/repos/{owner}/{repo}/git/tags": { + /** + * Create a tag object + * @description Note that creating a tag object does not create the reference that makes a tag in Git. If you want to create an annotated tag in Git, you have to do this call to create the tag object, and then [create](https://docs.github.com/rest/reference/git#create-a-reference) the `refs/tags/[tag]` reference. If you want to create a lightweight tag, you only have to [create](https://docs.github.com/rest/reference/git#create-a-reference) the tag reference - this call would be unnecessary. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + post: operations["git/create-tag"]; + }; + "/repos/{owner}/{repo}/git/tags/{tag_sha}": { + /** + * Get a tag + * @description **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["git/get-tag"]; + }; + "/repos/{owner}/{repo}/git/trees": { + /** + * Create a tree + * @description The tree creation API accepts nested entries. If you specify both a tree and a nested path modifying that tree, this endpoint will overwrite the contents of the tree with the new path contents, and create a new tree structure. + * + * If you use this endpoint to add, delete, or modify the file contents in a tree, you will need to commit the tree and then update a branch to point to the commit. For more information see "[Create a commit](https://docs.github.com/rest/reference/git#create-a-commit)" and "[Update a reference](https://docs.github.com/rest/reference/git#update-a-reference)." + * + * Returns an error if you try to delete a file that does not exist. + */ + post: operations["git/create-tree"]; + }; + "/repos/{owner}/{repo}/git/trees/{tree_sha}": { + /** + * Get a tree + * @description Returns a single tree using the SHA1 value for that tree. + * + * If `truncated` is `true` in the response then the number of items in the `tree` array exceeded our maximum limit. If you need to fetch more items, use the non-recursive method of fetching trees, and fetch one sub-tree at a time. + * + * + * **Note**: The limit for the `tree` array is 100,000 entries with a maximum size of 7 MB when using the `recursive` parameter. + */ + get: operations["git/get-tree"]; + }; + "/repos/{owner}/{repo}/hooks": { + /** + * List repository webhooks + * @description Lists webhooks for a repository. `last response` may return null if there have not been any deliveries within 30 days. + */ + get: operations["repos/list-webhooks"]; + /** + * Create a repository webhook + * @description Repositories can have multiple webhooks installed. Each webhook should have a unique `config`. Multiple webhooks can + * share the same `config` as long as those webhooks do not have any `events` that overlap. + */ + post: operations["repos/create-webhook"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}": { + /** + * Get a repository webhook + * @description Returns a webhook configured in a repository. To get only the webhook `config` properties, see "[Get a webhook configuration for a repository](/rest/reference/repos#get-a-webhook-configuration-for-a-repository)." + */ + get: operations["repos/get-webhook"]; + /** Delete a repository webhook */ + delete: operations["repos/delete-webhook"]; + /** + * Update a repository webhook + * @description Updates a webhook configured in a repository. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for a repository](/rest/reference/repos#update-a-webhook-configuration-for-a-repository)." + */ + patch: operations["repos/update-webhook"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/config": { + /** + * Get a webhook configuration for a repository + * @description Returns the webhook configuration for a repository. To get more information about the webhook, including the `active` state and `events`, use "[Get a repository webhook](/rest/reference/orgs#get-a-repository-webhook)." + * + * Access tokens must have the `read:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:read` permission. + */ + get: operations["repos/get-webhook-config-for-repo"]; + /** + * Update a webhook configuration for a repository + * @description Updates the webhook configuration for a repository. To update more information about the webhook, including the `active` state and `events`, use "[Update a repository webhook](/rest/reference/orgs#update-a-repository-webhook)." + * + * Access tokens must have the `write:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:write` permission. + */ + patch: operations["repos/update-webhook-config-for-repo"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries": { + /** + * List deliveries for a repository webhook + * @description Returns a list of webhook deliveries for a webhook configured in a repository. + */ + get: operations["repos/list-webhook-deliveries"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}": { + /** + * Get a delivery for a repository webhook + * @description Returns a delivery for a webhook configured in a repository. + */ + get: operations["repos/get-webhook-delivery"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": { + /** + * Redeliver a delivery for a repository webhook + * @description Redeliver a webhook delivery for a webhook configured in a repository. + */ + post: operations["repos/redeliver-webhook-delivery"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/pings": { + /** + * Ping a repository webhook + * @description This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. + */ + post: operations["repos/ping-webhook"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/tests": { + /** + * Test the push repository webhook + * @description This will trigger the hook with the latest push to the current repository if the hook is subscribed to `push` events. If the hook is not subscribed to `push` events, the server will respond with 204 but no test POST will be generated. + * + * **Note**: Previously `/repos/:owner/:repo/hooks/:hook_id/test` + */ + post: operations["repos/test-push-webhook"]; + }; + "/repos/{owner}/{repo}/import": { + /** + * Get an import status + * @description View the progress of an import. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + * + * **Import status** + * + * This section includes details about the possible values of the `status` field of the Import Progress response. + * + * An import that does not have errors will progress through these steps: + * + * * `detecting` - the "detection" step of the import is in progress because the request did not include a `vcs` parameter. The import is identifying the type of source control present at the URL. + * * `importing` - the "raw" step of the import is in progress. This is where commit data is fetched from the original repository. The import progress response will include `commit_count` (the total number of raw commits that will be imported) and `percent` (0 - 100, the current progress through the import). + * * `mapping` - the "rewrite" step of the import is in progress. This is where SVN branches are converted to Git branches, and where author updates are applied. The import progress response does not include progress information. + * * `pushing` - the "push" step of the import is in progress. This is where the importer updates the repository on GitHub. The import progress response will include `push_percent`, which is the percent value reported by `git push` when it is "Writing objects". + * * `complete` - the import is complete, and the repository is ready on GitHub. + * + * If there are problems, you will see one of these in the `status` field: + * + * * `auth_failed` - the import requires authentication in order to connect to the original repository. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/migrations/source-imports#update-an-import) section. + * * `error` - the import encountered an error. The import progress response will include the `failed_step` and an error message. Contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api) for more information. + * * `detection_needs_auth` - the importer requires authentication for the originating repository to continue detection. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/migrations/source-imports#update-an-import) section. + * * `detection_found_nothing` - the importer didn't recognize any source control at the URL. To resolve, [Cancel the import](https://docs.github.com/rest/migrations/source-imports#cancel-an-import) and [retry](https://docs.github.com/rest/migrations/source-imports#start-an-import) with the correct URL. + * * `detection_found_multiple` - the importer found several projects or repositories at the provided URL. When this is the case, the Import Progress response will also include a `project_choices` field with the possible project choices as values. To update project choice, please see the [Update an import](https://docs.github.com/rest/migrations/source-imports#update-an-import) section. + * + * **The project_choices field** + * + * When multiple projects are found at the provided URL, the response hash will include a `project_choices` field, the value of which is an array of hashes each representing a project choice. The exact key/value pairs of the project hashes will differ depending on the version control type. + * + * **Git LFS related fields** + * + * This section includes details about Git LFS related fields that may be present in the Import Progress response. + * + * * `use_lfs` - describes whether the import has been opted in or out of using Git LFS. The value can be `opt_in`, `opt_out`, or `undecided` if no action has been taken. + * * `has_large_files` - the boolean value describing whether files larger than 100MB were found during the `importing` step. + * * `large_files_size` - the total size in gigabytes of files larger than 100MB found in the originating repository. + * * `large_files_count` - the total number of files larger than 100MB found in the originating repository. To see a list of these files, make a "Get Large Files" request. + */ + get: operations["migrations/get-import-status"]; + /** + * Start an import + * @description Start a source import to a GitHub repository using GitHub Importer. Importing into a GitHub repository with GitHub Actions enabled is not supported and will return a status `422 Unprocessable Entity` response. + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + put: operations["migrations/start-import"]; + /** + * Cancel an import + * @description Stop an import for a repository. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + delete: operations["migrations/cancel-import"]; + /** + * Update an import + * @description An import can be updated with credentials or a project choice by passing in the appropriate parameters in this API + * request. If no parameters are provided, the import will be restarted. + * + * Some servers (e.g. TFS servers) can have several projects at a single URL. In those cases the import progress will + * have the status `detection_found_multiple` and the Import Progress response will include a `project_choices` array. + * You can select the project to import by providing one of the objects in the `project_choices` array in the update request. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + patch: operations["migrations/update-import"]; + }; + "/repos/{owner}/{repo}/import/authors": { + /** + * Get commit authors + * @description Each type of source control system represents authors in a different way. For example, a Git commit author has a display name and an email address, but a Subversion commit author just has a username. The GitHub Importer will make the author information valid, but the author might not be correct. For example, it will change the bare Subversion username `hubot` into something like `hubot `. + * + * This endpoint and the [Map a commit author](https://docs.github.com/rest/migrations/source-imports#map-a-commit-author) endpoint allow you to provide correct Git author information. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + get: operations["migrations/get-commit-authors"]; + }; + "/repos/{owner}/{repo}/import/authors/{author_id}": { + /** + * Map a commit author + * @description Update an author's identity for the import. Your application can continue updating authors any time before you push + * new commits to the repository. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + patch: operations["migrations/map-commit-author"]; + }; + "/repos/{owner}/{repo}/import/large_files": { + /** + * Get large files + * @description List files larger than 100MB found during the import + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + get: operations["migrations/get-large-files"]; + }; + "/repos/{owner}/{repo}/import/lfs": { + /** + * Update Git LFS preference + * @description You can import repositories from Subversion, Mercurial, and TFS that include files larger than 100MB. This ability + * is powered by [Git LFS](https://git-lfs.com). + * + * You can learn more about our LFS feature and working with large files [on our help + * site](https://docs.github.com/repositories/working-with-files/managing-large-files). + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + patch: operations["migrations/set-lfs-preference"]; + }; + "/repos/{owner}/{repo}/installation": { + /** + * Get a repository installation for the authenticated app + * @description Enables an authenticated GitHub App to find the repository's installation information. The installation's account type will be either an organization or a user account, depending which account the repository belongs to. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-repo-installation"]; + }; + "/repos/{owner}/{repo}/interaction-limits": { + /** + * Get interaction restrictions for a repository + * @description Shows which type of GitHub user can interact with this repository and when the restriction expires. If there are no restrictions, you will see an empty response. + */ + get: operations["interactions/get-restrictions-for-repo"]; + /** + * Set interaction restrictions for a repository + * @description Temporarily restricts interactions to a certain type of GitHub user within the given repository. You must have owner or admin access to set these restrictions. If an interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. + */ + put: operations["interactions/set-restrictions-for-repo"]; + /** + * Remove interaction restrictions for a repository + * @description Removes all interaction restrictions from the given repository. You must have owner or admin access to remove restrictions. If the interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. + */ + delete: operations["interactions/remove-restrictions-for-repo"]; + }; + "/repos/{owner}/{repo}/invitations": { + /** + * List repository invitations + * @description When authenticating as a user with admin rights to a repository, this endpoint will list all currently open repository invitations. + */ + get: operations["repos/list-invitations"]; + }; + "/repos/{owner}/{repo}/invitations/{invitation_id}": { + /** Delete a repository invitation */ + delete: operations["repos/delete-invitation"]; + /** Update a repository invitation */ + patch: operations["repos/update-invitation"]; + }; + "/repos/{owner}/{repo}/issues": { + /** + * List repository issues + * @description List issues in a repository. Only open issues will be listed. + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/list-for-repo"]; + /** + * Create an issue + * @description Any user with pull access to a repository can create an issue. If [issues are disabled in the repository](https://docs.github.com/articles/disabling-issues/), the API returns a `410 Gone` status. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["issues/create"]; + }; + "/repos/{owner}/{repo}/issues/comments": { + /** + * List issue comments for a repository + * @description You can use the REST API to list comments on issues and pull requests for a repository. Every pull request is an issue, but not every issue is a pull request. + * + * By default, issue comments are ordered by ascending ID. + */ + get: operations["issues/list-comments-for-repo"]; + }; + "/repos/{owner}/{repo}/issues/comments/{comment_id}": { + /** + * Get an issue comment + * @description You can use the REST API to get comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + */ + get: operations["issues/get-comment"]; + /** + * Delete an issue comment + * @description You can use the REST API to delete comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + */ + delete: operations["issues/delete-comment"]; + /** + * Update an issue comment + * @description You can use the REST API to update comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + */ + patch: operations["issues/update-comment"]; + }; + "/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": { + /** + * List reactions for an issue comment + * @description List the reactions to an [issue comment](https://docs.github.com/rest/reference/issues#comments). + */ + get: operations["reactions/list-for-issue-comment"]; + /** + * Create reaction for an issue comment + * @description Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with an HTTP `200` status means that you already added the reaction type to this issue comment. + */ + post: operations["reactions/create-for-issue-comment"]; + }; + "/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}": { + /** + * Delete an issue comment reaction + * @description **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/issues/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). + */ + delete: operations["reactions/delete-for-issue-comment"]; + }; + "/repos/{owner}/{repo}/issues/events": { + /** + * List issue events for a repository + * @description Lists events for a repository. + */ + get: operations["issues/list-events-for-repo"]; + }; + "/repos/{owner}/{repo}/issues/events/{event_id}": { + /** + * Get an issue event + * @description Gets a single event by the event id. + */ + get: operations["issues/get-event"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}": { + /** + * Get an issue + * @description The API returns a [`301 Moved Permanently` status](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-redirects-redirects) if the issue was + * [transferred](https://docs.github.com/articles/transferring-an-issue-to-another-repository/) to another repository. If + * the issue was transferred to or deleted from a repository where the authenticated user lacks read access, the API + * returns a `404 Not Found` status. If the issue was deleted from a repository where the authenticated user has read + * access, the API returns a `410 Gone` status. To receive webhook events for transferred and deleted issues, subscribe + * to the [`issues`](https://docs.github.com/webhooks/event-payloads/#issues) webhook. + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/get"]; + /** + * Update an issue + * @description Issue owners and users with push access can edit an issue. + */ + patch: operations["issues/update"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/assignees": { + /** + * Add assignees to an issue + * @description Adds up to 10 assignees to an issue. Users already assigned to an issue are not replaced. + */ + post: operations["issues/add-assignees"]; + /** + * Remove assignees from an issue + * @description Removes one or more assignees from an issue. + */ + delete: operations["issues/remove-assignees"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}": { + /** + * Check if a user can be assigned to a issue + * @description Checks if a user has permission to be assigned to a specific issue. + * + * If the `assignee` can be assigned to this issue, a `204` status code with no content is returned. + * + * Otherwise a `404` status code is returned. + */ + get: operations["issues/check-user-can-be-assigned-to-issue"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/comments": { + /** + * List issue comments + * @description You can use the REST API to list comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + * + * Issue comments are ordered by ascending ID. + */ + get: operations["issues/list-comments"]; + /** + * Create an issue comment + * @description + * You can use the REST API to create comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). + * Creating content too quickly using this endpoint may result in secondary rate limiting. + * See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" + * and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" + * for details. + */ + post: operations["issues/create-comment"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/events": { + /** + * List issue events + * @description Lists all events for an issue. + */ + get: operations["issues/list-events"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/labels": { + /** + * List labels for an issue + * @description Lists all labels for an issue. + */ + get: operations["issues/list-labels-on-issue"]; + /** + * Set labels for an issue + * @description Removes any previous labels and sets the new labels for an issue. + */ + put: operations["issues/set-labels"]; + /** + * Add labels to an issue + * @description Adds labels to an issue. If you provide an empty array of labels, all labels are removed from the issue. + */ + post: operations["issues/add-labels"]; + /** + * Remove all labels from an issue + * @description Removes all labels from an issue. + */ + delete: operations["issues/remove-all-labels"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/labels/{name}": { + /** + * Remove a label from an issue + * @description Removes the specified label from the issue, and returns the remaining labels on the issue. This endpoint returns a `404 Not Found` status if the label does not exist. + */ + delete: operations["issues/remove-label"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/lock": { + /** + * Lock an issue + * @description Users with push access can lock an issue or pull request's conversation. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["issues/lock"]; + /** + * Unlock an issue + * @description Users with push access can unlock an issue's conversation. + */ + delete: operations["issues/unlock"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/reactions": { + /** + * List reactions for an issue + * @description List the reactions to an [issue](https://docs.github.com/rest/reference/issues). + */ + get: operations["reactions/list-for-issue"]; + /** + * Create reaction for an issue + * @description Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with an HTTP `200` status means that you already added the reaction type to this issue. + */ + post: operations["reactions/create-for-issue"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}": { + /** + * Delete an issue reaction + * @description **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/issues/:issue_number/reactions/:reaction_id`. + * + * Delete a reaction to an [issue](https://docs.github.com/rest/reference/issues/). + */ + delete: operations["reactions/delete-for-issue"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/timeline": { + /** + * List timeline events for an issue + * @description List all timeline events for an issue. + */ + get: operations["issues/list-events-for-timeline"]; + }; + "/repos/{owner}/{repo}/keys": { + /** List deploy keys */ + get: operations["repos/list-deploy-keys"]; + /** + * Create a deploy key + * @description You can create a read-only deploy key. + */ + post: operations["repos/create-deploy-key"]; + }; + "/repos/{owner}/{repo}/keys/{key_id}": { + /** Get a deploy key */ + get: operations["repos/get-deploy-key"]; + /** + * Delete a deploy key + * @description Deploy keys are immutable. If you need to update a key, remove the key and create a new one instead. + */ + delete: operations["repos/delete-deploy-key"]; + }; + "/repos/{owner}/{repo}/labels": { + /** + * List labels for a repository + * @description Lists all labels for a repository. + */ + get: operations["issues/list-labels-for-repo"]; + /** + * Create a label + * @description Creates a label for the specified repository with the given name and color. The name and color parameters are required. The color must be a valid [hexadecimal color code](http://www.color-hex.com/). + */ + post: operations["issues/create-label"]; + }; + "/repos/{owner}/{repo}/labels/{name}": { + /** + * Get a label + * @description Gets a label using the given name. + */ + get: operations["issues/get-label"]; + /** + * Delete a label + * @description Deletes a label using the given label name. + */ + delete: operations["issues/delete-label"]; + /** + * Update a label + * @description Updates a label using the given label name. + */ + patch: operations["issues/update-label"]; + }; + "/repos/{owner}/{repo}/languages": { + /** + * List repository languages + * @description Lists languages for the specified repository. The value shown for each language is the number of bytes of code written in that language. + */ + get: operations["repos/list-languages"]; + }; + "/repos/{owner}/{repo}/lfs": { + /** + * Enable Git LFS for a repository + * @description Enables Git LFS for a repository. Access tokens must have the `admin:enterprise` scope. + */ + put: operations["repos/enable-lfs-for-repo"]; + /** + * Disable Git LFS for a repository + * @description Disables Git LFS for a repository. Access tokens must have the `admin:enterprise` scope. + */ + delete: operations["repos/disable-lfs-for-repo"]; + }; + "/repos/{owner}/{repo}/license": { + /** + * Get the license for a repository + * @description This method returns the contents of the repository's license file, if one is detected. + * + * Similar to [Get repository content](https://docs.github.com/rest/reference/repos#get-repository-content), this method also supports [custom media types](https://docs.github.com/rest/overview/media-types) for retrieving the raw license content or rendered license HTML. + */ + get: operations["licenses/get-for-repo"]; + }; + "/repos/{owner}/{repo}/merge-upstream": { + /** + * Sync a fork branch with the upstream repository + * @description Sync a branch of a forked repository to keep it up-to-date with the upstream repository. + */ + post: operations["repos/merge-upstream"]; + }; + "/repos/{owner}/{repo}/merges": { + /** Merge a branch */ + post: operations["repos/merge"]; + }; + "/repos/{owner}/{repo}/milestones": { + /** + * List milestones + * @description Lists milestones for a repository. + */ + get: operations["issues/list-milestones"]; + /** + * Create a milestone + * @description Creates a milestone. + */ + post: operations["issues/create-milestone"]; + }; + "/repos/{owner}/{repo}/milestones/{milestone_number}": { + /** + * Get a milestone + * @description Gets a milestone using the given milestone number. + */ + get: operations["issues/get-milestone"]; + /** + * Delete a milestone + * @description Deletes a milestone using the given milestone number. + */ + delete: operations["issues/delete-milestone"]; + /** Update a milestone */ + patch: operations["issues/update-milestone"]; + }; + "/repos/{owner}/{repo}/milestones/{milestone_number}/labels": { + /** + * List labels for issues in a milestone + * @description Lists labels for issues in a milestone. + */ + get: operations["issues/list-labels-for-milestone"]; + }; + "/repos/{owner}/{repo}/notifications": { + /** + * List repository notifications for the authenticated user + * @description Lists all notifications for the current user in the specified repository. + */ + get: operations["activity/list-repo-notifications-for-authenticated-user"]; + /** + * Mark repository notifications as read + * @description Marks all notifications in a repository as "read" for the current user. If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List repository notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. + */ + put: operations["activity/mark-repo-notifications-as-read"]; + }; + "/repos/{owner}/{repo}/pages": { + /** + * Get a GitHub Pages site + * @description Gets information about a GitHub Pages site. + * + * A token with the `repo` scope is required. GitHub Apps must have the `pages:read` permission. + */ + get: operations["repos/get-pages"]; + /** + * Update information about a GitHub Pages site + * @description Updates information for a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). + * + * To use this endpoint, you must be a repository administrator, maintainer, or have the 'manage GitHub Pages settings' permission. A token with the `repo` scope or Pages write permission is required. GitHub Apps must have the `administration:write` and `pages:write` permissions. + */ + put: operations["repos/update-information-about-pages-site"]; + /** + * Create a GitHub Pages site + * @description Configures a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages)." + * + * To use this endpoint, you must be a repository administrator, maintainer, or have the 'manage GitHub Pages settings' permission. A token with the `repo` scope or Pages write permission is required. GitHub Apps must have the `administration:write` and `pages:write` permissions. + */ + post: operations["repos/create-pages-site"]; + /** + * Delete a GitHub Pages site + * @description Deletes a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). + * + * To use this endpoint, you must be a repository administrator, maintainer, or have the 'manage GitHub Pages settings' permission. A token with the `repo` scope or Pages write permission is required. GitHub Apps must have the `administration:write` and `pages:write` permissions. + */ + delete: operations["repos/delete-pages-site"]; + }; + "/repos/{owner}/{repo}/pages/builds": { + /** + * List GitHub Pages builds + * @description Lists builts of a GitHub Pages site. + * + * A token with the `repo` scope is required. GitHub Apps must have the `pages:read` permission. + */ + get: operations["repos/list-pages-builds"]; + /** + * Request a GitHub Pages build + * @description You can request that your site be built from the latest revision on the default branch. This has the same effect as pushing a commit to your default branch, but does not require an additional commit. Manually triggering page builds can be helpful when diagnosing build warnings and failures. + * + * Build requests are limited to one concurrent build per repository and one concurrent build per requester. If you request a build while another is still in progress, the second request will be queued until the first completes. + */ + post: operations["repos/request-pages-build"]; + }; + "/repos/{owner}/{repo}/pages/builds/latest": { + /** + * Get latest Pages build + * @description Gets information about the single most recent build of a GitHub Pages site. + * + * A token with the `repo` scope is required. GitHub Apps must have the `pages:read` permission. + */ + get: operations["repos/get-latest-pages-build"]; + }; + "/repos/{owner}/{repo}/pages/builds/{build_id}": { + /** + * Get GitHub Pages build + * @description Gets information about a GitHub Pages build. + * + * A token with the `repo` scope is required. GitHub Apps must have the `pages:read` permission. + */ + get: operations["repos/get-pages-build"]; + }; + "/repos/{owner}/{repo}/pages/deployment": { + /** + * Create a GitHub Pages deployment + * @description Create a GitHub Pages deployment for a repository. + * + * Users must have write permissions. GitHub Apps must have the `pages:write` permission to use this endpoint. + */ + post: operations["repos/create-pages-deployment"]; + }; + "/repos/{owner}/{repo}/pages/health": { + /** + * Get a DNS health check for GitHub Pages + * @description Gets a health check of the DNS settings for the `CNAME` record configured for a repository's GitHub Pages. + * + * The first request to this endpoint returns a `202 Accepted` status and starts an asynchronous background task to get the results for the domain. After the background task completes, subsequent requests to this endpoint return a `200 OK` status with the health check results in the response. + * + * To use this endpoint, you must be a repository administrator, maintainer, or have the 'manage GitHub Pages settings' permission. A token with the `repo` scope or Pages write permission is required. GitHub Apps must have the `administrative:write` and `pages:write` permissions. + */ + get: operations["repos/get-pages-health-check"]; + }; + "/repos/{owner}/{repo}/projects": { + /** + * List repository projects + * @description Lists the projects in a repository. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + get: operations["projects/list-for-repo"]; + /** + * Create a repository project + * @description Creates a repository project board. Returns a `410 Gone` status if projects are disabled in the repository or if the repository does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + post: operations["projects/create-for-repo"]; + }; + "/repos/{owner}/{repo}/pulls": { + /** + * List pull requests + * @description Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + get: operations["pulls/list"]; + /** + * Create a pull request + * @description Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. + */ + post: operations["pulls/create"]; + }; + "/repos/{owner}/{repo}/pulls/comments": { + /** + * List review comments in a repository + * @description Lists review comments for all pull requests in a repository. By default, review comments are in ascending order by ID. + */ + get: operations["pulls/list-review-comments-for-repo"]; + }; + "/repos/{owner}/{repo}/pulls/comments/{comment_id}": { + /** + * Get a review comment for a pull request + * @description Provides details for a review comment. + */ + get: operations["pulls/get-review-comment"]; + /** + * Delete a review comment for a pull request + * @description Deletes a review comment. + */ + delete: operations["pulls/delete-review-comment"]; + /** + * Update a review comment for a pull request + * @description Enables you to edit a review comment. + */ + patch: operations["pulls/update-review-comment"]; + }; + "/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": { + /** + * List reactions for a pull request review comment + * @description List the reactions to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). + */ + get: operations["reactions/list-for-pull-request-review-comment"]; + /** + * Create reaction for a pull request review comment + * @description Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with an HTTP `200` status means that you already added the reaction type to this pull request review comment. + */ + post: operations["reactions/create-for-pull-request-review-comment"]; + }; + "/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}": { + /** + * Delete a pull request comment reaction + * @description **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/pulls/comments/:comment_id/reactions/:reaction_id.` + * + * Delete a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). + */ + delete: operations["reactions/delete-for-pull-request-comment"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}": { + /** + * Get a pull request + * @description Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists details of a pull request by providing its number. + * + * When you get, [create](https://docs.github.com/rest/reference/pulls/#create-a-pull-request), or [edit](https://docs.github.com/rest/reference/pulls#update-a-pull-request) a pull request, GitHub creates a merge commit to test whether the pull request can be automatically merged into the base branch. This test commit is not added to the base branch or the head branch. You can review the status of the test commit using the `mergeable` key. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * The value of the `mergeable` attribute can be `true`, `false`, or `null`. If the value is `null`, then GitHub has started a background job to compute the mergeability. After giving the job time to complete, resubmit the request. When the job finishes, you will see a non-`null` value for the `mergeable` attribute in the response. If `mergeable` is `true`, then `merge_commit_sha` will be the SHA of the _test_ merge commit. + * + * The value of the `merge_commit_sha` attribute changes depending on the state of the pull request. Before merging a pull request, the `merge_commit_sha` attribute holds the SHA of the _test_ merge commit. After merging a pull request, the `merge_commit_sha` attribute changes depending on how you merged the pull request: + * + * * If merged as a [merge commit](https://docs.github.com/articles/about-merge-methods-on-github/), `merge_commit_sha` represents the SHA of the merge commit. + * * If merged via a [squash](https://docs.github.com/articles/about-merge-methods-on-github/#squashing-your-merge-commits), `merge_commit_sha` represents the SHA of the squashed commit on the base branch. + * * If [rebased](https://docs.github.com/articles/about-merge-methods-on-github/#rebasing-and-merging-your-commits), `merge_commit_sha` represents the commit that the base branch was updated to. + * + * Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + */ + get: operations["pulls/get"]; + /** + * Update a pull request + * @description Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + */ + patch: operations["pulls/update"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/codespaces": { + /** + * Create a codespace from a pull request + * @description Creates a codespace owned by the authenticated user for the specified pull request. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + post: operations["codespaces/create-with-pr-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/comments": { + /** + * List review comments on a pull request + * @description Lists all review comments for a pull request. By default, review comments are in ascending order by ID. + */ + get: operations["pulls/list-review-comments"]; + /** + * Create a review comment for a pull request + * @description + * Creates a review comment in the pull request diff. To add a regular comment to a pull request timeline, see "[Create an issue comment](https://docs.github.com/rest/reference/issues#create-an-issue-comment)." We recommend creating a review comment using `line`, `side`, and optionally `start_line` and `start_side` if your comment applies to more than one line in the pull request diff. + * + * The `position` parameter is deprecated. If you use `position`, the `line`, `side`, `start_line`, and `start_side` parameters are not required. + * + * **Note:** The position value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["pulls/create-review-comment"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies": { + /** + * Create a reply for a review comment + * @description Creates a reply to a review comment for a pull request. For the `comment_id`, provide the ID of the review comment you are replying to. This must be the ID of a _top-level review comment_, not a reply to that comment. Replies to replies are not supported. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["pulls/create-reply-for-review-comment"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/commits": { + /** + * List commits on a pull request + * @description Lists a maximum of 250 commits for a pull request. To receive a complete commit list for pull requests with more than 250 commits, use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) endpoint. + */ + get: operations["pulls/list-commits"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/files": { + /** + * List pull requests files + * @description **Note:** Responses include a maximum of 3000 files. The paginated response returns 30 files per page by default. + */ + get: operations["pulls/list-files"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/merge": { + /** + * Check if a pull request has been merged + * @description Checks if a pull request has been merged into the base branch. The HTTP status of the response indicates whether or not the pull request has been merged; the response body is empty. + */ + get: operations["pulls/check-if-merged"]; + /** + * Merge a pull request + * @description Merges a pull request into the base branch. + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + put: operations["pulls/merge"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": { + /** + * Get all requested reviewers for a pull request + * @description Gets the users or teams whose review is requested for a pull request. Once a requested reviewer submits a review, they are no longer considered a requested reviewer. Their review will instead be returned by the [List reviews for a pull request](https://docs.github.com/rest/pulls/reviews#list-reviews-for-a-pull-request) operation. + */ + get: operations["pulls/list-requested-reviewers"]; + /** + * Request reviewers for a pull request + * @description This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["pulls/request-reviewers"]; + /** + * Remove requested reviewers from a pull request + * @description Removes review requests from a pull request for a given set of users and/or teams. + */ + delete: operations["pulls/remove-requested-reviewers"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews": { + /** + * List reviews for a pull request + * @description The list of reviews returns in chronological order. + */ + get: operations["pulls/list-reviews"]; + /** + * Create a review for a pull request + * @description This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Pull request reviews created in the `PENDING` state are not submitted and therefore do not include the `submitted_at` property in the response. To create a pending review for a pull request, leave the `event` parameter blank. For more information about submitting a `PENDING` review, see "[Submit a review for a pull request](https://docs.github.com/rest/pulls#submit-a-review-for-a-pull-request)." + * + * **Note:** To comment on a specific line in a file, you need to first determine the _position_ of that line in the diff. The GitHub REST API offers the `application/vnd.github.v3.diff` [media type](https://docs.github.com/rest/overview/media-types#commits-commit-comparison-and-pull-requests). To see a pull request diff, add this media type to the `Accept` header of a call to the [single pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) endpoint. + * + * The `position` value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + */ + post: operations["pulls/create-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": { + /** + * Get a review for a pull request + * @description Retrieves a pull request review by its ID. + */ + get: operations["pulls/get-review"]; + /** + * Update a review for a pull request + * @description Update the review summary comment with new text. + */ + put: operations["pulls/update-review"]; + /** + * Delete a pending review for a pull request + * @description Deletes a pull request review that has not been submitted. Submitted reviews cannot be deleted. + */ + delete: operations["pulls/delete-pending-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments": { + /** + * List comments for a pull request review + * @description List comments for a specific pull request review. + */ + get: operations["pulls/list-comments-for-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals": { + /** + * Dismiss a review for a pull request + * @description **Note:** To dismiss a pull request review on a [protected branch](https://docs.github.com/rest/reference/repos#branches), you must be a repository administrator or be included in the list of people or teams who can dismiss pull request reviews. + */ + put: operations["pulls/dismiss-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events": { + /** + * Submit a review for a pull request + * @description Submits a pending review for a pull request. For more information about creating a pending review for a pull request, see "[Create a review for a pull request](https://docs.github.com/rest/pulls#create-a-review-for-a-pull-request)." + */ + post: operations["pulls/submit-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/update-branch": { + /** + * Update a pull request branch + * @description Updates the pull request branch with the latest upstream changes by merging HEAD from the base branch into the pull request branch. + */ + put: operations["pulls/update-branch"]; + }; + "/repos/{owner}/{repo}/readme": { + /** + * Get a repository README + * @description Gets the preferred README for a repository. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + get: operations["repos/get-readme"]; + }; + "/repos/{owner}/{repo}/readme/{dir}": { + /** + * Get a repository README for a directory + * @description Gets the README from a repository directory. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + get: operations["repos/get-readme-in-directory"]; + }; + "/repos/{owner}/{repo}/releases": { + /** + * List releases + * @description This returns a list of releases, which does not include regular Git tags that have not been associated with a release. To get a list of Git tags, use the [Repository Tags API](https://docs.github.com/rest/reference/repos#list-repository-tags). + * + * Information about published releases are available to everyone. Only users with push access will receive listings for draft releases. + */ + get: operations["repos/list-releases"]; + /** + * Create a release + * @description Users with push access to the repository can create a release. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["repos/create-release"]; + }; + "/repos/{owner}/{repo}/releases/assets/{asset_id}": { + /** + * Get a release asset + * @description To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. + */ + get: operations["repos/get-release-asset"]; + /** Delete a release asset */ + delete: operations["repos/delete-release-asset"]; + /** + * Update a release asset + * @description Users with push access to the repository can edit a release asset. + */ + patch: operations["repos/update-release-asset"]; + }; + "/repos/{owner}/{repo}/releases/generate-notes": { + /** + * Generate release notes content for a release + * @description Generate a name and body describing a [release](https://docs.github.com/rest/reference/repos#releases). The body content will be markdown formatted and contain information like the changes since last release and users who contributed. The generated release notes are not saved anywhere. They are intended to be generated and used when creating a new release. + */ + post: operations["repos/generate-release-notes"]; + }; + "/repos/{owner}/{repo}/releases/latest": { + /** + * Get the latest release + * @description View the latest published full release for the repository. + * + * The latest release is the most recent non-prerelease, non-draft release, sorted by the `created_at` attribute. The `created_at` attribute is the date of the commit used for the release, and not the date when the release was drafted or published. + */ + get: operations["repos/get-latest-release"]; + }; + "/repos/{owner}/{repo}/releases/tags/{tag}": { + /** + * Get a release by tag name + * @description Get a published release with the specified tag. + */ + get: operations["repos/get-release-by-tag"]; + }; + "/repos/{owner}/{repo}/releases/{release_id}": { + /** + * Get a release + * @description **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). + */ + get: operations["repos/get-release"]; + /** + * Delete a release + * @description Users with push access to the repository can delete a release. + */ + delete: operations["repos/delete-release"]; + /** + * Update a release + * @description Users with push access to the repository can edit a release. + */ + patch: operations["repos/update-release"]; + }; + "/repos/{owner}/{repo}/releases/{release_id}/assets": { + /** List release assets */ + get: operations["repos/list-release-assets"]; + /** + * Upload a release asset + * @description This endpoint makes use of [a Hypermedia relation](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) to determine which URL to access. The endpoint you call to upload release assets is specific to your release. Use the `upload_url` returned in + * the response of the [Create a release endpoint](https://docs.github.com/rest/releases/releases#create-a-release) to upload a release asset. + * + * You need to use an HTTP client which supports [SNI](http://en.wikipedia.org/wiki/Server_Name_Indication) to make calls to this endpoint. + * + * Most libraries will set the required `Content-Length` header automatically. Use the required `Content-Type` header to provide the media type of the asset. For a list of media types, see [Media Types](https://www.iana.org/assignments/media-types/media-types.xhtml). For example: + * + * `application/zip` + * + * GitHub expects the asset data in its raw binary form, rather than JSON. You will send the raw binary content of the asset as the request body. Everything else about the endpoint is the same as the rest of the API. For example, + * you'll still need to pass your authentication to be able to upload an asset. + * + * When an upstream failure occurs, you will receive a `502 Bad Gateway` status. This may leave an empty asset with a state of `starter`. It can be safely deleted. + * + * **Notes:** + * * GitHub renames asset filenames that have special characters, non-alphanumeric characters, and leading or trailing periods. The "[List assets for a release](https://docs.github.com/rest/reference/repos#list-assets-for-a-release)" + * endpoint lists the renamed filenames. For more information and help, contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + * * To find the `release_id` query the [`GET /repos/{owner}/{repo}/releases/latest` endpoint](https://docs.github.com/rest/releases/releases#get-the-latest-release). + * * If you upload an asset with the same filename as another uploaded asset, you'll receive an error and must delete the old file before you can re-upload the new asset. + */ + post: operations["repos/upload-release-asset"]; + }; + "/repos/{owner}/{repo}/releases/{release_id}/reactions": { + /** + * List reactions for a release + * @description List the reactions to a [release](https://docs.github.com/rest/reference/repos#releases). + */ + get: operations["reactions/list-for-release"]; + /** + * Create reaction for a release + * @description Create a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). A response with a `Status: 200 OK` means that you already added the reaction type to this release. + */ + post: operations["reactions/create-for-release"]; + }; + "/repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}": { + /** + * Delete a release reaction + * @description **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/releases/:release_id/reactions/:reaction_id`. + * + * Delete a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). + */ + delete: operations["reactions/delete-for-release"]; + }; + "/repos/{owner}/{repo}/rules/branches/{branch}": { + /** + * Get rules for a branch + * @description Returns all rules that apply to the specified branch. + */ + get: operations["repos/get-branch-rules"]; + }; + "/repos/{owner}/{repo}/rulesets": { + /** + * Get all repository rulesets + * @description Get all the rulesets for a repository. + */ + get: operations["repos/get-repo-rulesets"]; + /** + * Create a repository ruleset + * @description Create a ruleset for a repository. + */ + post: operations["repos/create-repo-ruleset"]; + }; + "/repos/{owner}/{repo}/rulesets/{ruleset_id}": { + /** + * Get a repository ruleset + * @description Get a ruleset for a repository. + */ + get: operations["repos/get-repo-ruleset"]; + /** + * Update a repository ruleset + * @description Update a ruleset for a repository. + */ + put: operations["repos/update-repo-ruleset"]; + /** + * Delete a repository ruleset + * @description Delete a ruleset for a repository. + */ + delete: operations["repos/delete-repo-ruleset"]; + }; + "/repos/{owner}/{repo}/secret-scanning/alerts": { + /** + * List secret scanning alerts for a repository + * @description Lists secret scanning alerts for an eligible repository, from newest to oldest. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + get: operations["secret-scanning/list-alerts-for-repo"]; + }; + "/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": { + /** + * Get a secret scanning alert + * @description Gets a single secret scanning alert detected in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + get: operations["secret-scanning/get-alert"]; + /** + * Update a secret scanning alert + * @description Updates the status of a secret scanning alert in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` write permission to use this endpoint. + */ + patch: operations["secret-scanning/update-alert"]; + }; + "/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations": { + /** + * List locations for a secret scanning alert + * @description Lists all locations for a given secret scanning alert for an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + get: operations["secret-scanning/list-locations-for-alert"]; + }; + "/repos/{owner}/{repo}/security-advisories": { + /** + * List repository security advisories + * @description Lists security advisories in a repository. + * You must authenticate using an access token with the `repo` scope or `repository_advisories:read` permission + * in order to get published security advisories in a private repository, or any unpublished security advisories that you have access to. + * + * You can access unpublished security advisories from a repository if you are a security manager or administrator of that repository, or if you are a collaborator on any security advisory. + */ + get: operations["security-advisories/list-repository-advisories"]; + /** + * Create a repository security advisory + * @description Creates a new repository security advisory. + * You must authenticate using an access token with the `repo` scope or `repository_advisories:write` permission to use this endpoint. + * + * In order to create a draft repository security advisory, you must be a security manager or administrator of that repository. + */ + post: operations["security-advisories/create-repository-advisory"]; + }; + "/repos/{owner}/{repo}/security-advisories/reports": { + /** + * Privately report a security vulnerability + * @description Report a security vulnerability to the maintainers of the repository. + * See "[Privately reporting a security vulnerability](https://docs.github.com/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability)" for more information about private vulnerability reporting. + */ + post: operations["security-advisories/create-private-vulnerability-report"]; + }; + "/repos/{owner}/{repo}/security-advisories/{ghsa_id}": { + /** + * Get a repository security advisory + * @description Get a repository security advisory using its GitHub Security Advisory (GHSA) identifier. + * You can access any published security advisory on a public repository. + * You must authenticate using an access token with the `repo` scope or `repository_advisories:read` permission + * in order to get a published security advisory in a private repository, or any unpublished security advisory that you have access to. + * + * You can access an unpublished security advisory from a repository if you are a security manager or administrator of that repository, or if you are a + * collaborator on the security advisory. + */ + get: operations["security-advisories/get-repository-advisory"]; + /** + * Update a repository security advisory + * @description Update a repository security advisory using its GitHub Security Advisory (GHSA) identifier. + * You must authenticate using an access token with the `repo` scope or `repository_advisories:write` permission to use this endpoint. + * + * In order to update any security advisory, you must be a security manager or administrator of that repository, + * or a collaborator on the repository security advisory. + */ + patch: operations["security-advisories/update-repository-advisory"]; + }; + "/repos/{owner}/{repo}/stargazers": { + /** + * List stargazers + * @description Lists the people that have starred the repository. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + get: operations["activity/list-stargazers-for-repo"]; + }; + "/repos/{owner}/{repo}/stats/code_frequency": { + /** + * Get the weekly commit activity + * @description Returns a weekly aggregate of the number of additions and deletions pushed to a repository. + */ + get: operations["repos/get-code-frequency-stats"]; + }; + "/repos/{owner}/{repo}/stats/commit_activity": { + /** + * Get the last year of commit activity + * @description Returns the last year of commit activity grouped by week. The `days` array is a group of commits per day, starting on `Sunday`. + */ + get: operations["repos/get-commit-activity-stats"]; + }; + "/repos/{owner}/{repo}/stats/contributors": { + /** + * Get all contributor commit activity + * @description + * Returns the `total` number of commits authored by the contributor. In addition, the response includes a Weekly Hash (`weeks` array) with the following information: + * + * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). + * * `a` - Number of additions + * * `d` - Number of deletions + * * `c` - Number of commits + */ + get: operations["repos/get-contributors-stats"]; + }; + "/repos/{owner}/{repo}/stats/participation": { + /** + * Get the weekly commit count + * @description Returns the total commit counts for the `owner` and total commit counts in `all`. `all` is everyone combined, including the `owner` in the last 52 weeks. If you'd like to get the commit counts for non-owners, you can subtract `owner` from `all`. + * + * The array order is oldest week (index 0) to most recent week. + * + * The most recent week is seven days ago at UTC midnight to today at UTC midnight. + */ + get: operations["repos/get-participation-stats"]; + }; + "/repos/{owner}/{repo}/stats/punch_card": { + /** + * Get the hourly commit count for each day + * @description Each array contains the day number, hour number, and number of commits: + * + * * `0-6`: Sunday - Saturday + * * `0-23`: Hour of day + * * Number of commits + * + * For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. + */ + get: operations["repos/get-punch-card-stats"]; + }; + "/repos/{owner}/{repo}/statuses/{sha}": { + /** + * Create a commit status + * @description Users with push access in a repository can create commit statuses for a given SHA. + * + * Note: there is a limit of 1000 statuses per `sha` and `context` within a repository. Attempts to create more than 1000 statuses will result in a validation error. + */ + post: operations["repos/create-commit-status"]; + }; + "/repos/{owner}/{repo}/subscribers": { + /** + * List watchers + * @description Lists the people watching the specified repository. + */ + get: operations["activity/list-watchers-for-repo"]; + }; + "/repos/{owner}/{repo}/subscription": { + /** Get a repository subscription */ + get: operations["activity/get-repo-subscription"]; + /** + * Set a repository subscription + * @description If you would like to watch a repository, set `subscribed` to `true`. If you would like to ignore notifications made within a repository, set `ignored` to `true`. If you would like to stop watching a repository, [delete the repository's subscription](https://docs.github.com/rest/reference/activity#delete-a-repository-subscription) completely. + */ + put: operations["activity/set-repo-subscription"]; + /** + * Delete a repository subscription + * @description This endpoint should only be used to stop watching a repository. To control whether or not you wish to receive notifications from a repository, [set the repository's subscription manually](https://docs.github.com/rest/reference/activity#set-a-repository-subscription). + */ + delete: operations["activity/delete-repo-subscription"]; + }; + "/repos/{owner}/{repo}/tags": { + /** List repository tags */ + get: operations["repos/list-tags"]; + }; + "/repos/{owner}/{repo}/tags/protection": { + /** + * List tag protection states for a repository + * @description This returns the tag protection states of a repository. + * + * This information is only available to repository administrators. + */ + get: operations["repos/list-tag-protection"]; + /** + * Create a tag protection state for a repository + * @description This creates a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + post: operations["repos/create-tag-protection"]; + }; + "/repos/{owner}/{repo}/tags/protection/{tag_protection_id}": { + /** + * Delete a tag protection state for a repository + * @description This deletes a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + delete: operations["repos/delete-tag-protection"]; + }; + "/repos/{owner}/{repo}/tarball/{ref}": { + /** + * Download a repository archive (tar) + * @description Gets a redirect URL to download a tar archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `main`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * **Note**: For private repositories, these links are temporary and expire after five minutes. + */ + get: operations["repos/download-tarball-archive"]; + }; + "/repos/{owner}/{repo}/teams": { + /** + * List repository teams + * @description Lists the teams that have access to the specified repository and that are also visible to the authenticated user. + * + * For a public repository, a team is listed only if that team added the public repository explicitly. + * + * Personal access tokens require the following scopes: + * * `public_repo` to call this endpoint on a public repository + * * `repo` to call this endpoint on a private repository (this scope also includes public repositories) + * + * This endpoint is not compatible with fine-grained personal access tokens. + */ + get: operations["repos/list-teams"]; + }; + "/repos/{owner}/{repo}/topics": { + /** Get all repository topics */ + get: operations["repos/get-all-topics"]; + /** Replace all repository topics */ + put: operations["repos/replace-all-topics"]; + }; + "/repos/{owner}/{repo}/traffic/clones": { + /** + * Get repository clones + * @description Get the total number of clones and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. + */ + get: operations["repos/get-clones"]; + }; + "/repos/{owner}/{repo}/traffic/popular/paths": { + /** + * Get top referral paths + * @description Get the top 10 popular contents over the last 14 days. + */ + get: operations["repos/get-top-paths"]; + }; + "/repos/{owner}/{repo}/traffic/popular/referrers": { + /** + * Get top referral sources + * @description Get the top 10 referrers over the last 14 days. + */ + get: operations["repos/get-top-referrers"]; + }; + "/repos/{owner}/{repo}/traffic/views": { + /** + * Get page views + * @description Get the total number of views and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. + */ + get: operations["repos/get-views"]; + }; + "/repos/{owner}/{repo}/transfer": { + /** + * Transfer a repository + * @description A transfer request will need to be accepted by the new owner when transferring a personal repository to another user. The response will contain the original `owner`, and the transfer will continue asynchronously. For more details on the requirements to transfer personal and organization-owned repositories, see [about repository transfers](https://docs.github.com/articles/about-repository-transfers/). + */ + post: operations["repos/transfer"]; + }; + "/repos/{owner}/{repo}/vulnerability-alerts": { + /** + * Check if vulnerability alerts are enabled for a repository + * @description Shows whether dependency alerts are enabled or disabled for a repository. The authenticated user must have admin read access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/articles/about-security-alerts-for-vulnerable-dependencies)". + */ + get: operations["repos/check-vulnerability-alerts"]; + /** + * Enable vulnerability alerts + * @description Enables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/articles/about-security-alerts-for-vulnerable-dependencies)". + */ + put: operations["repos/enable-vulnerability-alerts"]; + /** + * Disable vulnerability alerts + * @description Disables dependency alerts and the dependency graph for a repository. + * The authenticated user must have admin access to the repository. For more information, + * see "[About security alerts for vulnerable dependencies](https://docs.github.com/articles/about-security-alerts-for-vulnerable-dependencies)". + */ + delete: operations["repos/disable-vulnerability-alerts"]; + }; + "/repos/{owner}/{repo}/zipball/{ref}": { + /** + * Download a repository archive (zip) + * @description Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `main`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * + * **Note**: For private repositories, these links are temporary and expire after five minutes. If the repository is empty, you will receive a 404 when you follow the redirect. + */ + get: operations["repos/download-zipball-archive"]; + }; + "/repos/{template_owner}/{template_repo}/generate": { + /** + * Create a repository using a template + * @description Creates a new repository using a repository template. Use the `template_owner` and `template_repo` route parameters to specify the repository to use as the template. If the repository is not public, the authenticated user must own or be a member of an organization that owns the repository. To check if a repository is available to use as a template, get the repository's information using the [Get a repository](https://docs.github.com/rest/reference/repos#get-a-repository) endpoint and check that the `is_template` key is `true`. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + post: operations["repos/create-using-template"]; + }; + "/repositories": { + /** + * List public repositories + * @description Lists all public repositories in the order that they were created. + * + * Note: + * - For GitHub Enterprise Server, this endpoint will only list repositories available to all users on the enterprise. + * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers) to get the URL for the next page of repositories. + */ + get: operations["repos/list-public"]; + }; + "/repositories/{repository_id}/environments/{environment_name}/secrets": { + /** + * List environment secrets + * @description Lists all secrets available in an environment without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + get: operations["actions/list-environment-secrets"]; + }; + "/repositories/{repository_id}/environments/{environment_name}/secrets/public-key": { + /** + * Get an environment public key + * @description Get the public key for an environment, which you need to encrypt environment secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + get: operations["actions/get-environment-public-key"]; + }; + "/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": { + /** + * Get an environment secret + * @description Gets a single environment secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + get: operations["actions/get-environment-secret"]; + /** + * Create or update an environment secret + * @description Creates or updates an environment secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["actions/create-or-update-environment-secret"]; + /** + * Delete an environment secret + * @description Deletes a secret in an environment using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + delete: operations["actions/delete-environment-secret"]; + }; + "/repositories/{repository_id}/environments/{environment_name}/variables": { + /** + * List environment variables + * @description Lists all environment variables. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `environments:read` repository permission to use this endpoint. + */ + get: operations["actions/list-environment-variables"]; + /** + * Create an environment variable + * @description Create an environment variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `environment:write` repository permission to use this endpoint. + */ + post: operations["actions/create-environment-variable"]; + }; + "/repositories/{repository_id}/environments/{environment_name}/variables/{name}": { + /** + * Get an environment variable + * @description Gets a specific variable in an environment. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `environments:read` repository permission to use this endpoint. + */ + get: operations["actions/get-environment-variable"]; + /** + * Delete an environment variable + * @description Deletes an environment variable using the variable name. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `environment:write` repository permission to use this endpoint. + */ + delete: operations["actions/delete-environment-variable"]; + /** + * Update an environment variable + * @description Updates an environment variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `environment:write` repository permission to use this endpoint. + */ + patch: operations["actions/update-environment-variable"]; + }; + "/search/code": { + /** + * Search code + * @description Searches for query terms inside of a file. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for code, you can get text match metadata for the file **content** and file **path** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the definition of the `addClass` function inside [jQuery](https://github.com/jquery/jquery) repository, your query would look something like this: + * + * `q=addClass+in:file+language:js+repo:jquery/jquery` + * + * This query searches for the keyword `addClass` within a file's contents. The query limits the search to files where the language is JavaScript in the `jquery/jquery` repository. + * + * #### Considerations for code search + * + * Due to the complexity of searching code, there are a few restrictions on how searches are performed: + * + * * Only the _default branch_ is considered. In most cases, this will be the `master` branch. + * * Only files smaller than 384 KB are searchable. + * * You must always include at least one search term when searching source code. For example, searching for [`language:go`](https://github.com/search?utf8=%E2%9C%93&q=language%3Ago&type=Code) is not valid, while [`amazing + * language:go`](https://github.com/search?utf8=%E2%9C%93&q=amazing+language%3Ago&type=Code) is. + * + * This endpoint requires you to authenticate and limits you to 10 requests per minute. + */ + get: operations["search/code"]; + }; + "/search/commits": { + /** + * Search commits + * @description Find commits via various criteria on the default branch (usually `main`). This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for commits, you can get text match metadata for the **message** field when you provide the `text-match` media type. For more details about how to receive highlighted search results, see [Text match + * metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find commits related to CSS in the [octocat/Spoon-Knife](https://github.com/octocat/Spoon-Knife) repository. Your query would look something like this: + * + * `q=repo:octocat/Spoon-Knife+css` + */ + get: operations["search/commits"]; + }; + "/search/issues": { + /** + * Search issues and pull requests + * @description Find issues by state and keyword. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for issues, you can get text match metadata for the issue **title**, issue **body**, and issue **comment body** fields when you pass the `text-match` media type. For more details about how to receive highlighted + * search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this. + * + * `q=windows+label:bug+language:python+state:open&sort=created&order=asc` + * + * This query searches for the keyword `windows`, within any open issue that is labeled as `bug`. The search runs across repositories whose primary language is Python. The results are sorted by creation date in ascending order, which means the oldest issues appear first in the search results. + * + * **Note:** For [user-to-server](https://docs.github.com/developers/apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests) GitHub App requests, you can't retrieve a combination of issues and pull requests in a single query. Requests that don't include the `is:issue` or `is:pull-request` qualifier will receive an HTTP `422 Unprocessable Entity` response. To get results for both issues and pull requests, you must send separate queries for issues and pull requests. For more information about the `is` qualifier, see "[Searching only issues or pull requests](https://docs.github.com/github/searching-for-information-on-github/searching-issues-and-pull-requests#search-only-issues-or-pull-requests)." + */ + get: operations["search/issues-and-pull-requests"]; + }; + "/search/labels": { + /** + * Search labels + * @description Find labels in a repository with names or descriptions that match search keywords. Returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for labels, you can get text match metadata for the label **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find labels in the `linguist` repository that match `bug`, `defect`, or `enhancement`. Your query might look like this: + * + * `q=bug+defect+enhancement&repository_id=64778136` + * + * The labels that best match the query appear first in the search results. + */ + get: operations["search/labels"]; + }; + "/search/repositories": { + /** + * Search repositories + * @description Find repositories via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for repositories, you can get text match metadata for the **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for popular Tetris repositories written in assembly code, your query might look like this: + * + * `q=tetris+language:assembly&sort=stars&order=desc` + * + * This query searches for repositories with the word `tetris` in the name, the description, or the README. The results are limited to repositories where the primary language is assembly. The results are sorted by stars in descending order, so that the most popular repositories appear first in the search results. + */ + get: operations["search/repos"]; + }; + "/search/topics": { + /** + * Search topics + * @description Find topics via various criteria. Results are sorted by best match. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). See "[Searching topics](https://docs.github.com/articles/searching-topics/)" for a detailed list of qualifiers. + * + * When searching for topics, you can get text match metadata for the topic's **short\_description**, **description**, **name**, or **display\_name** field when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for topics related to Ruby that are featured on https://github.com/topics. Your query might look like this: + * + * `q=ruby+is:featured` + * + * This query searches for topics with the keyword `ruby` and limits the results to find only topics that are featured. The topics that are the best match for the query appear first in the search results. + */ + get: operations["search/topics"]; + }; + "/search/users": { + /** + * Search users + * @description Find users via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for users, you can get text match metadata for the issue **login**, public **email**, and **name** fields when you pass the `text-match` media type. For more details about highlighting search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you're looking for a list of popular users, you might try this query: + * + * `q=tom+repos:%3E42+followers:%3E1000` + * + * This query searches for users with the name `tom`. The results are restricted to users with more than 42 repositories and over 1,000 followers. + */ + get: operations["search/users"]; + }; + "/teams/{team_id}": { + /** + * Get a team (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the [Get a team by name](https://docs.github.com/rest/reference/teams#get-a-team-by-name) endpoint. + */ + get: operations["teams/get-legacy"]; + /** + * Delete a team (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a team](https://docs.github.com/rest/reference/teams#delete-a-team) endpoint. + * + * To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + */ + delete: operations["teams/delete-legacy"]; + /** + * Update a team (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a team](https://docs.github.com/rest/reference/teams#update-a-team) endpoint. + * + * To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** With nested teams, the `privacy` for parent teams cannot be `secret`. + */ + patch: operations["teams/update-legacy"]; + }; + "/teams/{team_id}/discussions": { + /** + * List discussions (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List discussions`](https://docs.github.com/rest/reference/teams#list-discussions) endpoint. + * + * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["teams/list-discussions-legacy"]; + /** + * Create a discussion (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create a discussion`](https://docs.github.com/rest/reference/teams#create-a-discussion) endpoint. + * + * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["teams/create-discussion-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}": { + /** + * Get a discussion (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion](https://docs.github.com/rest/reference/teams#get-a-discussion) endpoint. + * + * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["teams/get-discussion-legacy"]; + /** + * Delete a discussion (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Delete a discussion`](https://docs.github.com/rest/reference/teams#delete-a-discussion) endpoint. + * + * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["teams/delete-discussion-legacy"]; + /** + * Update a discussion (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion](https://docs.github.com/rest/reference/teams#update-a-discussion) endpoint. + * + * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + patch: operations["teams/update-discussion-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}/comments": { + /** + * List discussion comments (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List discussion comments](https://docs.github.com/rest/reference/teams#list-discussion-comments) endpoint. + * + * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["teams/list-discussion-comments-legacy"]; + /** + * Create a discussion comment (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Create a discussion comment](https://docs.github.com/rest/reference/teams#create-a-discussion-comment) endpoint. + * + * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["teams/create-discussion-comment-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": { + /** + * Get a discussion comment (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion comment](https://docs.github.com/rest/reference/teams#get-a-discussion-comment) endpoint. + * + * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["teams/get-discussion-comment-legacy"]; + /** + * Delete a discussion comment (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a discussion comment](https://docs.github.com/rest/reference/teams#delete-a-discussion-comment) endpoint. + * + * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["teams/delete-discussion-comment-legacy"]; + /** + * Update a discussion comment (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion comment](https://docs.github.com/rest/reference/teams#update-a-discussion-comment) endpoint. + * + * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + patch: operations["teams/update-discussion-comment-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": { + /** + * List reactions for a team discussion comment (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion comment`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment) endpoint. + * + * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["reactions/list-for-team-discussion-comment-legacy"]; + /** + * Create reaction for a team discussion comment (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Create reaction for a team discussion comment](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion-comment)" endpoint. + * + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + */ + post: operations["reactions/create-for-team-discussion-comment-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}/reactions": { + /** + * List reactions for a team discussion (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion) endpoint. + * + * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["reactions/list-for-team-discussion-legacy"]; + /** + * Create reaction for a team discussion (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create reaction for a team discussion`](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion) endpoint. + * + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + */ + post: operations["reactions/create-for-team-discussion-legacy"]; + }; + "/teams/{team_id}/invitations": { + /** + * List pending team invitations (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List pending team invitations`](https://docs.github.com/rest/reference/teams#list-pending-team-invitations) endpoint. + * + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + */ + get: operations["teams/list-pending-invitations-legacy"]; + }; + "/teams/{team_id}/members": { + /** + * List team members (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team members`](https://docs.github.com/rest/reference/teams#list-team-members) endpoint. + * + * Team members will include the members of child teams. + */ + get: operations["teams/list-members-legacy"]; + }; + "/teams/{team_id}/members/{username}": { + /** + * Get team member (Legacy) + * @deprecated + * @description The "Get team member" endpoint (described below) is deprecated. + * + * We recommend using the [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint instead. It allows you to get both active and pending memberships. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + get: operations["teams/get-member-legacy"]; + /** + * Add team member (Legacy) + * @deprecated + * @description The "Add team member" endpoint (described below) is deprecated. + * + * We recommend using the [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint instead. It allows you to invite new organization members to your teams. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To add someone to a team, the authenticated user must be an organization owner or a team maintainer in the team they're changing. The person being added to the team must be a member of the team's organization. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["teams/add-member-legacy"]; + /** + * Remove team member (Legacy) + * @deprecated + * @description The "Remove team member" endpoint (described below) is deprecated. + * + * We recommend using the [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint instead. It allows you to remove both active and pending memberships. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a team member, the authenticated user must have 'admin' permissions to the team or be an owner of the org that the team is associated with. Removing a team member does not delete the user, it just removes them from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + */ + delete: operations["teams/remove-member-legacy"]; + }; + "/teams/{team_id}/memberships/{username}": { + /** + * Get team membership for a user (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint. + * + * Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + get: operations["teams/get-membership-for-user-legacy"]; + /** + * Add or update team membership for a user (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * If the user is already a member of the team's organization, this endpoint will add the user to the team. To add a membership between an organization member and a team, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * If the user is unaffiliated with the team's organization, this endpoint will send an invitation to the user via email. This newly-created membership will be in the "pending" state until the user accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. To add a membership between an unaffiliated user and a team, the authenticated user must be an organization owner. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + */ + put: operations["teams/add-or-update-membership-for-user-legacy"]; + /** + * Remove team membership for a user (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + */ + delete: operations["teams/remove-membership-for-user-legacy"]; + }; + "/teams/{team_id}/projects": { + /** + * List team projects (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team projects`](https://docs.github.com/rest/reference/teams#list-team-projects) endpoint. + * + * Lists the organization projects for a team. + */ + get: operations["teams/list-projects-legacy"]; + }; + "/teams/{team_id}/projects/{project_id}": { + /** + * Check team permissions for a project (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a project](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project) endpoint. + * + * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + */ + get: operations["teams/check-permissions-for-project-legacy"]; + /** + * Add or update team project permissions (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team project permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions) endpoint. + * + * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + */ + put: operations["teams/add-or-update-project-permissions-legacy"]; + /** + * Remove a project from a team (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a project from a team](https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team) endpoint. + * + * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. **Note:** This endpoint removes the project from the team, but does not delete it. + */ + delete: operations["teams/remove-project-legacy"]; + }; + "/teams/{team_id}/repos": { + /** + * List team repositories (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List team repositories](https://docs.github.com/rest/reference/teams#list-team-repositories) endpoint. + */ + get: operations["teams/list-repos-legacy"]; + }; + "/teams/{team_id}/repos/{owner}/{repo}": { + /** + * Check team permissions for a repository (Legacy) + * @deprecated + * @description **Note**: Repositories inherited through a parent team will also be checked. + * + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a repository](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-repository) endpoint. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + get: operations["teams/check-permissions-for-repo-legacy"]; + /** + * Add or update team repository permissions (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Add or update team repository permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-repository-permissions)" endpoint. + * + * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["teams/add-or-update-repo-permissions-legacy"]; + /** + * Remove a repository from a team (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a repository from a team](https://docs.github.com/rest/reference/teams#remove-a-repository-from-a-team) endpoint. + * + * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. NOTE: This does not delete the repository, it just removes it from the team. + */ + delete: operations["teams/remove-repo-legacy"]; + }; + "/teams/{team_id}/teams": { + /** + * List child teams (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List child teams`](https://docs.github.com/rest/reference/teams#list-child-teams) endpoint. + */ + get: operations["teams/list-child-legacy"]; + }; + "/user": { + /** + * Get the authenticated user + * @description If the authenticated user is authenticated with an OAuth token with the `user` scope, then the response lists public and private profile information. + * + * If the authenticated user is authenticated through OAuth without the `user` scope, then the response lists only public profile information. + */ + get: operations["users/get-authenticated"]; + /** + * Update the authenticated user + * @description **Note:** If your email is set to private and you send an `email` parameter as part of this request to update your profile, your privacy settings are still enforced: the email address will not be displayed on your public profile or via the API. + */ + patch: operations["users/update-authenticated"]; + }; + "/user/blocks": { + /** + * List users blocked by the authenticated user + * @description List the users you've blocked on your personal account. + */ + get: operations["users/list-blocked-by-authenticated-user"]; + }; + "/user/blocks/{username}": { + /** Check if a user is blocked by the authenticated user */ + get: operations["users/check-blocked"]; + /** Block a user */ + put: operations["users/block"]; + /** Unblock a user */ + delete: operations["users/unblock"]; + }; + "/user/codespaces": { + /** + * List codespaces for the authenticated user + * @description Lists the authenticated user's codespaces. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + get: operations["codespaces/list-for-authenticated-user"]; + /** + * Create a codespace for the authenticated user + * @description Creates a new codespace, owned by the authenticated user. + * + * This endpoint requires either a `repository_id` OR a `pull_request` but not both. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + post: operations["codespaces/create-for-authenticated-user"]; + }; + "/user/codespaces/secrets": { + /** + * List secrets for the authenticated user + * @description Lists all secrets available for a user's Codespaces without revealing their + * encrypted values. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + get: operations["codespaces/list-secrets-for-authenticated-user"]; + }; + "/user/codespaces/secrets/public-key": { + /** + * Get public key for the authenticated user + * @description Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + get: operations["codespaces/get-public-key-for-authenticated-user"]; + }; + "/user/codespaces/secrets/{secret_name}": { + /** + * Get a secret for the authenticated user + * @description Gets a secret available to a user's codespaces without revealing its encrypted value. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + get: operations["codespaces/get-secret-for-authenticated-user"]; + /** + * Create or update a secret for the authenticated user + * @description Creates or updates a secret for a user's codespace with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must also have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["codespaces/create-or-update-secret-for-authenticated-user"]; + /** + * Delete a secret for the authenticated user + * @description Deletes a secret from a user's codespaces using the secret name. Deleting the secret will remove access from all codespaces that were allowed to access the secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + delete: operations["codespaces/delete-secret-for-authenticated-user"]; + }; + "/user/codespaces/secrets/{secret_name}/repositories": { + /** + * List selected repositories for a user secret + * @description List the repositories that have been granted the ability to use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + get: operations["codespaces/list-repositories-for-secret-for-authenticated-user"]; + /** + * Set selected repositories for a user secret + * @description Select the repositories that will use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + put: operations["codespaces/set-repositories-for-secret-for-authenticated-user"]; + }; + "/user/codespaces/secrets/{secret_name}/repositories/{repository_id}": { + /** + * Add a selected repository to a user secret + * @description Adds a repository to the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on the referenced repository to use this endpoint. + */ + put: operations["codespaces/add-repository-for-secret-for-authenticated-user"]; + /** + * Remove a selected repository from a user secret + * @description Removes a repository from the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + delete: operations["codespaces/remove-repository-for-secret-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}": { + /** + * Get a codespace for the authenticated user + * @description Gets information about a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + get: operations["codespaces/get-for-authenticated-user"]; + /** + * Delete a codespace for the authenticated user + * @description Deletes a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + delete: operations["codespaces/delete-for-authenticated-user"]; + /** + * Update a codespace for the authenticated user + * @description Updates a codespace owned by the authenticated user. Currently only the codespace's machine type and recent folders can be modified using this endpoint. + * + * If you specify a new machine type it will be applied the next time your codespace is started. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + patch: operations["codespaces/update-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/exports": { + /** + * Export a codespace for the authenticated user + * @description Triggers an export of the specified codespace and returns a URL and ID where the status of the export can be monitored. + * + * If changes cannot be pushed to the codespace's repository, they will be pushed to a new or previously-existing fork instead. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + post: operations["codespaces/export-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/exports/{export_id}": { + /** + * Get details about a codespace export + * @description Gets information about an export of a codespace. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + get: operations["codespaces/get-export-details-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/machines": { + /** + * List machine types for a codespace + * @description List the machine types a codespace can transition to use. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + get: operations["codespaces/codespace-machines-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/publish": { + /** + * Create a repository from an unpublished codespace + * @description Publishes an unpublished codespace, creating a new repository and assigning it to the codespace. + * + * The codespace's token is granted write permissions to the repository, allowing the user to push their changes. + * + * This will fail for a codespace that is already published, meaning it has an associated repository. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + post: operations["codespaces/publish-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/start": { + /** + * Start a codespace for the authenticated user + * @description Starts a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + post: operations["codespaces/start-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/stop": { + /** + * Stop a codespace for the authenticated user + * @description Stops a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + post: operations["codespaces/stop-for-authenticated-user"]; + }; + "/user/docker/conflicts": { + /** + * Get list of conflicting packages during Docker migration for authenticated-user + * @description Lists all packages that are owned by the authenticated user within the user's namespace, and that encountered a conflict during a Docker migration. + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. + */ + get: operations["packages/list-docker-migration-conflicting-packages-for-authenticated-user"]; + }; + "/user/email/visibility": { + /** + * Set primary email visibility for the authenticated user + * @description Sets the visibility for your primary email addresses. + */ + patch: operations["users/set-primary-email-visibility-for-authenticated-user"]; + }; + "/user/emails": { + /** + * List email addresses for the authenticated user + * @description Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. + */ + get: operations["users/list-emails-for-authenticated-user"]; + /** + * Add an email address for the authenticated user + * @description This endpoint is accessible with the `user` scope. + */ + post: operations["users/add-email-for-authenticated-user"]; + /** + * Delete an email address for the authenticated user + * @description This endpoint is accessible with the `user` scope. + */ + delete: operations["users/delete-email-for-authenticated-user"]; + }; + "/user/followers": { + /** + * List followers of the authenticated user + * @description Lists the people following the authenticated user. + */ + get: operations["users/list-followers-for-authenticated-user"]; + }; + "/user/following": { + /** + * List the people the authenticated user follows + * @description Lists the people who the authenticated user follows. + */ + get: operations["users/list-followed-by-authenticated-user"]; + }; + "/user/following/{username}": { + /** Check if a person is followed by the authenticated user */ + get: operations["users/check-person-is-followed-by-authenticated"]; + /** + * Follow a user + * @description Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * Following a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. + */ + put: operations["users/follow"]; + /** + * Unfollow a user + * @description Unfollowing a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. + */ + delete: operations["users/unfollow"]; + }; + "/user/gpg_keys": { + /** + * List GPG keys for the authenticated user + * @description Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["users/list-gpg-keys-for-authenticated-user"]; + /** + * Create a GPG key for the authenticated user + * @description Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + post: operations["users/create-gpg-key-for-authenticated-user"]; + }; + "/user/gpg_keys/{gpg_key_id}": { + /** + * Get a GPG key for the authenticated user + * @description View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["users/get-gpg-key-for-authenticated-user"]; + /** + * Delete a GPG key for the authenticated user + * @description Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["users/delete-gpg-key-for-authenticated-user"]; + }; + "/user/installations": { + /** + * List app installations accessible to the user access token + * @description Lists installations of your GitHub App that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You can find the permissions for the installation under the `permissions` key. + */ + get: operations["apps/list-installations-for-authenticated-user"]; + }; + "/user/installations/{installation_id}/repositories": { + /** + * List repositories accessible to the user access token + * @description List repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access for an installation. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The access the user has to each repository is included in the hash under the `permissions` key. + */ + get: operations["apps/list-installation-repos-for-authenticated-user"]; + }; + "/user/installations/{installation_id}/repositories/{repository_id}": { + /** + * Add a repository to an app installation + * @description Add a single repository to an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + put: operations["apps/add-repo-to-installation-for-authenticated-user"]; + /** + * Remove a repository from an app installation + * @description Remove a single repository from an installation. The authenticated user must have admin access to the repository. The installation must have the `repository_selection` of `selected`. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + delete: operations["apps/remove-repo-from-installation-for-authenticated-user"]; + }; + "/user/interaction-limits": { + /** + * Get interaction restrictions for your public repositories + * @description Shows which type of GitHub user can interact with your public repositories and when the restriction expires. + */ + get: operations["interactions/get-restrictions-for-authenticated-user"]; + /** + * Set interaction restrictions for your public repositories + * @description Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. + */ + put: operations["interactions/set-restrictions-for-authenticated-user"]; + /** + * Remove interaction restrictions from your public repositories + * @description Removes any interaction restrictions from your public repositories. + */ + delete: operations["interactions/remove-restrictions-for-authenticated-user"]; + }; + "/user/issues": { + /** + * List user account issues assigned to the authenticated user + * @description List issues across owned and member repositories assigned to the authenticated user. + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/list-for-authenticated-user"]; + }; + "/user/keys": { + /** + * List public SSH keys for the authenticated user + * @description Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["users/list-public-ssh-keys-for-authenticated-user"]; + /** + * Create a public SSH key for the authenticated user + * @description Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + post: operations["users/create-public-ssh-key-for-authenticated-user"]; + }; + "/user/keys/{key_id}": { + /** + * Get a public SSH key for the authenticated user + * @description View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["users/get-public-ssh-key-for-authenticated-user"]; + /** + * Delete a public SSH key for the authenticated user + * @description Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["users/delete-public-ssh-key-for-authenticated-user"]; + }; + "/user/marketplace_purchases": { + /** + * List subscriptions for the authenticated user + * @description Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). + */ + get: operations["apps/list-subscriptions-for-authenticated-user"]; + }; + "/user/marketplace_purchases/stubbed": { + /** + * List subscriptions for the authenticated user (stubbed) + * @description Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). + */ + get: operations["apps/list-subscriptions-for-authenticated-user-stubbed"]; + }; + "/user/memberships/orgs": { + /** + * List organization memberships for the authenticated user + * @description Lists all of the authenticated user's organization memberships. + */ + get: operations["orgs/list-memberships-for-authenticated-user"]; + }; + "/user/memberships/orgs/{org}": { + /** + * Get an organization membership for the authenticated user + * @description If the authenticated user is an active or pending member of the organization, this endpoint will return the user's membership. If the authenticated user is not affiliated with the organization, a `404` is returned. This endpoint will return a `403` if the request is made by a GitHub App that is blocked by the organization. + */ + get: operations["orgs/get-membership-for-authenticated-user"]; + /** + * Update an organization membership for the authenticated user + * @description Converts the authenticated user to an active member of the organization, if that user has a pending invitation from the organization. + */ + patch: operations["orgs/update-membership-for-authenticated-user"]; + }; + "/user/migrations": { + /** + * List user migrations + * @description Lists all migrations a user has started. + */ + get: operations["migrations/list-for-authenticated-user"]; + /** + * Start a user migration + * @description Initiates the generation of a user migration archive. + */ + post: operations["migrations/start-for-authenticated-user"]; + }; + "/user/migrations/{migration_id}": { + /** + * Get a user migration status + * @description Fetches a single user migration. The response includes the `state` of the migration, which can be one of the following values: + * + * * `pending` - the migration hasn't started yet. + * * `exporting` - the migration is in progress. + * * `exported` - the migration finished successfully. + * * `failed` - the migration failed. + * + * Once the migration has been `exported` you can [download the migration archive](https://docs.github.com/rest/migrations/users#download-a-user-migration-archive). + */ + get: operations["migrations/get-status-for-authenticated-user"]; + }; + "/user/migrations/{migration_id}/archive": { + /** + * Download a user migration archive + * @description Fetches the URL to download the migration archive as a `tar.gz` file. Depending on the resources your repository uses, the migration archive can contain JSON files with data for these objects: + * + * * attachments + * * bases + * * commit\_comments + * * issue\_comments + * * issue\_events + * * issues + * * milestones + * * organizations + * * projects + * * protected\_branches + * * pull\_request\_reviews + * * pull\_requests + * * releases + * * repositories + * * review\_comments + * * schema + * * users + * + * The archive will also contain an `attachments` directory that includes all attachment files uploaded to GitHub.com and a `repositories` directory that contains the repository's Git data. + */ + get: operations["migrations/get-archive-for-authenticated-user"]; + /** + * Delete a user migration archive + * @description Deletes a previous migration archive. Downloadable migration archives are automatically deleted after seven days. Migration metadata, which is returned in the [List user migrations](https://docs.github.com/rest/migrations/users#list-user-migrations) and [Get a user migration status](https://docs.github.com/rest/migrations/users#get-a-user-migration-status) endpoints, will continue to be available even after an archive is deleted. + */ + delete: operations["migrations/delete-archive-for-authenticated-user"]; + }; + "/user/migrations/{migration_id}/repos/{repo_name}/lock": { + /** + * Unlock a user repository + * @description Unlocks a repository. You can lock repositories when you [start a user migration](https://docs.github.com/rest/migrations/users#start-a-user-migration). Once the migration is complete you can unlock each repository to begin using it again or [delete the repository](https://docs.github.com/rest/repos/repos#delete-a-repository) if you no longer need the source data. Returns a status of `404 Not Found` if the repository is not locked. + */ + delete: operations["migrations/unlock-repo-for-authenticated-user"]; + }; + "/user/migrations/{migration_id}/repositories": { + /** + * List repositories for a user migration + * @description Lists all the repositories for this user migration. + */ + get: operations["migrations/list-repos-for-authenticated-user"]; + }; + "/user/orgs": { + /** + * List organizations for the authenticated user + * @description List organizations for the authenticated user. + * + * **OAuth scope requirements** + * + * This only lists organizations that your authorization allows you to operate on in some way (e.g., you can list teams with `read:org` scope, you can publicize your organization membership with `user` scope, etc.). Therefore, this API requires at least `user` or `read:org` scope. OAuth requests with insufficient scope receive a `403 Forbidden` response. + */ + get: operations["orgs/list-for-authenticated-user"]; + }; + "/user/packages": { + /** + * List packages for the authenticated user's namespace + * @description Lists packages owned by the authenticated user within the user's namespace. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + get: operations["packages/list-packages-for-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}": { + /** + * Get a package for the authenticated user + * @description Gets a specific package for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + get: operations["packages/get-package-for-authenticated-user"]; + /** + * Delete a package for the authenticated user + * @description Deletes a package owned by the authenticated user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `delete:packages` scopes. + * If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + delete: operations["packages/delete-package-for-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}/restore": { + /** + * Restore a package for the authenticated user + * @description Restores a package owned by the authenticated user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `write:packages` scopes. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + post: operations["packages/restore-package-for-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}/versions": { + /** + * List package versions for a package owned by the authenticated user + * @description Lists package versions for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + get: operations["packages/get-all-package-versions-for-package-owned-by-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}/versions/{package_version_id}": { + /** + * Get a package version for the authenticated user + * @description Gets a specific package version for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + get: operations["packages/get-package-version-for-authenticated-user"]; + /** + * Delete a package version for the authenticated user + * @description Deletes a specific package version for a package owned by the authenticated user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `delete:packages` scopes. + * If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + delete: operations["packages/delete-package-version-for-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": { + /** + * Restore a package version for the authenticated user + * @description Restores a package version owned by the authenticated user. + * + * You can restore a deleted package version under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `write:packages` scopes. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + post: operations["packages/restore-package-version-for-authenticated-user"]; + }; + "/user/projects": { + /** + * Create a user project + * @description Creates a user project board. Returns a `410 Gone` status if the user does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + post: operations["projects/create-for-authenticated-user"]; + }; + "/user/public_emails": { + /** + * List public email addresses for the authenticated user + * @description Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. + */ + get: operations["users/list-public-emails-for-authenticated-user"]; + }; + "/user/repos": { + /** + * List repositories for the authenticated user + * @description Lists repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + */ + get: operations["repos/list-for-authenticated-user"]; + /** + * Create a repository for the authenticated user + * @description Creates a new repository for the authenticated user. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository. + */ + post: operations["repos/create-for-authenticated-user"]; + }; + "/user/repository_invitations": { + /** + * List repository invitations for the authenticated user + * @description When authenticating as a user, this endpoint will list all currently open repository invitations for that user. + */ + get: operations["repos/list-invitations-for-authenticated-user"]; + }; + "/user/repository_invitations/{invitation_id}": { + /** Decline a repository invitation */ + delete: operations["repos/decline-invitation-for-authenticated-user"]; + /** Accept a repository invitation */ + patch: operations["repos/accept-invitation-for-authenticated-user"]; + }; + "/user/social_accounts": { + /** + * List social accounts for the authenticated user + * @description Lists all of your social accounts. + */ + get: operations["users/list-social-accounts-for-authenticated-user"]; + /** + * Add social accounts for the authenticated user + * @description Add one or more social accounts to the authenticated user's profile. This endpoint is accessible with the `user` scope. + */ + post: operations["users/add-social-account-for-authenticated-user"]; + /** + * Delete social accounts for the authenticated user + * @description Deletes one or more social accounts from the authenticated user's profile. This endpoint is accessible with the `user` scope. + */ + delete: operations["users/delete-social-account-for-authenticated-user"]; + }; + "/user/ssh_signing_keys": { + /** + * List SSH signing keys for the authenticated user + * @description Lists the SSH signing keys for the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `read:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." + */ + get: operations["users/list-ssh-signing-keys-for-authenticated-user"]; + /** + * Create a SSH signing key for the authenticated user + * @description Creates an SSH signing key for the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `write:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." + */ + post: operations["users/create-ssh-signing-key-for-authenticated-user"]; + }; + "/user/ssh_signing_keys/{ssh_signing_key_id}": { + /** + * Get an SSH signing key for the authenticated user + * @description Gets extended details for an SSH signing key. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `read:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." + */ + get: operations["users/get-ssh-signing-key-for-authenticated-user"]; + /** + * Delete an SSH signing key for the authenticated user + * @description Deletes an SSH signing key from the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `admin:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." + */ + delete: operations["users/delete-ssh-signing-key-for-authenticated-user"]; + }; + "/user/starred": { + /** + * List repositories starred by the authenticated user + * @description Lists repositories the authenticated user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + get: operations["activity/list-repos-starred-by-authenticated-user"]; + }; + "/user/starred/{owner}/{repo}": { + /** Check if a repository is starred by the authenticated user */ + get: operations["activity/check-repo-is-starred-by-authenticated-user"]; + /** + * Star a repository for the authenticated user + * @description Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["activity/star-repo-for-authenticated-user"]; + /** Unstar a repository for the authenticated user */ + delete: operations["activity/unstar-repo-for-authenticated-user"]; + }; + "/user/subscriptions": { + /** + * List repositories watched by the authenticated user + * @description Lists repositories the authenticated user is watching. + */ + get: operations["activity/list-watched-repos-for-authenticated-user"]; + }; + "/user/teams": { + /** + * List teams for the authenticated user + * @description List all of the teams across all of the organizations to which the authenticated user belongs. This method requires `user`, `repo`, or `read:org` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/) when authenticating via [OAuth](https://docs.github.com/apps/building-oauth-apps/). When using a fine-grained personal access token, the resource owner of the token [must be a single organization](https://docs.github.com/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#fine-grained-personal-access-tokens), and have at least read-only member organization permissions. The response payload only contains the teams from a single organization when using a fine-grained personal access token. + */ + get: operations["teams/list-for-authenticated-user"]; + }; + "/users": { + /** + * List users + * @description Lists all users, in the order that they signed up on GitHub. This list includes personal user accounts and organization accounts. + * + * Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers) to get the URL for the next page of users. + */ + get: operations["users/list"]; + }; + "/users/{username}": { + /** + * Get a user + * @description Provides publicly available information about someone with a GitHub account. + * + * GitHub Apps with the `Plan` user permission can use this endpoint to retrieve information about a user's GitHub plan. The GitHub App must be authenticated as a user. See "[Identifying and authorizing users for GitHub Apps](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/)" for details about authentication. For an example response, see 'Response with GitHub plan information' below" + * + * The `email` key in the following response is the publicly visible email address from your GitHub [profile page](https://github.com/settings/profile). When setting up your profile, you can select a primary email address to be “public” which provides an email entry for this endpoint. If you do not set a public email address for `email`, then it will have a value of `null`. You only see publicly visible email addresses when authenticated with GitHub. For more information, see [Authentication](https://docs.github.com/rest/overview/resources-in-the-rest-api#authentication). + * + * The Emails API enables you to list all of your email addresses, and toggle a primary email to be visible publicly. For more information, see "[Emails API](https://docs.github.com/rest/reference/users#emails)". + */ + get: operations["users/get-by-username"]; + }; + "/users/{username}/docker/conflicts": { + /** + * Get list of conflicting packages during Docker migration for user + * @description Lists all packages that are in a specific user's namespace, that the requesting user has access to, and that encountered a conflict during Docker migration. + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. + */ + get: operations["packages/list-docker-migration-conflicting-packages-for-user"]; + }; + "/users/{username}/events": { + /** + * List events for the authenticated user + * @description If you are authenticated as the given user, you will see your private events. Otherwise, you'll only see public events. + */ + get: operations["activity/list-events-for-authenticated-user"]; + }; + "/users/{username}/events/orgs/{org}": { + /** + * List organization events for the authenticated user + * @description This is the user's organization dashboard. You must be authenticated as the user to view this. + */ + get: operations["activity/list-org-events-for-authenticated-user"]; + }; + "/users/{username}/events/public": { + /** List public events for a user */ + get: operations["activity/list-public-events-for-user"]; + }; + "/users/{username}/followers": { + /** + * List followers of a user + * @description Lists the people following the specified user. + */ + get: operations["users/list-followers-for-user"]; + }; + "/users/{username}/following": { + /** + * List the people a user follows + * @description Lists the people who the specified user follows. + */ + get: operations["users/list-following-for-user"]; + }; + "/users/{username}/following/{target_user}": { + /** Check if a user follows another user */ + get: operations["users/check-following-for-user"]; + }; + "/users/{username}/gists": { + /** + * List gists for a user + * @description Lists public gists for the specified user: + */ + get: operations["gists/list-for-user"]; + }; + "/users/{username}/gpg_keys": { + /** + * List GPG keys for a user + * @description Lists the GPG keys for a user. This information is accessible by anyone. + */ + get: operations["users/list-gpg-keys-for-user"]; + }; + "/users/{username}/hovercard": { + /** + * Get contextual information for a user + * @description Provides hovercard information when authenticated through basic auth or OAuth with the `repo` scope. You can find out more about someone in relation to their pull requests, issues, repositories, and organizations. + * + * The `subject_type` and `subject_id` parameters provide context for the person's hovercard, which returns more information than without the parameters. For example, if you wanted to find out more about `octocat` who owns the `Spoon-Knife` repository via cURL, it would look like this: + * + * ```shell + * curl -u username:token + * https://api.github.com/users/octocat/hovercard?subject_type=repository&subject_id=1300192 + * ``` + */ + get: operations["users/get-context-for-user"]; + }; + "/users/{username}/installation": { + /** + * Get a user installation for the authenticated app + * @description Enables an authenticated GitHub App to find the user’s installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-user-installation"]; + }; + "/users/{username}/keys": { + /** + * List public keys for a user + * @description Lists the _verified_ public SSH keys for a user. This is accessible by anyone. + */ + get: operations["users/list-public-keys-for-user"]; + }; + "/users/{username}/orgs": { + /** + * List organizations for a user + * @description List [public organization memberships](https://docs.github.com/articles/publicizing-or-concealing-organization-membership) for the specified user. + * + * This method only lists _public_ memberships, regardless of authentication. If you need to fetch all of the organization memberships (public and private) for the authenticated user, use the [List organizations for the authenticated user](https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user) API instead. + */ + get: operations["orgs/list-for-user"]; + }; + "/users/{username}/packages": { + /** + * List packages for a user + * @description Lists all packages in a user's namespace for which the requesting user has access. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + get: operations["packages/list-packages-for-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}": { + /** + * Get a package for a user + * @description Gets a specific package metadata for a public package owned by a user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + get: operations["packages/get-package-for-user"]; + /** + * Delete a package for a user + * @description Deletes an entire package for a user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `delete:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package you want to delete. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + delete: operations["packages/delete-package-for-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}/restore": { + /** + * Restore a package for a user + * @description Restores an entire package for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `write:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package you want to restore. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + post: operations["packages/restore-package-for-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}/versions": { + /** + * List package versions for a package owned by a user + * @description Lists package versions for a public package owned by a specified user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + get: operations["packages/get-all-package-versions-for-package-owned-by-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}": { + /** + * Get a package version for a user + * @description Gets a specific package version for a public package owned by a specified user. + * + * At this time, to use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + get: operations["packages/get-package-version-for-user"]; + /** + * Delete package version for a user + * @description Deletes a specific package version for a user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `delete:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package whose version you want to delete. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + delete: operations["packages/delete-package-version-for-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": { + /** + * Restore package version for a user + * @description Restores a specific package version for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `write:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package whose version you want to restore. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + post: operations["packages/restore-package-version-for-user"]; + }; + "/users/{username}/projects": { + /** + * List user projects + * @description Lists projects for a user. + */ + get: operations["projects/list-for-user"]; + }; + "/users/{username}/received_events": { + /** + * List events received by the authenticated user + * @description These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events. + */ + get: operations["activity/list-received-events-for-user"]; + }; + "/users/{username}/received_events/public": { + /** List public events received by a user */ + get: operations["activity/list-received-public-events-for-user"]; + }; + "/users/{username}/repos": { + /** + * List repositories for a user + * @description Lists public repositories for the specified user. Note: For GitHub AE, this endpoint will list internal repositories for the specified user. + */ + get: operations["repos/list-for-user"]; + }; + "/users/{username}/settings/billing/actions": { + /** + * Get GitHub Actions billing for a user + * @description Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `user` scope. + */ + get: operations["billing/get-github-actions-billing-user"]; + }; + "/users/{username}/settings/billing/packages": { + /** + * Get GitHub Packages billing for a user + * @description Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + get: operations["billing/get-github-packages-billing-user"]; + }; + "/users/{username}/settings/billing/shared-storage": { + /** + * Get shared storage billing for a user + * @description Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + get: operations["billing/get-shared-storage-billing-user"]; + }; + "/users/{username}/social_accounts": { + /** + * List social accounts for a user + * @description Lists social media accounts for a user. This endpoint is accessible by anyone. + */ + get: operations["users/list-social-accounts-for-user"]; + }; + "/users/{username}/ssh_signing_keys": { + /** + * List SSH signing keys for a user + * @description Lists the SSH signing keys for a user. This operation is accessible by anyone. + */ + get: operations["users/list-ssh-signing-keys-for-user"]; + }; + "/users/{username}/starred": { + /** + * List repositories starred by a user + * @description Lists repositories a user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + get: operations["activity/list-repos-starred-by-user"]; + }; + "/users/{username}/subscriptions": { + /** + * List repositories watched by a user + * @description Lists repositories a user is watching. + */ + get: operations["activity/list-repos-watched-by-user"]; + }; + "/versions": { + /** + * Get all API versions + * @description Get all supported GitHub API versions. + */ + get: operations["meta/get-all-versions"]; + }; + "/zen": { + /** + * Get the Zen of GitHub + * @description Get a random sentence from the Zen of GitHub + */ + get: operations["meta/get-zen"]; + }; + "/repos/{owner}/{repo}/compare/{base}...{head}": { + /** + * Compare two commits + * @description **Deprecated**: Use `repos.compareCommitsWithBasehead()` (`GET /repos/{owner}/{repo}/compare/{basehead}`) instead. Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/compare-commits"]; + }; +} + +export type webhooks = Record; + +export interface components { + schemas: { + root: { + /** Format: uri-template */ + current_user_url: string; + /** Format: uri-template */ + current_user_authorizations_html_url: string; + /** Format: uri-template */ + authorizations_url: string; + /** Format: uri-template */ + code_search_url: string; + /** Format: uri-template */ + commit_search_url: string; + /** Format: uri-template */ + emails_url: string; + /** Format: uri-template */ + emojis_url: string; + /** Format: uri-template */ + events_url: string; + /** Format: uri-template */ + feeds_url: string; + /** Format: uri-template */ + followers_url: string; + /** Format: uri-template */ + following_url: string; + /** Format: uri-template */ + gists_url: string; + /** Format: uri-template */ + hub_url: string; + /** Format: uri-template */ + issue_search_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + label_search_url: string; + /** Format: uri-template */ + notifications_url: string; + /** Format: uri-template */ + organization_url: string; + /** Format: uri-template */ + organization_repositories_url: string; + /** Format: uri-template */ + organization_teams_url: string; + /** Format: uri-template */ + public_gists_url: string; + /** Format: uri-template */ + rate_limit_url: string; + /** Format: uri-template */ + repository_url: string; + /** Format: uri-template */ + repository_search_url: string; + /** Format: uri-template */ + current_user_repositories_url: string; + /** Format: uri-template */ + starred_url: string; + /** Format: uri-template */ + starred_gists_url: string; + /** Format: uri-template */ + topic_search_url?: string; + /** Format: uri-template */ + user_url: string; + /** Format: uri-template */ + user_organizations_url: string; + /** Format: uri-template */ + user_repositories_url: string; + /** Format: uri-template */ + user_search_url: string; + }; + /** + * Simple User + * @description A GitHub user. + */ + "nullable-simple-user": { + name?: string | null; + email?: string | null; + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + /** @example "2020-07-09T00:17:55Z" */ + starred_at?: string; + } | null; + /** + * GitHub app + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + integration: { + /** + * @description Unique identifier of the GitHub app + * @example 37 + */ + id: number; + /** + * @description The slug name of the GitHub app + * @example probot-owners + */ + slug?: string; + /** @example MDExOkludGVncmF0aW9uMQ== */ + node_id: string; + owner: components["schemas"]["nullable-simple-user"]; + /** + * @description The name of the GitHub app + * @example Probot Owners + */ + name: string; + /** @example The description of the app. */ + description: string | null; + /** + * Format: uri + * @example https://example.com + */ + external_url: string; + /** + * Format: uri + * @example https://github.com/apps/super-ci + */ + html_url: string; + /** + * Format: date-time + * @example 2017-07-08T16:18:44-04:00 + */ + created_at: string; + /** + * Format: date-time + * @example 2017-07-08T16:18:44-04:00 + */ + updated_at: string; + /** + * @description The set of permissions for the GitHub app + * @example { + * "issues": "read", + * "deployments": "write" + * } + */ + permissions: { + issues?: string; + checks?: string; + metadata?: string; + contents?: string; + deployments?: string; + [key: string]: string | undefined; + }; + /** + * @description The list of events for the GitHub app + * @example [ + * "label", + * "deployment" + * ] + */ + events: string[]; + /** + * @description The number of installations associated with the GitHub app + * @example 5 + */ + installations_count?: number; + /** @example "Iv1.25b5d1e65ffc4022" */ + client_id?: string; + /** @example "1d4b2097ac622ba702d19de498f005747a8b21d3" */ + client_secret?: string; + /** @example "6fba8f2fc8a7e8f2cca5577eddd82ca7586b3b6b" */ + webhook_secret?: string | null; + /** @example "-----BEGIN RSA PRIVATE KEY-----\nMIIEogIBAAKCAQEArYxrNYD/iT5CZVpRJu4rBKmmze3PVmT/gCo2ATUvDvZTPTey\nxcGJ3vvrJXazKk06pN05TN29o98jrYz4cengG3YGsXPNEpKsIrEl8NhbnxapEnM9\nJCMRe0P5JcPsfZlX6hmiT7136GRWiGOUba2X9+HKh8QJVLG5rM007TBER9/z9mWm\nrJuNh+m5l320oBQY/Qq3A7wzdEfZw8qm/mIN0FCeoXH1L6B8xXWaAYBwhTEh6SSn\nZHlO1Xu1JWDmAvBCi0RO5aRSKM8q9QEkvvHP4yweAtK3N8+aAbZ7ovaDhyGz8r6r\nzhU1b8Uo0Z2ysf503WqzQgIajr7Fry7/kUwpgQIDAQABAoIBADwJp80Ko1xHPZDy\nfcCKBDfIuPvkmSW6KumbsLMaQv1aGdHDwwTGv3t0ixSay8CGlxMRtRDyZPib6SvQ\n6OH/lpfpbMdW2ErkksgtoIKBVrDilfrcAvrNZu7NxRNbhCSvN8q0s4ICecjbbVQh\nnueSdlA6vGXbW58BHMq68uRbHkP+k+mM9U0mDJ1HMch67wlg5GbayVRt63H7R2+r\nVxcna7B80J/lCEjIYZznawgiTvp3MSanTglqAYi+m1EcSsP14bJIB9vgaxS79kTu\noiSo93leJbBvuGo8QEiUqTwMw4tDksmkLsoqNKQ1q9P7LZ9DGcujtPy4EZsamSJT\ny8OJt0ECgYEA2lxOxJsQk2kI325JgKFjo92mQeUObIvPfSNWUIZQDTjniOI6Gv63\nGLWVFrZcvQBWjMEQraJA9xjPbblV8PtfO87MiJGLWCHFxmPz2dzoedN+2Coxom8m\nV95CLz8QUShuao6u/RYcvUaZEoYs5bHcTmy5sBK80JyEmafJPtCQVxMCgYEAy3ar\nZr3yv4xRPEPMat4rseswmuMooSaK3SKub19WFI5IAtB/e7qR1Rj9JhOGcZz+OQrl\nT78O2OFYlgOIkJPvRMrPpK5V9lslc7tz1FSh3BZMRGq5jSyD7ETSOQ0c8T2O/s7v\nbeEPbVbDe4mwvM24XByH0GnWveVxaDl51ABD65sCgYB3ZAspUkOA5egVCh8kNpnd\nSd6SnuQBE3ySRlT2WEnCwP9Ph6oPgn+oAfiPX4xbRqkL8q/k0BdHQ4h+zNwhk7+h\nWtPYRAP1Xxnc/F+jGjb+DVaIaKGU18MWPg7f+FI6nampl3Q0KvfxwX0GdNhtio8T\nTj1E+SnFwh56SRQuxSh2gwKBgHKjlIO5NtNSflsUYFM+hyQiPiqnHzddfhSG+/3o\nm5nNaSmczJesUYreH5San7/YEy2UxAugvP7aSY2MxB+iGsiJ9WD2kZzTUlDZJ7RV\nUzWsoqBR+eZfVJ2FUWWvy8TpSG6trh4dFxImNtKejCR1TREpSiTV3Zb1dmahK9GV\nrK9NAoGAbBxRLoC01xfxCTgt5BDiBcFVh4fp5yYKwavJPLzHSpuDOrrI9jDn1oKN\nonq5sDU1i391zfQvdrbX4Ova48BN+B7p63FocP/MK5tyyBoT8zQEk2+vWDOw7H/Z\nu5dTCPxTIsoIwUw1I+7yIxqJzLPFgR2gVBwY1ra/8iAqCj+zeBw=\n-----END RSA PRIVATE KEY-----\n" */ + pem?: string; + }; + /** + * Basic Error + * @description Basic Error + */ + "basic-error": { + message?: string; + documentation_url?: string; + url?: string; + status?: string; + }; + /** + * Validation Error Simple + * @description Validation Error Simple + */ + "validation-error-simple": { + message: string; + documentation_url: string; + errors?: string[]; + }; + /** + * Format: uri + * @description The URL to which the payloads will be delivered. + * @example https://example.com/webhook + */ + "webhook-config-url": string; + /** + * @description The media type used to serialize the payloads. Supported values include `json` and `form`. The default is `form`. + * @example "json" + */ + "webhook-config-content-type": string; + /** + * @description If provided, the `secret` will be used as the `key` to generate the HMAC hex digest value for [delivery signature headers](https://docs.github.com/webhooks/event-payloads/#delivery-headers). + * @example "********" + */ + "webhook-config-secret": string; + "webhook-config-insecure-ssl": string | number; + /** + * Webhook Configuration + * @description Configuration object of the webhook + */ + "webhook-config": { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + /** + * Simple webhook delivery + * @description Delivery made by a webhook, without request and response information. + */ + "hook-delivery-item": { + /** + * @description Unique identifier of the webhook delivery. + * @example 42 + */ + id: number; + /** + * @description Unique identifier for the event (shared with all deliveries for all webhooks that subscribe to this event). + * @example 58474f00-b361-11eb-836d-0e4f3503ccbe + */ + guid: string; + /** + * Format: date-time + * @description Time when the webhook delivery occurred. + * @example 2021-05-12T20:33:44Z + */ + delivered_at: string; + /** + * @description Whether the webhook delivery is a redelivery. + * @example false + */ + redelivery: boolean; + /** + * @description Time spent delivering. + * @example 0.03 + */ + duration: number; + /** + * @description Describes the response returned after attempting the delivery. + * @example failed to connect + */ + status: string; + /** + * @description Status code received when delivery was made. + * @example 502 + */ + status_code: number; + /** + * @description The event that triggered the delivery. + * @example issues + */ + event: string; + /** + * @description The type of activity for the event that triggered the delivery. + * @example opened + */ + action: string | null; + /** + * @description The id of the GitHub App installation associated with this event. + * @example 123 + */ + installation_id: number | null; + /** + * @description The id of the repository associated with this event. + * @example 123 + */ + repository_id: number | null; + }; + /** + * Scim Error + * @description Scim Error + */ + "scim-error": { + message?: string | null; + documentation_url?: string | null; + detail?: string | null; + status?: number; + scimType?: string | null; + schemas?: string[]; + }; + /** + * Validation Error + * @description Validation Error + */ + "validation-error": { + message: string; + documentation_url: string; + errors?: { + resource?: string; + field?: string; + message?: string; + code: string; + index?: number; + value?: (string | null) | (number | null) | (string[] | null); + }[]; + }; + /** + * Webhook delivery + * @description Delivery made by a webhook. + */ + "hook-delivery": { + /** + * @description Unique identifier of the delivery. + * @example 42 + */ + id: number; + /** + * @description Unique identifier for the event (shared with all deliveries for all webhooks that subscribe to this event). + * @example 58474f00-b361-11eb-836d-0e4f3503ccbe + */ + guid: string; + /** + * Format: date-time + * @description Time when the delivery was delivered. + * @example 2021-05-12T20:33:44Z + */ + delivered_at: string; + /** + * @description Whether the delivery is a redelivery. + * @example false + */ + redelivery: boolean; + /** + * @description Time spent delivering. + * @example 0.03 + */ + duration: number; + /** + * @description Description of the status of the attempted delivery + * @example failed to connect + */ + status: string; + /** + * @description Status code received when delivery was made. + * @example 502 + */ + status_code: number; + /** + * @description The event that triggered the delivery. + * @example issues + */ + event: string; + /** + * @description The type of activity for the event that triggered the delivery. + * @example opened + */ + action: string | null; + /** + * @description The id of the GitHub App installation associated with this event. + * @example 123 + */ + installation_id: number | null; + /** + * @description The id of the repository associated with this event. + * @example 123 + */ + repository_id: number | null; + /** + * @description The URL target of the delivery. + * @example https://www.example.com + */ + url?: string; + request: { + /** @description The request headers sent with the webhook delivery. */ + headers: { + [key: string]: unknown; + } | null; + /** @description The webhook payload. */ + payload: { + [key: string]: unknown; + } | null; + }; + response: { + /** @description The response headers received when the delivery was made. */ + headers: { + [key: string]: unknown; + } | null; + /** @description The response payload received. */ + payload: string | null; + }; + }; + /** + * Simple User + * @description A GitHub user. + */ + "simple-user": { + name?: string | null; + email?: string | null; + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + /** @example "2020-07-09T00:17:55Z" */ + starred_at?: string; + }; + /** + * Enterprise + * @description An enterprise on GitHub. + */ + enterprise: { + /** @description A short description of the enterprise. */ + description?: string | null; + /** + * Format: uri + * @example https://github.com/enterprises/octo-business + */ + html_url: string; + /** + * Format: uri + * @description The enterprise's website URL. + */ + website_url?: string | null; + /** + * @description Unique identifier of the enterprise + * @example 42 + */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** + * @description The name of the enterprise. + * @example Octo Business + */ + name: string; + /** + * @description The slug url identifier for the enterprise. + * @example octo-business + */ + slug: string; + /** + * Format: date-time + * @example 2019-01-26T19:01:12Z + */ + created_at: string | null; + /** + * Format: date-time + * @example 2019-01-26T19:14:43Z + */ + updated_at: string | null; + /** Format: uri */ + avatar_url: string; + }; + /** + * Integration Installation Request + * @description Request to install an integration on a target + */ + "integration-installation-request": { + /** + * @description Unique identifier of the request installation. + * @example 42 + */ + id: number; + /** @example MDExOkludGVncmF0aW9uMQ== */ + node_id?: string; + account: + | components["schemas"]["simple-user"] + | components["schemas"]["enterprise"]; + requester: components["schemas"]["simple-user"]; + /** + * Format: date-time + * @example 2022-07-08T16:18:44-04:00 + */ + created_at: string; + }; + /** + * App Permissions + * @description The permissions granted to the user-to-server access token. + * @example { + * "contents": "read", + * "issues": "read", + * "deployments": "write", + * "single_file": "read" + * } + */ + "app-permissions": { + /** + * @description The level of permission to grant the access token for GitHub Actions workflows, workflow runs, and artifacts. + * @enum {string} + */ + actions?: "read" | "write"; + /** + * @description The level of permission to grant the access token for repository creation, deletion, settings, teams, and collaborators creation. + * @enum {string} + */ + administration?: "read" | "write"; + /** + * @description The level of permission to grant the access token for checks on code. + * @enum {string} + */ + checks?: "read" | "write"; + /** + * @description The level of permission to grant the access token for repository contents, commits, branches, downloads, releases, and merges. + * @enum {string} + */ + contents?: "read" | "write"; + /** + * @description The level of permission to grant the access token for deployments and deployment statuses. + * @enum {string} + */ + deployments?: "read" | "write"; + /** + * @description The level of permission to grant the access token for managing repository environments. + * @enum {string} + */ + environments?: "read" | "write"; + /** + * @description The level of permission to grant the access token for issues and related comments, assignees, labels, and milestones. + * @enum {string} + */ + issues?: "read" | "write"; + /** + * @description The level of permission to grant the access token to search repositories, list collaborators, and access repository metadata. + * @enum {string} + */ + metadata?: "read" | "write"; + /** + * @description The level of permission to grant the access token for packages published to GitHub Packages. + * @enum {string} + */ + packages?: "read" | "write"; + /** + * @description The level of permission to grant the access token to retrieve Pages statuses, configuration, and builds, as well as create new builds. + * @enum {string} + */ + pages?: "read" | "write"; + /** + * @description The level of permission to grant the access token for pull requests and related comments, assignees, labels, milestones, and merges. + * @enum {string} + */ + pull_requests?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage the post-receive hooks for a repository. + * @enum {string} + */ + repository_hooks?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage repository projects, columns, and cards. + * @enum {string} + */ + repository_projects?: "read" | "write" | "admin"; + /** + * @description The level of permission to grant the access token to view and manage secret scanning alerts. + * @enum {string} + */ + secret_scanning_alerts?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage repository secrets. + * @enum {string} + */ + secrets?: "read" | "write"; + /** + * @description The level of permission to grant the access token to view and manage security events like code scanning alerts. + * @enum {string} + */ + security_events?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage just a single file. + * @enum {string} + */ + single_file?: "read" | "write"; + /** + * @description The level of permission to grant the access token for commit statuses. + * @enum {string} + */ + statuses?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage Dependabot alerts. + * @enum {string} + */ + vulnerability_alerts?: "read" | "write"; + /** + * @description The level of permission to grant the access token to update GitHub Actions workflow files. + * @enum {string} + */ + workflows?: "write"; + /** + * @description The level of permission to grant the access token for organization teams and members. + * @enum {string} + */ + members?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage access to an organization. + * @enum {string} + */ + organization_administration?: "read" | "write"; + /** + * @description The level of permission to grant the access token for custom repository roles management. This property is in beta and is subject to change. + * @enum {string} + */ + organization_custom_roles?: "read" | "write"; + /** + * @description The level of permission to grant the access token to view and manage announcement banners for an organization. + * @enum {string} + */ + organization_announcement_banners?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage the post-receive hooks for an organization. + * @enum {string} + */ + organization_hooks?: "read" | "write"; + /** + * @description The level of permission to grant the access token for viewing and managing fine-grained personal access token requests to an organization. + * @enum {string} + */ + organization_personal_access_tokens?: "read" | "write"; + /** + * @description The level of permission to grant the access token for viewing and managing fine-grained personal access tokens that have been approved by an organization. + * @enum {string} + */ + organization_personal_access_token_requests?: "read" | "write"; + /** + * @description The level of permission to grant the access token for viewing an organization's plan. + * @enum {string} + */ + organization_plan?: "read"; + /** + * @description The level of permission to grant the access token to manage organization projects and projects beta (where available). + * @enum {string} + */ + organization_projects?: "read" | "write" | "admin"; + /** + * @description The level of permission to grant the access token for organization packages published to GitHub Packages. + * @enum {string} + */ + organization_packages?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage organization secrets. + * @enum {string} + */ + organization_secrets?: "read" | "write"; + /** + * @description The level of permission to grant the access token to view and manage GitHub Actions self-hosted runners available to an organization. + * @enum {string} + */ + organization_self_hosted_runners?: "read" | "write"; + /** + * @description The level of permission to grant the access token to view and manage users blocked by the organization. + * @enum {string} + */ + organization_user_blocking?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage team discussions and related comments. + * @enum {string} + */ + team_discussions?: "read" | "write"; + }; + /** + * Installation + * @description Installation + */ + installation: { + /** + * @description The ID of the installation. + * @example 1 + */ + id: number; + account: + | ( + | components["schemas"]["simple-user"] + | components["schemas"]["enterprise"] + ) + | null; + /** + * @description Describe whether all repositories have been selected or there's a selection involved + * @enum {string} + */ + repository_selection: "all" | "selected"; + /** + * Format: uri + * @example https://api.github.com/installations/1/access_tokens + */ + access_tokens_url: string; + /** + * Format: uri + * @example https://api.github.com/installation/repositories + */ + repositories_url: string; + /** + * Format: uri + * @example https://github.com/organizations/github/settings/installations/1 + */ + html_url: string; + /** @example 1 */ + app_id: number; + /** @description The ID of the user or organization this token is being scoped to. */ + target_id: number; + /** @example Organization */ + target_type: string; + permissions: components["schemas"]["app-permissions"]; + events: string[]; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** @example config.yaml */ + single_file_name: string | null; + /** @example true */ + has_multiple_single_files?: boolean; + /** + * @example [ + * "config.yml", + * ".github/issue_TEMPLATE.md" + * ] + */ + single_file_paths?: string[]; + /** @example github-actions */ + app_slug: string; + suspended_by: components["schemas"]["nullable-simple-user"]; + /** Format: date-time */ + suspended_at: string | null; + /** @example "test_13f1e99741e3e004@d7e1eb0bc0a1ba12.com" */ + contact_email?: string | null; + }; + /** + * License Simple + * @description License Simple + */ + "nullable-license-simple": { + /** @example mit */ + key: string; + /** @example MIT License */ + name: string; + /** + * Format: uri + * @example https://api.github.com/licenses/mit + */ + url: string | null; + /** @example MIT */ + spdx_id: string | null; + /** @example MDc6TGljZW5zZW1pdA== */ + node_id: string; + /** Format: uri */ + html_url?: string; + } | null; + /** + * Repository + * @description A repository on GitHub. + */ + repository: { + /** + * @description Unique identifier of the repository + * @example 42 + */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** + * @description The name of the repository. + * @example Team Environment + */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + license: components["schemas"]["nullable-license-simple"]; + organization?: components["schemas"]["nullable-simple-user"]; + forks: number; + permissions?: { + admin: boolean; + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + }; + owner: components["schemas"]["simple-user"]; + /** + * @description Whether the repository is private or public. + * @default false + */ + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + /** @example git:github.com/octocat/Hello-World.git */ + git_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + /** @example git@github.com:octocat/Hello-World.git */ + ssh_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + /** @example https://github.com/octocat/Hello-World.git */ + clone_url: string; + /** + * Format: uri + * @example git:git.example.com/octocat/Hello-World + */ + mirror_url: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + /** + * Format: uri + * @example https://svn.github.com/octocat/Hello-World + */ + svn_url: string; + /** + * Format: uri + * @example https://github.com + */ + homepage: string | null; + language: string | null; + /** @example 9 */ + forks_count: number; + /** @example 80 */ + stargazers_count: number; + /** @example 80 */ + watchers_count: number; + /** + * @description The size of the repository. Size is calculated hourly. When a repository is initially created, the size is 0. + * @example 108 + */ + size: number; + /** + * @description The default branch of the repository. + * @example master + */ + default_branch: string; + /** @example 0 */ + open_issues_count: number; + /** + * @description Whether this repository acts as a template that can be used to generate new repositories. + * @default false + * @example true + */ + is_template?: boolean; + topics?: string[]; + /** + * @description Whether issues are enabled. + * @default true + * @example true + */ + has_issues: boolean; + /** + * @description Whether projects are enabled. + * @default true + * @example true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + * @example true + */ + has_wiki: boolean; + has_pages: boolean; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads: boolean; + /** + * @description Whether discussions are enabled. + * @default false + * @example true + */ + has_discussions?: boolean; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** + * @description The repository visibility: public, private, or internal. + * @default public + */ + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at: string | null; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + * @example true + */ + allow_rebase_merge?: boolean; + template_repository?: { + id?: number; + node_id?: string; + name?: string; + full_name?: string; + owner?: { + login?: string; + id?: number; + node_id?: string; + avatar_url?: string; + gravatar_id?: string; + url?: string; + html_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + starred_url?: string; + subscriptions_url?: string; + organizations_url?: string; + repos_url?: string; + events_url?: string; + received_events_url?: string; + type?: string; + site_admin?: boolean; + }; + private?: boolean; + html_url?: string; + description?: string; + fork?: boolean; + url?: string; + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + downloads_url?: string; + events_url?: string; + forks_url?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + git_url?: string; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + notifications_url?: string; + pulls_url?: string; + releases_url?: string; + ssh_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + clone_url?: string; + mirror_url?: string; + hooks_url?: string; + svn_url?: string; + homepage?: string; + language?: string; + forks_count?: number; + stargazers_count?: number; + watchers_count?: number; + size?: number; + default_branch?: string; + open_issues_count?: number; + is_template?: boolean; + topics?: string[]; + has_issues?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + has_pages?: boolean; + has_downloads?: boolean; + archived?: boolean; + disabled?: boolean; + visibility?: string; + pushed_at?: string; + created_at?: string; + updated_at?: string; + permissions?: { + admin?: boolean; + maintain?: boolean; + push?: boolean; + triage?: boolean; + pull?: boolean; + }; + allow_rebase_merge?: boolean; + temp_clone_token?: string; + allow_squash_merge?: boolean; + allow_auto_merge?: boolean; + delete_branch_on_merge?: boolean; + allow_update_branch?: boolean; + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + allow_merge_commit?: boolean; + subscribers_count?: number; + network_count?: number; + } | null; + temp_clone_token?: string; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + * @example true + */ + allow_squash_merge?: boolean; + /** + * @description Whether to allow Auto-merge to be used on pull requests. + * @default false + * @example false + */ + allow_auto_merge?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + * @example false + */ + delete_branch_on_merge?: boolean; + /** + * @description Whether or not a pull request head branch that is behind its base branch can always be updated even if it is not required to be up to date before merging. + * @default false + * @example false + */ + allow_update_branch?: boolean; + /** + * @deprecated + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + * @example true + */ + allow_merge_commit?: boolean; + /** @description Whether to allow forking this repo */ + allow_forking?: boolean; + /** + * @description Whether to require contributors to sign off on web-based commits + * @default false + */ + web_commit_signoff_required?: boolean; + subscribers_count?: number; + network_count?: number; + open_issues: number; + watchers: number; + master_branch?: string; + /** @example "2020-07-09T00:17:42Z" */ + starred_at?: string; + /** @description Whether anonymous git access is enabled for this repository */ + anonymous_access_enabled?: boolean; + }; + /** + * Installation Token + * @description Authentication token for a GitHub App installed on a user or org. + */ + "installation-token": { + token: string; + expires_at: string; + permissions?: components["schemas"]["app-permissions"]; + /** @enum {string} */ + repository_selection?: "all" | "selected"; + repositories?: components["schemas"]["repository"][]; + /** @example README.md */ + single_file?: string; + /** @example true */ + has_multiple_single_files?: boolean; + /** + * @example [ + * "config.yml", + * ".github/issue_TEMPLATE.md" + * ] + */ + single_file_paths?: string[]; + }; + /** Scoped Installation */ + "nullable-scoped-installation": { + permissions: components["schemas"]["app-permissions"]; + /** + * @description Describe whether all repositories have been selected or there's a selection involved + * @enum {string} + */ + repository_selection: "all" | "selected"; + /** @example config.yaml */ + single_file_name: string | null; + /** @example true */ + has_multiple_single_files?: boolean; + /** + * @example [ + * "config.yml", + * ".github/issue_TEMPLATE.md" + * ] + */ + single_file_paths?: string[]; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repositories_url: string; + account: components["schemas"]["simple-user"]; + } | null; + /** + * Authorization + * @description The authorization for an OAuth app, GitHub App, or a Personal Access Token. + */ + authorization: { + id: number; + /** Format: uri */ + url: string; + /** @description A list of scopes that this authorization is in. */ + scopes: string[] | null; + token: string; + token_last_eight: string | null; + hashed_token: string | null; + app: { + client_id: string; + name: string; + /** Format: uri */ + url: string; + }; + note: string | null; + /** Format: uri */ + note_url: string | null; + /** Format: date-time */ + updated_at: string; + /** Format: date-time */ + created_at: string; + fingerprint: string | null; + user?: components["schemas"]["nullable-simple-user"]; + installation?: components["schemas"]["nullable-scoped-installation"]; + /** Format: date-time */ + expires_at: string | null; + }; + /** + * Code Of Conduct + * @description Code Of Conduct + */ + "code-of-conduct": { + /** @example contributor_covenant */ + key: string; + /** @example Contributor Covenant */ + name: string; + /** + * Format: uri + * @example https://api.github.com/codes_of_conduct/contributor_covenant + */ + url: string; + /** + * @example # Contributor Covenant Code of Conduct + * + * ## Our Pledge + * + * In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. + * + * ## Our Standards + * + * Examples of behavior that contributes to creating a positive environment include: + * + * * Using welcoming and inclusive language + * * Being respectful of differing viewpoints and experiences + * * Gracefully accepting constructive criticism + * * Focusing on what is best for the community + * * Showing empathy towards other community members + * + * Examples of unacceptable behavior by participants include: + * + * * The use of sexualized language or imagery and unwelcome sexual attention or advances + * * Trolling, insulting/derogatory comments, and personal or political attacks + * * Public or private harassment + * * Publishing others' private information, such as a physical or electronic address, without explicit permission + * * Other conduct which could reasonably be considered inappropriate in a professional setting + * + * ## Our Responsibilities + * + * Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response + * to any instances of unacceptable behavior. + * + * Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + * + * ## Scope + * + * This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, + * posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + * + * ## Enforcement + * + * Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [EMAIL]. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + * + * Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + * + * ## Attribution + * + * This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] + * + * [homepage]: http://contributor-covenant.org + * [version]: http://contributor-covenant.org/version/1/4/ + */ + body?: string; + /** Format: uri */ + html_url: string | null; + }; + /** @description The security alert number. */ + readonly "alert-number": number; + /** @description Details for the vulnerable package. */ + readonly "dependabot-alert-package": { + /** @description The package's language or package management ecosystem. */ + readonly ecosystem: string; + /** @description The unique package name within its ecosystem. */ + readonly name: string; + }; + /** @description Details pertaining to one vulnerable version range for the advisory. */ + readonly "dependabot-alert-security-vulnerability": { + readonly package: components["schemas"]["dependabot-alert-package"]; + /** + * @description The severity of the vulnerability. + * @enum {string} + */ + readonly severity: "low" | "medium" | "high" | "critical"; + /** @description Conditions that identify vulnerable versions of this vulnerability's package. */ + readonly vulnerable_version_range: string; + /** @description Details pertaining to the package version that patches this vulnerability. */ + readonly first_patched_version: { + /** @description The package version that patches this vulnerability. */ + readonly identifier: string; + } | null; + }; + /** @description Details for the GitHub Security Advisory. */ + readonly "dependabot-alert-security-advisory": { + /** @description The unique GitHub Security Advisory ID assigned to the advisory. */ + readonly ghsa_id: string; + /** @description The unique CVE ID assigned to the advisory. */ + readonly cve_id: string | null; + /** @description A short, plain text summary of the advisory. */ + readonly summary: string; + /** @description A long-form Markdown-supported description of the advisory. */ + readonly description: string; + /** @description Vulnerable version range information for the advisory. */ + readonly vulnerabilities: readonly components["schemas"]["dependabot-alert-security-vulnerability"][]; + /** + * @description The severity of the advisory. + * @enum {string} + */ + readonly severity: "low" | "medium" | "high" | "critical"; + /** @description Details for the advisory pertaining to the Common Vulnerability Scoring System. */ + readonly cvss: { + /** @description The overall CVSS score of the advisory. */ + readonly score: number; + /** @description The full CVSS vector string for the advisory. */ + readonly vector_string: string | null; + }; + /** @description Details for the advisory pertaining to Common Weakness Enumeration. */ + readonly cwes: readonly { + /** @description The unique CWE ID. */ + readonly cwe_id: string; + /** @description The short, plain text name of the CWE. */ + readonly name: string; + }[]; + /** @description Values that identify this advisory among security information sources. */ + readonly identifiers: readonly { + /** + * @description The type of advisory identifier. + * @enum {string} + */ + readonly type: "CVE" | "GHSA"; + /** @description The value of the advisory identifer. */ + readonly value: string; + }[]; + /** @description Links to additional advisory information. */ + readonly references: readonly { + /** + * Format: uri + * @description The URL of the reference. + */ + readonly url: string; + }[]; + /** + * Format: date-time + * @description The time that the advisory was published in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + readonly published_at: string; + /** + * Format: date-time + * @description The time that the advisory was last modified in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + readonly updated_at: string; + /** + * Format: date-time + * @description The time that the advisory was withdrawn in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + readonly withdrawn_at: string | null; + }; + /** + * Format: uri + * @description The REST API URL of the alert resource. + */ + readonly "alert-url": string; + /** + * Format: uri + * @description The GitHub URL of the alert resource. + */ + readonly "alert-html-url": string; + /** + * Format: date-time + * @description The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + readonly "alert-created-at": string; + /** + * Format: date-time + * @description The time that the alert was last updated in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + readonly "alert-updated-at": string; + /** + * Format: date-time + * @description The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + readonly "alert-dismissed-at": string | null; + /** + * Format: date-time + * @description The time that the alert was no longer detected and was considered fixed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + readonly "alert-fixed-at": string | null; + /** + * Format: date-time + * @description The time that the alert was auto-dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + readonly "alert-auto-dismissed-at": string | null; + /** + * Simple Repository + * @description A GitHub repository. + */ + "simple-repository": { + /** + * @description A unique identifier of the repository. + * @example 1296269 + */ + id: number; + /** + * @description The GraphQL identifier of the repository. + * @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 + */ + node_id: string; + /** + * @description The name of the repository. + * @example Hello-World + */ + name: string; + /** + * @description The full, globally unique, name of the repository. + * @example octocat/Hello-World + */ + full_name: string; + owner: components["schemas"]["simple-user"]; + /** @description Whether the repository is private. */ + private: boolean; + /** + * Format: uri + * @description The URL to view the repository on GitHub.com. + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** + * @description The repository description. + * @example This your first repo! + */ + description: string | null; + /** @description Whether the repository is a fork. */ + fork: boolean; + /** + * Format: uri + * @description The URL to get more information about the repository from the GitHub API. + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** + * @description A template for the API URL to download the repository as an archive. + * @example https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} + */ + archive_url: string; + /** + * @description A template for the API URL to list the available assignees for issues in the repository. + * @example https://api.github.com/repos/octocat/Hello-World/assignees{/user} + */ + assignees_url: string; + /** + * @description A template for the API URL to create or retrieve a raw Git blob in the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} + */ + blobs_url: string; + /** + * @description A template for the API URL to get information about branches in the repository. + * @example https://api.github.com/repos/octocat/Hello-World/branches{/branch} + */ + branches_url: string; + /** + * @description A template for the API URL to get information about collaborators of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} + */ + collaborators_url: string; + /** + * @description A template for the API URL to get information about comments on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/comments{/number} + */ + comments_url: string; + /** + * @description A template for the API URL to get information about commits on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/commits{/sha} + */ + commits_url: string; + /** + * @description A template for the API URL to compare two commits or refs. + * @example https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} + */ + compare_url: string; + /** + * @description A template for the API URL to get the contents of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/contents/{+path} + */ + contents_url: string; + /** + * Format: uri + * @description A template for the API URL to list the contributors to the repository. + * @example https://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @description The API URL to list the deployments of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @description The API URL to list the downloads on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @description The API URL to list the events of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @description The API URL to list the forks of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** + * @description A template for the API URL to get information about Git commits of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/commits{/sha} + */ + git_commits_url: string; + /** + * @description A template for the API URL to get information about Git refs of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/refs{/sha} + */ + git_refs_url: string; + /** + * @description A template for the API URL to get information about Git tags of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/tags{/sha} + */ + git_tags_url: string; + /** + * @description A template for the API URL to get information about issue comments on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/issues/comments{/number} + */ + issue_comment_url: string; + /** + * @description A template for the API URL to get information about issue events on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/issues/events{/number} + */ + issue_events_url: string; + /** + * @description A template for the API URL to get information about issues on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/issues{/number} + */ + issues_url: string; + /** + * @description A template for the API URL to get information about deploy keys on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/keys{/key_id} + */ + keys_url: string; + /** + * @description A template for the API URL to get information about labels of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/labels{/name} + */ + labels_url: string; + /** + * Format: uri + * @description The API URL to get information about the languages of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @description The API URL to merge branches in the repository. + * @example https://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** + * @description A template for the API URL to get information about milestones of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/milestones{/number} + */ + milestones_url: string; + /** + * @description A template for the API URL to get information about notifications on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} + */ + notifications_url: string; + /** + * @description A template for the API URL to get information about pull requests on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/pulls{/number} + */ + pulls_url: string; + /** + * @description A template for the API URL to get information about releases on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/releases{/id} + */ + releases_url: string; + /** + * Format: uri + * @description The API URL to list the stargazers on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** + * @description A template for the API URL to get information about statuses of a commit. + * @example https://api.github.com/repos/octocat/Hello-World/statuses/{sha} + */ + statuses_url: string; + /** + * Format: uri + * @description The API URL to list the subscribers on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @description The API URL to subscribe to notifications for this repository. + * @example https://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @description The API URL to get information about tags on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @description The API URL to list the teams on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** + * @description A template for the API URL to create or retrieve a raw Git tree of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/trees{/sha} + */ + trees_url: string; + /** + * Format: uri + * @description The API URL to list the hooks on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + }; + /** @description A Dependabot alert. */ + "dependabot-alert-with-repository": { + number: components["schemas"]["alert-number"]; + /** + * @description The state of the Dependabot alert. + * @enum {string} + */ + state: "auto_dismissed" | "dismissed" | "fixed" | "open"; + /** @description Details for the vulnerable dependency. */ + dependency: { + readonly package?: components["schemas"]["dependabot-alert-package"]; + /** @description The full path to the dependency manifest file, relative to the root of the repository. */ + readonly manifest_path?: string; + /** + * @description The execution scope of the vulnerable dependency. + * @enum {string|null} + */ + readonly scope?: "development" | "runtime" | null; + }; + security_advisory: components["schemas"]["dependabot-alert-security-advisory"]; + security_vulnerability: components["schemas"]["dependabot-alert-security-vulnerability"]; + url: components["schemas"]["alert-url"]; + html_url: components["schemas"]["alert-html-url"]; + created_at: components["schemas"]["alert-created-at"]; + updated_at: components["schemas"]["alert-updated-at"]; + dismissed_at: components["schemas"]["alert-dismissed-at"]; + dismissed_by: components["schemas"]["nullable-simple-user"]; + /** + * @description The reason that the alert was dismissed. + * @enum {string|null} + */ + dismissed_reason: + | "fix_started" + | "inaccurate" + | "no_bandwidth" + | "not_used" + | "tolerable_risk" + | null; + /** @description An optional comment associated with the alert's dismissal. */ + dismissed_comment: string | null; + fixed_at: components["schemas"]["alert-fixed-at"]; + auto_dismissed_at?: components["schemas"]["alert-auto-dismissed-at"]; + repository: components["schemas"]["simple-repository"]; + }; + /** + * Format: date-time + * @description The time that the alert was last updated in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + readonly "nullable-alert-updated-at": string | null; + /** + * @description Sets the state of the secret scanning alert. You must provide `resolution` when you set the state to `resolved`. + * @enum {string} + */ + "secret-scanning-alert-state": "open" | "resolved"; + /** + * @description **Required when the `state` is `resolved`.** The reason for resolving the alert. + * @enum {string|null} + */ + "secret-scanning-alert-resolution": + | "false_positive" + | "wont_fix" + | "revoked" + | "used_in_tests" + | null; + "organization-secret-scanning-alert": { + number?: components["schemas"]["alert-number"]; + created_at?: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["nullable-alert-updated-at"]; + url?: components["schemas"]["alert-url"]; + html_url?: components["schemas"]["alert-html-url"]; + /** + * Format: uri + * @description The REST API URL of the code locations for this alert. + */ + locations_url?: string; + state?: components["schemas"]["secret-scanning-alert-state"]; + resolution?: components["schemas"]["secret-scanning-alert-resolution"]; + /** + * Format: date-time + * @description The time that the alert was resolved in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + resolved_at?: string | null; + resolved_by?: components["schemas"]["nullable-simple-user"]; + /** @description The type of secret that secret scanning detected. */ + secret_type?: string; + /** + * @description User-friendly name for the detected secret, matching the `secret_type`. + * For a list of built-in patterns, see "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)." + */ + secret_type_display_name?: string; + /** @description The secret that was detected. */ + secret?: string; + repository?: components["schemas"]["simple-repository"]; + /** @description Whether push protection was bypassed for the detected secret. */ + push_protection_bypassed?: boolean | null; + push_protection_bypassed_by?: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @description The time that push protection was bypassed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + push_protection_bypassed_at?: string | null; + /** @description The comment that was optionally added when this alert was closed */ + resolution_comment?: string | null; + }; + /** + * Actor + * @description Actor + */ + actor: { + id: number; + login: string; + display_login?: string; + gravatar_id: string | null; + /** Format: uri */ + url: string; + /** Format: uri */ + avatar_url: string; + }; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + "nullable-milestone": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/milestones/1 + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/milestones/v1.0 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/milestones/1/labels + */ + labels_url: string; + /** @example 1002604 */ + id: number; + /** @example MDk6TWlsZXN0b25lMTAwMjYwNA== */ + node_id: string; + /** + * @description The number of the milestone. + * @example 42 + */ + number: number; + /** + * @description The state of the milestone. + * @default open + * @example open + * @enum {string} + */ + state: "open" | "closed"; + /** + * @description The title of the milestone. + * @example v1.0 + */ + title: string; + /** @example Tracking milestone for version 1.0 */ + description: string | null; + creator: components["schemas"]["nullable-simple-user"]; + /** @example 4 */ + open_issues: number; + /** @example 8 */ + closed_issues: number; + /** + * Format: date-time + * @example 2011-04-10T20:09:31Z + */ + created_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2013-02-12T13:22:01Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2012-10-09T23:39:01Z + */ + due_on: string | null; + } | null; + /** + * GitHub app + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + "nullable-integration": { + /** + * @description Unique identifier of the GitHub app + * @example 37 + */ + id: number; + /** + * @description The slug name of the GitHub app + * @example probot-owners + */ + slug?: string; + /** @example MDExOkludGVncmF0aW9uMQ== */ + node_id: string; + owner: components["schemas"]["nullable-simple-user"]; + /** + * @description The name of the GitHub app + * @example Probot Owners + */ + name: string; + /** @example The description of the app. */ + description: string | null; + /** + * Format: uri + * @example https://example.com + */ + external_url: string; + /** + * Format: uri + * @example https://github.com/apps/super-ci + */ + html_url: string; + /** + * Format: date-time + * @example 2017-07-08T16:18:44-04:00 + */ + created_at: string; + /** + * Format: date-time + * @example 2017-07-08T16:18:44-04:00 + */ + updated_at: string; + /** + * @description The set of permissions for the GitHub app + * @example { + * "issues": "read", + * "deployments": "write" + * } + */ + permissions: { + issues?: string; + checks?: string; + metadata?: string; + contents?: string; + deployments?: string; + [key: string]: string | undefined; + }; + /** + * @description The list of events for the GitHub app + * @example [ + * "label", + * "deployment" + * ] + */ + events: string[]; + /** + * @description The number of installations associated with the GitHub app + * @example 5 + */ + installations_count?: number; + /** @example "Iv1.25b5d1e65ffc4022" */ + client_id?: string; + /** @example "1d4b2097ac622ba702d19de498f005747a8b21d3" */ + client_secret?: string; + /** @example "6fba8f2fc8a7e8f2cca5577eddd82ca7586b3b6b" */ + webhook_secret?: string | null; + /** @example "-----BEGIN RSA PRIVATE KEY-----\nMIIEogIBAAKCAQEArYxrNYD/iT5CZVpRJu4rBKmmze3PVmT/gCo2ATUvDvZTPTey\nxcGJ3vvrJXazKk06pN05TN29o98jrYz4cengG3YGsXPNEpKsIrEl8NhbnxapEnM9\nJCMRe0P5JcPsfZlX6hmiT7136GRWiGOUba2X9+HKh8QJVLG5rM007TBER9/z9mWm\nrJuNh+m5l320oBQY/Qq3A7wzdEfZw8qm/mIN0FCeoXH1L6B8xXWaAYBwhTEh6SSn\nZHlO1Xu1JWDmAvBCi0RO5aRSKM8q9QEkvvHP4yweAtK3N8+aAbZ7ovaDhyGz8r6r\nzhU1b8Uo0Z2ysf503WqzQgIajr7Fry7/kUwpgQIDAQABAoIBADwJp80Ko1xHPZDy\nfcCKBDfIuPvkmSW6KumbsLMaQv1aGdHDwwTGv3t0ixSay8CGlxMRtRDyZPib6SvQ\n6OH/lpfpbMdW2ErkksgtoIKBVrDilfrcAvrNZu7NxRNbhCSvN8q0s4ICecjbbVQh\nnueSdlA6vGXbW58BHMq68uRbHkP+k+mM9U0mDJ1HMch67wlg5GbayVRt63H7R2+r\nVxcna7B80J/lCEjIYZznawgiTvp3MSanTglqAYi+m1EcSsP14bJIB9vgaxS79kTu\noiSo93leJbBvuGo8QEiUqTwMw4tDksmkLsoqNKQ1q9P7LZ9DGcujtPy4EZsamSJT\ny8OJt0ECgYEA2lxOxJsQk2kI325JgKFjo92mQeUObIvPfSNWUIZQDTjniOI6Gv63\nGLWVFrZcvQBWjMEQraJA9xjPbblV8PtfO87MiJGLWCHFxmPz2dzoedN+2Coxom8m\nV95CLz8QUShuao6u/RYcvUaZEoYs5bHcTmy5sBK80JyEmafJPtCQVxMCgYEAy3ar\nZr3yv4xRPEPMat4rseswmuMooSaK3SKub19WFI5IAtB/e7qR1Rj9JhOGcZz+OQrl\nT78O2OFYlgOIkJPvRMrPpK5V9lslc7tz1FSh3BZMRGq5jSyD7ETSOQ0c8T2O/s7v\nbeEPbVbDe4mwvM24XByH0GnWveVxaDl51ABD65sCgYB3ZAspUkOA5egVCh8kNpnd\nSd6SnuQBE3ySRlT2WEnCwP9Ph6oPgn+oAfiPX4xbRqkL8q/k0BdHQ4h+zNwhk7+h\nWtPYRAP1Xxnc/F+jGjb+DVaIaKGU18MWPg7f+FI6nampl3Q0KvfxwX0GdNhtio8T\nTj1E+SnFwh56SRQuxSh2gwKBgHKjlIO5NtNSflsUYFM+hyQiPiqnHzddfhSG+/3o\nm5nNaSmczJesUYreH5San7/YEy2UxAugvP7aSY2MxB+iGsiJ9WD2kZzTUlDZJ7RV\nUzWsoqBR+eZfVJ2FUWWvy8TpSG6trh4dFxImNtKejCR1TREpSiTV3Zb1dmahK9GV\nrK9NAoGAbBxRLoC01xfxCTgt5BDiBcFVh4fp5yYKwavJPLzHSpuDOrrI9jDn1oKN\nonq5sDU1i391zfQvdrbX4Ova48BN+B7p63FocP/MK5tyyBoT8zQEk2+vWDOw7H/Z\nu5dTCPxTIsoIwUw1I+7yIxqJzLPFgR2gVBwY1ra/8iAqCj+zeBw=\n-----END RSA PRIVATE KEY-----\n" */ + pem?: string; + } | null; + /** + * author_association + * @description How the author is associated with the repository. + * @example OWNER + * @enum {string} + */ + "author-association": + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** Reaction Rollup */ + "reaction-rollup": { + /** Format: uri */ + url: string; + total_count: number; + "+1": number; + "-1": number; + laugh: number; + confused: number; + heart: number; + hooray: number; + eyes: number; + rocket: number; + }; + /** + * Issue + * @description Issues are a great way to keep track of tasks, enhancements, and bugs for your projects. + */ + issue: { + /** Format: int64 */ + id: number; + node_id: string; + /** + * Format: uri + * @description URL for the issue + * @example https://api.github.com/repositories/42/issues/1 + */ + url: string; + /** Format: uri */ + repository_url: string; + labels_url: string; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** + * @description Number uniquely identifying the issue within its repository + * @example 42 + */ + number: number; + /** + * @description State of the issue; either 'open' or 'closed' + * @example open + */ + state: string; + /** + * @description The reason for the current state + * @example not_planned + * @enum {string|null} + */ + state_reason?: "completed" | "reopened" | "not_planned" | null; + /** + * @description Title of the issue + * @example Widget creation fails in Safari on OS X 10.8 + */ + title: string; + /** + * @description Contents of the issue + * @example It looks like the new widget form is broken on Safari. When I try and create the widget, Safari crashes. This is reproducible on 10.8, but not 10.9. Maybe a browser bug? + */ + body?: string | null; + user: components["schemas"]["nullable-simple-user"]; + /** + * @description Labels to associate with this issue; pass one or more label names to replace the set of labels on this issue; send an empty array to clear all labels from the issue; note that the labels are silently dropped for users without push access to the repository + * @example [ + * "bug", + * "registration" + * ] + */ + labels: OneOf< + [ + string, + { + /** Format: int64 */ + id?: number; + node_id?: string; + /** Format: uri */ + url?: string; + name?: string; + description?: string | null; + color?: string | null; + default?: boolean; + }, + ] + >[]; + assignee: components["schemas"]["nullable-simple-user"]; + assignees?: components["schemas"]["simple-user"][] | null; + milestone: components["schemas"]["nullable-milestone"]; + locked: boolean; + active_lock_reason?: string | null; + comments: number; + pull_request?: { + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + diff_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + patch_url: string | null; + /** Format: uri */ + url: string | null; + }; + /** Format: date-time */ + closed_at: string | null; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + draft?: boolean; + closed_by?: components["schemas"]["nullable-simple-user"]; + body_html?: string; + body_text?: string; + /** Format: uri */ + timeline_url?: string; + repository?: components["schemas"]["repository"]; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + author_association: components["schemas"]["author-association"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Issue Comment + * @description Comments provide a way for people to collaborate on an issue. + */ + "issue-comment": { + /** + * Format: int64 + * @description Unique identifier of the issue comment + * @example 42 + */ + id: number; + node_id: string; + /** + * Format: uri + * @description URL for the issue comment + * @example https://api.github.com/repositories/42/issues/comments/1 + */ + url: string; + /** + * @description Contents of the issue comment + * @example What version of Safari were you using when you observed this bug? + */ + body?: string; + body_text?: string; + body_html?: string; + /** Format: uri */ + html_url: string; + user: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + updated_at: string; + /** Format: uri */ + issue_url: string; + author_association: components["schemas"]["author-association"]; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Event + * @description Event + */ + event: { + id: string; + type: string | null; + actor: components["schemas"]["actor"]; + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + org?: components["schemas"]["actor"]; + payload: { + action?: string; + issue?: components["schemas"]["issue"]; + comment?: components["schemas"]["issue-comment"]; + pages?: { + page_name?: string; + title?: string; + summary?: string | null; + action?: string; + sha?: string; + html_url?: string; + }[]; + }; + public: boolean; + /** Format: date-time */ + created_at: string | null; + }; + /** + * Link With Type + * @description Hypermedia Link with Type + */ + "link-with-type": { + href: string; + type: string; + }; + /** + * Feed + * @description Feed + */ + feed: { + /** @example https://github.com/timeline */ + timeline_url: string; + /** @example https://github.com/{user} */ + user_url: string; + /** @example https://github.com/octocat */ + current_user_public_url?: string; + /** @example https://github.com/octocat.private?token=abc123 */ + current_user_url?: string; + /** @example https://github.com/octocat.private.actor?token=abc123 */ + current_user_actor_url?: string; + /** @example https://github.com/octocat-org */ + current_user_organization_url?: string; + /** + * @example [ + * "https://github.com/organizations/github/octocat.private.atom?token=abc123" + * ] + */ + current_user_organization_urls?: string[]; + /** @example https://github.com/security-advisories */ + security_advisories_url?: string; + /** + * @description A feed of discussions for a given repository. + * @example https://github.com/{user}/{repo}/discussions + */ + repository_discussions_url?: string; + /** + * @description A feed of discussions for a given repository and category. + * @example https://github.com/{user}/{repo}/discussions/categories/{category} + */ + repository_discussions_category_url?: string; + _links: { + timeline: components["schemas"]["link-with-type"]; + user: components["schemas"]["link-with-type"]; + security_advisories?: components["schemas"]["link-with-type"]; + current_user?: components["schemas"]["link-with-type"]; + current_user_public?: components["schemas"]["link-with-type"]; + current_user_actor?: components["schemas"]["link-with-type"]; + current_user_organization?: components["schemas"]["link-with-type"]; + current_user_organizations?: components["schemas"]["link-with-type"][]; + repository_discussions?: components["schemas"]["link-with-type"]; + repository_discussions_category?: components["schemas"]["link-with-type"]; + }; + }; + /** + * Base Gist + * @description Base Gist + */ + "base-gist": { + /** Format: uri */ + url: string; + /** Format: uri */ + forks_url: string; + /** Format: uri */ + commits_url: string; + id: string; + node_id: string; + /** Format: uri */ + git_pull_url: string; + /** Format: uri */ + git_push_url: string; + /** Format: uri */ + html_url: string; + files: { + [key: string]: + | { + filename?: string; + type?: string; + language?: string; + raw_url?: string; + size?: number; + } + | undefined; + }; + public: boolean; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + description: string | null; + comments: number; + user: components["schemas"]["nullable-simple-user"]; + /** Format: uri */ + comments_url: string; + owner?: components["schemas"]["simple-user"]; + truncated?: boolean; + forks?: unknown[]; + history?: unknown[]; + }; + /** + * Public User + * @description Public User + */ + "public-user": { + login: string; + id: number; + node_id: string; + /** Format: uri */ + avatar_url: string; + gravatar_id: string | null; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + repos_url: string; + events_url: string; + /** Format: uri */ + received_events_url: string; + type: string; + site_admin: boolean; + name: string | null; + company: string | null; + blog: string | null; + location: string | null; + /** Format: email */ + email: string | null; + hireable: boolean | null; + bio: string | null; + twitter_username?: string | null; + public_repos: number; + public_gists: number; + followers: number; + following: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + plan?: { + collaborators: number; + name: string; + space: number; + private_repos: number; + }; + /** Format: date-time */ + suspended_at?: string | null; + /** @example 1 */ + private_gists?: number; + /** @example 2 */ + total_private_repos?: number; + /** @example 2 */ + owned_private_repos?: number; + /** @example 1 */ + disk_usage?: number; + /** @example 3 */ + collaborators?: number; + }; + /** + * Gist History + * @description Gist History + */ + "gist-history": { + user?: components["schemas"]["nullable-simple-user"]; + version?: string; + /** Format: date-time */ + committed_at?: string; + change_status?: { + total?: number; + additions?: number; + deletions?: number; + }; + /** Format: uri */ + url?: string; + }; + /** + * Gist Simple + * @description Gist Simple + */ + "gist-simple": { + /** @deprecated */ + forks?: + | { + id?: string; + /** Format: uri */ + url?: string; + user?: components["schemas"]["public-user"]; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + }[] + | null; + /** @deprecated */ + history?: components["schemas"]["gist-history"][] | null; + /** + * Gist + * @description Gist + */ + fork_of?: { + /** Format: uri */ + url: string; + /** Format: uri */ + forks_url: string; + /** Format: uri */ + commits_url: string; + id: string; + node_id: string; + /** Format: uri */ + git_pull_url: string; + /** Format: uri */ + git_push_url: string; + /** Format: uri */ + html_url: string; + files: { + [key: string]: + | { + filename?: string; + type?: string; + language?: string; + raw_url?: string; + size?: number; + } + | undefined; + }; + public: boolean; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + description: string | null; + comments: number; + user: components["schemas"]["nullable-simple-user"]; + /** Format: uri */ + comments_url: string; + owner?: components["schemas"]["nullable-simple-user"]; + truncated?: boolean; + forks?: unknown[]; + history?: unknown[]; + } | null; + url?: string; + forks_url?: string; + commits_url?: string; + id?: string; + node_id?: string; + git_pull_url?: string; + git_push_url?: string; + html_url?: string; + files?: { + [key: string]: + | ({ + filename?: string; + type?: string; + language?: string; + raw_url?: string; + size?: number; + truncated?: boolean; + content?: string; + } | null) + | undefined; + }; + public?: boolean; + created_at?: string; + updated_at?: string; + description?: string | null; + comments?: number; + user?: string | null; + comments_url?: string; + owner?: components["schemas"]["simple-user"]; + truncated?: boolean; + }; + /** + * Gist Comment + * @description A comment made to a gist. + */ + "gist-comment": { + /** @example 1 */ + id: number; + /** @example MDExOkdpc3RDb21tZW50MQ== */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/gists/a6db0bec360bb87e9418/comments/1 + */ + url: string; + /** + * @description The comment text. + * @example Body of the attachment + */ + body: string; + user: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2011-04-18T23:23:56Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-18T23:23:56Z + */ + updated_at: string; + author_association: components["schemas"]["author-association"]; + }; + /** + * Gist Commit + * @description Gist Commit + */ + "gist-commit": { + /** + * Format: uri + * @example https://api.github.com/gists/aa5a315d61ae9438b18d/57a7f021a713b1c5a6a199b54cc514735d2d462f + */ + url: string; + /** @example 57a7f021a713b1c5a6a199b54cc514735d2d462f */ + version: string; + user: components["schemas"]["nullable-simple-user"]; + change_status: { + total?: number; + additions?: number; + deletions?: number; + }; + /** + * Format: date-time + * @example 2010-04-14T02:15:15Z + */ + committed_at: string; + }; + /** + * Gitignore Template + * @description Gitignore Template + */ + "gitignore-template": { + /** @example C */ + name: string; + /** + * @example # Object files + * *.o + * + * # Libraries + * *.lib + * *.a + * + * # Shared objects (inc. Windows DLLs) + * *.dll + * *.so + * *.so.* + * *.dylib + * + * # Executables + * *.exe + * *.out + * *.app + */ + source: string; + }; + /** + * License Simple + * @description License Simple + */ + "license-simple": { + /** @example mit */ + key: string; + /** @example MIT License */ + name: string; + /** + * Format: uri + * @example https://api.github.com/licenses/mit + */ + url: string | null; + /** @example MIT */ + spdx_id: string | null; + /** @example MDc6TGljZW5zZW1pdA== */ + node_id: string; + /** Format: uri */ + html_url?: string; + }; + /** + * License + * @description License + */ + license: { + /** @example mit */ + key: string; + /** @example MIT License */ + name: string; + /** @example MIT */ + spdx_id: string | null; + /** + * Format: uri + * @example https://api.github.com/licenses/mit + */ + url: string | null; + /** @example MDc6TGljZW5zZW1pdA== */ + node_id: string; + /** + * Format: uri + * @example http://choosealicense.com/licenses/mit/ + */ + html_url: string; + /** @example A permissive license that is short and to the point. It lets people do anything with your code with proper attribution and without warranty. */ + description: string; + /** @example Create a text file (typically named LICENSE or LICENSE.txt) in the root of your source code and copy the text of the license into the file. Replace [year] with the current year and [fullname] with the name (or names) of the copyright holders. */ + implementation: string; + /** + * @example [ + * "commercial-use", + * "modifications", + * "distribution", + * "sublicense", + * "private-use" + * ] + */ + permissions: string[]; + /** + * @example [ + * "include-copyright" + * ] + */ + conditions: string[]; + /** + * @example [ + * "no-liability" + * ] + */ + limitations: string[]; + /** + * @example + * + * The MIT License (MIT) + * + * Copyright (c) [year] [fullname] + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + body: string; + /** @example true */ + featured: boolean; + }; + /** + * Marketplace Listing Plan + * @description Marketplace Listing Plan + */ + "marketplace-listing-plan": { + /** + * Format: uri + * @example https://api.github.com/marketplace_listing/plans/1313 + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/marketplace_listing/plans/1313/accounts + */ + accounts_url: string; + /** @example 1313 */ + id: number; + /** @example 3 */ + number: number; + /** @example Pro */ + name: string; + /** @example A professional-grade CI solution */ + description: string; + /** @example 1099 */ + monthly_price_in_cents: number; + /** @example 11870 */ + yearly_price_in_cents: number; + /** + * @example FLAT_RATE + * @enum {string} + */ + price_model: "FREE" | "FLAT_RATE" | "PER_UNIT"; + /** @example true */ + has_free_trial: boolean; + unit_name: string | null; + /** @example published */ + state: string; + /** + * @example [ + * "Up to 25 private repositories", + * "11 concurrent builds" + * ] + */ + bullets: string[]; + }; + /** + * Marketplace Purchase + * @description Marketplace Purchase + */ + "marketplace-purchase": { + url: string; + type: string; + id: number; + login: string; + organization_billing_email?: string; + email?: string | null; + marketplace_pending_change?: { + is_installed?: boolean; + effective_date?: string; + unit_count?: number | null; + id?: number; + plan?: components["schemas"]["marketplace-listing-plan"]; + } | null; + marketplace_purchase: { + billing_cycle?: string; + next_billing_date?: string | null; + is_installed?: boolean; + unit_count?: number | null; + on_free_trial?: boolean; + free_trial_ends_on?: string | null; + updated_at?: string; + plan?: components["schemas"]["marketplace-listing-plan"]; + }; + }; + /** + * Api Overview + * @description Api Overview + */ + "api-overview": { + /** @example true */ + verifiable_password_authentication: boolean; + ssh_key_fingerprints?: { + SHA256_RSA?: string; + SHA256_DSA?: string; + SHA256_ECDSA?: string; + SHA256_ED25519?: string; + }; + /** + * @example [ + * "ssh-ed25519 ABCDEFGHIJKLMNOPQRSTUVWXYZ" + * ] + */ + ssh_keys?: string[]; + /** + * @example [ + * "192.0.2.1" + * ] + */ + hooks?: string[]; + /** + * @example [ + * "192.0.2.1" + * ] + */ + web?: string[]; + /** + * @example [ + * "192.0.2.1" + * ] + */ + api?: string[]; + /** + * @example [ + * "192.0.2.1" + * ] + */ + git?: string[]; + /** + * @example [ + * "192.0.2.1" + * ] + */ + packages?: string[]; + /** + * @example [ + * "192.0.2.1" + * ] + */ + pages?: string[]; + /** + * @example [ + * "192.0.2.1" + * ] + */ + importer?: string[]; + /** + * @example [ + * "192.0.2.1" + * ] + */ + actions?: string[]; + /** + * @example [ + * "192.0.2.1" + * ] + */ + dependabot?: string[]; + domains?: { + website?: string[]; + codespaces?: string[]; + copilot?: string[]; + packages?: string[]; + }; + }; + "security-and-analysis": { + advanced_security?: { + /** @enum {string} */ + status?: "enabled" | "disabled"; + }; + secret_scanning?: { + /** @enum {string} */ + status?: "enabled" | "disabled"; + }; + secret_scanning_push_protection?: { + /** @enum {string} */ + status?: "enabled" | "disabled"; + }; + } | null; + /** + * Minimal Repository + * @description Minimal Repository + */ + "minimal-repository": { + /** @example 1296269 */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** @example Hello-World */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + owner: components["schemas"]["simple-user"]; + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + git_url?: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + ssh_url?: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + clone_url?: string; + mirror_url?: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + svn_url?: string; + homepage?: string | null; + language?: string | null; + forks_count?: number; + stargazers_count?: number; + watchers_count?: number; + /** @description The size of the repository. Size is calculated hourly. When a repository is initially created, the size is 0. */ + size?: number; + default_branch?: string; + open_issues_count?: number; + is_template?: boolean; + topics?: string[]; + has_issues?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + has_pages?: boolean; + has_downloads?: boolean; + has_discussions?: boolean; + archived?: boolean; + disabled?: boolean; + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at?: string | null; + permissions?: { + admin?: boolean; + maintain?: boolean; + push?: boolean; + triage?: boolean; + pull?: boolean; + }; + /** @example admin */ + role_name?: string; + temp_clone_token?: string; + delete_branch_on_merge?: boolean; + subscribers_count?: number; + network_count?: number; + code_of_conduct?: components["schemas"]["code-of-conduct"]; + license?: { + key?: string; + name?: string; + spdx_id?: string; + url?: string; + node_id?: string; + } | null; + /** @example 0 */ + forks?: number; + /** @example 0 */ + open_issues?: number; + /** @example 0 */ + watchers?: number; + allow_forking?: boolean; + /** @example false */ + web_commit_signoff_required?: boolean; + security_and_analysis?: components["schemas"]["security-and-analysis"]; + }; + /** + * Thread + * @description Thread + */ + thread: { + id: string; + repository: components["schemas"]["minimal-repository"]; + subject: { + title: string; + url: string; + latest_comment_url: string; + type: string; + }; + reason: string; + unread: boolean; + updated_at: string; + last_read_at: string | null; + url: string; + /** @example https://api.github.com/notifications/threads/2/subscription */ + subscription_url: string; + }; + /** + * Thread Subscription + * @description Thread Subscription + */ + "thread-subscription": { + /** @example true */ + subscribed: boolean; + ignored: boolean; + reason: string | null; + /** + * Format: date-time + * @example 2012-10-06T21:34:12Z + */ + created_at: string | null; + /** + * Format: uri + * @example https://api.github.com/notifications/threads/1/subscription + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/notifications/threads/1 + */ + thread_url?: string; + /** + * Format: uri + * @example https://api.github.com/repos/1 + */ + repository_url?: string; + }; + /** + * Organization Simple + * @description A GitHub organization. + */ + "organization-simple": { + /** @example github */ + login: string; + /** @example 1 */ + id: number; + /** @example MDEyOk9yZ2FuaXphdGlvbjE= */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/repos + */ + repos_url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/events + */ + events_url: string; + /** @example https://api.github.com/orgs/github/hooks */ + hooks_url: string; + /** @example https://api.github.com/orgs/github/issues */ + issues_url: string; + /** @example https://api.github.com/orgs/github/members{/member} */ + members_url: string; + /** @example https://api.github.com/orgs/github/public_members{/member} */ + public_members_url: string; + /** @example https://github.com/images/error/octocat_happy.gif */ + avatar_url: string; + /** @example A great organization */ + description: string | null; + }; + /** + * Simple Organization Programmatic Access Grant Request + * @description Minimal representation of an organization programmatic access grant request for enumerations + */ + "organization-programmatic-access-grant-request": { + /** @description Unique identifier of the request for access via fine-grained personal access token. The `pat_request_id` used to review PAT requests. */ + id: number; + /** @description Reason for requesting access. */ + reason: string | null; + owner: components["schemas"]["simple-user"]; + /** + * @description Type of repository selection requested. + * @enum {string} + */ + repository_selection: "none" | "all" | "subset"; + /** @description URL to the list of repositories requested to be accessed via fine-grained personal access token. Should only be followed when `repository_selection` is `subset`. */ + repositories_url: string; + /** @description Permissions requested, categorized by type of permission. */ + permissions: { + organization?: { + [key: string]: string | undefined; + }; + repository?: { + [key: string]: string | undefined; + }; + other?: { + [key: string]: string | undefined; + }; + }; + /** @description Date and time when the request for access was created. */ + created_at: string; + /** @description Whether the associated fine-grained personal access token has expired. */ + token_expired: boolean; + /** @description Date and time when the associated fine-grained personal access token expires. */ + token_expires_at: string | null; + /** @description Date and time when the associated fine-grained personal access token was last used for authentication. */ + token_last_used_at: string | null; + }; + /** + * Organization Programmatic Access Grant + * @description Minimal representation of an organization programmatic access grant for enumerations + */ + "organization-programmatic-access-grant": { + /** @description Unique identifier of the fine-grained personal access token. The `pat_id` used to get details about an approved fine-grained personal access token. */ + id: number; + owner: components["schemas"]["simple-user"]; + /** + * @description Type of repository selection requested. + * @enum {string} + */ + repository_selection: "none" | "all" | "subset"; + /** @description URL to the list of repositories the fine-grained personal access token can access. Only follow when `repository_selection` is `subset`. */ + repositories_url: string; + /** @description Permissions requested, categorized by type of permission. */ + permissions: { + organization?: { + [key: string]: string | undefined; + }; + repository?: { + [key: string]: string | undefined; + }; + other?: { + [key: string]: string | undefined; + }; + }; + /** @description Date and time when the fine-grained personal access token was approved to access the organization. */ + access_granted_at: string; + /** @description Whether the associated fine-grained personal access token has expired. */ + token_expired: boolean; + /** @description Date and time when the associated fine-grained personal access token expires. */ + token_expires_at: string | null; + /** @description Date and time when the associated fine-grained personal access token was last used for authentication. */ + token_last_used_at: string | null; + }; + /** + * Organization Full + * @description Organization Full + */ + "organization-full": { + /** @example github */ + login: string; + /** @example 1 */ + id: number; + /** @example MDEyOk9yZ2FuaXphdGlvbjE= */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/repos + */ + repos_url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/events + */ + events_url: string; + /** @example https://api.github.com/orgs/github/hooks */ + hooks_url: string; + /** @example https://api.github.com/orgs/github/issues */ + issues_url: string; + /** @example https://api.github.com/orgs/github/members{/member} */ + members_url: string; + /** @example https://api.github.com/orgs/github/public_members{/member} */ + public_members_url: string; + /** @example https://github.com/images/error/octocat_happy.gif */ + avatar_url: string; + /** @example A great organization */ + description: string | null; + /** @example github */ + name?: string; + /** @example GitHub */ + company?: string; + /** + * Format: uri + * @example https://github.com/blog + */ + blog?: string; + /** @example San Francisco */ + location?: string; + /** + * Format: email + * @example octocat@github.com + */ + email?: string; + /** @example github */ + twitter_username?: string | null; + /** @example true */ + is_verified?: boolean; + /** @example true */ + has_organization_projects: boolean; + /** @example true */ + has_repository_projects: boolean; + /** @example 2 */ + public_repos: number; + /** @example 1 */ + public_gists: number; + /** @example 20 */ + followers: number; + /** @example 0 */ + following: number; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: date-time + * @example 2008-01-14T04:33:35Z + */ + created_at: string; + /** @example Organization */ + type: string; + /** @example 100 */ + total_private_repos?: number; + /** @example 100 */ + owned_private_repos?: number; + /** @example 81 */ + private_gists?: number | null; + /** @example 10000 */ + disk_usage?: number | null; + /** @example 8 */ + collaborators?: number | null; + /** + * Format: email + * @example org@example.com + */ + billing_email?: string | null; + plan?: { + name: string; + space: number; + private_repos: number; + filled_seats?: number; + seats?: number; + }; + default_repository_permission?: string | null; + /** @example true */ + members_can_create_repositories?: boolean | null; + /** @example true */ + two_factor_requirement_enabled?: boolean | null; + /** @example all */ + members_allowed_repository_creation_type?: string; + /** @example true */ + members_can_create_public_repositories?: boolean; + /** @example true */ + members_can_create_private_repositories?: boolean; + /** @example true */ + members_can_create_internal_repositories?: boolean; + /** @example true */ + members_can_create_pages?: boolean; + /** @example true */ + members_can_create_public_pages?: boolean; + /** @example true */ + members_can_create_private_pages?: boolean; + /** @example false */ + members_can_fork_private_repositories?: boolean | null; + /** @example false */ + web_commit_signoff_required?: boolean; + /** Format: date-time */ + updated_at: string; + /** + * @description Whether GitHub Advanced Security is enabled for new repositories and repositories transferred to this organization. + * + * This field is only visible to organization owners or members of a team with the security manager role. + * @example false + */ + advanced_security_enabled_for_new_repositories?: boolean; + /** + * @description Whether GitHub Advanced Security is automatically enabled for new repositories and repositories transferred to + * this organization. + * + * This field is only visible to organization owners or members of a team with the security manager role. + * @example false + */ + dependabot_alerts_enabled_for_new_repositories?: boolean; + /** + * @description Whether dependabot security updates are automatically enabled for new repositories and repositories transferred + * to this organization. + * + * This field is only visible to organization owners or members of a team with the security manager role. + * @example false + */ + dependabot_security_updates_enabled_for_new_repositories?: boolean; + /** + * @description Whether dependency graph is automatically enabled for new repositories and repositories transferred to this + * organization. + * + * This field is only visible to organization owners or members of a team with the security manager role. + * @example false + */ + dependency_graph_enabled_for_new_repositories?: boolean; + /** + * @description Whether secret scanning is automatically enabled for new repositories and repositories transferred to this + * organization. + * + * This field is only visible to organization owners or members of a team with the security manager role. + * @example false + */ + secret_scanning_enabled_for_new_repositories?: boolean; + /** + * @description Whether secret scanning push protection is automatically enabled for new repositories and repositories + * transferred to this organization. + * + * This field is only visible to organization owners or members of a team with the security manager role. + * @example false + */ + secret_scanning_push_protection_enabled_for_new_repositories?: boolean; + /** + * @description Whether a custom link is shown to contributors who are blocked from pushing a secret by push protection. + * @example false + */ + secret_scanning_push_protection_custom_link_enabled?: boolean; + /** + * @description An optional URL string to display to contributors who are blocked from pushing a secret. + * @example https://github.com/test-org/test-repo/blob/main/README.md + */ + secret_scanning_push_protection_custom_link?: string | null; + }; + "actions-cache-usage-org-enterprise": { + /** @description The count of active caches across all repositories of an enterprise or an organization. */ + total_active_caches_count: number; + /** @description The total size in bytes of all active cache items across all repositories of an enterprise or an organization. */ + total_active_caches_size_in_bytes: number; + }; + /** + * Actions Cache Usage by repository + * @description GitHub Actions Cache Usage by repository. + */ + "actions-cache-usage-by-repository": { + /** + * @description The repository owner and name for the cache usage being shown. + * @example octo-org/Hello-World + */ + full_name: string; + /** + * @description The sum of the size in bytes of all the active cache items in the repository. + * @example 2322142 + */ + active_caches_size_in_bytes: number; + /** + * @description The number of active caches in the repository. + * @example 3 + */ + active_caches_count: number; + }; + /** + * Actions OIDC Subject customization + * @description Actions OIDC Subject customization + */ + "oidc-custom-sub": { + /** @description Array of unique strings. Each claim key can only contain alphanumeric characters and underscores. */ + include_claim_keys: string[]; + }; + /** + * Empty Object + * @description An object without any properties. + */ + "empty-object": Record; + /** + * @description The policy that controls the repositories in the organization that are allowed to run GitHub Actions. + * @enum {string} + */ + "enabled-repositories": "all" | "none" | "selected"; + /** + * @description The permissions policy that controls the actions and reusable workflows that are allowed to run. + * @enum {string} + */ + "allowed-actions": "all" | "local_only" | "selected"; + /** @description The API URL to use to get or set the actions and reusable workflows that are allowed to run, when `allowed_actions` is set to `selected`. */ + "selected-actions-url": string; + "actions-organization-permissions": { + enabled_repositories: components["schemas"]["enabled-repositories"]; + /** @description The API URL to use to get or set the selected repositories that are allowed to run GitHub Actions, when `enabled_repositories` is set to `selected`. */ + selected_repositories_url?: string; + allowed_actions?: components["schemas"]["allowed-actions"]; + selected_actions_url?: components["schemas"]["selected-actions-url"]; + }; + "selected-actions": { + /** @description Whether GitHub-owned actions are allowed. For example, this includes the actions in the `actions` organization. */ + github_owned_allowed?: boolean; + /** @description Whether actions from GitHub Marketplace verified creators are allowed. Set to `true` to allow all actions by GitHub Marketplace verified creators. */ + verified_allowed?: boolean; + /** + * @description Specifies a list of string-matching patterns to allow specific action(s) and reusable workflow(s). Wildcards, tags, and SHAs are allowed. For example, `monalisa/octocat@*`, `monalisa/octocat@v2`, `monalisa/*`. + * + * **Note**: The `patterns_allowed` setting only applies to public repositories. + */ + patterns_allowed?: string[]; + }; + /** + * @description The default workflow permissions granted to the GITHUB_TOKEN when running workflows. + * @enum {string} + */ + "actions-default-workflow-permissions": "read" | "write"; + /** @description Whether GitHub Actions can approve pull requests. Enabling this can be a security risk. */ + "actions-can-approve-pull-request-reviews": boolean; + "actions-get-default-workflow-permissions": { + default_workflow_permissions: components["schemas"]["actions-default-workflow-permissions"]; + can_approve_pull_request_reviews: components["schemas"]["actions-can-approve-pull-request-reviews"]; + }; + "actions-set-default-workflow-permissions": { + default_workflow_permissions?: components["schemas"]["actions-default-workflow-permissions"]; + can_approve_pull_request_reviews?: components["schemas"]["actions-can-approve-pull-request-reviews"]; + }; + "required-workflow": { + /** @description Unique identifier for a required workflow */ + id: number; + /** @description Name present in the workflow file */ + name: string; + /** @description Path of the workflow file */ + path: string; + /** + * @description Scope of the required workflow + * @enum {string} + */ + scope: "all" | "selected"; + /** @description Ref at which the workflow file will be selected */ + ref: string; + /** + * @description State of the required workflow + * @enum {string} + */ + state: "active" | "deleted"; + /** + * Format: uri + * @example https://api.github.com/organizations/org/actions/required_workflows/1/repositories + */ + selected_repositories_url?: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + repository: components["schemas"]["minimal-repository"]; + }; + /** + * Self hosted runner label + * @description A label for a self hosted runner + */ + "runner-label": { + /** @description Unique identifier of the label. */ + id?: number; + /** @description Name of the label. */ + name: string; + /** + * @description The type of label. Read-only labels are applied automatically when the runner is configured. + * @enum {string} + */ + type?: "read-only" | "custom"; + }; + /** + * Self hosted runners + * @description A self hosted runner + */ + runner: { + /** + * @description The id of the runner. + * @example 5 + */ + id: number; + /** + * @description The id of the runner group. + * @example 1 + */ + runner_group_id?: number; + /** + * @description The name of the runner. + * @example iMac + */ + name: string; + /** + * @description The Operating System of the runner. + * @example macos + */ + os: string; + /** + * @description The status of the runner. + * @example online + */ + status: string; + busy: boolean; + labels: components["schemas"]["runner-label"][]; + }; + /** + * Runner Application + * @description Runner Application + */ + "runner-application": { + os: string; + architecture: string; + download_url: string; + filename: string; + /** @description A short lived bearer token used to download the runner, if needed. */ + temp_download_token?: string; + sha256_checksum?: string; + }; + /** + * Authentication Token + * @description Authentication Token + */ + "authentication-token": { + /** + * @description The token used for authentication + * @example v1.1f699f1069f60xxx + */ + token: string; + /** + * Format: date-time + * @description The time this token expires + * @example 2016-07-11T22:14:10Z + */ + expires_at: string; + /** + * @example { + * "issues": "read", + * "deployments": "write" + * } + */ + permissions?: Record; + /** @description The repositories this token has access to */ + repositories?: components["schemas"]["repository"][]; + /** @example config.yaml */ + single_file?: string | null; + /** + * @description Describe whether all repositories have been selected or there's a selection involved + * @enum {string} + */ + repository_selection?: "all" | "selected"; + }; + /** + * Actions Secret for an Organization + * @description Secrets for GitHub Actions for an organization. + */ + "organization-actions-secret": { + /** + * @description The name of the secret. + * @example SECRET_TOKEN + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** + * @description Visibility of a secret + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** + * Format: uri + * @example https://api.github.com/organizations/org/secrets/my_secret/repositories + */ + selected_repositories_url?: string; + }; + /** + * ActionsPublicKey + * @description The public key used for setting Actions Secrets. + */ + "actions-public-key": { + /** + * @description The identifier for the key. + * @example 1234567 + */ + key_id: string; + /** + * @description The Base64 encoded public key. + * @example hBT5WZEj8ZoOv6TYJsfWq7MxTEQopZO5/IT3ZCVQPzs= + */ + key: string; + /** @example 2 */ + id?: number; + /** @example https://api.github.com/user/keys/2 */ + url?: string; + /** @example ssh-rsa AAAAB3NzaC1yc2EAAA */ + title?: string; + /** @example 2011-01-26T19:01:12Z */ + created_at?: string; + }; + /** + * Actions Variable for an Organization + * @description Organization variable for GitHub Actions. + */ + "organization-actions-variable": { + /** + * @description The name of the variable. + * @example USERNAME + */ + name: string; + /** + * @description The value of the variable. + * @example octocat + */ + value: string; + /** + * Format: date-time + * @description The date and time at which the variable was created, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + * @example 2019-01-24T22:45:36.000Z + */ + created_at: string; + /** + * Format: date-time + * @description The date and time at which the variable was last updated, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + * @example 2019-01-24T22:45:36.000Z + */ + updated_at: string; + /** + * @description Visibility of a variable + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** + * Format: uri + * @example https://api.github.com/organizations/org/variables/USERNAME/repositories + */ + selected_repositories_url?: string; + }; + /** @description The name of the tool used to generate the code scanning analysis. */ + "code-scanning-analysis-tool-name": string; + /** @description The GUID of the tool used to generate the code scanning analysis, if provided in the uploaded SARIF data. */ + "code-scanning-analysis-tool-guid": string | null; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + "code-scanning-alert-state": "open" | "closed" | "dismissed" | "fixed"; + /** + * @description Severity of a code scanning alert. + * @enum {string} + */ + "code-scanning-alert-severity": + | "critical" + | "high" + | "medium" + | "low" + | "warning" + | "note" + | "error"; + /** + * Format: uri + * @description The REST API URL for fetching the list of instances for an alert. + */ + readonly "alert-instances-url": string; + /** + * @description **Required when the state is dismissed.** The reason for dismissing or closing the alert. + * @enum {string|null} + */ + "code-scanning-alert-dismissed-reason": + | "" + | "false positive" + | "won't fix" + | "used in tests" + | null; + /** @description The dismissal comment associated with the dismissal of the alert. */ + "code-scanning-alert-dismissed-comment": string | null; + "code-scanning-alert-rule": { + /** @description A unique identifier for the rule used to detect the alert. */ + id?: string | null; + /** @description The name of the rule used to detect the alert. */ + name?: string; + /** + * @description The severity of the alert. + * @enum {string|null} + */ + severity?: "none" | "note" | "warning" | "error" | null; + /** + * @description The security severity of the alert. + * @enum {string|null} + */ + security_severity_level?: "low" | "medium" | "high" | "critical" | null; + /** @description A short description of the rule used to detect the alert. */ + description?: string; + /** @description description of the rule used to detect the alert. */ + full_description?: string; + /** @description A set of tags applicable for the rule. */ + tags?: string[] | null; + /** @description Detailed documentation for the rule as GitHub Flavored Markdown. */ + help?: string | null; + /** @description A link to the documentation for the rule used to detect the alert. */ + help_uri?: string | null; + }; + /** @description The version of the tool used to generate the code scanning analysis. */ + "code-scanning-analysis-tool-version": string | null; + "code-scanning-analysis-tool": { + name?: components["schemas"]["code-scanning-analysis-tool-name"]; + version?: components["schemas"]["code-scanning-analysis-tool-version"]; + guid?: components["schemas"]["code-scanning-analysis-tool-guid"]; + }; + /** + * @description The full Git reference, formatted as `refs/heads/`, + * `refs/pull//merge`, or `refs/pull//head`. + */ + "code-scanning-ref": string; + /** @description Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name. */ + "code-scanning-analysis-analysis-key": string; + /** @description Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed. */ + "code-scanning-alert-environment": string; + /** @description Identifies the configuration under which the analysis was executed. Used to distinguish between multiple analyses for the same tool and commit, but performed on different languages or different parts of the code. */ + "code-scanning-analysis-category": string; + /** @description Describe a region within a file for the alert. */ + "code-scanning-alert-location": { + path?: string; + start_line?: number; + end_line?: number; + start_column?: number; + end_column?: number; + }; + /** + * @description A classification of the file. For example to identify it as generated. + * @enum {string|null} + */ + "code-scanning-alert-classification": + | "source" + | "generated" + | "test" + | "library" + | null; + "code-scanning-alert-instance": { + ref?: components["schemas"]["code-scanning-ref"]; + analysis_key?: components["schemas"]["code-scanning-analysis-analysis-key"]; + environment?: components["schemas"]["code-scanning-alert-environment"]; + category?: components["schemas"]["code-scanning-analysis-category"]; + state?: components["schemas"]["code-scanning-alert-state"]; + commit_sha?: string; + message?: { + text?: string; + }; + location?: components["schemas"]["code-scanning-alert-location"]; + html_url?: string; + /** + * @description Classifications that have been applied to the file that triggered the alert. + * For example identifying it as documentation, or a generated file. + */ + classifications?: components["schemas"]["code-scanning-alert-classification"][]; + }; + "code-scanning-organization-alert-items": { + number: components["schemas"]["alert-number"]; + created_at: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["alert-updated-at"]; + url: components["schemas"]["alert-url"]; + html_url: components["schemas"]["alert-html-url"]; + instances_url: components["schemas"]["alert-instances-url"]; + state: components["schemas"]["code-scanning-alert-state"]; + fixed_at?: components["schemas"]["alert-fixed-at"]; + dismissed_by: components["schemas"]["nullable-simple-user"]; + dismissed_at: components["schemas"]["alert-dismissed-at"]; + dismissed_reason: components["schemas"]["code-scanning-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + rule: components["schemas"]["code-scanning-alert-rule"]; + tool: components["schemas"]["code-scanning-analysis-tool"]; + most_recent_instance: components["schemas"]["code-scanning-alert-instance"]; + repository: components["schemas"]["simple-repository"]; + }; + /** + * Codespace machine + * @description A description of the machine powering a codespace. + */ + "nullable-codespace-machine": { + /** + * @description The name of the machine. + * @example standardLinux + */ + name: string; + /** + * @description The display name of the machine includes cores, memory, and storage. + * @example 4 cores, 8 GB RAM, 64 GB storage + */ + display_name: string; + /** + * @description The operating system of the machine. + * @example linux + */ + operating_system: string; + /** + * @description How much storage is available to the codespace. + * @example 68719476736 + */ + storage_in_bytes: number; + /** + * @description How much memory is available to the codespace. + * @example 8589934592 + */ + memory_in_bytes: number; + /** + * @description How many cores are available to the codespace. + * @example 4 + */ + cpus: number; + /** + * @description Whether a prebuild is currently available when creating a codespace for this machine and repository. If a branch was not specified as a ref, the default branch will be assumed. Value will be "null" if prebuilds are not supported or prebuild availability could not be determined. Value will be "none" if no prebuild is available. Latest values "ready" and "in_progress" indicate the prebuild availability status. + * @example ready + * @enum {string|null} + */ + prebuild_availability: "none" | "ready" | "in_progress" | null; + } | null; + /** + * Codespace + * @description A codespace. + */ + codespace: { + /** @example 1 */ + id: number; + /** + * @description Automatically generated name of this codespace. + * @example monalisa-octocat-hello-world-g4wpq6h95q + */ + name: string; + /** + * @description Display name for this codespace. + * @example bookish space pancake + */ + display_name?: string | null; + /** + * @description UUID identifying this codespace's environment. + * @example 26a7c758-7299-4a73-b978-5a92a7ae98a0 + */ + environment_id: string | null; + owner: components["schemas"]["simple-user"]; + billable_owner: components["schemas"]["simple-user"]; + repository: components["schemas"]["minimal-repository"]; + machine: components["schemas"]["nullable-codespace-machine"]; + /** + * @description Path to devcontainer.json from repo root used to create Codespace. + * @example .devcontainer/example/devcontainer.json + */ + devcontainer_path?: string | null; + /** + * @description Whether the codespace was created from a prebuild. + * @example false + */ + prebuild: boolean | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + updated_at: string; + /** + * Format: date-time + * @description Last known time this codespace was started. + * @example 2011-01-26T19:01:12Z + */ + last_used_at: string; + /** + * @description State of this codespace. + * @example Available + * @enum {string} + */ + state: + | "Unknown" + | "Created" + | "Queued" + | "Provisioning" + | "Available" + | "Awaiting" + | "Unavailable" + | "Deleted" + | "Moved" + | "Shutdown" + | "Archived" + | "Starting" + | "ShuttingDown" + | "Failed" + | "Exporting" + | "Updating" + | "Rebuilding"; + /** + * Format: uri + * @description API URL for this codespace. + */ + url: string; + /** @description Details about the codespace's git repository. */ + git_status: { + /** + * @description The number of commits the local repository is ahead of the remote. + * @example 0 + */ + ahead?: number; + /** + * @description The number of commits the local repository is behind the remote. + * @example 0 + */ + behind?: number; + /** @description Whether the local repository has unpushed changes. */ + has_unpushed_changes?: boolean; + /** @description Whether the local repository has uncommitted changes. */ + has_uncommitted_changes?: boolean; + /** + * @description The current branch (or SHA if in detached HEAD state) of the local repository. + * @example main + */ + ref?: string; + }; + /** + * @description The initally assigned location of a new codespace. + * @example WestUs2 + * @enum {string} + */ + location: "EastUs" | "SouthEastAsia" | "WestEurope" | "WestUs2"; + /** + * @description The number of minutes of inactivity after which this codespace will be automatically stopped. + * @example 60 + */ + idle_timeout_minutes: number | null; + /** + * Format: uri + * @description URL to access this codespace on the web. + */ + web_url: string; + /** + * Format: uri + * @description API URL to access available alternate machine types for this codespace. + */ + machines_url: string; + /** + * Format: uri + * @description API URL to start this codespace. + */ + start_url: string; + /** + * Format: uri + * @description API URL to stop this codespace. + */ + stop_url: string; + /** + * Format: uri + * @description API URL to publish this codespace to a new repository. + */ + publish_url?: string | null; + /** + * Format: uri + * @description API URL for the Pull Request associated with this codespace, if any. + */ + pulls_url: string | null; + recent_folders: string[]; + runtime_constraints?: { + /** @description The privacy settings a user can select from when forwarding a port. */ + allowed_port_privacy_settings?: string[] | null; + }; + /** @description Whether or not a codespace has a pending async operation. This would mean that the codespace is temporarily unavailable. The only thing that you can do with a codespace in this state is delete it. */ + pending_operation?: boolean | null; + /** @description Text to show user when codespace is disabled by a pending operation */ + pending_operation_disabled_reason?: string | null; + /** @description Text to show user when codespace idle timeout minutes has been overriden by an organization policy */ + idle_timeout_notice?: string | null; + /** + * @description Duration in minutes after codespace has gone idle in which it will be deleted. Must be integer minutes between 0 and 43200 (30 days). + * @example 60 + */ + retention_period_minutes?: number | null; + /** + * Format: date-time + * @description When a codespace will be auto-deleted based on the "retention_period_minutes" and "last_used_at" + * @example 2011-01-26T20:01:12Z + */ + retention_expires_at?: string | null; + /** + * @description The text to display to a user when a codespace has been stopped for a potentially actionable reason. + * @example you've used 100% of your spending limit for Codespaces + */ + last_known_stop_notice?: string | null; + }; + /** + * Codespaces Secret + * @description Secrets for a GitHub Codespace. + */ + "codespaces-org-secret": { + /** + * @description The name of the secret + * @example SECRET_NAME + */ + name: string; + /** + * Format: date-time + * @description The date and time at which the secret was created, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + */ + created_at: string; + /** + * Format: date-time + * @description The date and time at which the secret was created, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + */ + updated_at: string; + /** + * @description The type of repositories in the organization that the secret is visible to + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** + * Format: uri + * @description The API URL at which the list of repositories this secret is visible to can be retrieved + * @example https://api.github.com/orgs/ORGANIZATION/codespaces/secrets/SECRET_NAME/repositories + */ + selected_repositories_url?: string; + }; + /** + * CodespacesPublicKey + * @description The public key used for setting Codespaces secrets. + */ + "codespaces-public-key": { + /** + * @description The identifier for the key. + * @example 1234567 + */ + key_id: string; + /** + * @description The Base64 encoded public key. + * @example hBT5WZEj8ZoOv6TYJsfWq7MxTEQopZO5/IT3ZCVQPzs= + */ + key: string; + /** @example 2 */ + id?: number; + /** @example https://api.github.com/user/keys/2 */ + url?: string; + /** @example ssh-rsa AAAAB3NzaC1yc2EAAA */ + title?: string; + /** @example 2011-01-26T19:01:12Z */ + created_at?: string; + }; + /** + * Dependabot Secret for an Organization + * @description Secrets for GitHub Dependabot for an organization. + */ + "organization-dependabot-secret": { + /** + * @description The name of the secret. + * @example SECRET_TOKEN + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** + * @description Visibility of a secret + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** + * Format: uri + * @example https://api.github.com/organizations/org/dependabot/secrets/my_secret/repositories + */ + selected_repositories_url?: string; + }; + /** + * DependabotPublicKey + * @description The public key used for setting Dependabot Secrets. + */ + "dependabot-public-key": { + /** + * @description The identifier for the key. + * @example 1234567 + */ + key_id: string; + /** + * @description The Base64 encoded public key. + * @example hBT5WZEj8ZoOv6TYJsfWq7MxTEQopZO5/IT3ZCVQPzs= + */ + key: string; + }; + /** + * Minimal Repository + * @description Minimal Repository + */ + "nullable-minimal-repository": { + /** @example 1296269 */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** @example Hello-World */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + owner: components["schemas"]["simple-user"]; + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + git_url?: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + ssh_url?: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + clone_url?: string; + mirror_url?: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + svn_url?: string; + homepage?: string | null; + language?: string | null; + forks_count?: number; + stargazers_count?: number; + watchers_count?: number; + /** @description The size of the repository. Size is calculated hourly. When a repository is initially created, the size is 0. */ + size?: number; + default_branch?: string; + open_issues_count?: number; + is_template?: boolean; + topics?: string[]; + has_issues?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + has_pages?: boolean; + has_downloads?: boolean; + has_discussions?: boolean; + archived?: boolean; + disabled?: boolean; + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at?: string | null; + permissions?: { + admin?: boolean; + maintain?: boolean; + push?: boolean; + triage?: boolean; + pull?: boolean; + }; + /** @example admin */ + role_name?: string; + temp_clone_token?: string; + delete_branch_on_merge?: boolean; + subscribers_count?: number; + network_count?: number; + code_of_conduct?: components["schemas"]["code-of-conduct"]; + license?: { + key?: string; + name?: string; + spdx_id?: string; + url?: string; + node_id?: string; + } | null; + /** @example 0 */ + forks?: number; + /** @example 0 */ + open_issues?: number; + /** @example 0 */ + watchers?: number; + allow_forking?: boolean; + /** @example false */ + web_commit_signoff_required?: boolean; + security_and_analysis?: components["schemas"]["security-and-analysis"]; + } | null; + /** + * Package + * @description A software package + */ + package: { + /** + * @description Unique identifier of the package. + * @example 1 + */ + id: number; + /** + * @description The name of the package. + * @example super-linter + */ + name: string; + /** + * @example docker + * @enum {string} + */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** @example https://api.github.com/orgs/github/packages/container/super-linter */ + url: string; + /** @example https://github.com/orgs/github/packages/container/package/super-linter */ + html_url: string; + /** + * @description The number of versions of the package. + * @example 1 + */ + version_count: number; + /** + * @example private + * @enum {string} + */ + visibility: "private" | "public"; + owner?: components["schemas"]["nullable-simple-user"]; + repository?: components["schemas"]["nullable-minimal-repository"]; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Organization Invitation + * @description Organization Invitation + */ + "organization-invitation": { + id: number; + login: string | null; + email: string | null; + role: string; + created_at: string; + failed_at?: string | null; + failed_reason?: string | null; + inviter: components["schemas"]["simple-user"]; + team_count: number; + /** @example "MDIyOk9yZ2FuaXphdGlvbkludml0YXRpb24x" */ + node_id: string; + /** @example "https://api.github.com/organizations/16/invitations/1/teams" */ + invitation_teams_url: string; + /** @example "member" */ + invitation_source?: string; + }; + /** + * Org Hook + * @description Org Hook + */ + "org-hook": { + /** @example 1 */ + id: number; + /** + * Format: uri + * @example https://api.github.com/orgs/octocat/hooks/1 + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/octocat/hooks/1/pings + */ + ping_url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/octocat/hooks/1/deliveries + */ + deliveries_url?: string; + /** @example web */ + name: string; + /** + * @example [ + * "push", + * "pull_request" + * ] + */ + events: string[]; + /** @example true */ + active: boolean; + config: { + /** @example "http://example.com/2" */ + url?: string; + /** @example "0" */ + insecure_ssl?: string; + /** @example "form" */ + content_type?: string; + /** @example "********" */ + secret?: string; + }; + /** + * Format: date-time + * @example 2011-09-06T20:39:23Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2011-09-06T17:26:27Z + */ + created_at: string; + type: string; + }; + /** + * @description The type of GitHub user that can comment, open issues, or create pull requests while the interaction limit is in effect. + * @example collaborators_only + * @enum {string} + */ + "interaction-group": + | "existing_users" + | "contributors_only" + | "collaborators_only"; + /** + * Interaction Limits + * @description Interaction limit settings. + */ + "interaction-limit-response": { + limit: components["schemas"]["interaction-group"]; + /** @example repository */ + origin: string; + /** + * Format: date-time + * @example 2018-08-17T04:18:39Z + */ + expires_at: string; + }; + /** + * @description The duration of the interaction restriction. Default: `one_day`. + * @example one_month + * @enum {string} + */ + "interaction-expiry": + | "one_day" + | "three_days" + | "one_week" + | "one_month" + | "six_months"; + /** + * Interaction Restrictions + * @description Limit interactions to a specific type of user for a specified duration + */ + "interaction-limit": { + limit: components["schemas"]["interaction-group"]; + expiry?: components["schemas"]["interaction-expiry"]; + }; + /** + * Team Simple + * @description Groups of organization members that gives permissions on specified repositories. + */ + "nullable-team-simple": { + /** + * @description Unique identifier of the team + * @example 1 + */ + id: number; + /** @example MDQ6VGVhbTE= */ + node_id: string; + /** + * Format: uri + * @description URL for the team + * @example https://api.github.com/organizations/1/team/1 + */ + url: string; + /** @example https://api.github.com/organizations/1/team/1/members{/member} */ + members_url: string; + /** + * @description Name of the team + * @example Justice League + */ + name: string; + /** + * @description Description of the team + * @example A great team. + */ + description: string | null; + /** + * @description Permission that the team will have for its repositories + * @example admin + */ + permission: string; + /** + * @description The level of privacy this team should have + * @example closed + */ + privacy?: string; + /** + * @description The notification setting the team has set + * @example notifications_enabled + */ + notification_setting?: string; + /** + * Format: uri + * @example https://github.com/orgs/rails/teams/core + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/1/repos + */ + repositories_url: string; + /** @example justice-league */ + slug: string; + /** + * @description Distinguished Name (DN) that team maps to within LDAP environment + * @example uid=example,ou=users,dc=github,dc=com + */ + ldap_dn?: string; + } | null; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + team: { + id: number; + node_id: string; + name: string; + slug: string; + description: string | null; + privacy?: string; + notification_setting?: string; + permission: string; + permissions?: { + pull: boolean; + triage: boolean; + push: boolean; + maintain: boolean; + admin: boolean; + }; + /** Format: uri */ + url: string; + /** + * Format: uri + * @example https://github.com/orgs/rails/teams/core + */ + html_url: string; + members_url: string; + /** Format: uri */ + repositories_url: string; + parent: components["schemas"]["nullable-team-simple"]; + }; + /** + * Org Membership + * @description Org Membership + */ + "org-membership": { + /** + * Format: uri + * @example https://api.github.com/orgs/octocat/memberships/defunkt + */ + url: string; + /** + * @description The state of the member in the organization. The `pending` state indicates the user has not yet accepted an invitation. + * @example active + * @enum {string} + */ + state: "active" | "pending"; + /** + * @description The user's membership type in the organization. + * @example admin + * @enum {string} + */ + role: "admin" | "member" | "billing_manager"; + /** + * Format: uri + * @example https://api.github.com/orgs/octocat + */ + organization_url: string; + organization: components["schemas"]["organization-simple"]; + user: components["schemas"]["nullable-simple-user"]; + permissions?: { + can_create_repository: boolean; + }; + }; + /** + * Migration + * @description A migration. + */ + migration: { + /** @example 79 */ + id: number; + owner: components["schemas"]["nullable-simple-user"]; + /** @example 0b989ba4-242f-11e5-81e1-c7b6966d2516 */ + guid: string; + /** @example pending */ + state: string; + /** @example true */ + lock_repositories: boolean; + exclude_metadata: boolean; + exclude_git_data: boolean; + exclude_attachments: boolean; + exclude_releases: boolean; + exclude_owner_projects: boolean; + org_metadata_only: boolean; + /** @description The repositories included in the migration. Only returned for export migrations. */ + repositories: components["schemas"]["repository"][]; + /** + * Format: uri + * @example https://api.github.com/orgs/octo-org/migrations/79 + */ + url: string; + /** + * Format: date-time + * @example 2015-07-06T15:33:38-07:00 + */ + created_at: string; + /** + * Format: date-time + * @example 2015-07-06T15:33:38-07:00 + */ + updated_at: string; + node_id: string; + /** Format: uri */ + archive_url?: string; + /** @description Exclude related items from being returned in the response in order to improve performance of the request. The array can include any of: `"repositories"`. */ + exclude?: string[]; + }; + /** + * Package Version + * @description A version of a software package + */ + "package-version": { + /** + * @description Unique identifier of the package version. + * @example 1 + */ + id: number; + /** + * @description The name of the package version. + * @example latest + */ + name: string; + /** @example https://api.github.com/orgs/github/packages/container/super-linter/versions/786068 */ + url: string; + /** @example https://github.com/orgs/github/packages/container/package/super-linter */ + package_html_url: string; + /** @example https://github.com/orgs/github/packages/container/super-linter/786068 */ + html_url?: string; + /** @example MIT */ + license?: string; + description?: string; + /** + * Format: date-time + * @example 2011-04-10T20:09:31Z + */ + created_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + deleted_at?: string; + /** Package Version Metadata */ + metadata?: { + /** + * @example docker + * @enum {string} + */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** Container Metadata */ + container?: { + tags: string[]; + }; + /** Docker Metadata */ + docker?: { + tag?: string[]; + }; + }; + }; + /** + * Project + * @description Projects are a way to organize columns and cards of work. + */ + project: { + /** + * Format: uri + * @example https://api.github.com/repos/api-playground/projects-test + */ + owner_url: string; + /** + * Format: uri + * @example https://api.github.com/projects/1002604 + */ + url: string; + /** + * Format: uri + * @example https://github.com/api-playground/projects-test/projects/12 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/projects/1002604/columns + */ + columns_url: string; + /** @example 1002604 */ + id: number; + /** @example MDc6UHJvamVjdDEwMDI2MDQ= */ + node_id: string; + /** + * @description Name of the project + * @example Week One Sprint + */ + name: string; + /** + * @description Body of the project + * @example This project represents the sprint of the first week in January + */ + body: string | null; + /** @example 1 */ + number: number; + /** + * @description State of the project; either 'open' or 'closed' + * @example open + */ + state: string; + creator: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2011-04-10T20:09:31Z + */ + created_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + updated_at: string; + /** + * @description The baseline permission that all organization members have on this project. Only present if owner is an organization. + * @enum {string} + */ + organization_permission?: "read" | "write" | "admin" | "none"; + /** @description Whether or not this project can be seen by everyone. Only present if owner is an organization. */ + private?: boolean; + }; + /** + * @description The enforcement level of the ruleset. `evaluate` allows admins to test rules before enforcing them. Admins can view insights on the Rule Insights page (`evaluate` is only available with GitHub Enterprise). + * @enum {string} + */ + "repository-rule-enforcement": "disabled" | "active" | "evaluate"; + /** + * Repository Ruleset Bypass Actor + * @description An actor that can bypass rules in a ruleset + */ + "repository-ruleset-bypass-actor": { + /** @description The ID of the actor that can bypass a ruleset */ + actor_id?: number; + /** + * @description The type of actor that can bypass a ruleset + * @enum {string} + */ + actor_type?: "Team" | "Integration"; + }; + /** + * Repository ruleset conditions for ref names + * @description Parameters for a repository ruleset ref name condition + */ + "repository-ruleset-conditions": { + ref_name?: { + /** @description Array of ref names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the default branch or `~ALL` to include all branches. */ + include?: string[]; + /** @description Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match. */ + exclude?: string[]; + }; + }; + /** + * Repository ruleset conditions for repository names + * @description Parameters for a repository name condition + */ + "repository-ruleset-conditions-repository-name-target": { + repository_name?: { + /** @description Array of repository names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~ALL` to include all repositories. */ + include?: string[]; + /** @description Array of repository names or patterns to exclude. The condition will not pass if any of these patterns match. */ + exclude?: string[]; + /** @description Whether renaming of target repositories is prevented. */ + protected?: boolean; + }; + }; + /** + * Organization ruleset conditions + * @description Conditions for a organization ruleset + */ + "org-ruleset-conditions": components["schemas"]["repository-ruleset-conditions"] & + components["schemas"]["repository-ruleset-conditions-repository-name-target"]; + /** + * creation + * @description Only allow users with bypass permission to create matching refs. + */ + "repository-rule-creation": { + /** @enum {string} */ + type: "creation"; + }; + /** + * update + * @description Only allow users with bypass permission to update matching refs. + */ + "repository-rule-update": { + /** @enum {string} */ + type: "update"; + parameters?: { + /** @description Branch can pull changes from its upstream repository */ + update_allows_fetch_and_merge: boolean; + }; + }; + /** + * deletion + * @description Only allow users with bypass permissions to delete matching refs. + */ + "repository-rule-deletion": { + /** @enum {string} */ + type: "deletion"; + }; + /** + * required_linear_history + * @description Prevent merge commits from being pushed to matching branches. + */ + "repository-rule-required-linear-history": { + /** @enum {string} */ + type: "required_linear_history"; + }; + /** + * required_deployments + * @description Choose which environments must be successfully deployed to before branches can be merged into a branch that matches this rule. + */ + "repository-rule-required-deployments": { + /** @enum {string} */ + type: "required_deployments"; + parameters?: { + /** @description The environments that must be successfully deployed to before branches can be merged. */ + required_deployment_environments: string[]; + }; + }; + /** + * required_signatures + * @description Commits pushed to matching branches must have verified signatures. + */ + "repository-rule-required-signatures": { + /** @enum {string} */ + type: "required_signatures"; + }; + /** + * pull_request + * @description Require all commits be made to a non-target branch and submitted via a pull request before they can be merged. + */ + "repository-rule-pull-request": { + /** @enum {string} */ + type: "pull_request"; + parameters?: { + /** @description New, reviewable commits pushed will dismiss previous pull request review approvals. */ + dismiss_stale_reviews_on_push: boolean; + /** @description Require an approving review in pull requests that modify files that have a designated code owner. */ + require_code_owner_review: boolean; + /** @description Whether the most recent reviewable push must be approved by someone other than the person who pushed it. */ + require_last_push_approval: boolean; + /** @description The number of approving reviews that are required before a pull request can be merged. */ + required_approving_review_count: number; + /** @description All conversations on code must be resolved before a pull request can be merged. */ + required_review_thread_resolution: boolean; + }; + }; + /** + * StatusCheckConfiguration + * @description Required status check + */ + "repository-rule-params-status-check-configuration": { + /** @description The status check context name that must be present on the commit. */ + context: string; + /** @description The optional integration ID that this status check must originate from. */ + integration_id?: number; + }; + /** + * required_status_checks + * @description Choose which status checks must pass before branches can be merged into a branch that matches this rule. When enabled, commits must first be pushed to another branch, then merged or pushed directly to a branch that matches this rule after status checks have passed. + */ + "repository-rule-required-status-checks": { + /** @enum {string} */ + type: "required_status_checks"; + parameters?: { + /** @description Status checks that are required. */ + required_status_checks: components["schemas"]["repository-rule-params-status-check-configuration"][]; + /** @description Whether pull requests targeting a matching branch must be tested with the latest code. This setting will not take effect unless at least one status check is enabled. */ + strict_required_status_checks_policy: boolean; + }; + }; + /** + * non_fast_forward + * @description Prevent users with push access from force pushing to branches. + */ + "repository-rule-non-fast-forward": { + /** @enum {string} */ + type: "non_fast_forward"; + }; + /** + * commit_message_pattern + * @description Parameters to be used for the commit_message_pattern rule + */ + "repository-rule-commit-message-pattern": { + /** @enum {string} */ + type: "commit_message_pattern"; + parameters?: { + /** @description How this rule will appear to users. */ + name?: string; + /** @description If true, the rule will fail if the pattern matches. */ + negate?: boolean; + /** + * @description The operator to use for matching. + * @enum {string} + */ + operator: "starts_with" | "ends_with" | "contains" | "regex"; + /** @description The pattern to match with. */ + pattern: string; + }; + }; + /** + * commit_author_email_pattern + * @description Parameters to be used for the commit_author_email_pattern rule + */ + "repository-rule-commit-author-email-pattern": { + /** @enum {string} */ + type: "commit_author_email_pattern"; + parameters?: { + /** @description How this rule will appear to users. */ + name?: string; + /** @description If true, the rule will fail if the pattern matches. */ + negate?: boolean; + /** + * @description The operator to use for matching. + * @enum {string} + */ + operator: "starts_with" | "ends_with" | "contains" | "regex"; + /** @description The pattern to match with. */ + pattern: string; + }; + }; + /** + * committer_email_pattern + * @description Parameters to be used for the committer_email_pattern rule + */ + "repository-rule-committer-email-pattern": { + /** @enum {string} */ + type: "committer_email_pattern"; + parameters?: { + /** @description How this rule will appear to users. */ + name?: string; + /** @description If true, the rule will fail if the pattern matches. */ + negate?: boolean; + /** + * @description The operator to use for matching. + * @enum {string} + */ + operator: "starts_with" | "ends_with" | "contains" | "regex"; + /** @description The pattern to match with. */ + pattern: string; + }; + }; + /** + * branch_name_pattern + * @description Parameters to be used for the branch_name_pattern rule + */ + "repository-rule-branch-name-pattern": { + /** @enum {string} */ + type: "branch_name_pattern"; + parameters?: { + /** @description How this rule will appear to users. */ + name?: string; + /** @description If true, the rule will fail if the pattern matches. */ + negate?: boolean; + /** + * @description The operator to use for matching. + * @enum {string} + */ + operator: "starts_with" | "ends_with" | "contains" | "regex"; + /** @description The pattern to match with. */ + pattern: string; + }; + }; + /** + * tag_name_pattern + * @description Parameters to be used for the tag_name_pattern rule + */ + "repository-rule-tag-name-pattern": { + /** @enum {string} */ + type: "tag_name_pattern"; + parameters?: { + /** @description How this rule will appear to users. */ + name?: string; + /** @description If true, the rule will fail if the pattern matches. */ + negate?: boolean; + /** + * @description The operator to use for matching. + * @enum {string} + */ + operator: "starts_with" | "ends_with" | "contains" | "regex"; + /** @description The pattern to match with. */ + pattern: string; + }; + }; + /** + * Repository Rule + * @description A repository rule. + */ + "repository-rule": + | components["schemas"]["repository-rule-creation"] + | components["schemas"]["repository-rule-update"] + | components["schemas"]["repository-rule-deletion"] + | components["schemas"]["repository-rule-required-linear-history"] + | components["schemas"]["repository-rule-required-deployments"] + | components["schemas"]["repository-rule-required-signatures"] + | components["schemas"]["repository-rule-pull-request"] + | components["schemas"]["repository-rule-required-status-checks"] + | components["schemas"]["repository-rule-non-fast-forward"] + | components["schemas"]["repository-rule-commit-message-pattern"] + | components["schemas"]["repository-rule-commit-author-email-pattern"] + | components["schemas"]["repository-rule-committer-email-pattern"] + | components["schemas"]["repository-rule-branch-name-pattern"] + | components["schemas"]["repository-rule-tag-name-pattern"]; + /** + * Repository ruleset + * @description A set of rules to apply when specified conditions are met. + */ + "repository-ruleset": { + /** @description The ID of the ruleset */ + id: number; + /** @description The name of the ruleset */ + name: string; + /** + * @description The target of the ruleset + * @enum {string} + */ + target?: "branch" | "tag"; + /** + * @description The type of the source of the ruleset + * @enum {string} + */ + source_type?: "Repository" | "Organization"; + /** @description The name of the source */ + source: string; + enforcement: components["schemas"]["repository-rule-enforcement"]; + /** + * @description The permission level required to bypass this ruleset. "repository" allows those with bypass permission at the repository level to bypass. "organization" allows those with bypass permission at the organization level to bypass. "none" prevents anyone from bypassing. + * @enum {string} + */ + bypass_mode?: "none" | "repository" | "organization"; + /** @description The actors that can bypass the rules in this ruleset */ + bypass_actors?: components["schemas"]["repository-ruleset-bypass-actor"][]; + node_id?: string; + _links?: { + self?: { + /** @description The URL of the ruleset */ + href?: string; + }; + html?: { + /** @description The html URL of the ruleset */ + href?: string; + }; + }; + conditions?: + | components["schemas"]["repository-ruleset-conditions"] + | components["schemas"]["org-ruleset-conditions"]; + rules?: components["schemas"]["repository-rule"][]; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + }; + /** + * Team Simple + * @description Groups of organization members that gives permissions on specified repositories. + */ + "team-simple": { + /** + * @description Unique identifier of the team + * @example 1 + */ + id: number; + /** @example MDQ6VGVhbTE= */ + node_id: string; + /** + * Format: uri + * @description URL for the team + * @example https://api.github.com/organizations/1/team/1 + */ + url: string; + /** @example https://api.github.com/organizations/1/team/1/members{/member} */ + members_url: string; + /** + * @description Name of the team + * @example Justice League + */ + name: string; + /** + * @description Description of the team + * @example A great team. + */ + description: string | null; + /** + * @description Permission that the team will have for its repositories + * @example admin + */ + permission: string; + /** + * @description The level of privacy this team should have + * @example closed + */ + privacy?: string; + /** + * @description The notification setting the team has set + * @example notifications_enabled + */ + notification_setting?: string; + /** + * Format: uri + * @example https://github.com/orgs/rails/teams/core + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/1/repos + */ + repositories_url: string; + /** @example justice-league */ + slug: string; + /** + * @description Distinguished Name (DN) that team maps to within LDAP environment + * @example uid=example,ou=users,dc=github,dc=com + */ + ldap_dn?: string; + }; + "actions-billing-usage": { + /** @description The sum of the free and paid GitHub Actions minutes used. */ + total_minutes_used: number; + /** @description The total paid GitHub Actions minutes used. */ + total_paid_minutes_used: number; + /** @description The amount of free GitHub Actions minutes available. */ + included_minutes: number; + minutes_used_breakdown: { + /** @description Total minutes used on Ubuntu runner machines. */ + UBUNTU?: number; + /** @description Total minutes used on macOS runner machines. */ + MACOS?: number; + /** @description Total minutes used on Windows runner machines. */ + WINDOWS?: number; + /** @description Total minutes used on Ubuntu 4 core runner machines. */ + ubuntu_4_core?: number; + /** @description Total minutes used on Ubuntu 8 core runner machines. */ + ubuntu_8_core?: number; + /** @description Total minutes used on Ubuntu 16 core runner machines. */ + ubuntu_16_core?: number; + /** @description Total minutes used on Ubuntu 32 core runner machines. */ + ubuntu_32_core?: number; + /** @description Total minutes used on Ubuntu 64 core runner machines. */ + ubuntu_64_core?: number; + /** @description Total minutes used on Windows 4 core runner machines. */ + windows_4_core?: number; + /** @description Total minutes used on Windows 8 core runner machines. */ + windows_8_core?: number; + /** @description Total minutes used on Windows 16 core runner machines. */ + windows_16_core?: number; + /** @description Total minutes used on Windows 32 core runner machines. */ + windows_32_core?: number; + /** @description Total minutes used on Windows 64 core runner machines. */ + windows_64_core?: number; + /** @description Total minutes used on macOS 12 core runner machines. */ + macos_12_core?: number; + /** @description Total minutes used on all runner machines. */ + total?: number; + }; + }; + "packages-billing-usage": { + /** @description Sum of the free and paid storage space (GB) for GitHuub Packages. */ + total_gigabytes_bandwidth_used: number; + /** @description Total paid storage space (GB) for GitHuub Packages. */ + total_paid_gigabytes_bandwidth_used: number; + /** @description Free storage space (GB) for GitHub Packages. */ + included_gigabytes_bandwidth: number; + }; + "combined-billing-usage": { + /** @description Numbers of days left in billing cycle. */ + days_left_in_billing_cycle: number; + /** @description Estimated storage space (GB) used in billing cycle. */ + estimated_paid_storage_for_month: number; + /** @description Estimated sum of free and paid storage space (GB) used in billing cycle. */ + estimated_storage_for_month: number; + }; + /** + * Team Organization + * @description Team Organization + */ + "team-organization": { + /** @example github */ + login: string; + /** @example 1 */ + id: number; + /** @example MDEyOk9yZ2FuaXphdGlvbjE= */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/repos + */ + repos_url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/events + */ + events_url: string; + /** @example https://api.github.com/orgs/github/hooks */ + hooks_url: string; + /** @example https://api.github.com/orgs/github/issues */ + issues_url: string; + /** @example https://api.github.com/orgs/github/members{/member} */ + members_url: string; + /** @example https://api.github.com/orgs/github/public_members{/member} */ + public_members_url: string; + /** @example https://github.com/images/error/octocat_happy.gif */ + avatar_url: string; + /** @example A great organization */ + description: string | null; + /** @example github */ + name?: string; + /** @example GitHub */ + company?: string; + /** + * Format: uri + * @example https://github.com/blog + */ + blog?: string; + /** @example San Francisco */ + location?: string; + /** + * Format: email + * @example octocat@github.com + */ + email?: string; + /** @example github */ + twitter_username?: string | null; + /** @example true */ + is_verified?: boolean; + /** @example true */ + has_organization_projects: boolean; + /** @example true */ + has_repository_projects: boolean; + /** @example 2 */ + public_repos: number; + /** @example 1 */ + public_gists: number; + /** @example 20 */ + followers: number; + /** @example 0 */ + following: number; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: date-time + * @example 2008-01-14T04:33:35Z + */ + created_at: string; + /** @example Organization */ + type: string; + /** @example 100 */ + total_private_repos?: number; + /** @example 100 */ + owned_private_repos?: number; + /** @example 81 */ + private_gists?: number | null; + /** @example 10000 */ + disk_usage?: number | null; + /** @example 8 */ + collaborators?: number | null; + /** + * Format: email + * @example org@example.com + */ + billing_email?: string | null; + plan?: { + name: string; + space: number; + private_repos: number; + filled_seats?: number; + seats?: number; + }; + default_repository_permission?: string | null; + /** @example true */ + members_can_create_repositories?: boolean | null; + /** @example true */ + two_factor_requirement_enabled?: boolean | null; + /** @example all */ + members_allowed_repository_creation_type?: string; + /** @example true */ + members_can_create_public_repositories?: boolean; + /** @example true */ + members_can_create_private_repositories?: boolean; + /** @example true */ + members_can_create_internal_repositories?: boolean; + /** @example true */ + members_can_create_pages?: boolean; + /** @example true */ + members_can_create_public_pages?: boolean; + /** @example true */ + members_can_create_private_pages?: boolean; + /** @example false */ + members_can_fork_private_repositories?: boolean | null; + /** @example false */ + web_commit_signoff_required?: boolean; + /** Format: date-time */ + updated_at: string; + }; + /** + * Full Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + "team-full": { + /** + * @description Unique identifier of the team + * @example 42 + */ + id: number; + /** @example MDQ6VGVhbTE= */ + node_id: string; + /** + * Format: uri + * @description URL for the team + * @example https://api.github.com/organizations/1/team/1 + */ + url: string; + /** + * Format: uri + * @example https://github.com/orgs/rails/teams/core + */ + html_url: string; + /** + * @description Name of the team + * @example Developers + */ + name: string; + /** @example justice-league */ + slug: string; + /** @example A great team. */ + description: string | null; + /** + * @description The level of privacy this team should have + * @example closed + * @enum {string} + */ + privacy?: "closed" | "secret"; + /** + * @description The notification setting the team has set + * @example notifications_enabled + * @enum {string} + */ + notification_setting?: "notifications_enabled" | "notifications_disabled"; + /** + * @description Permission that the team will have for its repositories + * @example push + */ + permission: string; + /** @example https://api.github.com/organizations/1/team/1/members{/member} */ + members_url: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/1/repos + */ + repositories_url: string; + parent?: components["schemas"]["nullable-team-simple"]; + /** @example 3 */ + members_count: number; + /** @example 10 */ + repos_count: number; + /** + * Format: date-time + * @example 2017-07-14T16:53:42Z + */ + created_at: string; + /** + * Format: date-time + * @example 2017-08-17T12:37:15Z + */ + updated_at: string; + organization: components["schemas"]["team-organization"]; + /** + * @description Distinguished Name (DN) that team maps to within LDAP environment + * @example uid=example,ou=users,dc=github,dc=com + */ + ldap_dn?: string; + }; + /** + * Team Discussion + * @description A team discussion is a persistent record of a free-form conversation within a team. + */ + "team-discussion": { + author: components["schemas"]["nullable-simple-user"]; + /** + * @description The main text of the discussion. + * @example Please suggest improvements to our workflow in comments. + */ + body: string; + /** @example

Hi! This is an area for us to collaborate as a team

*/ + body_html: string; + /** + * @description The current version of the body content. If provided, this update operation will be rejected if the given version does not match the latest version on the server. + * @example 0307116bbf7ced493b8d8a346c650b71 + */ + body_version: string; + /** @example 0 */ + comments_count: number; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2343027/discussions/1/comments + */ + comments_url: string; + /** + * Format: date-time + * @example 2018-01-25T18:56:31Z + */ + created_at: string; + /** Format: date-time */ + last_edited_at: string | null; + /** + * Format: uri + * @example https://github.com/orgs/github/teams/justice-league/discussions/1 + */ + html_url: string; + /** @example MDE0OlRlYW1EaXNjdXNzaW9uMQ== */ + node_id: string; + /** + * @description The unique sequence number of a team discussion. + * @example 42 + */ + number: number; + /** + * @description Whether or not this discussion should be pinned for easy retrieval. + * @example true + */ + pinned: boolean; + /** + * @description Whether or not this discussion should be restricted to team members and organization administrators. + * @example true + */ + private: boolean; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2343027 + */ + team_url: string; + /** + * @description The title of the discussion. + * @example How can we improve our workflow? + */ + title: string; + /** + * Format: date-time + * @example 2018-01-25T18:56:31Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2343027/discussions/1 + */ + url: string; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Team Discussion Comment + * @description A reply to a discussion within a team. + */ + "team-discussion-comment": { + author: components["schemas"]["nullable-simple-user"]; + /** + * @description The main text of the comment. + * @example I agree with this suggestion. + */ + body: string; + /** @example

Do you like apples?

*/ + body_html: string; + /** + * @description The current version of the body content. If provided, this update operation will be rejected if the given version does not match the latest version on the server. + * @example 0307116bbf7ced493b8d8a346c650b71 + */ + body_version: string; + /** + * Format: date-time + * @example 2018-01-15T23:53:58Z + */ + created_at: string; + /** Format: date-time */ + last_edited_at: string | null; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2403582/discussions/1 + */ + discussion_url: string; + /** + * Format: uri + * @example https://github.com/orgs/github/teams/justice-league/discussions/1/comments/1 + */ + html_url: string; + /** @example MDIxOlRlYW1EaXNjdXNzaW9uQ29tbWVudDE= */ + node_id: string; + /** + * @description The unique sequence number of a team discussion comment. + * @example 42 + */ + number: number; + /** + * Format: date-time + * @example 2018-01-15T23:53:58Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2403582/discussions/1/comments/1 + */ + url: string; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Reaction + * @description Reactions to conversations provide a way to help people express their feelings more simply and effectively. + */ + reaction: { + /** @example 1 */ + id: number; + /** @example MDg6UmVhY3Rpb24x */ + node_id: string; + user: components["schemas"]["nullable-simple-user"]; + /** + * @description The reaction to use + * @example heart + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** + * Format: date-time + * @example 2016-05-20T20:09:31Z + */ + created_at: string; + }; + /** + * Team Membership + * @description Team Membership + */ + "team-membership": { + /** Format: uri */ + url: string; + /** + * @description The role of the user in the team. + * @default member + * @example member + * @enum {string} + */ + role: "member" | "maintainer"; + /** + * @description The state of the user's membership in the team. + * @enum {string} + */ + state: "active" | "pending"; + }; + /** + * Team Project + * @description A team's access to a project. + */ + "team-project": { + owner_url: string; + url: string; + html_url: string; + columns_url: string; + id: number; + node_id: string; + name: string; + body: string | null; + number: number; + state: string; + creator: components["schemas"]["simple-user"]; + created_at: string; + updated_at: string; + /** @description The organization permission for this project. Only present when owner is an organization. */ + organization_permission?: string; + /** @description Whether the project is private or not. Only present when owner is an organization. */ + private?: boolean; + permissions: { + read: boolean; + write: boolean; + admin: boolean; + }; + }; + /** + * Repository + * @description A repository on GitHub. + */ + "nullable-repository": { + /** + * @description Unique identifier of the repository + * @example 42 + */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** + * @description The name of the repository. + * @example Team Environment + */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + license: components["schemas"]["nullable-license-simple"]; + organization?: components["schemas"]["nullable-simple-user"]; + forks: number; + permissions?: { + admin: boolean; + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + }; + owner: components["schemas"]["simple-user"]; + /** + * @description Whether the repository is private or public. + * @default false + */ + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + /** @example git:github.com/octocat/Hello-World.git */ + git_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + /** @example git@github.com:octocat/Hello-World.git */ + ssh_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + /** @example https://github.com/octocat/Hello-World.git */ + clone_url: string; + /** + * Format: uri + * @example git:git.example.com/octocat/Hello-World + */ + mirror_url: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + /** + * Format: uri + * @example https://svn.github.com/octocat/Hello-World + */ + svn_url: string; + /** + * Format: uri + * @example https://github.com + */ + homepage: string | null; + language: string | null; + /** @example 9 */ + forks_count: number; + /** @example 80 */ + stargazers_count: number; + /** @example 80 */ + watchers_count: number; + /** + * @description The size of the repository. Size is calculated hourly. When a repository is initially created, the size is 0. + * @example 108 + */ + size: number; + /** + * @description The default branch of the repository. + * @example master + */ + default_branch: string; + /** @example 0 */ + open_issues_count: number; + /** + * @description Whether this repository acts as a template that can be used to generate new repositories. + * @default false + * @example true + */ + is_template?: boolean; + topics?: string[]; + /** + * @description Whether issues are enabled. + * @default true + * @example true + */ + has_issues: boolean; + /** + * @description Whether projects are enabled. + * @default true + * @example true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + * @example true + */ + has_wiki: boolean; + has_pages: boolean; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads: boolean; + /** + * @description Whether discussions are enabled. + * @default false + * @example true + */ + has_discussions?: boolean; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** + * @description The repository visibility: public, private, or internal. + * @default public + */ + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at: string | null; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + * @example true + */ + allow_rebase_merge?: boolean; + template_repository?: { + id?: number; + node_id?: string; + name?: string; + full_name?: string; + owner?: { + login?: string; + id?: number; + node_id?: string; + avatar_url?: string; + gravatar_id?: string; + url?: string; + html_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + starred_url?: string; + subscriptions_url?: string; + organizations_url?: string; + repos_url?: string; + events_url?: string; + received_events_url?: string; + type?: string; + site_admin?: boolean; + }; + private?: boolean; + html_url?: string; + description?: string; + fork?: boolean; + url?: string; + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + downloads_url?: string; + events_url?: string; + forks_url?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + git_url?: string; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + notifications_url?: string; + pulls_url?: string; + releases_url?: string; + ssh_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + clone_url?: string; + mirror_url?: string; + hooks_url?: string; + svn_url?: string; + homepage?: string; + language?: string; + forks_count?: number; + stargazers_count?: number; + watchers_count?: number; + size?: number; + default_branch?: string; + open_issues_count?: number; + is_template?: boolean; + topics?: string[]; + has_issues?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + has_pages?: boolean; + has_downloads?: boolean; + archived?: boolean; + disabled?: boolean; + visibility?: string; + pushed_at?: string; + created_at?: string; + updated_at?: string; + permissions?: { + admin?: boolean; + maintain?: boolean; + push?: boolean; + triage?: boolean; + pull?: boolean; + }; + allow_rebase_merge?: boolean; + temp_clone_token?: string; + allow_squash_merge?: boolean; + allow_auto_merge?: boolean; + delete_branch_on_merge?: boolean; + allow_update_branch?: boolean; + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + allow_merge_commit?: boolean; + subscribers_count?: number; + network_count?: number; + } | null; + temp_clone_token?: string; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + * @example true + */ + allow_squash_merge?: boolean; + /** + * @description Whether to allow Auto-merge to be used on pull requests. + * @default false + * @example false + */ + allow_auto_merge?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + * @example false + */ + delete_branch_on_merge?: boolean; + /** + * @description Whether or not a pull request head branch that is behind its base branch can always be updated even if it is not required to be up to date before merging. + * @default false + * @example false + */ + allow_update_branch?: boolean; + /** + * @deprecated + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + * @example true + */ + allow_merge_commit?: boolean; + /** @description Whether to allow forking this repo */ + allow_forking?: boolean; + /** + * @description Whether to require contributors to sign off on web-based commits + * @default false + */ + web_commit_signoff_required?: boolean; + subscribers_count?: number; + network_count?: number; + open_issues: number; + watchers: number; + master_branch?: string; + /** @example "2020-07-09T00:17:42Z" */ + starred_at?: string; + /** @description Whether anonymous git access is enabled for this repository */ + anonymous_access_enabled?: boolean; + } | null; + /** + * Team Repository + * @description A team's access to a repository. + */ + "team-repository": { + /** + * @description Unique identifier of the repository + * @example 42 + */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** + * @description The name of the repository. + * @example Team Environment + */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + license: components["schemas"]["nullable-license-simple"]; + forks: number; + permissions?: { + admin: boolean; + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + }; + /** @example admin */ + role_name?: string; + owner: components["schemas"]["nullable-simple-user"]; + /** + * @description Whether the repository is private or public. + * @default false + */ + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + /** @example git:github.com/octocat/Hello-World.git */ + git_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + /** @example git@github.com:octocat/Hello-World.git */ + ssh_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + /** @example https://github.com/octocat/Hello-World.git */ + clone_url: string; + /** + * Format: uri + * @example git:git.example.com/octocat/Hello-World + */ + mirror_url: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + /** + * Format: uri + * @example https://svn.github.com/octocat/Hello-World + */ + svn_url: string; + /** + * Format: uri + * @example https://github.com + */ + homepage: string | null; + language: string | null; + /** @example 9 */ + forks_count: number; + /** @example 80 */ + stargazers_count: number; + /** @example 80 */ + watchers_count: number; + /** @example 108 */ + size: number; + /** + * @description The default branch of the repository. + * @example master + */ + default_branch: string; + /** @example 0 */ + open_issues_count: number; + /** + * @description Whether this repository acts as a template that can be used to generate new repositories. + * @default false + * @example true + */ + is_template?: boolean; + topics?: string[]; + /** + * @description Whether issues are enabled. + * @default true + * @example true + */ + has_issues: boolean; + /** + * @description Whether projects are enabled. + * @default true + * @example true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + * @example true + */ + has_wiki: boolean; + has_pages: boolean; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads: boolean; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** + * @description The repository visibility: public, private, or internal. + * @default public + */ + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at: string | null; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + * @example true + */ + allow_rebase_merge?: boolean; + template_repository?: components["schemas"]["nullable-repository"]; + temp_clone_token?: string; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + * @example true + */ + allow_squash_merge?: boolean; + /** + * @description Whether to allow Auto-merge to be used on pull requests. + * @default false + * @example false + */ + allow_auto_merge?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + * @example false + */ + delete_branch_on_merge?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + * @example true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow forking this repo + * @default false + * @example false + */ + allow_forking?: boolean; + /** + * @description Whether to require contributors to sign off on web-based commits + * @default false + * @example false + */ + web_commit_signoff_required?: boolean; + subscribers_count?: number; + network_count?: number; + open_issues: number; + watchers: number; + master_branch?: string; + }; + /** + * Project Card + * @description Project cards represent a scope of work. + */ + "project-card": { + /** + * Format: uri + * @example https://api.github.com/projects/columns/cards/1478 + */ + url: string; + /** + * @description The project card's ID + * @example 42 + */ + id: number; + /** @example MDExOlByb2plY3RDYXJkMTQ3OA== */ + node_id: string; + /** @example Add payload for delete Project column */ + note: string | null; + creator: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2016-09-05T14:21:06Z + */ + created_at: string; + /** + * Format: date-time + * @example 2016-09-05T14:20:22Z + */ + updated_at: string; + /** + * @description Whether or not the card is archived + * @example false + */ + archived?: boolean; + column_name?: string; + project_id?: string; + /** + * Format: uri + * @example https://api.github.com/projects/columns/367 + */ + column_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/api-playground/projects-test/issues/3 + */ + content_url?: string; + /** + * Format: uri + * @example https://api.github.com/projects/120 + */ + project_url: string; + }; + /** + * Project Column + * @description Project columns contain cards of work. + */ + "project-column": { + /** + * Format: uri + * @example https://api.github.com/projects/columns/367 + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/projects/120 + */ + project_url: string; + /** + * Format: uri + * @example https://api.github.com/projects/columns/367/cards + */ + cards_url: string; + /** + * @description The unique identifier of the project column + * @example 42 + */ + id: number; + /** @example MDEzOlByb2plY3RDb2x1bW4zNjc= */ + node_id: string; + /** + * @description Name of the project column + * @example Remaining tasks + */ + name: string; + /** + * Format: date-time + * @example 2016-09-05T14:18:44Z + */ + created_at: string; + /** + * Format: date-time + * @example 2016-09-05T14:22:28Z + */ + updated_at: string; + }; + /** + * Project Collaborator Permission + * @description Project Collaborator Permission + */ + "project-collaborator-permission": { + permission: string; + user: components["schemas"]["nullable-simple-user"]; + }; + /** Rate Limit */ + "rate-limit": { + limit: number; + remaining: number; + reset: number; + used: number; + }; + /** + * Rate Limit Overview + * @description Rate Limit Overview + */ + "rate-limit-overview": { + resources: { + core: components["schemas"]["rate-limit"]; + graphql?: components["schemas"]["rate-limit"]; + search: components["schemas"]["rate-limit"]; + source_import?: components["schemas"]["rate-limit"]; + integration_manifest?: components["schemas"]["rate-limit"]; + code_scanning_upload?: components["schemas"]["rate-limit"]; + actions_runner_registration?: components["schemas"]["rate-limit"]; + scim?: components["schemas"]["rate-limit"]; + dependency_snapshots?: components["schemas"]["rate-limit"]; + }; + rate: components["schemas"]["rate-limit"]; + }; + /** + * Required workflow + * @description A GitHub Actions required workflow + */ + "repo-required-workflow": { + /** @example 5 */ + id: number; + /** @example MDg6V29ya2Zsb3cxMg== */ + node_id: string; + /** @example Required CI */ + name: string; + /** @example .github/workflows/required_ci.yaml */ + path: string; + /** + * @example active + * @enum {string} + */ + state: "active" | "deleted"; + source_repository: components["schemas"]["minimal-repository"]; + /** + * Format: date-time + * @example 2019-12-06T14:20:20.000Z + */ + created_at: string; + /** + * Format: date-time + * @example 2019-12-06T14:20:20.000Z + */ + updated_at: string; + /** @example https://api.github.com/repos/sample-org/sample-repo/actions/required_workflows/5 */ + url: string; + /** @example https://github.com/sample-org/source-repo/blob/main/.github/workflows/required_ci.yaml */ + html_url: string; + /** @example https://github.com/sample-org/sample-repo/workflows/required/sample-org/source-repo/.github/workflows/required_ci.yaml/badge.svg */ + badge_url: string; + }; + /** + * Workflow Usage + * @description Workflow Usage + */ + "workflow-usage": { + billable: { + UBUNTU?: { + total_ms?: number; + }; + MACOS?: { + total_ms?: number; + }; + WINDOWS?: { + total_ms?: number; + }; + }; + }; + /** + * Code Of Conduct Simple + * @description Code of Conduct Simple + */ + "code-of-conduct-simple": { + /** + * Format: uri + * @example https://api.github.com/repos/github/docs/community/code_of_conduct + */ + url: string; + /** @example citizen_code_of_conduct */ + key: string; + /** @example Citizen Code of Conduct */ + name: string; + /** + * Format: uri + * @example https://github.com/github/docs/blob/main/CODE_OF_CONDUCT.md + */ + html_url: string | null; + }; + /** + * Full Repository + * @description Full Repository + */ + "full-repository": { + /** @example 1296269 */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** @example Hello-World */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + owner: components["schemas"]["simple-user"]; + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + /** @example git:github.com/octocat/Hello-World.git */ + git_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + /** @example git@github.com:octocat/Hello-World.git */ + ssh_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + /** @example https://github.com/octocat/Hello-World.git */ + clone_url: string; + /** + * Format: uri + * @example git:git.example.com/octocat/Hello-World + */ + mirror_url: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + /** + * Format: uri + * @example https://svn.github.com/octocat/Hello-World + */ + svn_url: string; + /** + * Format: uri + * @example https://github.com + */ + homepage: string | null; + language: string | null; + /** @example 9 */ + forks_count: number; + /** @example 80 */ + stargazers_count: number; + /** @example 80 */ + watchers_count: number; + /** + * @description The size of the repository. Size is calculated hourly. When a repository is initially created, the size is 0. + * @example 108 + */ + size: number; + /** @example master */ + default_branch: string; + /** @example 0 */ + open_issues_count: number; + /** @example true */ + is_template?: boolean; + /** + * @example [ + * "octocat", + * "atom", + * "electron", + * "API" + * ] + */ + topics?: string[]; + /** @example true */ + has_issues: boolean; + /** @example true */ + has_projects: boolean; + /** @example true */ + has_wiki: boolean; + has_pages: boolean; + /** @example true */ + has_downloads: boolean; + /** @example true */ + has_discussions: boolean; + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** + * @description The repository visibility: public, private, or internal. + * @example public + */ + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at: string; + permissions?: { + admin: boolean; + maintain?: boolean; + push: boolean; + triage?: boolean; + pull: boolean; + }; + /** @example true */ + allow_rebase_merge?: boolean; + template_repository?: components["schemas"]["nullable-repository"]; + temp_clone_token?: string | null; + /** @example true */ + allow_squash_merge?: boolean; + /** @example false */ + allow_auto_merge?: boolean; + /** @example false */ + delete_branch_on_merge?: boolean; + /** @example true */ + allow_merge_commit?: boolean; + /** @example true */ + allow_update_branch?: boolean; + /** @example false */ + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @example PR_TITLE + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @example PR_BODY + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @example PR_TITLE + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @example PR_BODY + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** @example true */ + allow_forking?: boolean; + /** @example false */ + web_commit_signoff_required?: boolean; + /** @example 42 */ + subscribers_count: number; + /** @example 0 */ + network_count: number; + license: components["schemas"]["nullable-license-simple"]; + organization?: components["schemas"]["nullable-simple-user"]; + parent?: components["schemas"]["repository"]; + source?: components["schemas"]["repository"]; + forks: number; + master_branch?: string; + open_issues: number; + watchers: number; + /** + * @description Whether anonymous git access is allowed. + * @default true + */ + anonymous_access_enabled?: boolean; + code_of_conduct?: components["schemas"]["code-of-conduct-simple"]; + security_and_analysis?: components["schemas"]["security-and-analysis"]; + }; + /** + * Artifact + * @description An artifact + */ + artifact: { + /** @example 5 */ + id: number; + /** @example MDEwOkNoZWNrU3VpdGU1 */ + node_id: string; + /** + * @description The name of the artifact. + * @example AdventureWorks.Framework + */ + name: string; + /** + * @description The size in bytes of the artifact. + * @example 12345 + */ + size_in_bytes: number; + /** @example https://api.github.com/repos/github/hello-world/actions/artifacts/5 */ + url: string; + /** @example https://api.github.com/repos/github/hello-world/actions/artifacts/5/zip */ + archive_download_url: string; + /** @description Whether or not the artifact has expired. */ + expired: boolean; + /** Format: date-time */ + created_at: string | null; + /** Format: date-time */ + expires_at: string | null; + /** Format: date-time */ + updated_at: string | null; + workflow_run?: { + /** @example 10 */ + id?: number; + /** @example 42 */ + repository_id?: number; + /** @example 42 */ + head_repository_id?: number; + /** @example main */ + head_branch?: string; + /** @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d */ + head_sha?: string; + } | null; + }; + /** + * Repository actions caches + * @description Repository actions caches + */ + "actions-cache-list": { + /** + * @description Total number of caches + * @example 2 + */ + total_count: number; + /** @description Array of caches */ + actions_caches: { + /** @example 2 */ + id?: number; + /** @example refs/heads/main */ + ref?: string; + /** @example Linux-node-958aff96db2d75d67787d1e634ae70b659de937b */ + key?: string; + /** @example 73885106f58cc52a7df9ec4d4a5622a5614813162cb516c759a30af6bf56e6f0 */ + version?: string; + /** + * Format: date-time + * @example 2019-01-24T22:45:36.000Z + */ + last_accessed_at?: string; + /** + * Format: date-time + * @example 2019-01-24T22:45:36.000Z + */ + created_at?: string; + /** @example 1024 */ + size_in_bytes?: number; + }[]; + }; + /** + * Job + * @description Information of a job execution in a workflow run + */ + job: { + /** + * @description The id of the job. + * @example 21 + */ + id: number; + /** + * @description The id of the associated workflow run. + * @example 5 + */ + run_id: number; + /** @example https://api.github.com/repos/github/hello-world/actions/runs/5 */ + run_url: string; + /** + * @description Attempt number of the associated workflow run, 1 for first attempt and higher if the workflow was re-run. + * @example 1 + */ + run_attempt?: number; + /** @example MDg6Q2hlY2tSdW40 */ + node_id: string; + /** + * @description The SHA of the commit that is being run. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** @example https://api.github.com/repos/github/hello-world/actions/jobs/21 */ + url: string; + /** @example https://github.com/github/hello-world/runs/4 */ + html_url: string | null; + /** + * @description The phase of the lifecycle that the job is currently in. + * @example queued + * @enum {string} + */ + status: "queued" | "in_progress" | "completed"; + /** + * @description The outcome of the job. + * @example success + * @enum {string|null} + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "skipped" + | "timed_out" + | "action_required" + | null; + /** + * Format: date-time + * @description The time that the job created, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + created_at: string; + /** + * Format: date-time + * @description The time that the job started, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + started_at: string; + /** + * Format: date-time + * @description The time that the job finished, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + completed_at: string | null; + /** + * @description The name of the job. + * @example test-coverage + */ + name: string; + /** @description Steps in this job. */ + steps?: { + /** + * @description The phase of the lifecycle that the job is currently in. + * @example queued + * @enum {string} + */ + status: "queued" | "in_progress" | "completed"; + /** + * @description The outcome of the job. + * @example success + */ + conclusion: string | null; + /** + * @description The name of the job. + * @example test-coverage + */ + name: string; + /** @example 1 */ + number: number; + /** + * Format: date-time + * @description The time that the step started, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + started_at?: string | null; + /** + * Format: date-time + * @description The time that the job finished, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + completed_at?: string | null; + }[]; + /** @example https://api.github.com/repos/github/hello-world/check-runs/4 */ + check_run_url: string; + /** + * @description Labels for the workflow job. Specified by the "runs_on" attribute in the action's workflow file. + * @example [ + * "self-hosted", + * "foo", + * "bar" + * ] + */ + labels: string[]; + /** + * @description The ID of the runner to which this job has been assigned. (If a runner hasn't yet been assigned, this will be null.) + * @example 1 + */ + runner_id: number | null; + /** + * @description The name of the runner to which this job has been assigned. (If a runner hasn't yet been assigned, this will be null.) + * @example my runner + */ + runner_name: string | null; + /** + * @description The ID of the runner group to which this job has been assigned. (If a runner hasn't yet been assigned, this will be null.) + * @example 2 + */ + runner_group_id: number | null; + /** + * @description The name of the runner group to which this job has been assigned. (If a runner hasn't yet been assigned, this will be null.) + * @example my runner group + */ + runner_group_name: string | null; + /** + * @description The name of the workflow. + * @example Build + */ + workflow_name: string | null; + /** + * @description The name of the current branch. + * @example main + */ + head_branch: string | null; + }; + /** + * Actions OIDC subject customization for a repository + * @description Actions OIDC subject customization for a repository + */ + "oidc-custom-sub-repo": { + /** @description Whether to use the default template or not. If `true`, the `include_claim_keys` field is ignored. */ + use_default: boolean; + /** @description Array of unique strings. Each claim key can only contain alphanumeric characters and underscores. */ + include_claim_keys?: string[]; + }; + /** + * Actions Secret + * @description Set secrets for GitHub Actions. + */ + "actions-secret": { + /** + * @description The name of the secret. + * @example SECRET_TOKEN + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** Actions Variable */ + "actions-variable": { + /** + * @description The name of the variable. + * @example USERNAME + */ + name: string; + /** + * @description The value of the variable. + * @example octocat + */ + value: string; + /** + * Format: date-time + * @description The date and time at which the variable was created, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + * @example 2019-01-24T22:45:36.000Z + */ + created_at: string; + /** + * Format: date-time + * @description The date and time at which the variable was last updated, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + * @example 2019-01-24T22:45:36.000Z + */ + updated_at: string; + }; + /** @description Whether GitHub Actions is enabled on the repository. */ + "actions-enabled": boolean; + "actions-repository-permissions": { + enabled: components["schemas"]["actions-enabled"]; + allowed_actions?: components["schemas"]["allowed-actions"]; + selected_actions_url?: components["schemas"]["selected-actions-url"]; + }; + "actions-workflow-access-to-repository": { + /** + * @description Defines the level of access that workflows outside of the repository have to actions and reusable workflows within the + * repository. + * + * `none` means the access is only possible from workflows in this repository. `user` level access allows sharing across user owned private repos only. `organization` level access allows sharing across the organization. + * @enum {string} + */ + access_level: "none" | "user" | "organization"; + }; + /** + * Referenced workflow + * @description A workflow referenced/reused by the initial caller workflow + */ + "referenced-workflow": { + path: string; + sha: string; + ref?: string; + }; + /** Pull Request Minimal */ + "pull-request-minimal": { + id: number; + number: number; + url: string; + head: { + ref: string; + sha: string; + repo: { + id: number; + url: string; + name: string; + }; + }; + base: { + ref: string; + sha: string; + repo: { + id: number; + url: string; + name: string; + }; + }; + }; + /** + * Simple Commit + * @description A commit. + */ + "nullable-simple-commit": { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + id: string; + /** @description SHA for the commit's tree */ + tree_id: string; + /** + * @description Message describing the purpose of the commit + * @example Fix #42 + */ + message: string; + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + timestamp: string; + /** @description Information about the Git author */ + author: { + /** + * @description Name of the commit's author + * @example Monalisa Octocat + */ + name: string; + /** + * Format: email + * @description Git email address of the commit's author + * @example monalisa.octocat@example.com + */ + email: string; + } | null; + /** @description Information about the Git committer */ + committer: { + /** + * @description Name of the commit's committer + * @example Monalisa Octocat + */ + name: string; + /** + * Format: email + * @description Git email address of the commit's committer + * @example monalisa.octocat@example.com + */ + email: string; + } | null; + } | null; + /** + * Workflow Run + * @description An invocation of a workflow + */ + "workflow-run": { + /** + * @description The ID of the workflow run. + * @example 5 + */ + id: number; + /** + * @description The name of the workflow run. + * @example Build + */ + name?: string | null; + /** @example MDEwOkNoZWNrU3VpdGU1 */ + node_id: string; + /** + * @description The ID of the associated check suite. + * @example 42 + */ + check_suite_id?: number; + /** + * @description The node ID of the associated check suite. + * @example MDEwOkNoZWNrU3VpdGU0Mg== + */ + check_suite_node_id?: string; + /** @example master */ + head_branch: string | null; + /** + * @description The SHA of the head commit that points to the version of the workflow being run. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** + * @description The full path of the workflow + * @example octocat/octo-repo/.github/workflows/ci.yml@main + */ + path: string; + /** + * @description The auto incrementing run number for the workflow run. + * @example 106 + */ + run_number: number; + /** + * @description Attempt number of the run, 1 for first attempt and higher if the workflow was re-run. + * @example 1 + */ + run_attempt?: number; + referenced_workflows?: + | components["schemas"]["referenced-workflow"][] + | null; + /** @example push */ + event: string; + /** @example completed */ + status: string | null; + /** @example neutral */ + conclusion: string | null; + /** + * @description The ID of the parent workflow. + * @example 5 + */ + workflow_id: number; + /** + * @description The URL to the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5 + */ + url: string; + /** @example https://github.com/github/hello-world/suites/4 */ + html_url: string; + pull_requests: components["schemas"]["pull-request-minimal"][] | null; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + actor?: components["schemas"]["simple-user"]; + triggering_actor?: components["schemas"]["simple-user"]; + /** + * Format: date-time + * @description The start time of the latest run. Resets on re-run. + */ + run_started_at?: string; + /** + * @description The URL to the jobs for the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/jobs + */ + jobs_url: string; + /** + * @description The URL to download the logs for the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/logs + */ + logs_url: string; + /** + * @description The URL to the associated check suite. + * @example https://api.github.com/repos/github/hello-world/check-suites/12 + */ + check_suite_url: string; + /** + * @description The URL to the artifacts for the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/rerun/artifacts + */ + artifacts_url: string; + /** + * @description The URL to cancel the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/cancel + */ + cancel_url: string; + /** + * @description The URL to rerun the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/rerun + */ + rerun_url: string; + /** + * @description The URL to the previous attempted run of this workflow, if one exists. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/attempts/3 + */ + previous_attempt_url?: string | null; + /** + * @description The URL to the workflow. + * @example https://api.github.com/repos/github/hello-world/actions/workflows/main.yaml + */ + workflow_url: string; + head_commit: components["schemas"]["nullable-simple-commit"]; + repository: components["schemas"]["minimal-repository"]; + head_repository: components["schemas"]["minimal-repository"]; + /** @example 5 */ + head_repository_id?: number; + /** + * @description The event-specific title associated with the run or the run-name if set, or the value of `run-name` if it is set in the workflow. + * @example Simple Workflow + */ + display_title: string; + }; + /** + * Environment Approval + * @description An entry in the reviews log for environment deployments + */ + "environment-approvals": { + /** @description The list of environments that were approved or rejected */ + environments: { + /** + * @description The id of the environment. + * @example 56780428 + */ + id?: number; + /** @example MDExOkVudmlyb25tZW50NTY3ODA0Mjg= */ + node_id?: string; + /** + * @description The name of the environment. + * @example staging + */ + name?: string; + /** @example https://api.github.com/repos/github/hello-world/environments/staging */ + url?: string; + /** @example https://github.com/github/hello-world/deployments/activity_log?environments_filter=staging */ + html_url?: string; + /** + * Format: date-time + * @description The time that the environment was created, in ISO 8601 format. + * @example 2020-11-23T22:00:40Z + */ + created_at?: string; + /** + * Format: date-time + * @description The time that the environment was last updated, in ISO 8601 format. + * @example 2020-11-23T22:00:40Z + */ + updated_at?: string; + }[]; + /** + * @description Whether deployment to the environment(s) was approved or rejected or pending (with comments) + * @example approved + * @enum {string} + */ + state: "approved" | "rejected" | "pending"; + user: components["schemas"]["simple-user"]; + /** + * @description The comment submitted with the deployment review + * @example Ship it! + */ + comment: string; + }; + "review-custom-gates-comment-required": { + /** @description The name of the environment to approve or reject. */ + environment_name: string; + /** @description Comment associated with the pending deployment protection rule. **Required when state is not provided.** */ + comment: string; + }; + "review-custom-gates-state-required": { + /** @description The name of the environment to approve or reject. */ + environment_name: string; + /** + * @description Whether to approve or reject deployment to the specified environments. + * @enum {string} + */ + state: "approved" | "rejected"; + /** @description Optional comment to include with the review. */ + comment?: string; + }; + /** + * @description The type of reviewer. + * @example User + * @enum {string} + */ + "deployment-reviewer-type": "User" | "Team"; + /** + * Pending Deployment + * @description Details of a deployment that is waiting for protection rules to pass + */ + "pending-deployment": { + environment: { + /** + * @description The id of the environment. + * @example 56780428 + */ + id?: number; + /** @example MDExOkVudmlyb25tZW50NTY3ODA0Mjg= */ + node_id?: string; + /** + * @description The name of the environment. + * @example staging + */ + name?: string; + /** @example https://api.github.com/repos/github/hello-world/environments/staging */ + url?: string; + /** @example https://github.com/github/hello-world/deployments/activity_log?environments_filter=staging */ + html_url?: string; + }; + /** + * @description The set duration of the wait timer + * @example 30 + */ + wait_timer: number; + /** + * Format: date-time + * @description The time that the wait timer began. + * @example 2020-11-23T22:00:40Z + */ + wait_timer_started_at: string | null; + /** + * @description Whether the currently authenticated user can approve the deployment + * @example true + */ + current_user_can_approve: boolean; + /** @description The people or teams that may approve jobs that reference the environment. You can list up to six users or teams as reviewers. The reviewers must have at least read access to the repository. Only one of the required reviewers needs to approve the job for it to proceed. */ + reviewers: { + type?: components["schemas"]["deployment-reviewer-type"]; + reviewer?: + | components["schemas"]["simple-user"] + | components["schemas"]["team"]; + }[]; + }; + /** + * Deployment + * @description A request for a specific ref(branch,sha,tag) to be deployed + */ + deployment: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/1 + */ + url: string; + /** + * @description Unique identifier of the deployment + * @example 42 + */ + id: number; + /** @example MDEwOkRlcGxveW1lbnQx */ + node_id: string; + /** @example a84d88e7554fc1fa21bcbc4efae3c782a70d2b9d */ + sha: string; + /** + * @description The ref to deploy. This can be a branch, tag, or sha. + * @example topic-branch + */ + ref: string; + /** + * @description Parameter to specify a task to execute + * @example deploy + */ + task: string; + payload: OneOf< + [ + { + [key: string]: unknown; + }, + string, + ] + >; + /** @example staging */ + original_environment?: string; + /** + * @description Name for the target deployment environment. + * @example production + */ + environment: string; + /** @example Deploy request from hubot */ + description: string | null; + creator: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + created_at: string; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/1/statuses + */ + statuses_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example + */ + repository_url: string; + /** + * @description Specifies if the given environment is will no longer exist at some point in the future. Default: false. + * @example true + */ + transient_environment?: boolean; + /** + * @description Specifies if the given environment is one that end-users directly interact with. Default: false. + * @example true + */ + production_environment?: boolean; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + }; + /** + * Workflow Run Usage + * @description Workflow Run Usage + */ + "workflow-run-usage": { + billable: { + UBUNTU?: { + total_ms: number; + jobs: number; + job_runs?: { + job_id: number; + duration_ms: number; + }[]; + }; + MACOS?: { + total_ms: number; + jobs: number; + job_runs?: { + job_id: number; + duration_ms: number; + }[]; + }; + WINDOWS?: { + total_ms: number; + jobs: number; + job_runs?: { + job_id: number; + duration_ms: number; + }[]; + }; + }; + run_duration_ms?: number; + }; + /** + * Workflow + * @description A GitHub Actions workflow + */ + workflow: { + /** @example 5 */ + id: number; + /** @example MDg6V29ya2Zsb3cxMg== */ + node_id: string; + /** @example CI */ + name: string; + /** @example ruby.yaml */ + path: string; + /** + * @example active + * @enum {string} + */ + state: + | "active" + | "deleted" + | "disabled_fork" + | "disabled_inactivity" + | "disabled_manually"; + /** + * Format: date-time + * @example 2019-12-06T14:20:20.000Z + */ + created_at: string; + /** + * Format: date-time + * @example 2019-12-06T14:20:20.000Z + */ + updated_at: string; + /** @example https://api.github.com/repos/actions/setup-ruby/workflows/5 */ + url: string; + /** @example https://github.com/actions/setup-ruby/blob/master/.github/workflows/ruby.yaml */ + html_url: string; + /** @example https://github.com/actions/setup-ruby/workflows/CI/badge.svg */ + badge_url: string; + /** + * Format: date-time + * @example 2019-12-06T14:20:20.000Z + */ + deleted_at?: string; + }; + /** + * Autolink reference + * @description An autolink reference. + */ + autolink: { + /** @example 3 */ + id: number; + /** + * @description The prefix of a key that is linkified. + * @example TICKET- + */ + key_prefix: string; + /** + * @description A template for the target URL that is generated if a key was found. + * @example https://example.com/TICKET?query= + */ + url_template: string; + /** + * @description Whether this autolink reference matches alphanumeric characters. If false, this autolink reference only matches numeric characters. + * @example true + */ + is_alphanumeric: boolean; + }; + /** + * Protected Branch Required Status Check + * @description Protected Branch Required Status Check + */ + "protected-branch-required-status-check": { + url?: string; + enforcement_level?: string; + contexts: string[]; + checks: { + context: string; + app_id: number | null; + }[]; + contexts_url?: string; + strict?: boolean; + }; + /** + * Protected Branch Admin Enforced + * @description Protected Branch Admin Enforced + */ + "protected-branch-admin-enforced": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/enforce_admins + */ + url: string; + /** @example true */ + enabled: boolean; + }; + /** + * Protected Branch Pull Request Review + * @description Protected Branch Pull Request Review + */ + "protected-branch-pull-request-review": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/dismissal_restrictions + */ + url?: string; + dismissal_restrictions?: { + /** @description The list of users with review dismissal access. */ + users?: components["schemas"]["simple-user"][]; + /** @description The list of teams with review dismissal access. */ + teams?: components["schemas"]["team"][]; + /** @description The list of apps with review dismissal access. */ + apps?: components["schemas"]["integration"][]; + /** @example "https://api.github.com/repos/the-org/an-org-repo/branches/master/protection/dismissal_restrictions" */ + url?: string; + /** @example "https://api.github.com/repos/the-org/an-org-repo/branches/master/protection/dismissal_restrictions/users" */ + users_url?: string; + /** @example "https://api.github.com/repos/the-org/an-org-repo/branches/master/protection/dismissal_restrictions/teams" */ + teams_url?: string; + }; + /** @description Allow specific users, teams, or apps to bypass pull request requirements. */ + bypass_pull_request_allowances?: { + /** @description The list of users allowed to bypass pull request requirements. */ + users?: components["schemas"]["simple-user"][]; + /** @description The list of teams allowed to bypass pull request requirements. */ + teams?: components["schemas"]["team"][]; + /** @description The list of apps allowed to bypass pull request requirements. */ + apps?: components["schemas"]["integration"][]; + }; + /** @example true */ + dismiss_stale_reviews: boolean; + /** @example true */ + require_code_owner_reviews: boolean; + /** @example 2 */ + required_approving_review_count?: number; + /** + * @description Whether the most recent push must be approved by someone other than the person who pushed it. + * @default false + * @example true + */ + require_last_push_approval?: boolean; + }; + /** + * Branch Restriction Policy + * @description Branch Restriction Policy + */ + "branch-restriction-policy": { + /** Format: uri */ + url: string; + /** Format: uri */ + users_url: string; + /** Format: uri */ + teams_url: string; + /** Format: uri */ + apps_url: string; + users: { + login?: string; + id?: number; + node_id?: string; + avatar_url?: string; + gravatar_id?: string; + url?: string; + html_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + starred_url?: string; + subscriptions_url?: string; + organizations_url?: string; + repos_url?: string; + events_url?: string; + received_events_url?: string; + type?: string; + site_admin?: boolean; + }[]; + teams: { + id?: number; + node_id?: string; + url?: string; + html_url?: string; + name?: string; + slug?: string; + description?: string | null; + privacy?: string; + notification_setting?: string; + permission?: string; + members_url?: string; + repositories_url?: string; + parent?: string | null; + }[]; + apps: { + id?: number; + slug?: string; + node_id?: string; + owner?: { + login?: string; + id?: number; + node_id?: string; + url?: string; + repos_url?: string; + events_url?: string; + hooks_url?: string; + issues_url?: string; + members_url?: string; + public_members_url?: string; + avatar_url?: string; + description?: string; + /** @example "" */ + gravatar_id?: string; + /** @example "https://github.com/testorg-ea8ec76d71c3af4b" */ + html_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/followers" */ + followers_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/following{/other_user}" */ + following_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/gists{/gist_id}" */ + gists_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/starred{/owner}{/repo}" */ + starred_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/subscriptions" */ + subscriptions_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/orgs" */ + organizations_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/received_events" */ + received_events_url?: string; + /** @example "Organization" */ + type?: string; + /** @example false */ + site_admin?: boolean; + }; + name?: string; + description?: string; + external_url?: string; + html_url?: string; + created_at?: string; + updated_at?: string; + permissions?: { + metadata?: string; + contents?: string; + issues?: string; + single_file?: string; + }; + events?: string[]; + }[]; + }; + /** + * Branch Protection + * @description Branch Protection + */ + "branch-protection": { + url?: string; + enabled?: boolean; + required_status_checks?: components["schemas"]["protected-branch-required-status-check"]; + enforce_admins?: components["schemas"]["protected-branch-admin-enforced"]; + required_pull_request_reviews?: components["schemas"]["protected-branch-pull-request-review"]; + restrictions?: components["schemas"]["branch-restriction-policy"]; + required_linear_history?: { + enabled?: boolean; + }; + allow_force_pushes?: { + enabled?: boolean; + }; + allow_deletions?: { + enabled?: boolean; + }; + block_creations?: { + enabled?: boolean; + }; + required_conversation_resolution?: { + enabled?: boolean; + }; + /** @example "branch/with/protection" */ + name?: string; + /** @example "https://api.github.com/repos/owner-79e94e2d36b3fd06a32bb213/AAA_Public_Repo/branches/branch/with/protection/protection" */ + protection_url?: string; + required_signatures?: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_signatures + */ + url: string; + /** @example true */ + enabled: boolean; + }; + /** @description Whether to set the branch as read-only. If this is true, users will not be able to push to the branch. */ + lock_branch?: { + /** @default false */ + enabled?: boolean; + }; + /** @description Whether users can pull changes from upstream when the branch is locked. Set to `true` to allow fork syncing. Set to `false` to prevent fork syncing. */ + allow_fork_syncing?: { + /** @default false */ + enabled?: boolean; + }; + }; + /** + * Short Branch + * @description Short Branch + */ + "short-branch": { + name: string; + commit: { + sha: string; + /** Format: uri */ + url: string; + }; + protected: boolean; + protection?: components["schemas"]["branch-protection"]; + /** Format: uri */ + protection_url?: string; + }; + /** + * Git User + * @description Metaproperties for Git author/committer information. + */ + "nullable-git-user": { + /** @example "Chris Wanstrath" */ + name?: string; + /** @example "chris@ozmm.org" */ + email?: string; + /** @example "2007-10-29T02:42:39.000-07:00" */ + date?: string; + } | null; + /** Verification */ + verification: { + verified: boolean; + reason: string; + payload: string | null; + signature: string | null; + }; + /** + * Diff Entry + * @description Diff Entry + */ + "diff-entry": { + /** @example bbcd538c8e72b8c175046e27cc8f907076331401 */ + sha: string; + /** @example file1.txt */ + filename: string; + /** + * @example added + * @enum {string} + */ + status: + | "added" + | "removed" + | "modified" + | "renamed" + | "copied" + | "changed" + | "unchanged"; + /** @example 103 */ + additions: number; + /** @example 21 */ + deletions: number; + /** @example 124 */ + changes: number; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/blob/6dcb09b5b57875f334f61aebed695e2e4193db5e/file1.txt + */ + blob_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/raw/6dcb09b5b57875f334f61aebed695e2e4193db5e/file1.txt + */ + raw_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/contents/file1.txt?ref=6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + contents_url: string; + /** @example @@ -132,7 +132,7 @@ module Test @@ -1000,7 +1000,7 @@ module Test */ + patch?: string; + /** @example file.txt */ + previous_filename?: string; + }; + /** + * Commit + * @description Commit + */ + commit: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + url: string; + /** @example 6dcb09b5b57875f334f61aebed695e2e4193db5e */ + sha: string; + /** @example MDY6Q29tbWl0NmRjYjA5YjViNTc4NzVmMzM0ZjYxYWViZWQ2OTVlMmU0MTkzZGI1ZQ== */ + node_id: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/commit/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e/comments + */ + comments_url: string; + commit: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + url: string; + author: components["schemas"]["nullable-git-user"]; + committer: components["schemas"]["nullable-git-user"]; + /** @example Fix all the bugs */ + message: string; + /** @example 0 */ + comment_count: number; + tree: { + /** @example 827efc6d56897b048c772eb4087f854f46256132 */ + sha: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/tree/827efc6d56897b048c772eb4087f854f46256132 + */ + url: string; + }; + verification?: components["schemas"]["verification"]; + }; + author: components["schemas"]["nullable-simple-user"]; + committer: components["schemas"]["nullable-simple-user"]; + parents: { + /** @example 7638417db6d59f3c431d3e1f261cc637155684cd */ + sha: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/commits/7638417db6d59f3c431d3e1f261cc637155684cd + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/commit/7638417db6d59f3c431d3e1f261cc637155684cd + */ + html_url?: string; + }[]; + stats?: { + additions?: number; + deletions?: number; + total?: number; + }; + files?: components["schemas"]["diff-entry"][]; + }; + /** + * Branch With Protection + * @description Branch With Protection + */ + "branch-with-protection": { + name: string; + commit: components["schemas"]["commit"]; + _links: { + html: string; + /** Format: uri */ + self: string; + }; + protected: boolean; + protection: components["schemas"]["branch-protection"]; + /** Format: uri */ + protection_url: string; + /** @example "mas*" */ + pattern?: string; + /** @example 1 */ + required_approving_review_count?: number; + }; + /** + * Status Check Policy + * @description Status Check Policy + */ + "status-check-policy": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_status_checks + */ + url: string; + /** @example true */ + strict: boolean; + /** + * @example [ + * "continuous-integration/travis-ci" + * ] + */ + contexts: string[]; + checks: { + /** @example continuous-integration/travis-ci */ + context: string; + app_id: number | null; + }[]; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_status_checks/contexts + */ + contexts_url: string; + }; + /** + * Protected Branch + * @description Branch protections protect branches + */ + "protected-branch": { + /** Format: uri */ + url: string; + required_status_checks?: components["schemas"]["status-check-policy"]; + required_pull_request_reviews?: { + /** Format: uri */ + url: string; + dismiss_stale_reviews?: boolean; + require_code_owner_reviews?: boolean; + required_approving_review_count?: number; + /** + * @description Whether the most recent push must be approved by someone other than the person who pushed it. + * @default false + */ + require_last_push_approval?: boolean; + dismissal_restrictions?: { + /** Format: uri */ + url: string; + /** Format: uri */ + users_url: string; + /** Format: uri */ + teams_url: string; + users: components["schemas"]["simple-user"][]; + teams: components["schemas"]["team"][]; + apps?: components["schemas"]["integration"][]; + }; + bypass_pull_request_allowances?: { + users: components["schemas"]["simple-user"][]; + teams: components["schemas"]["team"][]; + apps?: components["schemas"]["integration"][]; + }; + }; + required_signatures?: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_signatures + */ + url: string; + /** @example true */ + enabled: boolean; + }; + enforce_admins?: { + /** Format: uri */ + url: string; + enabled: boolean; + }; + required_linear_history?: { + enabled: boolean; + }; + allow_force_pushes?: { + enabled: boolean; + }; + allow_deletions?: { + enabled: boolean; + }; + restrictions?: components["schemas"]["branch-restriction-policy"]; + required_conversation_resolution?: { + enabled?: boolean; + }; + block_creations?: { + enabled: boolean; + }; + /** @description Whether to set the branch as read-only. If this is true, users will not be able to push to the branch. */ + lock_branch?: { + /** @default false */ + enabled?: boolean; + }; + /** @description Whether users can pull changes from upstream when the branch is locked. Set to `true` to allow fork syncing. Set to `false` to prevent fork syncing. */ + allow_fork_syncing?: { + /** @default false */ + enabled?: boolean; + }; + }; + /** + * Deployment + * @description A deployment created as the result of an Actions check run from a workflow that references an environment + */ + "deployment-simple": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/1 + */ + url: string; + /** + * @description Unique identifier of the deployment + * @example 42 + */ + id: number; + /** @example MDEwOkRlcGxveW1lbnQx */ + node_id: string; + /** + * @description Parameter to specify a task to execute + * @example deploy + */ + task: string; + /** @example staging */ + original_environment?: string; + /** + * @description Name for the target deployment environment. + * @example production + */ + environment: string; + /** @example Deploy request from hubot */ + description: string | null; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + created_at: string; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/1/statuses + */ + statuses_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example + */ + repository_url: string; + /** + * @description Specifies if the given environment is will no longer exist at some point in the future. Default: false. + * @example true + */ + transient_environment?: boolean; + /** + * @description Specifies if the given environment is one that end-users directly interact with. Default: false. + * @example true + */ + production_environment?: boolean; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + }; + /** + * CheckRun + * @description A check performed on the code of a given code change + */ + "check-run": { + /** + * @description The id of the check. + * @example 21 + */ + id: number; + /** + * @description The SHA of the commit that is being checked. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** @example MDg6Q2hlY2tSdW40 */ + node_id: string; + /** @example 42 */ + external_id: string | null; + /** @example https://api.github.com/repos/github/hello-world/check-runs/4 */ + url: string; + /** @example https://github.com/github/hello-world/runs/4 */ + html_url: string | null; + /** @example https://example.com */ + details_url: string | null; + /** + * @description The phase of the lifecycle that the check is currently in. + * @example queued + * @enum {string} + */ + status: "queued" | "in_progress" | "completed"; + /** + * @example neutral + * @enum {string|null} + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "skipped" + | "timed_out" + | "action_required" + | null; + /** + * Format: date-time + * @example 2018-05-04T01:14:52Z + */ + started_at: string | null; + /** + * Format: date-time + * @example 2018-05-04T01:14:52Z + */ + completed_at: string | null; + output: { + title: string | null; + summary: string | null; + text: string | null; + annotations_count: number; + /** Format: uri */ + annotations_url: string; + }; + /** + * @description The name of the check. + * @example test-coverage + */ + name: string; + check_suite: { + id: number; + } | null; + app: components["schemas"]["nullable-integration"]; + pull_requests: components["schemas"]["pull-request-minimal"][]; + deployment?: components["schemas"]["deployment-simple"]; + }; + /** + * Check Annotation + * @description Check Annotation + */ + "check-annotation": { + /** @example README.md */ + path: string; + /** @example 2 */ + start_line: number; + /** @example 2 */ + end_line: number; + /** @example 5 */ + start_column: number | null; + /** @example 10 */ + end_column: number | null; + /** @example warning */ + annotation_level: string | null; + /** @example Spell Checker */ + title: string | null; + /** @example Check your spelling for 'banaas'. */ + message: string | null; + /** @example Do you mean 'bananas' or 'banana'? */ + raw_details: string | null; + blob_href: string; + }; + /** + * Simple Commit + * @description A commit. + */ + "simple-commit": { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + id: string; + /** @description SHA for the commit's tree */ + tree_id: string; + /** + * @description Message describing the purpose of the commit + * @example Fix #42 + */ + message: string; + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + timestamp: string; + /** @description Information about the Git author */ + author: { + /** + * @description Name of the commit's author + * @example Monalisa Octocat + */ + name: string; + /** + * Format: email + * @description Git email address of the commit's author + * @example monalisa.octocat@example.com + */ + email: string; + } | null; + /** @description Information about the Git committer */ + committer: { + /** + * @description Name of the commit's committer + * @example Monalisa Octocat + */ + name: string; + /** + * Format: email + * @description Git email address of the commit's committer + * @example monalisa.octocat@example.com + */ + email: string; + } | null; + }; + /** + * CheckSuite + * @description A suite of checks performed on the code of a given code change + */ + "check-suite": { + /** @example 5 */ + id: number; + /** @example MDEwOkNoZWNrU3VpdGU1 */ + node_id: string; + /** @example master */ + head_branch: string | null; + /** + * @description The SHA of the head commit that is being checked. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** + * @example completed + * @enum {string|null} + */ + status: "queued" | "in_progress" | "completed" | null; + /** + * @example neutral + * @enum {string|null} + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "skipped" + | "timed_out" + | "action_required" + | "startup_failure" + | "stale" + | "" + | null; + /** @example https://api.github.com/repos/github/hello-world/check-suites/5 */ + url: string | null; + /** @example 146e867f55c26428e5f9fade55a9bbf5e95a7912 */ + before: string | null; + /** @example d6fde92930d4715a2b49857d24b940956b26d2d3 */ + after: string | null; + pull_requests: components["schemas"]["pull-request-minimal"][] | null; + app: components["schemas"]["nullable-integration"]; + repository: components["schemas"]["minimal-repository"]; + /** Format: date-time */ + created_at: string | null; + /** Format: date-time */ + updated_at: string | null; + head_commit: components["schemas"]["simple-commit"]; + latest_check_runs_count: number; + check_runs_url: string; + rerequestable?: boolean; + runs_rerequestable?: boolean; + }; + /** + * Check Suite Preference + * @description Check suite configuration preferences for a repository. + */ + "check-suite-preference": { + preferences: { + auto_trigger_checks?: { + app_id: number; + setting: boolean; + }[]; + }; + repository: components["schemas"]["minimal-repository"]; + }; + "code-scanning-alert-rule-summary": { + /** @description A unique identifier for the rule used to detect the alert. */ + id?: string | null; + /** @description The name of the rule used to detect the alert. */ + name?: string; + /** @description A set of tags applicable for the rule. */ + tags?: string[] | null; + /** + * @description The severity of the alert. + * @enum {string|null} + */ + severity?: "none" | "note" | "warning" | "error" | null; + /** @description A short description of the rule used to detect the alert. */ + description?: string; + }; + "code-scanning-alert-items": { + number: components["schemas"]["alert-number"]; + created_at: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["alert-updated-at"]; + url: components["schemas"]["alert-url"]; + html_url: components["schemas"]["alert-html-url"]; + instances_url: components["schemas"]["alert-instances-url"]; + state: components["schemas"]["code-scanning-alert-state"]; + fixed_at?: components["schemas"]["alert-fixed-at"]; + dismissed_by: components["schemas"]["nullable-simple-user"]; + dismissed_at: components["schemas"]["alert-dismissed-at"]; + dismissed_reason: components["schemas"]["code-scanning-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + rule: components["schemas"]["code-scanning-alert-rule-summary"]; + tool: components["schemas"]["code-scanning-analysis-tool"]; + most_recent_instance: components["schemas"]["code-scanning-alert-instance"]; + }; + "code-scanning-alert": { + number: components["schemas"]["alert-number"]; + created_at: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["alert-updated-at"]; + url: components["schemas"]["alert-url"]; + html_url: components["schemas"]["alert-html-url"]; + instances_url: components["schemas"]["alert-instances-url"]; + state: components["schemas"]["code-scanning-alert-state"]; + fixed_at?: components["schemas"]["alert-fixed-at"]; + dismissed_by: components["schemas"]["nullable-simple-user"]; + dismissed_at: components["schemas"]["alert-dismissed-at"]; + dismissed_reason: components["schemas"]["code-scanning-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + rule: components["schemas"]["code-scanning-alert-rule"]; + tool: components["schemas"]["code-scanning-analysis-tool"]; + most_recent_instance: components["schemas"]["code-scanning-alert-instance"]; + }; + /** + * @description Sets the state of the code scanning alert. You must provide `dismissed_reason` when you set the state to `dismissed`. + * @enum {string} + */ + "code-scanning-alert-set-state": "open" | "dismissed"; + /** + * @description An identifier for the upload. + * @example 6c81cd8e-b078-4ac3-a3be-1dad7dbd0b53 + */ + "code-scanning-analysis-sarif-id": string; + /** @description The SHA of the commit to which the analysis you are uploading relates. */ + "code-scanning-analysis-commit-sha": string; + /** @description Identifies the variable values associated with the environment in which this analysis was performed. */ + "code-scanning-analysis-environment": string; + /** + * Format: date-time + * @description The time that the analysis was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + readonly "code-scanning-analysis-created-at": string; + /** + * Format: uri + * @description The REST API URL of the analysis resource. + */ + readonly "code-scanning-analysis-url": string; + "code-scanning-analysis": { + ref: components["schemas"]["code-scanning-ref"]; + commit_sha: components["schemas"]["code-scanning-analysis-commit-sha"]; + analysis_key: components["schemas"]["code-scanning-analysis-analysis-key"]; + environment: components["schemas"]["code-scanning-analysis-environment"]; + category?: components["schemas"]["code-scanning-analysis-category"]; + /** @example error reading field xyz */ + error: string; + created_at: components["schemas"]["code-scanning-analysis-created-at"]; + /** @description The total number of results in the analysis. */ + results_count: number; + /** @description The total number of rules used in the analysis. */ + rules_count: number; + /** @description Unique identifier for this analysis. */ + id: number; + url: components["schemas"]["code-scanning-analysis-url"]; + sarif_id: components["schemas"]["code-scanning-analysis-sarif-id"]; + tool: components["schemas"]["code-scanning-analysis-tool"]; + deletable: boolean; + /** + * @description Warning generated when processing the analysis + * @example 123 results were ignored + */ + warning: string; + }; + /** + * Analysis deletion + * @description Successful deletion of a code scanning analysis + */ + "code-scanning-analysis-deletion": { + /** + * Format: uri + * @description Next deletable analysis in chain, without last analysis deletion confirmation + */ + next_analysis_url: string | null; + /** + * Format: uri + * @description Next deletable analysis in chain, with last analysis deletion confirmation + */ + confirm_delete_url: string | null; + }; + /** + * CodeQL Database + * @description A CodeQL database. + */ + "code-scanning-codeql-database": { + /** @description The ID of the CodeQL database. */ + id: number; + /** @description The name of the CodeQL database. */ + name: string; + /** @description The language of the CodeQL database. */ + language: string; + uploader: components["schemas"]["simple-user"]; + /** @description The MIME type of the CodeQL database file. */ + content_type: string; + /** @description The size of the CodeQL database file in bytes. */ + size: number; + /** + * Format: date-time + * @description The date and time at which the CodeQL database was created, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + */ + created_at: string; + /** + * Format: date-time + * @description The date and time at which the CodeQL database was last updated, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + */ + updated_at: string; + /** + * Format: uri + * @description The URL at which to download the CodeQL database. The `Accept` header must be set to the value of the `content_type` property. + */ + url: string; + }; + /** @description Configuration for code scanning default setup. */ + "code-scanning-default-setup": { + /** + * @description Code scanning default setup has been configured or not. + * @enum {string} + */ + state?: "configured" | "not-configured"; + /** @description Languages to be analysed. */ + languages?: ( + | "c-cpp" + | "csharp" + | "go" + | "java-kotlin" + | "javascript-typescript" + | "javascript" + | "python" + | "ruby" + | "typescript" + )[]; + /** + * @description CodeQL query suite to be used. + * @enum {string} + */ + query_suite?: "default" | "extended"; + /** + * Format: date-time + * @description Timestamp of latest configuration update. + * @example 2023-12-06T14:20:20.000Z + */ + updated_at?: string | null; + }; + /** @description Configuration for code scanning default setup. */ + "code-scanning-default-setup-update": { + /** + * @description Whether code scanning default setup has been configured or not. + * @enum {string} + */ + state: "configured" | "not-configured"; + /** + * @description CodeQL query suite to be used. + * @enum {string} + */ + query_suite?: "default" | "extended"; + }; + /** + * @description You can use `run_url` to track the status of the run. This includes a property status and conclusion. + * You should not rely on this always being an actions workflow run object. + */ + "code-scanning-default-setup-update-response": { + /** @description ID of the corresponding run. */ + run_id?: number; + /** @description URL of the corresponding run. */ + run_url?: string; + }; + /** @description A Base64 string representing the SARIF file to upload. You must first compress your SARIF file using [`gzip`](http://www.gnu.org/software/gzip/manual/gzip.html) and then translate the contents of the file into a Base64 encoding string. For more information, see "[SARIF support for code scanning](https://docs.github.com/code-security/secure-coding/sarif-support-for-code-scanning)." */ + "code-scanning-analysis-sarif-file": string; + "code-scanning-sarifs-receipt": { + id?: components["schemas"]["code-scanning-analysis-sarif-id"]; + /** + * Format: uri + * @description The REST API URL for checking the status of the upload. + */ + url?: string; + }; + "code-scanning-sarifs-status": { + /** + * @description `pending` files have not yet been processed, while `complete` means results from the SARIF have been stored. `failed` files have either not been processed at all, or could only be partially processed. + * @enum {string} + */ + processing_status?: "pending" | "complete" | "failed"; + /** + * Format: uri + * @description The REST API URL for getting the analyses associated with the upload. + */ + analyses_url?: string | null; + /** @description Any errors that ocurred during processing of the delivery. */ + errors?: readonly string[] | null; + }; + /** + * CODEOWNERS errors + * @description A list of errors found in a repo's CODEOWNERS file + */ + "codeowners-errors": { + errors: { + /** + * @description The line number where this errors occurs. + * @example 7 + */ + line: number; + /** + * @description The column number where this errors occurs. + * @example 3 + */ + column: number; + /** + * @description The contents of the line where the error occurs. + * @example * user + */ + source?: string; + /** + * @description The type of error. + * @example Invalid owner + */ + kind: string; + /** + * @description Suggested action to fix the error. This will usually be `null`, but is provided for some common errors. + * @example The pattern `/` will never match anything, did you mean `*` instead? + */ + suggestion?: string | null; + /** + * @description A human-readable description of the error, combining information from multiple fields, laid out for display in a monospaced typeface (for example, a command-line setting). + * @example Invalid owner on line 7: + * + * * user + * ^ + */ + message: string; + /** + * @description The path of the file where the error occured. + * @example .github/CODEOWNERS + */ + path: string; + }[]; + }; + /** + * Codespace machine + * @description A description of the machine powering a codespace. + */ + "codespace-machine": { + /** + * @description The name of the machine. + * @example standardLinux + */ + name: string; + /** + * @description The display name of the machine includes cores, memory, and storage. + * @example 4 cores, 8 GB RAM, 64 GB storage + */ + display_name: string; + /** + * @description The operating system of the machine. + * @example linux + */ + operating_system: string; + /** + * @description How much storage is available to the codespace. + * @example 68719476736 + */ + storage_in_bytes: number; + /** + * @description How much memory is available to the codespace. + * @example 8589934592 + */ + memory_in_bytes: number; + /** + * @description How many cores are available to the codespace. + * @example 4 + */ + cpus: number; + /** + * @description Whether a prebuild is currently available when creating a codespace for this machine and repository. If a branch was not specified as a ref, the default branch will be assumed. Value will be "null" if prebuilds are not supported or prebuild availability could not be determined. Value will be "none" if no prebuild is available. Latest values "ready" and "in_progress" indicate the prebuild availability status. + * @example ready + * @enum {string|null} + */ + prebuild_availability: "none" | "ready" | "in_progress" | null; + }; + /** + * Codespaces Secret + * @description Set repository secrets for GitHub Codespaces. + */ + "repo-codespaces-secret": { + /** + * @description The name of the secret. + * @example SECRET_TOKEN + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Collaborator + * @description Collaborator + */ + collaborator: { + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + email?: string | null; + name?: string | null; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + permissions?: { + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + admin: boolean; + }; + /** @example admin */ + role_name: string; + }; + /** + * Repository Invitation + * @description Repository invitations let you manage who you collaborate with. + */ + "repository-invitation": { + /** + * @description Unique identifier of the repository invitation. + * @example 42 + */ + id: number; + repository: components["schemas"]["minimal-repository"]; + invitee: components["schemas"]["nullable-simple-user"]; + inviter: components["schemas"]["nullable-simple-user"]; + /** + * @description The permission associated with the invitation. + * @example read + * @enum {string} + */ + permissions: "read" | "write" | "admin" | "triage" | "maintain"; + /** + * Format: date-time + * @example 2016-06-13T14:52:50-05:00 + */ + created_at: string; + /** @description Whether or not the invitation has expired */ + expired?: boolean; + /** + * @description URL for the repository invitation + * @example https://api.github.com/user/repository-invitations/1 + */ + url: string; + /** @example https://github.com/octocat/Hello-World/invitations */ + html_url: string; + node_id: string; + }; + /** + * Collaborator + * @description Collaborator + */ + "nullable-collaborator": { + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + email?: string | null; + name?: string | null; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + permissions?: { + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + admin: boolean; + }; + /** @example admin */ + role_name: string; + } | null; + /** + * Repository Collaborator Permission + * @description Repository Collaborator Permission + */ + "repository-collaborator-permission": { + permission: string; + /** @example admin */ + role_name: string; + user: components["schemas"]["nullable-collaborator"]; + }; + /** + * Commit Comment + * @description Commit Comment + */ + "commit-comment": { + /** Format: uri */ + html_url: string; + /** Format: uri */ + url: string; + id: number; + node_id: string; + body: string; + path: string | null; + position: number | null; + line: number | null; + commit_id: string; + user: components["schemas"]["nullable-simple-user"]; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + author_association: components["schemas"]["author-association"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Branch Short + * @description Branch Short + */ + "branch-short": { + name: string; + commit: { + sha: string; + url: string; + }; + protected: boolean; + }; + /** + * Link + * @description Hypermedia Link + */ + link: { + href: string; + }; + /** + * Auto merge + * @description The status of auto merging a pull request. + */ + "auto-merge": { + enabled_by: components["schemas"]["simple-user"]; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + /** @description Title for the merge commit message. */ + commit_title: string; + /** @description Commit message for the merge commit. */ + commit_message: string; + } | null; + /** + * Pull Request Simple + * @description Pull Request Simple + */ + "pull-request-simple": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347 + */ + url: string; + /** @example 1 */ + id: number; + /** @example MDExOlB1bGxSZXF1ZXN0MQ== */ + node_id: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347 + */ + html_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347.diff + */ + diff_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347.patch + */ + patch_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347 + */ + issue_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits + */ + commits_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments + */ + review_comments_url: string; + /** @example https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number} */ + review_comment_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347/comments + */ + comments_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + statuses_url: string; + /** @example 1347 */ + number: number; + /** @example open */ + state: string; + /** @example true */ + locked: boolean; + /** @example new-feature */ + title: string; + user: components["schemas"]["nullable-simple-user"]; + /** @example Please pull these awesome changes */ + body: string | null; + labels: { + /** Format: int64 */ + id: number; + node_id: string; + url: string; + name: string; + description: string; + color: string; + default: boolean; + }[]; + milestone: components["schemas"]["nullable-milestone"]; + /** @example too heated */ + active_lock_reason?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + merged_at: string | null; + /** @example e5bd3914e2e596debea16f433f57875b5b90bcd6 */ + merge_commit_sha: string | null; + assignee: components["schemas"]["nullable-simple-user"]; + assignees?: components["schemas"]["simple-user"][] | null; + requested_reviewers?: components["schemas"]["simple-user"][] | null; + requested_teams?: components["schemas"]["team"][] | null; + head: { + label: string; + ref: string; + repo: components["schemas"]["repository"]; + sha: string; + user: components["schemas"]["nullable-simple-user"]; + }; + base: { + label: string; + ref: string; + repo: components["schemas"]["repository"]; + sha: string; + user: components["schemas"]["nullable-simple-user"]; + }; + _links: { + comments: components["schemas"]["link"]; + commits: components["schemas"]["link"]; + statuses: components["schemas"]["link"]; + html: components["schemas"]["link"]; + issue: components["schemas"]["link"]; + review_comments: components["schemas"]["link"]; + review_comment: components["schemas"]["link"]; + self: components["schemas"]["link"]; + }; + author_association: components["schemas"]["author-association"]; + auto_merge: components["schemas"]["auto-merge"]; + /** + * @description Indicates whether or not the pull request is a draft. + * @example false + */ + draft?: boolean; + }; + /** Simple Commit Status */ + "simple-commit-status": { + description: string | null; + id: number; + node_id: string; + state: string; + context: string; + /** Format: uri */ + target_url: string | null; + required?: boolean | null; + /** Format: uri */ + avatar_url: string | null; + /** Format: uri */ + url: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Combined Commit Status + * @description Combined Commit Status + */ + "combined-commit-status": { + state: string; + statuses: components["schemas"]["simple-commit-status"][]; + sha: string; + total_count: number; + repository: components["schemas"]["minimal-repository"]; + /** Format: uri */ + commit_url: string; + /** Format: uri */ + url: string; + }; + /** + * Status + * @description The status of a commit. + */ + status: { + url: string; + avatar_url: string | null; + id: number; + node_id: string; + state: string; + description: string | null; + target_url: string | null; + context: string; + created_at: string; + updated_at: string; + creator: components["schemas"]["nullable-simple-user"]; + }; + /** + * Code Of Conduct Simple + * @description Code of Conduct Simple + */ + "nullable-code-of-conduct-simple": { + /** + * Format: uri + * @example https://api.github.com/repos/github/docs/community/code_of_conduct + */ + url: string; + /** @example citizen_code_of_conduct */ + key: string; + /** @example Citizen Code of Conduct */ + name: string; + /** + * Format: uri + * @example https://github.com/github/docs/blob/main/CODE_OF_CONDUCT.md + */ + html_url: string | null; + } | null; + /** Community Health File */ + "nullable-community-health-file": { + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + } | null; + /** + * Community Profile + * @description Community Profile + */ + "community-profile": { + /** @example 100 */ + health_percentage: number; + /** @example My first repository on GitHub! */ + description: string | null; + /** @example example.com */ + documentation: string | null; + files: { + code_of_conduct: components["schemas"]["nullable-code-of-conduct-simple"]; + code_of_conduct_file: components["schemas"]["nullable-community-health-file"]; + license: components["schemas"]["nullable-license-simple"]; + contributing: components["schemas"]["nullable-community-health-file"]; + readme: components["schemas"]["nullable-community-health-file"]; + issue_template: components["schemas"]["nullable-community-health-file"]; + pull_request_template: components["schemas"]["nullable-community-health-file"]; + }; + /** + * Format: date-time + * @example 2017-02-28T19:09:29Z + */ + updated_at: string | null; + /** @example true */ + content_reports_enabled?: boolean; + }; + /** + * Commit Comparison + * @description Commit Comparison + */ + "commit-comparison": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/compare/master...topic + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/compare/master...topic + */ + html_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/compare/octocat:bbcd538c8e72b8c175046e27cc8f907076331401...octocat:0328041d1152db8ae77652d1618a02e57f745f17 + */ + permalink_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/compare/master...topic.diff + */ + diff_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/compare/master...topic.patch + */ + patch_url: string; + base_commit: components["schemas"]["commit"]; + merge_base_commit: components["schemas"]["commit"]; + /** + * @example ahead + * @enum {string} + */ + status: "diverged" | "ahead" | "behind" | "identical"; + /** @example 4 */ + ahead_by: number; + /** @example 5 */ + behind_by: number; + /** @example 6 */ + total_commits: number; + commits: components["schemas"]["commit"][]; + files?: components["schemas"]["diff-entry"][]; + }; + /** + * Content Tree + * @description Content Tree + */ + "content-tree": { + type: string; + size: number; + name: string; + path: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + entries?: { + type: string; + size: number; + name: string; + path: string; + content?: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }[]; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }; + /** + * Content Directory + * @description A list of directory items + */ + "content-directory": { + /** @enum {string} */ + type: "dir" | "file" | "submodule" | "symlink"; + size: number; + name: string; + path: string; + content?: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }[]; + /** + * Content File + * @description Content File + */ + "content-file": { + /** @enum {string} */ + type: "file"; + encoding: string; + size: number; + name: string; + path: string; + content: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + /** @example "actual/actual.md" */ + target?: string; + /** @example "git://example.com/defunkt/dotjs.git" */ + submodule_git_url?: string; + }; + /** + * Symlink Content + * @description An object describing a symlink + */ + "content-symlink": { + /** @enum {string} */ + type: "symlink"; + target: string; + size: number; + name: string; + path: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }; + /** + * Submodule Content + * @description An object describing a submodule + */ + "content-submodule": { + /** @enum {string} */ + type: "submodule"; + /** Format: uri */ + submodule_git_url: string; + size: number; + name: string; + path: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }; + /** + * File Commit + * @description File Commit + */ + "file-commit": { + content: { + name?: string; + path?: string; + sha?: string; + size?: number; + url?: string; + html_url?: string; + git_url?: string; + download_url?: string; + type?: string; + _links?: { + self?: string; + git?: string; + html?: string; + }; + } | null; + commit: { + sha?: string; + node_id?: string; + url?: string; + html_url?: string; + author?: { + date?: string; + name?: string; + email?: string; + }; + committer?: { + date?: string; + name?: string; + email?: string; + }; + message?: string; + tree?: { + url?: string; + sha?: string; + }; + parents?: { + url?: string; + html_url?: string; + sha?: string; + }[]; + verification?: { + verified?: boolean; + reason?: string; + signature?: string | null; + payload?: string | null; + }; + }; + }; + /** + * Contributor + * @description Contributor + */ + contributor: { + login?: string; + id?: number; + node_id?: string; + /** Format: uri */ + avatar_url?: string; + gravatar_id?: string | null; + /** Format: uri */ + url?: string; + /** Format: uri */ + html_url?: string; + /** Format: uri */ + followers_url?: string; + following_url?: string; + gists_url?: string; + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + repos_url?: string; + events_url?: string; + /** Format: uri */ + received_events_url?: string; + type: string; + site_admin?: boolean; + contributions: number; + email?: string; + name?: string; + }; + /** @description A Dependabot alert. */ + "dependabot-alert": { + number: components["schemas"]["alert-number"]; + /** + * @description The state of the Dependabot alert. + * @enum {string} + */ + state: "auto_dismissed" | "dismissed" | "fixed" | "open"; + /** @description Details for the vulnerable dependency. */ + dependency: { + readonly package?: components["schemas"]["dependabot-alert-package"]; + /** @description The full path to the dependency manifest file, relative to the root of the repository. */ + readonly manifest_path?: string; + /** + * @description The execution scope of the vulnerable dependency. + * @enum {string|null} + */ + readonly scope?: "development" | "runtime" | null; + }; + security_advisory: components["schemas"]["dependabot-alert-security-advisory"]; + security_vulnerability: components["schemas"]["dependabot-alert-security-vulnerability"]; + url: components["schemas"]["alert-url"]; + html_url: components["schemas"]["alert-html-url"]; + created_at: components["schemas"]["alert-created-at"]; + updated_at: components["schemas"]["alert-updated-at"]; + dismissed_at: components["schemas"]["alert-dismissed-at"]; + dismissed_by: components["schemas"]["nullable-simple-user"]; + /** + * @description The reason that the alert was dismissed. + * @enum {string|null} + */ + dismissed_reason: + | "fix_started" + | "inaccurate" + | "no_bandwidth" + | "not_used" + | "tolerable_risk" + | null; + /** @description An optional comment associated with the alert's dismissal. */ + dismissed_comment: string | null; + fixed_at: components["schemas"]["alert-fixed-at"]; + auto_dismissed_at?: components["schemas"]["alert-auto-dismissed-at"]; + }; + /** + * Dependabot Secret + * @description Set secrets for Dependabot. + */ + "dependabot-secret": { + /** + * @description The name of the secret. + * @example MY_ARTIFACTORY_PASSWORD + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Dependency Graph Diff + * @description A diff of the dependencies between two commits. + */ + "dependency-graph-diff": { + /** @enum {string} */ + change_type: "added" | "removed"; + /** @example path/to/package-lock.json */ + manifest: string; + /** @example npm */ + ecosystem: string; + /** @example @actions/core */ + name: string; + /** @example 1.0.0 */ + version: string; + /** @example pkg:/npm/%40actions/core@1.1.0 */ + package_url: string | null; + /** @example MIT */ + license: string | null; + /** @example https://github.com/github/actions */ + source_repository_url: string | null; + vulnerabilities: { + /** @example critical */ + severity: string; + /** @example GHSA-rf4j-j272-fj86 */ + advisory_ghsa_id: string; + /** @example A summary of the advisory. */ + advisory_summary: string; + /** @example https://github.com/advisories/GHSA-rf4j-j272-fj86 */ + advisory_url: string; + }[]; + /** + * @description Where the dependency is utilized. `development` means that the dependency is only utilized in the development environment. `runtime` means that the dependency is utilized at runtime and in the development environment. + * @enum {string} + */ + scope: "unknown" | "runtime" | "development"; + }[]; + /** + * Dependency Graph SPDX SBOM + * @description A schema for the SPDX JSON format returned by the Dependency Graph. + */ + "dependency-graph-spdx-sbom": { + sbom: { + /** + * @description The SPDX identifier for the SPDX document. + * @example SPDXRef-DOCUMENT + */ + SPDXID: string; + /** + * @description The version of the SPDX specification that this document conforms to. + * @example SPDX-2.3 + */ + spdxVersion: string; + creationInfo: { + /** + * @description The date and time the SPDX document was created. + * @example 2021-11-03T00:00:00Z + */ + created: string; + /** @description The tools that were used to generate the SPDX document. */ + creators: string[]; + }; + /** + * @description The name of the SPDX document. + * @example github/github + */ + name: string; + /** + * @description The license under which the SPDX document is licensed. + * @example CC0-1.0 + */ + dataLicense: string; + /** @description The name of the repository that the SPDX document describes. */ + documentDescribes: string[]; + /** + * @description The namespace for the SPDX document. + * @example https://github.com/example/dependency_graph/sbom-123 + */ + documentNamespace: string; + packages: { + /** + * @description A unique SPDX identifier for the package. + * @example SPDXRef-Package + */ + SPDXID?: string; + /** + * @description The name of the package. + * @example rubygems:github/github + */ + name?: string; + /** + * @description The version of the package. If the package does not have an exact version specified, + * a version range is given. + * @example 1.0.0 + */ + versionInfo?: string; + /** + * @description The location where the package can be downloaded, + * or NOASSERTION if this has not been determined. + * @example NOASSERTION + */ + downloadLocation?: string; + /** + * @description Whether the package's file content has been subjected to + * analysis during the creation of the SPDX document. + * @example false + */ + filesAnalyzed?: boolean; + /** + * @description The license of the package as determined while creating the SPDX document. + * @example MIT + */ + licenseConcluded?: string; + /** + * @description The license of the package as declared by its author, or NOASSERTION if this information + * was not available when the SPDX document was created. + * @example NOASSERTION + */ + licenseDeclared?: string; + /** + * @description The distribution source of this package, or NOASSERTION if this was not determined. + * @example NOASSERTION + */ + supplier?: string; + externalRefs?: { + /** + * @description The category of reference to an external resource this reference refers to. + * @example PACKAGE-MANAGER + */ + referenceCategory: string; + /** + * @description A locator for the particular external resource this reference refers to. + * @example pkg:gem/rails@6.0.1 + */ + referenceLocator: string; + /** + * @description The category of reference to an external resource this reference refers to. + * @example purl + */ + referenceType: string; + }[]; + }[]; + }; + }; + /** + * metadata + * @description User-defined metadata to store domain-specific information limited to 8 keys with scalar values. + */ + metadata: { + [key: string]: ((string | number | boolean) | null) | undefined; + }; + dependency: { + /** + * @description Package-url (PURL) of dependency. See https://github.com/package-url/purl-spec for more details. + * @example pkg:/npm/%40actions/http-client@1.0.11 + */ + package_url?: string; + metadata?: components["schemas"]["metadata"]; + /** + * @description A notation of whether a dependency is requested directly by this manifest or is a dependency of another dependency. + * @example direct + * @enum {string} + */ + relationship?: "direct" | "indirect"; + /** + * @description A notation of whether the dependency is required for the primary build artifact (runtime) or is only used for development. Future versions of this specification may allow for more granular scopes. + * @example runtime + * @enum {string} + */ + scope?: "runtime" | "development"; + /** + * @description Array of package-url (PURLs) of direct child dependencies. + * @example @actions/http-client + */ + dependencies?: string[]; + }; + manifest: { + /** + * @description The name of the manifest. + * @example package-lock.json + */ + name: string; + file?: { + /** + * @description The path of the manifest file relative to the root of the Git repository. + * @example /src/build/package-lock.json + */ + source_location?: string; + }; + metadata?: components["schemas"]["metadata"]; + /** @description A collection of resolved package dependencies. */ + resolved?: { + [key: string]: components["schemas"]["dependency"] | undefined; + }; + }; + /** + * snapshot + * @description Create a new snapshot of a repository's dependencies. + */ + snapshot: { + /** @description The version of the repository snapshot submission. */ + version: number; + job: { + /** + * @description The external ID of the job. + * @example 5622a2b0-63f6-4732-8c34-a1ab27e102a11 + */ + id: string; + /** + * @description Correlator provides a key that is used to group snapshots submitted over time. Only the "latest" submitted snapshot for a given combination of `job.correlator` and `detector.name` will be considered when calculating a repository's current dependencies. Correlator should be as unique as it takes to distinguish all detection runs for a given "wave" of CI workflow you run. If you're using GitHub Actions, a good default value for this could be the environment variables GITHUB_WORKFLOW and GITHUB_JOB concatenated together. If you're using a build matrix, then you'll also need to add additional key(s) to distinguish between each submission inside a matrix variation. + * @example yourworkflowname_yourjobname + */ + correlator: string; + /** + * @description The url for the job. + * @example http://example.com/build + */ + html_url?: string; + }; + /** + * @description The commit SHA associated with this dependency snapshot. Maximum length: 40 characters. + * @example ddc951f4b1293222421f2c8df679786153acf689 + */ + sha: string; + /** + * @description The repository branch that triggered this snapshot. + * @example refs/heads/main + */ + ref: string; + /** @description A description of the detector used. */ + detector: { + /** + * @description The name of the detector used. + * @example docker buildtime detector + */ + name: string; + /** + * @description The version of the detector used. + * @example 1.0.0 + */ + version: string; + /** + * @description The url of the detector used. + * @example http://example.com/docker-buildtimer-detector + */ + url: string; + }; + metadata?: components["schemas"]["metadata"]; + /** @description A collection of package manifests, which are a collection of related dependencies declared in a file or representing a logical group of dependencies. */ + manifests?: { + [key: string]: components["schemas"]["manifest"] | undefined; + }; + /** + * Format: date-time + * @description The time at which the snapshot was scanned. + * @example 2020-06-13T14:52:50-05:00 + */ + scanned: string; + }; + /** + * Deployment Status + * @description The status of a deployment. + */ + "deployment-status": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/42/statuses/1 + */ + url: string; + /** @example 1 */ + id: number; + /** @example MDE2OkRlcGxveW1lbnRTdGF0dXMx */ + node_id: string; + /** + * @description The state of the status. + * @example success + * @enum {string} + */ + state: + | "error" + | "failure" + | "inactive" + | "pending" + | "success" + | "queued" + | "in_progress"; + creator: components["schemas"]["nullable-simple-user"]; + /** + * @description A short description of the status. + * @default + * @example Deployment finished successfully. + */ + description: string; + /** + * @description The environment of the deployment that the status is for. + * @default + * @example production + */ + environment?: string; + /** + * Format: uri + * @description Deprecated: the URL to associate with this status. + * @default + * @example https://example.com/deployment/42/output + */ + target_url: string; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + created_at: string; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/42 + */ + deployment_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example + */ + repository_url: string; + /** + * Format: uri + * @description The URL for accessing your environment. + * @default + * @example https://staging.example.com/ + */ + environment_url?: string; + /** + * Format: uri + * @description The URL to associate with this status. + * @default + * @example https://example.com/deployment/42/output + */ + log_url?: string; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + }; + /** + * @description The amount of time to delay a job after the job is initially triggered. The time (in minutes) must be an integer between 0 and 43,200 (30 days). + * @example 30 + */ + "wait-timer": number; + /** @description The type of deployment branch policy for this environment. To allow all branches to deploy, set to `null`. */ + "deployment-branch-policy-settings": { + /** @description Whether only branches with branch protection rules can deploy to this environment. If `protected_branches` is `true`, `custom_branch_policies` must be `false`; if `protected_branches` is `false`, `custom_branch_policies` must be `true`. */ + protected_branches: boolean; + /** @description Whether only branches that match the specified name patterns can deploy to this environment. If `custom_branch_policies` is `true`, `protected_branches` must be `false`; if `custom_branch_policies` is `false`, `protected_branches` must be `true`. */ + custom_branch_policies: boolean; + } | null; + /** + * Environment + * @description Details of a deployment environment + */ + environment: { + /** + * @description The id of the environment. + * @example 56780428 + */ + id: number; + /** @example MDExOkVudmlyb25tZW50NTY3ODA0Mjg= */ + node_id: string; + /** + * @description The name of the environment. + * @example staging + */ + name: string; + /** @example https://api.github.com/repos/github/hello-world/environments/staging */ + url: string; + /** @example https://github.com/github/hello-world/deployments/activity_log?environments_filter=staging */ + html_url: string; + /** + * Format: date-time + * @description The time that the environment was created, in ISO 8601 format. + * @example 2020-11-23T22:00:40Z + */ + created_at: string; + /** + * Format: date-time + * @description The time that the environment was last updated, in ISO 8601 format. + * @example 2020-11-23T22:00:40Z + */ + updated_at: string; + /** @description Built-in deployment protection rules for the environment. */ + protection_rules?: ( + | { + /** @example 3515 */ + id: number; + /** @example MDQ6R2F0ZTM1MTU= */ + node_id: string; + /** @example wait_timer */ + type: string; + wait_timer?: components["schemas"]["wait-timer"]; + } + | { + /** @example 3755 */ + id: number; + /** @example MDQ6R2F0ZTM3NTU= */ + node_id: string; + /** @example required_reviewers */ + type: string; + /** @description The people or teams that may approve jobs that reference the environment. You can list up to six users or teams as reviewers. The reviewers must have at least read access to the repository. Only one of the required reviewers needs to approve the job for it to proceed. */ + reviewers?: { + type?: components["schemas"]["deployment-reviewer-type"]; + reviewer?: + | components["schemas"]["simple-user"] + | components["schemas"]["team"]; + }[]; + } + | { + /** @example 3515 */ + id: number; + /** @example MDQ6R2F0ZTM1MTU= */ + node_id: string; + /** @example branch_policy */ + type: string; + } + )[]; + deployment_branch_policy?: components["schemas"]["deployment-branch-policy-settings"]; + }; + /** + * Deployment branch policy + * @description Details of a deployment branch policy. + */ + "deployment-branch-policy": { + /** + * @description The unique identifier of the branch policy. + * @example 361471 + */ + id?: number; + /** @example MDE2OkdhdGVCcmFuY2hQb2xpY3kzNjE0NzE= */ + node_id?: string; + /** + * @description The name pattern that branches must match in order to deploy to the environment. + * @example release/* + */ + name?: string; + }; + /** Deployment branch policy name pattern */ + "deployment-branch-policy-name-pattern": { + /** + * @description The name pattern that branches must match in order to deploy to the environment. + * + * Wildcard characters will not match `/`. For example, to match branches that begin with `release/` and contain an additional single slash, use `release/*\/*`. + * For more information about pattern matching syntax, see the [Ruby File.fnmatch documentation](https://ruby-doc.org/core-2.5.1/File.html#method-c-fnmatch). + * @example release/* + */ + name: string; + }; + /** + * Custom deployment protection rule app + * @description A GitHub App that is providing a custom deployment protection rule. + */ + "custom-deployment-rule-app": { + /** + * @description The unique identifier of the deployment protection rule integration. + * @example 3515 + */ + id: number; + /** + * @description The slugified name of the deployment protection rule integration. + * @example my-custom-app + */ + slug: string; + /** + * @description The URL for the endpoint to get details about the app. + * @example https://api.github.com/apps/custom-app-slug + */ + integration_url: string; + /** + * @description The node ID for the deployment protection rule integration. + * @example MDQ6R2F0ZTM1MTU= + */ + node_id: string; + }; + /** + * Deployment protection rule + * @description Deployment protection rule + */ + "deployment-protection-rule": { + /** + * @description The unique identifier for the deployment protection rule. + * @example 3515 + */ + id: number; + /** + * @description The node ID for the deployment protection rule. + * @example MDQ6R2F0ZTM1MTU= + */ + node_id: string; + /** + * @description Whether the deployment protection rule is enabled for the environment. + * @example true + */ + enabled: boolean; + app: components["schemas"]["custom-deployment-rule-app"]; + }; + /** + * Short Blob + * @description Short Blob + */ + "short-blob": { + url: string; + sha: string; + }; + /** + * Blob + * @description Blob + */ + blob: { + content: string; + encoding: string; + /** Format: uri */ + url: string; + sha: string; + size: number | null; + node_id: string; + highlighted_content?: string; + }; + /** + * Git Commit + * @description Low-level Git commit operations within a repository + */ + "git-commit": { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + node_id: string; + /** Format: uri */ + url: string; + /** @description Identifying information for the git-user */ + author: { + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + date: string; + /** + * @description Git email address of the user + * @example monalisa.octocat@example.com + */ + email: string; + /** + * @description Name of the git user + * @example Monalisa Octocat + */ + name: string; + }; + /** @description Identifying information for the git-user */ + committer: { + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + date: string; + /** + * @description Git email address of the user + * @example monalisa.octocat@example.com + */ + email: string; + /** + * @description Name of the git user + * @example Monalisa Octocat + */ + name: string; + }; + /** + * @description Message describing the purpose of the commit + * @example Fix #42 + */ + message: string; + tree: { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + }; + parents: { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + }[]; + verification: { + verified: boolean; + reason: string; + signature: string | null; + payload: string | null; + }; + /** Format: uri */ + html_url: string; + }; + /** + * Git Reference + * @description Git references within a repository + */ + "git-ref": { + ref: string; + node_id: string; + /** Format: uri */ + url: string; + object: { + type: string; + /** + * @description SHA for the reference + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + }; + }; + /** + * Git Tag + * @description Metadata for a Git tag + */ + "git-tag": { + /** @example MDM6VGFnOTQwYmQzMzYyNDhlZmFlMGY5ZWU1YmM3YjJkNWM5ODU4ODdiMTZhYw== */ + node_id: string; + /** + * @description Name of the tag + * @example v0.0.1 + */ + tag: string; + /** @example 940bd336248efae0f9ee5bc7b2d5c985887b16ac */ + sha: string; + /** + * Format: uri + * @description URL for the tag + * @example https://api.github.com/repositories/42/git/tags/940bd336248efae0f9ee5bc7b2d5c985887b16ac + */ + url: string; + /** + * @description Message describing the purpose of the tag + * @example Initial public release + */ + message: string; + tagger: { + date: string; + email: string; + name: string; + }; + object: { + sha: string; + type: string; + /** Format: uri */ + url: string; + }; + verification?: components["schemas"]["verification"]; + }; + /** + * Git Tree + * @description The hierarchy between files in a Git repository. + */ + "git-tree": { + sha: string; + /** Format: uri */ + url: string; + truncated: boolean; + /** + * @description Objects specifying a tree structure + * @example [ + * { + * "path": "file.rb", + * "mode": "100644", + * "type": "blob", + * "size": 30, + * "sha": "44b4fc6d56897b048c772eb4087f854f46256132", + * "url": "https://api.github.com/repos/octocat/Hello-World/git/blobs/44b4fc6d56897b048c772eb4087f854f46256132", + * "properties": { + * "path": { + * "type": "string" + * }, + * "mode": { + * "type": "string" + * }, + * "type": { + * "type": "string" + * }, + * "size": { + * "type": "integer" + * }, + * "sha": { + * "type": "string" + * }, + * "url": { + * "type": "string" + * } + * }, + * "required": [ + * "path", + * "mode", + * "type", + * "sha", + * "url", + * "size" + * ] + * } + * ] + */ + tree: { + /** @example test/file.rb */ + path?: string; + /** @example 040000 */ + mode?: string; + /** @example tree */ + type?: string; + /** @example 23f6827669e43831def8a7ad935069c8bd418261 */ + sha?: string; + /** @example 12 */ + size?: number; + /** @example https://api.github.com/repos/owner-482f3203ecf01f67e9deb18e/BBB_Private_Repo/git/blobs/23f6827669e43831def8a7ad935069c8bd418261 */ + url?: string; + }[]; + }; + /** Hook Response */ + "hook-response": { + code: number | null; + status: string | null; + message: string | null; + }; + /** + * Webhook + * @description Webhooks for repositories. + */ + hook: { + type: string; + /** + * @description Unique identifier of the webhook. + * @example 42 + */ + id: number; + /** + * @description The name of a valid service, use 'web' for a webhook. + * @example web + */ + name: string; + /** + * @description Determines whether the hook is actually triggered on pushes. + * @example true + */ + active: boolean; + /** + * @description Determines what events the hook is triggered for. Default: ['push']. + * @example [ + * "push", + * "pull_request" + * ] + */ + events: string[]; + config: { + /** @example "foo@bar.com" */ + email?: string; + /** @example "foo" */ + password?: string; + /** @example "roomer" */ + room?: string; + /** @example "foo" */ + subdomain?: string; + url?: components["schemas"]["webhook-config-url"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + /** @example "sha256" */ + digest?: string; + secret?: components["schemas"]["webhook-config-secret"]; + /** @example "abc" */ + token?: string; + }; + /** + * Format: date-time + * @example 2011-09-06T20:39:23Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2011-09-06T17:26:27Z + */ + created_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/hooks/1 + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/hooks/1/test + */ + test_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/hooks/1/pings + */ + ping_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/hooks/1/deliveries + */ + deliveries_url?: string; + last_response: components["schemas"]["hook-response"]; + }; + /** + * Import + * @description A repository import from an external source. + */ + import: { + vcs: string | null; + use_lfs?: boolean; + /** @description The URL of the originating repository. */ + vcs_url: string; + svc_root?: string; + tfvc_project?: string; + /** @enum {string} */ + status: + | "auth" + | "error" + | "none" + | "detecting" + | "choose" + | "auth_failed" + | "importing" + | "mapping" + | "waiting_to_push" + | "pushing" + | "complete" + | "setup" + | "unknown" + | "detection_found_multiple" + | "detection_found_nothing" + | "detection_needs_auth"; + status_text?: string | null; + failed_step?: string | null; + error_message?: string | null; + import_percent?: number | null; + commit_count?: number | null; + push_percent?: number | null; + has_large_files?: boolean; + large_files_size?: number; + large_files_count?: number; + project_choices?: { + vcs?: string; + tfvc_project?: string; + human_name?: string; + }[]; + message?: string; + authors_count?: number | null; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + authors_url: string; + /** Format: uri */ + repository_url: string; + svn_root?: string; + }; + /** + * Porter Author + * @description Porter Author + */ + "porter-author": { + id: number; + remote_id: string; + remote_name: string; + email: string; + name: string; + /** Format: uri */ + url: string; + /** Format: uri */ + import_url: string; + }; + /** + * Porter Large File + * @description Porter Large File + */ + "porter-large-file": { + ref_name: string; + path: string; + oid: string; + size: number; + }; + /** + * Issue + * @description Issues are a great way to keep track of tasks, enhancements, and bugs for your projects. + */ + "nullable-issue": { + /** Format: int64 */ + id: number; + node_id: string; + /** + * Format: uri + * @description URL for the issue + * @example https://api.github.com/repositories/42/issues/1 + */ + url: string; + /** Format: uri */ + repository_url: string; + labels_url: string; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** + * @description Number uniquely identifying the issue within its repository + * @example 42 + */ + number: number; + /** + * @description State of the issue; either 'open' or 'closed' + * @example open + */ + state: string; + /** + * @description The reason for the current state + * @example not_planned + * @enum {string|null} + */ + state_reason?: "completed" | "reopened" | "not_planned" | null; + /** + * @description Title of the issue + * @example Widget creation fails in Safari on OS X 10.8 + */ + title: string; + /** + * @description Contents of the issue + * @example It looks like the new widget form is broken on Safari. When I try and create the widget, Safari crashes. This is reproducible on 10.8, but not 10.9. Maybe a browser bug? + */ + body?: string | null; + user: components["schemas"]["nullable-simple-user"]; + /** + * @description Labels to associate with this issue; pass one or more label names to replace the set of labels on this issue; send an empty array to clear all labels from the issue; note that the labels are silently dropped for users without push access to the repository + * @example [ + * "bug", + * "registration" + * ] + */ + labels: OneOf< + [ + string, + { + /** Format: int64 */ + id?: number; + node_id?: string; + /** Format: uri */ + url?: string; + name?: string; + description?: string | null; + color?: string | null; + default?: boolean; + }, + ] + >[]; + assignee: components["schemas"]["nullable-simple-user"]; + assignees?: components["schemas"]["simple-user"][] | null; + milestone: components["schemas"]["nullable-milestone"]; + locked: boolean; + active_lock_reason?: string | null; + comments: number; + pull_request?: { + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + diff_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + patch_url: string | null; + /** Format: uri */ + url: string | null; + }; + /** Format: date-time */ + closed_at: string | null; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + draft?: boolean; + closed_by?: components["schemas"]["nullable-simple-user"]; + body_html?: string; + body_text?: string; + /** Format: uri */ + timeline_url?: string; + repository?: components["schemas"]["repository"]; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + author_association: components["schemas"]["author-association"]; + reactions?: components["schemas"]["reaction-rollup"]; + } | null; + /** + * Issue Event Label + * @description Issue Event Label + */ + "issue-event-label": { + name: string | null; + color: string | null; + }; + /** Issue Event Dismissed Review */ + "issue-event-dismissed-review": { + state: string; + review_id: number; + dismissal_message: string | null; + dismissal_commit_id?: string | null; + }; + /** + * Issue Event Milestone + * @description Issue Event Milestone + */ + "issue-event-milestone": { + title: string; + }; + /** + * Issue Event Project Card + * @description Issue Event Project Card + */ + "issue-event-project-card": { + /** Format: uri */ + url: string; + id: number; + /** Format: uri */ + project_url: string; + project_id: number; + column_name: string; + previous_column_name?: string; + }; + /** + * Issue Event Rename + * @description Issue Event Rename + */ + "issue-event-rename": { + from: string; + to: string; + }; + /** + * Issue Event + * @description Issue Event + */ + "issue-event": { + /** + * Format: int64 + * @example 1 + */ + id: number; + /** @example MDEwOklzc3VlRXZlbnQx */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/events/1 + */ + url: string; + actor: components["schemas"]["nullable-simple-user"]; + /** @example closed */ + event: string; + /** @example 6dcb09b5b57875f334f61aebed695e2e4193db5e */ + commit_id: string | null; + /** @example https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e */ + commit_url: string | null; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + issue?: components["schemas"]["nullable-issue"]; + label?: components["schemas"]["issue-event-label"]; + assignee?: components["schemas"]["nullable-simple-user"]; + assigner?: components["schemas"]["nullable-simple-user"]; + review_requester?: components["schemas"]["nullable-simple-user"]; + requested_reviewer?: components["schemas"]["nullable-simple-user"]; + requested_team?: components["schemas"]["team"]; + dismissed_review?: components["schemas"]["issue-event-dismissed-review"]; + milestone?: components["schemas"]["issue-event-milestone"]; + project_card?: components["schemas"]["issue-event-project-card"]; + rename?: components["schemas"]["issue-event-rename"]; + author_association?: components["schemas"]["author-association"]; + lock_reason?: string | null; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + }; + /** + * Labeled Issue Event + * @description Labeled Issue Event + */ + "labeled-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + label: { + name: string; + color: string; + }; + }; + /** + * Unlabeled Issue Event + * @description Unlabeled Issue Event + */ + "unlabeled-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + label: { + name: string; + color: string; + }; + }; + /** + * Assigned Issue Event + * @description Assigned Issue Event + */ + "assigned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["integration"]; + assignee: components["schemas"]["simple-user"]; + assigner: components["schemas"]["simple-user"]; + }; + /** + * Unassigned Issue Event + * @description Unassigned Issue Event + */ + "unassigned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + assignee: components["schemas"]["simple-user"]; + assigner: components["schemas"]["simple-user"]; + }; + /** + * Milestoned Issue Event + * @description Milestoned Issue Event + */ + "milestoned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + milestone: { + title: string; + }; + }; + /** + * Demilestoned Issue Event + * @description Demilestoned Issue Event + */ + "demilestoned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + milestone: { + title: string; + }; + }; + /** + * Renamed Issue Event + * @description Renamed Issue Event + */ + "renamed-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + rename: { + from: string; + to: string; + }; + }; + /** + * Review Requested Issue Event + * @description Review Requested Issue Event + */ + "review-requested-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + review_requester: components["schemas"]["simple-user"]; + requested_team?: components["schemas"]["team"]; + requested_reviewer?: components["schemas"]["simple-user"]; + }; + /** + * Review Request Removed Issue Event + * @description Review Request Removed Issue Event + */ + "review-request-removed-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + review_requester: components["schemas"]["simple-user"]; + requested_team?: components["schemas"]["team"]; + requested_reviewer?: components["schemas"]["simple-user"]; + }; + /** + * Review Dismissed Issue Event + * @description Review Dismissed Issue Event + */ + "review-dismissed-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + dismissed_review: { + state: string; + review_id: number; + dismissal_message: string | null; + dismissal_commit_id?: string; + }; + }; + /** + * Locked Issue Event + * @description Locked Issue Event + */ + "locked-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + /** @example "off-topic" */ + lock_reason: string | null; + }; + /** + * Added to Project Issue Event + * @description Added to Project Issue Event + */ + "added-to-project-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + project_card?: { + id: number; + /** Format: uri */ + url: string; + project_id: number; + /** Format: uri */ + project_url: string; + column_name: string; + previous_column_name?: string; + }; + }; + /** + * Moved Column in Project Issue Event + * @description Moved Column in Project Issue Event + */ + "moved-column-in-project-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + project_card?: { + id: number; + /** Format: uri */ + url: string; + project_id: number; + /** Format: uri */ + project_url: string; + column_name: string; + previous_column_name?: string; + }; + }; + /** + * Removed from Project Issue Event + * @description Removed from Project Issue Event + */ + "removed-from-project-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + project_card?: { + id: number; + /** Format: uri */ + url: string; + project_id: number; + /** Format: uri */ + project_url: string; + column_name: string; + previous_column_name?: string; + }; + }; + /** + * Converted Note to Issue Issue Event + * @description Converted Note to Issue Issue Event + */ + "converted-note-to-issue-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["integration"]; + project_card?: { + id: number; + /** Format: uri */ + url: string; + project_id: number; + /** Format: uri */ + project_url: string; + column_name: string; + previous_column_name?: string; + }; + }; + /** + * Issue Event for Issue + * @description Issue Event for Issue + */ + "issue-event-for-issue": + | components["schemas"]["labeled-issue-event"] + | components["schemas"]["unlabeled-issue-event"] + | components["schemas"]["assigned-issue-event"] + | components["schemas"]["unassigned-issue-event"] + | components["schemas"]["milestoned-issue-event"] + | components["schemas"]["demilestoned-issue-event"] + | components["schemas"]["renamed-issue-event"] + | components["schemas"]["review-requested-issue-event"] + | components["schemas"]["review-request-removed-issue-event"] + | components["schemas"]["review-dismissed-issue-event"] + | components["schemas"]["locked-issue-event"] + | components["schemas"]["added-to-project-issue-event"] + | components["schemas"]["moved-column-in-project-issue-event"] + | components["schemas"]["removed-from-project-issue-event"] + | components["schemas"]["converted-note-to-issue-issue-event"]; + /** + * Label + * @description Color-coded labels help you categorize and filter your issues (just like labels in Gmail). + */ + label: { + /** + * Format: int64 + * @example 208045946 + */ + id: number; + /** @example MDU6TGFiZWwyMDgwNDU5NDY= */ + node_id: string; + /** + * Format: uri + * @description URL for the label + * @example https://api.github.com/repositories/42/labels/bug + */ + url: string; + /** + * @description The name of the label. + * @example bug + */ + name: string; + /** @example Something isn't working */ + description: string | null; + /** + * @description 6-character hex code, without the leading #, identifying the color + * @example FFFFFF + */ + color: string; + /** @example true */ + default: boolean; + }; + /** + * Timeline Comment Event + * @description Timeline Comment Event + */ + "timeline-comment-event": { + event: string; + actor: components["schemas"]["simple-user"]; + /** + * @description Unique identifier of the issue comment + * @example 42 + */ + id: number; + node_id: string; + /** + * Format: uri + * @description URL for the issue comment + * @example https://api.github.com/repositories/42/issues/comments/1 + */ + url: string; + /** + * @description Contents of the issue comment + * @example What version of Safari were you using when you observed this bug? + */ + body?: string; + body_text?: string; + body_html?: string; + /** Format: uri */ + html_url: string; + user: components["schemas"]["simple-user"]; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + updated_at: string; + /** Format: uri */ + issue_url: string; + author_association: components["schemas"]["author-association"]; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Timeline Cross Referenced Event + * @description Timeline Cross Referenced Event + */ + "timeline-cross-referenced-event": { + event: string; + actor?: components["schemas"]["simple-user"]; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + source: { + type?: string; + issue?: components["schemas"]["issue"]; + }; + }; + /** + * Timeline Committed Event + * @description Timeline Committed Event + */ + "timeline-committed-event": { + event?: string; + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + node_id: string; + /** Format: uri */ + url: string; + /** @description Identifying information for the git-user */ + author: { + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + date: string; + /** + * @description Git email address of the user + * @example monalisa.octocat@example.com + */ + email: string; + /** + * @description Name of the git user + * @example Monalisa Octocat + */ + name: string; + }; + /** @description Identifying information for the git-user */ + committer: { + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + date: string; + /** + * @description Git email address of the user + * @example monalisa.octocat@example.com + */ + email: string; + /** + * @description Name of the git user + * @example Monalisa Octocat + */ + name: string; + }; + /** + * @description Message describing the purpose of the commit + * @example Fix #42 + */ + message: string; + tree: { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + }; + parents: { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + }[]; + verification: { + verified: boolean; + reason: string; + signature: string | null; + payload: string | null; + }; + /** Format: uri */ + html_url: string; + }; + /** + * Timeline Reviewed Event + * @description Timeline Reviewed Event + */ + "timeline-reviewed-event": { + event: string; + /** + * @description Unique identifier of the review + * @example 42 + */ + id: number; + /** @example MDE3OlB1bGxSZXF1ZXN0UmV2aWV3ODA= */ + node_id: string; + user: components["schemas"]["simple-user"]; + /** + * @description The text of the review. + * @example This looks great. + */ + body: string | null; + /** @example CHANGES_REQUESTED */ + state: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/12#pullrequestreview-80 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/12 + */ + pull_request_url: string; + _links: { + html: { + href: string; + }; + pull_request: { + href: string; + }; + }; + /** Format: date-time */ + submitted_at?: string; + /** + * @description A commit SHA for the review. + * @example 54bb654c9e6025347f57900a4a5c2313a96b8035 + */ + commit_id: string; + body_html?: string; + body_text?: string; + author_association: components["schemas"]["author-association"]; + }; + /** + * Pull Request Review Comment + * @description Pull Request Review Comments are comments on a portion of the Pull Request's diff. + */ + "pull-request-review-comment": { + /** + * @description URL for the pull request review comment + * @example https://api.github.com/repos/octocat/Hello-World/pulls/comments/1 + */ + url: string; + /** + * @description The ID of the pull request review to which the comment belongs. + * @example 42 + */ + pull_request_review_id: number | null; + /** + * @description The ID of the pull request review comment. + * @example 1 + */ + id: number; + /** + * @description The node ID of the pull request review comment. + * @example MDI0OlB1bGxSZXF1ZXN0UmV2aWV3Q29tbWVudDEw + */ + node_id: string; + /** + * @description The diff of the line that the comment refers to. + * @example @@ -16,33 +16,40 @@ public class Connection : IConnection... + */ + diff_hunk: string; + /** + * @description The relative path of the file to which the comment applies. + * @example config/database.yaml + */ + path: string; + /** + * @description The line index in the diff to which the comment applies. This field is deprecated; use `line` instead. + * @example 1 + */ + position?: number; + /** + * @description The index of the original line in the diff to which the comment applies. This field is deprecated; use `original_line` instead. + * @example 4 + */ + original_position?: number; + /** + * @description The SHA of the commit to which the comment applies. + * @example 6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + commit_id: string; + /** + * @description The SHA of the original commit to which the comment applies. + * @example 9c48853fa3dc5c1c3d6f1f1cd1f2743e72652840 + */ + original_commit_id: string; + /** + * @description The comment ID to reply to. + * @example 8 + */ + in_reply_to_id?: number; + user: components["schemas"]["simple-user"]; + /** + * @description The text of the comment. + * @example We should probably include a check for null values here. + */ + body: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + updated_at: string; + /** + * Format: uri + * @description HTML URL for the pull request review comment. + * @example https://github.com/octocat/Hello-World/pull/1#discussion-diff-1 + */ + html_url: string; + /** + * Format: uri + * @description URL for the pull request that the review comment belongs to. + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1 + */ + pull_request_url: string; + author_association: components["schemas"]["author-association"]; + _links: { + self: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/comments/1 + */ + href: string; + }; + html: { + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1#discussion-diff-1 + */ + href: string; + }; + pull_request: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1 + */ + href: string; + }; + }; + /** + * @description The first line of the range for a multi-line comment. + * @example 2 + */ + start_line?: number | null; + /** + * @description The first line of the range for a multi-line comment. + * @example 2 + */ + original_start_line?: number | null; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string|null} + */ + start_side?: "LEFT" | "RIGHT" | null; + /** + * @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment + * @example 2 + */ + line?: number; + /** + * @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment + * @example 2 + */ + original_line?: number; + /** + * @description The side of the diff to which the comment applies. The side of the last line of the range for a multi-line comment + * @default RIGHT + * @enum {string} + */ + side?: "LEFT" | "RIGHT"; + /** + * @description The level at which the comment is targeted, can be a diff line or a file. + * @enum {string} + */ + subject_type?: "line" | "file"; + reactions?: components["schemas"]["reaction-rollup"]; + /** @example "

comment body

" */ + body_html?: string; + /** @example "comment body" */ + body_text?: string; + }; + /** + * Timeline Line Commented Event + * @description Timeline Line Commented Event + */ + "timeline-line-commented-event": { + event?: string; + node_id?: string; + comments?: components["schemas"]["pull-request-review-comment"][]; + }; + /** + * Timeline Commit Commented Event + * @description Timeline Commit Commented Event + */ + "timeline-commit-commented-event": { + event?: string; + node_id?: string; + commit_id?: string; + comments?: components["schemas"]["commit-comment"][]; + }; + /** + * Timeline Assigned Issue Event + * @description Timeline Assigned Issue Event + */ + "timeline-assigned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + assignee: components["schemas"]["simple-user"]; + }; + /** + * Timeline Unassigned Issue Event + * @description Timeline Unassigned Issue Event + */ + "timeline-unassigned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + assignee: components["schemas"]["simple-user"]; + }; + /** + * State Change Issue Event + * @description State Change Issue Event + */ + "state-change-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + state_reason?: string | null; + }; + /** + * Timeline Event + * @description Timeline Event + */ + "timeline-issue-events": + | components["schemas"]["labeled-issue-event"] + | components["schemas"]["unlabeled-issue-event"] + | components["schemas"]["milestoned-issue-event"] + | components["schemas"]["demilestoned-issue-event"] + | components["schemas"]["renamed-issue-event"] + | components["schemas"]["review-requested-issue-event"] + | components["schemas"]["review-request-removed-issue-event"] + | components["schemas"]["review-dismissed-issue-event"] + | components["schemas"]["locked-issue-event"] + | components["schemas"]["added-to-project-issue-event"] + | components["schemas"]["moved-column-in-project-issue-event"] + | components["schemas"]["removed-from-project-issue-event"] + | components["schemas"]["converted-note-to-issue-issue-event"] + | components["schemas"]["timeline-comment-event"] + | components["schemas"]["timeline-cross-referenced-event"] + | components["schemas"]["timeline-committed-event"] + | components["schemas"]["timeline-reviewed-event"] + | components["schemas"]["timeline-line-commented-event"] + | components["schemas"]["timeline-commit-commented-event"] + | components["schemas"]["timeline-assigned-issue-event"] + | components["schemas"]["timeline-unassigned-issue-event"] + | components["schemas"]["state-change-issue-event"]; + /** + * Deploy Key + * @description An SSH key granting access to a single repository. + */ + "deploy-key": { + id: number; + key: string; + url: string; + title: string; + verified: boolean; + created_at: string; + read_only: boolean; + added_by?: string | null; + last_used?: string | null; + }; + /** + * Language + * @description Language + */ + language: { + [key: string]: number | undefined; + }; + /** + * License Content + * @description License Content + */ + "license-content": { + name: string; + path: string; + sha: string; + size: number; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + download_url: string | null; + type: string; + content: string; + encoding: string; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + license: components["schemas"]["nullable-license-simple"]; + }; + /** + * Merged upstream + * @description Results of a successful merge upstream request + */ + "merged-upstream": { + message?: string; + /** @enum {string} */ + merge_type?: "merge" | "fast-forward" | "none"; + base_branch?: string; + }; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/milestones/1 + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/milestones/v1.0 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/milestones/1/labels + */ + labels_url: string; + /** @example 1002604 */ + id: number; + /** @example MDk6TWlsZXN0b25lMTAwMjYwNA== */ + node_id: string; + /** + * @description The number of the milestone. + * @example 42 + */ + number: number; + /** + * @description The state of the milestone. + * @default open + * @example open + * @enum {string} + */ + state: "open" | "closed"; + /** + * @description The title of the milestone. + * @example v1.0 + */ + title: string; + /** @example Tracking milestone for version 1.0 */ + description: string | null; + creator: components["schemas"]["nullable-simple-user"]; + /** @example 4 */ + open_issues: number; + /** @example 8 */ + closed_issues: number; + /** + * Format: date-time + * @example 2011-04-10T20:09:31Z + */ + created_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2013-02-12T13:22:01Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2012-10-09T23:39:01Z + */ + due_on: string | null; + }; + /** Pages Source Hash */ + "pages-source-hash": { + branch: string; + path: string; + }; + /** Pages Https Certificate */ + "pages-https-certificate": { + /** + * @example approved + * @enum {string} + */ + state: + | "new" + | "authorization_created" + | "authorization_pending" + | "authorized" + | "authorization_revoked" + | "issued" + | "uploaded" + | "approved" + | "errored" + | "bad_authz" + | "destroy_pending" + | "dns_changed"; + /** @example Certificate is approved */ + description: string; + /** + * @description Array of the domain set and its alternate name (if it is configured) + * @example [ + * "example.com", + * "www.example.com" + * ] + */ + domains: string[]; + /** Format: date */ + expires_at?: string; + }; + /** + * GitHub Pages + * @description The configuration for GitHub Pages for a repository. + */ + page: { + /** + * Format: uri + * @description The API address for accessing this Page resource. + * @example https://api.github.com/repos/github/hello-world/pages + */ + url: string; + /** + * @description The status of the most recent build of the Page. + * @example built + * @enum {string|null} + */ + status: "built" | "building" | "errored" | null; + /** + * @description The Pages site's custom domain + * @example example.com + */ + cname: string | null; + /** + * @description The state if the domain is verified + * @example pending + * @enum {string|null} + */ + protected_domain_state?: "pending" | "verified" | "unverified" | null; + /** + * Format: date-time + * @description The timestamp when a pending domain becomes unverified. + */ + pending_domain_unverified_at?: string | null; + /** + * @description Whether the Page has a custom 404 page. + * @default false + * @example false + */ + custom_404: boolean; + /** + * Format: uri + * @description The web address the Page can be accessed from. + * @example https://example.com + */ + html_url?: string; + /** + * @description The process in which the Page will be built. + * @example legacy + * @enum {string|null} + */ + build_type?: "legacy" | "workflow" | null; + source?: components["schemas"]["pages-source-hash"]; + /** + * @description Whether the GitHub Pages site is publicly visible. If set to `true`, the site is accessible to anyone on the internet. If set to `false`, the site will only be accessible to users who have at least `read` access to the repository that published the site. + * @example true + */ + public: boolean; + https_certificate?: components["schemas"]["pages-https-certificate"]; + /** + * @description Whether https is enabled on the domain + * @example true + */ + https_enforced?: boolean; + }; + /** + * Page Build + * @description Page Build + */ + "page-build": { + /** Format: uri */ + url: string; + status: string; + error: { + message: string | null; + }; + pusher: components["schemas"]["nullable-simple-user"]; + commit: string; + duration: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Page Build Status + * @description Page Build Status + */ + "page-build-status": { + /** + * Format: uri + * @example https://api.github.com/repos/github/hello-world/pages/builds/latest + */ + url: string; + /** @example queued */ + status: string; + }; + /** + * GitHub Pages + * @description The GitHub Pages deployment status. + */ + "page-deployment": { + /** + * Format: uri + * @description The URI to monitor GitHub Pages deployment status. + * @example https://api.github.com/repos/github/hello-world/pages/deployments/4fd754f7e594640989b406850d0bc8f06a121251/status + */ + status_url: string; + /** + * Format: uri + * @description The URI to the deployed GitHub Pages. + * @example hello-world.github.io + */ + page_url: string; + /** + * Format: uri + * @description The URI to the deployed GitHub Pages preview. + * @example monalisa-1231a2312sa32-23sda74.drafts.github.io + */ + preview_url?: string; + }; + /** + * Pages Health Check Status + * @description Pages Health Check Status + */ + "pages-health-check": { + domain?: { + host?: string; + uri?: string; + nameservers?: string; + dns_resolves?: boolean; + is_proxied?: boolean | null; + is_cloudflare_ip?: boolean | null; + is_fastly_ip?: boolean | null; + is_old_ip_address?: boolean | null; + is_a_record?: boolean | null; + has_cname_record?: boolean | null; + has_mx_records_present?: boolean | null; + is_valid_domain?: boolean; + is_apex_domain?: boolean; + should_be_a_record?: boolean | null; + is_cname_to_github_user_domain?: boolean | null; + is_cname_to_pages_dot_github_dot_com?: boolean | null; + is_cname_to_fastly?: boolean | null; + is_pointed_to_github_pages_ip?: boolean | null; + is_non_github_pages_ip_present?: boolean | null; + is_pages_domain?: boolean; + is_served_by_pages?: boolean | null; + is_valid?: boolean; + reason?: string | null; + responds_to_https?: boolean; + enforces_https?: boolean; + https_error?: string | null; + is_https_eligible?: boolean | null; + caa_error?: string | null; + }; + alt_domain?: { + host?: string; + uri?: string; + nameservers?: string; + dns_resolves?: boolean; + is_proxied?: boolean | null; + is_cloudflare_ip?: boolean | null; + is_fastly_ip?: boolean | null; + is_old_ip_address?: boolean | null; + is_a_record?: boolean | null; + has_cname_record?: boolean | null; + has_mx_records_present?: boolean | null; + is_valid_domain?: boolean; + is_apex_domain?: boolean; + should_be_a_record?: boolean | null; + is_cname_to_github_user_domain?: boolean | null; + is_cname_to_pages_dot_github_dot_com?: boolean | null; + is_cname_to_fastly?: boolean | null; + is_pointed_to_github_pages_ip?: boolean | null; + is_non_github_pages_ip_present?: boolean | null; + is_pages_domain?: boolean; + is_served_by_pages?: boolean | null; + is_valid?: boolean; + reason?: string | null; + responds_to_https?: boolean; + enforces_https?: boolean; + https_error?: string | null; + is_https_eligible?: boolean | null; + caa_error?: string | null; + } | null; + }; + /** + * Pull Request + * @description Pull requests let you tell others about changes you've pushed to a repository on GitHub. Once a pull request is sent, interested parties can review the set of changes, discuss potential modifications, and even push follow-up commits if necessary. + */ + "pull-request": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347 + */ + url: string; + /** @example 1 */ + id: number; + /** @example MDExOlB1bGxSZXF1ZXN0MQ== */ + node_id: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347 + */ + html_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347.diff + */ + diff_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347.patch + */ + patch_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347 + */ + issue_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits + */ + commits_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments + */ + review_comments_url: string; + /** @example https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number} */ + review_comment_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347/comments + */ + comments_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + statuses_url: string; + /** + * @description Number uniquely identifying the pull request within its repository. + * @example 42 + */ + number: number; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @example open + * @enum {string} + */ + state: "open" | "closed"; + /** @example true */ + locked: boolean; + /** + * @description The title of the pull request. + * @example Amazing new feature + */ + title: string; + user: components["schemas"]["simple-user"]; + /** @example Please pull these awesome changes */ + body: string | null; + labels: { + /** Format: int64 */ + id: number; + node_id: string; + url: string; + name: string; + description: string | null; + color: string; + default: boolean; + }[]; + milestone: components["schemas"]["nullable-milestone"]; + /** @example too heated */ + active_lock_reason?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + merged_at: string | null; + /** @example e5bd3914e2e596debea16f433f57875b5b90bcd6 */ + merge_commit_sha: string | null; + assignee: components["schemas"]["nullable-simple-user"]; + assignees?: components["schemas"]["simple-user"][] | null; + requested_reviewers?: components["schemas"]["simple-user"][] | null; + requested_teams?: components["schemas"]["team-simple"][] | null; + head: { + label: string; + ref: string; + repo: { + archive_url: string; + assignees_url: string; + blobs_url: string; + branches_url: string; + collaborators_url: string; + comments_url: string; + commits_url: string; + compare_url: string; + contents_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + /** Format: uri */ + forks_url: string; + full_name: string; + git_commits_url: string; + git_refs_url: string; + git_tags_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + issue_comment_url: string; + issue_events_url: string; + issues_url: string; + keys_url: string; + labels_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + merges_url: string; + milestones_url: string; + name: string; + notifications_url: string; + owner: { + /** Format: uri */ + avatar_url: string; + events_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string | null; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + login: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + received_events_url: string; + /** Format: uri */ + repos_url: string; + site_admin: boolean; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + type: string; + /** Format: uri */ + url: string; + }; + private: boolean; + pulls_url: string; + releases_url: string; + /** Format: uri */ + stargazers_url: string; + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + trees_url: string; + /** Format: uri */ + url: string; + clone_url: string; + default_branch: string; + forks: number; + forks_count: number; + git_url: string; + has_downloads: boolean; + has_issues: boolean; + has_projects: boolean; + has_wiki: boolean; + has_pages: boolean; + has_discussions: boolean; + /** Format: uri */ + homepage: string | null; + language: string | null; + master_branch?: string; + archived: boolean; + disabled: boolean; + /** @description The repository visibility: public, private, or internal. */ + visibility?: string; + /** Format: uri */ + mirror_url: string | null; + open_issues: number; + open_issues_count: number; + permissions?: { + admin: boolean; + maintain?: boolean; + push: boolean; + triage?: boolean; + pull: boolean; + }; + temp_clone_token?: string; + allow_merge_commit?: boolean; + allow_squash_merge?: boolean; + allow_rebase_merge?: boolean; + license: { + key: string; + name: string; + /** Format: uri */ + url: string | null; + spdx_id: string | null; + node_id: string; + } | null; + /** Format: date-time */ + pushed_at: string; + size: number; + ssh_url: string; + stargazers_count: number; + /** Format: uri */ + svn_url: string; + topics?: string[]; + watchers: number; + watchers_count: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + allow_forking?: boolean; + is_template?: boolean; + web_commit_signoff_required?: boolean; + } | null; + sha: string; + user: { + /** Format: uri */ + avatar_url: string; + events_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string | null; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + login: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + received_events_url: string; + /** Format: uri */ + repos_url: string; + site_admin: boolean; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + type: string; + /** Format: uri */ + url: string; + }; + }; + base: { + label: string; + ref: string; + repo: { + archive_url: string; + assignees_url: string; + blobs_url: string; + branches_url: string; + collaborators_url: string; + comments_url: string; + commits_url: string; + compare_url: string; + contents_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + /** Format: uri */ + forks_url: string; + full_name: string; + git_commits_url: string; + git_refs_url: string; + git_tags_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + id: number; + is_template?: boolean; + node_id: string; + issue_comment_url: string; + issue_events_url: string; + issues_url: string; + keys_url: string; + labels_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + merges_url: string; + milestones_url: string; + name: string; + notifications_url: string; + owner: { + /** Format: uri */ + avatar_url: string; + events_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string | null; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + login: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + received_events_url: string; + /** Format: uri */ + repos_url: string; + site_admin: boolean; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + type: string; + /** Format: uri */ + url: string; + }; + private: boolean; + pulls_url: string; + releases_url: string; + /** Format: uri */ + stargazers_url: string; + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + trees_url: string; + /** Format: uri */ + url: string; + clone_url: string; + default_branch: string; + forks: number; + forks_count: number; + git_url: string; + has_downloads: boolean; + has_issues: boolean; + has_projects: boolean; + has_wiki: boolean; + has_pages: boolean; + has_discussions: boolean; + /** Format: uri */ + homepage: string | null; + language: string | null; + master_branch?: string; + archived: boolean; + disabled: boolean; + /** @description The repository visibility: public, private, or internal. */ + visibility?: string; + /** Format: uri */ + mirror_url: string | null; + open_issues: number; + open_issues_count: number; + permissions?: { + admin: boolean; + maintain?: boolean; + push: boolean; + triage?: boolean; + pull: boolean; + }; + temp_clone_token?: string; + allow_merge_commit?: boolean; + allow_squash_merge?: boolean; + allow_rebase_merge?: boolean; + license: components["schemas"]["nullable-license-simple"]; + /** Format: date-time */ + pushed_at: string; + size: number; + ssh_url: string; + stargazers_count: number; + /** Format: uri */ + svn_url: string; + topics?: string[]; + watchers: number; + watchers_count: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + allow_forking?: boolean; + web_commit_signoff_required?: boolean; + }; + sha: string; + user: { + /** Format: uri */ + avatar_url: string; + events_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string | null; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + login: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + received_events_url: string; + /** Format: uri */ + repos_url: string; + site_admin: boolean; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + type: string; + /** Format: uri */ + url: string; + }; + }; + _links: { + comments: components["schemas"]["link"]; + commits: components["schemas"]["link"]; + statuses: components["schemas"]["link"]; + html: components["schemas"]["link"]; + issue: components["schemas"]["link"]; + review_comments: components["schemas"]["link"]; + review_comment: components["schemas"]["link"]; + self: components["schemas"]["link"]; + }; + author_association: components["schemas"]["author-association"]; + auto_merge: components["schemas"]["auto-merge"]; + /** + * @description Indicates whether or not the pull request is a draft. + * @example false + */ + draft?: boolean; + merged: boolean; + /** @example true */ + mergeable: boolean | null; + /** @example true */ + rebaseable?: boolean | null; + /** @example clean */ + mergeable_state: string; + merged_by: components["schemas"]["nullable-simple-user"]; + /** @example 10 */ + comments: number; + /** @example 0 */ + review_comments: number; + /** + * @description Indicates whether maintainers can modify the pull request. + * @example true + */ + maintainer_can_modify: boolean; + /** @example 3 */ + commits: number; + /** @example 100 */ + additions: number; + /** @example 3 */ + deletions: number; + /** @example 5 */ + changed_files: number; + }; + /** + * Pull Request Merge Result + * @description Pull Request Merge Result + */ + "pull-request-merge-result": { + sha: string; + merged: boolean; + message: string; + }; + /** + * Pull Request Review Request + * @description Pull Request Review Request + */ + "pull-request-review-request": { + users: components["schemas"]["simple-user"][]; + teams: components["schemas"]["team"][]; + }; + /** + * Pull Request Review + * @description Pull Request Reviews are reviews on pull requests. + */ + "pull-request-review": { + /** + * @description Unique identifier of the review + * @example 42 + */ + id: number; + /** @example MDE3OlB1bGxSZXF1ZXN0UmV2aWV3ODA= */ + node_id: string; + user: components["schemas"]["nullable-simple-user"]; + /** + * @description The text of the review. + * @example This looks great. + */ + body: string; + /** @example CHANGES_REQUESTED */ + state: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/12#pullrequestreview-80 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/12 + */ + pull_request_url: string; + _links: { + html: { + href: string; + }; + pull_request: { + href: string; + }; + }; + /** Format: date-time */ + submitted_at?: string; + /** + * @description A commit SHA for the review. If the commit object was garbage collected or forcibly deleted, then it no longer exists in Git and this value will be `null`. + * @example 54bb654c9e6025347f57900a4a5c2313a96b8035 + */ + commit_id: string | null; + body_html?: string; + body_text?: string; + author_association: components["schemas"]["author-association"]; + }; + /** + * Legacy Review Comment + * @description Legacy Review Comment + */ + "review-comment": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/comments/1 + */ + url: string; + /** @example 42 */ + pull_request_review_id: number | null; + /** @example 10 */ + id: number; + /** @example MDI0OlB1bGxSZXF1ZXN0UmV2aWV3Q29tbWVudDEw */ + node_id: string; + /** @example @@ -16,33 +16,40 @@ public class Connection : IConnection... */ + diff_hunk: string; + /** @example file1.txt */ + path: string; + /** @example 1 */ + position: number | null; + /** @example 4 */ + original_position: number; + /** @example 6dcb09b5b57875f334f61aebed695e2e4193db5e */ + commit_id: string; + /** @example 9c48853fa3dc5c1c3d6f1f1cd1f2743e72652840 */ + original_commit_id: string; + /** @example 8 */ + in_reply_to_id?: number; + user: components["schemas"]["nullable-simple-user"]; + /** @example Great stuff */ + body: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + updated_at: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1#discussion-diff-1 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1 + */ + pull_request_url: string; + author_association: components["schemas"]["author-association"]; + _links: { + self: components["schemas"]["link"]; + html: components["schemas"]["link"]; + pull_request: components["schemas"]["link"]; + }; + body_text?: string; + body_html?: string; + reactions?: components["schemas"]["reaction-rollup"]; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string} + */ + side?: "LEFT" | "RIGHT"; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string|null} + */ + start_side?: "LEFT" | "RIGHT" | null; + /** + * @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment + * @example 2 + */ + line?: number; + /** + * @description The original line of the blob to which the comment applies. The last line of the range for a multi-line comment + * @example 2 + */ + original_line?: number; + /** + * @description The first line of the range for a multi-line comment. + * @example 2 + */ + start_line?: number | null; + /** + * @description The original first line of the range for a multi-line comment. + * @example 2 + */ + original_start_line?: number | null; + }; + /** + * Release Asset + * @description Data related to a release. + */ + "release-asset": { + /** Format: uri */ + url: string; + /** Format: uri */ + browser_download_url: string; + id: number; + node_id: string; + /** + * @description The file name of the asset. + * @example Team Environment + */ + name: string; + label: string | null; + /** + * @description State of the release asset. + * @enum {string} + */ + state: "uploaded" | "open"; + content_type: string; + size: number; + download_count: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + uploader: components["schemas"]["nullable-simple-user"]; + }; + /** + * Release + * @description A release. + */ + release: { + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + assets_url: string; + upload_url: string; + /** Format: uri */ + tarball_url: string | null; + /** Format: uri */ + zipball_url: string | null; + id: number; + node_id: string; + /** + * @description The name of the tag. + * @example v1.0.0 + */ + tag_name: string; + /** + * @description Specifies the commitish value that determines where the Git tag is created from. + * @example master + */ + target_commitish: string; + name: string | null; + body?: string | null; + /** + * @description true to create a draft (unpublished) release, false to create a published one. + * @example false + */ + draft: boolean; + /** + * @description Whether to identify the release as a prerelease or a full release. + * @example false + */ + prerelease: boolean; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + published_at: string | null; + author: components["schemas"]["simple-user"]; + assets: components["schemas"]["release-asset"][]; + body_html?: string; + body_text?: string; + mentions_count?: number; + /** + * Format: uri + * @description The URL of the release discussion. + */ + discussion_url?: string; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Generated Release Notes Content + * @description Generated name and body describing a release + */ + "release-notes-content": { + /** + * @description The generated name of the release + * @example Release v1.0.0 is now available! + */ + name: string; + /** @description The generated body describing the contents of the release supporting markdown formatting */ + body: string; + }; + "secret-scanning-alert": { + number?: components["schemas"]["alert-number"]; + created_at?: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["nullable-alert-updated-at"]; + url?: components["schemas"]["alert-url"]; + html_url?: components["schemas"]["alert-html-url"]; + /** + * Format: uri + * @description The REST API URL of the code locations for this alert. + */ + locations_url?: string; + state?: components["schemas"]["secret-scanning-alert-state"]; + resolution?: components["schemas"]["secret-scanning-alert-resolution"]; + /** + * Format: date-time + * @description The time that the alert was resolved in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + resolved_at?: string | null; + resolved_by?: components["schemas"]["nullable-simple-user"]; + /** @description An optional comment to resolve an alert. */ + resolution_comment?: string | null; + /** @description The type of secret that secret scanning detected. */ + secret_type?: string; + /** + * @description User-friendly name for the detected secret, matching the `secret_type`. + * For a list of built-in patterns, see "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)." + */ + secret_type_display_name?: string; + /** @description The secret that was detected. */ + secret?: string; + /** @description Whether push protection was bypassed for the detected secret. */ + push_protection_bypassed?: boolean | null; + push_protection_bypassed_by?: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @description The time that push protection was bypassed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + push_protection_bypassed_at?: string | null; + }; + /** @description An optional comment when closing an alert. Cannot be updated or deleted. Must be `null` when changing `state` to `open`. */ + "secret-scanning-alert-resolution-comment": string | null; + /** @description Represents a 'commit' secret scanning location type. This location type shows that a secret was detected inside a commit to a repository. */ + "secret-scanning-location-commit": { + /** + * @description The file path in the repository + * @example /example/secrets.txt + */ + path: string; + /** @description Line number at which the secret starts in the file */ + start_line: number; + /** @description Line number at which the secret ends in the file */ + end_line: number; + /** @description The column at which the secret starts within the start line when the file is interpreted as 8BIT ASCII */ + start_column: number; + /** @description The column at which the secret ends within the end line when the file is interpreted as 8BIT ASCII */ + end_column: number; + /** + * @description SHA-1 hash ID of the associated blob + * @example af5626b4a114abcb82d63db7c8082c3c4756e51b + */ + blob_sha: string; + /** @description The API URL to get the associated blob resource */ + blob_url: string; + /** + * @description SHA-1 hash ID of the associated commit + * @example af5626b4a114abcb82d63db7c8082c3c4756e51b + */ + commit_sha: string; + /** @description The API URL to get the associated commit resource */ + commit_url: string; + }; + /** @description Represents an 'issue_title' secret scanning location type. This location type shows that a secret was detected in the title of an issue. */ + "secret-scanning-location-issue-title": { + /** + * Format: uri + * @description The API URL to get the issue where the secret was detected. + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347 + */ + issue_title_url: string; + }; + /** @description Represents an 'issue_body' secret scanning location type. This location type shows that a secret was detected in the body of an issue. */ + "secret-scanning-location-issue-body": { + /** + * Format: uri + * @description The API URL to get the issue where the secret was detected. + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347 + */ + issue_body_url: string; + }; + /** @description Represents an 'issue_comment' secret scanning location type. This location type shows that a secret was detected in a comment on an issue. */ + "secret-scanning-location-issue-comment": { + /** + * Format: uri + * @description The API URL to get the issue comment where the secret was detected. + * @example https://api.github.com/repos/octocat/Hello-World/issues/comments/1081119451 + */ + issue_comment_url: string; + }; + "secret-scanning-location": { + /** + * @description The location type. Because secrets may be found in different types of resources (ie. code, comments, issues), this field identifies the type of resource where the secret was found. + * @example commit + * @enum {string} + */ + type: "commit" | "issue_title" | "issue_body" | "issue_comment"; + details: + | components["schemas"]["secret-scanning-location-commit"] + | components["schemas"]["secret-scanning-location-issue-title"] + | components["schemas"]["secret-scanning-location-issue-body"] + | components["schemas"]["secret-scanning-location-issue-comment"]; + }; + /** + * @description The package's language or package management ecosystem. + * @enum {string} + */ + "security-advisory-ecosystems": + | "rubygems" + | "npm" + | "pip" + | "maven" + | "nuget" + | "composer" + | "go" + | "rust" + | "erlang" + | "actions" + | "pub" + | "other"; + /** @description A product affected by the vulnerability detailed in a repository security advisory. */ + "repository-advisory-vulnerability": { + /** @description The name of the package affected by the vulnerability. */ + package: { + ecosystem: components["schemas"]["security-advisory-ecosystems"]; + /** @description The unique package name within its ecosystem. */ + name: string | null; + } | null; + /** @description The range of the package versions affected by the vulnerability. */ + vulnerable_version_range: string | null; + /** @description The package version(s) that resolve the vulnerability. */ + patched_versions: string | null; + /** @description The functions in the package that are affected. */ + vulnerable_functions: string[] | null; + }; + /** + * @description The type of credit the user is receiving. + * @enum {string} + */ + "security-advisory-credit-types": + | "analyst" + | "finder" + | "reporter" + | "coordinator" + | "remediation_developer" + | "remediation_reviewer" + | "remediation_verifier" + | "tool" + | "sponsor" + | "other"; + /** @description A credit given to a user for a repository security advisory. */ + "repository-advisory-credit": { + user: components["schemas"]["simple-user"]; + type: components["schemas"]["security-advisory-credit-types"]; + /** + * @description The state of the user's acceptance of the credit. + * @enum {string} + */ + state: "accepted" | "declined" | "pending"; + }; + /** @description A repository security advisory. */ + "repository-advisory": { + /** @description The GitHub Security Advisory ID. */ + ghsa_id: string; + /** @description The Common Vulnerabilities and Exposures (CVE) ID. */ + cve_id: string | null; + /** @description The API URL for the advisory. */ + url: string; + /** + * Format: uri + * @description The URL for the advisory. + */ + html_url: string; + /** @description A short summary of the advisory. */ + summary: string; + /** @description A detailed description of what the advisory entails. */ + description: string | null; + /** + * @description The severity of the advisory. + * @enum {string|null} + */ + severity: "critical" | "high" | "medium" | "low" | null; + /** @description The author of the advisory. */ + author: components["schemas"]["simple-user"] | null; + /** @description The publisher of the advisory. */ + publisher: components["schemas"]["simple-user"] | null; + identifiers: readonly { + /** + * @description The type of identifier. + * @enum {string} + */ + type: "CVE" | "GHSA"; + /** @description The identifier value. */ + value: string; + }[]; + /** + * @description The state of the advisory. + * @enum {string} + */ + state: "published" | "closed" | "withdrawn" | "draft" | "triage"; + /** + * Format: date-time + * @description The date and time of when the advisory was created, in ISO 8601 format. + */ + created_at: string | null; + /** + * Format: date-time + * @description The date and time of when the advisory was last updated, in ISO 8601 format. + */ + updated_at: string | null; + /** + * Format: date-time + * @description The date and time of when the advisory was published, in ISO 8601 format. + */ + published_at: string | null; + /** + * Format: date-time + * @description The date and time of when the advisory was closed, in ISO 8601 format. + */ + closed_at: string | null; + /** + * Format: date-time + * @description The date and time of when the advisory was withdrawn, in ISO 8601 format. + */ + withdrawn_at: string | null; + submission: { + /** @description Whether a private vulnerability report was accepted by the repository's administrators. */ + readonly accepted: boolean; + } | null; + vulnerabilities: + | components["schemas"]["repository-advisory-vulnerability"][] + | null; + cvss: { + /** @description The CVSS vector. */ + vector_string: string | null; + /** @description The CVSS score. */ + score: number | null; + } | null; + cwes: + | readonly { + /** @description The Common Weakness Enumeration (CWE) identifier. */ + cwe_id: string; + /** @description The name of the CWE. */ + name: string; + }[] + | null; + /** @description A list of only the CWE IDs. */ + cwe_ids: string[] | null; + credits: + | { + /** @description The username of the user credited. */ + login?: string; + type?: components["schemas"]["security-advisory-credit-types"]; + }[] + | null; + credits_detailed: + | readonly components["schemas"]["repository-advisory-credit"][] + | null; + }; + "repository-advisory-create": { + /** @description A short summary of the advisory. */ + summary: string; + /** @description A detailed description of what the advisory impacts. */ + description: string; + /** @description The Common Vulnerabilities and Exposures (CVE) ID. */ + cve_id?: string | null; + /** @description A product affected by the vulnerability detailed in a repository security advisory. */ + vulnerabilities: { + /** @description The name of the package affected by the vulnerability. */ + package: { + ecosystem: components["schemas"]["security-advisory-ecosystems"]; + /** @description The unique package name within its ecosystem. */ + name?: string | null; + }; + /** @description The range of the package versions affected by the vulnerability. */ + vulnerable_version_range?: string | null; + /** @description The package version(s) that resolve the vulnerability. */ + patched_versions?: string | null; + /** @description The functions in the package that are affected. */ + vulnerable_functions?: string[] | null; + }[]; + /** @description A list of Common Weakness Enumeration (CWE) IDs. */ + cwe_ids?: string[] | null; + /** @description A list of users receiving credit for their participation in the security advisory. */ + credits?: + | { + /** @description The username of the user credited. */ + login: string; + type: components["schemas"]["security-advisory-credit-types"]; + }[] + | null; + /** + * @description The severity of the advisory. You must choose between setting this field or `cvss_vector_string`. + * @enum {string|null} + */ + severity?: "critical" | "high" | "medium" | "low" | null; + /** @description The CVSS vector that calculates the severity of the advisory. You must choose between setting this field or `severity`. */ + cvss_vector_string?: string | null; + }; + "private-vulnerability-report-create": { + /** @description A short summary of the advisory. */ + summary: string; + /** @description A detailed description of what the advisory impacts. */ + description: string; + /** @description An array of products affected by the vulnerability detailed in a repository security advisory. */ + vulnerabilities?: + | { + /** @description The name of the package affected by the vulnerability. */ + package: { + ecosystem: components["schemas"]["security-advisory-ecosystems"]; + /** @description The unique package name within its ecosystem. */ + name?: string | null; + }; + /** @description The range of the package versions affected by the vulnerability. */ + vulnerable_version_range?: string | null; + /** @description The package version(s) that resolve the vulnerability. */ + patched_versions?: string | null; + /** @description The functions in the package that are affected. */ + vulnerable_functions?: string[] | null; + }[] + | null; + /** @description A list of Common Weakness Enumeration (CWE) IDs. */ + cwe_ids?: string[] | null; + /** + * @description The severity of the advisory. You must choose between setting this field or `cvss_vector_string`. + * @enum {string|null} + */ + severity?: "critical" | "high" | "medium" | "low" | null; + /** @description The CVSS vector that calculates the severity of the advisory. You must choose between setting this field or `severity`. */ + cvss_vector_string?: string | null; + }; + "repository-advisory-update": { + /** @description A short summary of the advisory. */ + summary?: string; + /** @description A detailed description of what the advisory impacts. */ + description?: string; + /** @description The Common Vulnerabilities and Exposures (CVE) ID. */ + cve_id?: string | null; + /** @description A product affected by the vulnerability detailed in a repository security advisory. */ + vulnerabilities?: { + /** @description The name of the package affected by the vulnerability. */ + package: { + ecosystem: components["schemas"]["security-advisory-ecosystems"]; + /** @description The unique package name within its ecosystem. */ + name?: string | null; + }; + /** @description The range of the package versions affected by the vulnerability. */ + vulnerable_version_range?: string | null; + /** @description The package version(s) that resolve the vulnerability. */ + patched_versions?: string | null; + /** @description The functions in the package that are affected. */ + vulnerable_functions?: string[] | null; + }[]; + /** @description A list of Common Weakness Enumeration (CWE) IDs. */ + cwe_ids?: string[] | null; + /** @description A list of users receiving credit for their participation in the security advisory. */ + credits?: + | { + /** @description The username of the user credited. */ + login: string; + type: components["schemas"]["security-advisory-credit-types"]; + }[] + | null; + /** + * @description The severity of the advisory. You must choose between setting this field or `cvss_vector_string`. + * @enum {string|null} + */ + severity?: "critical" | "high" | "medium" | "low" | null; + /** @description The CVSS vector that calculates the severity of the advisory. You must choose between setting this field or `severity`. */ + cvss_vector_string?: string | null; + /** + * @description The state of the advisory. + * @enum {string} + */ + state?: "published" | "closed" | "draft"; + }; + /** + * Stargazer + * @description Stargazer + */ + stargazer: { + /** Format: date-time */ + starred_at: string; + user: components["schemas"]["nullable-simple-user"]; + }; + /** + * Code Frequency Stat + * @description Code Frequency Stat + */ + "code-frequency-stat": number[]; + /** + * Commit Activity + * @description Commit Activity + */ + "commit-activity": { + /** + * @example [ + * 0, + * 3, + * 26, + * 20, + * 39, + * 1, + * 0 + * ] + */ + days: number[]; + /** @example 89 */ + total: number; + /** @example 1336280400 */ + week: number; + }; + /** + * Contributor Activity + * @description Contributor Activity + */ + "contributor-activity": { + author: components["schemas"]["nullable-simple-user"]; + /** @example 135 */ + total: number; + /** + * @example [ + * { + * "w": "1367712000", + * "a": 6898, + * "d": 77, + * "c": 10 + * } + * ] + */ + weeks: { + w?: number; + a?: number; + d?: number; + c?: number; + }[]; + }; + /** Participation Stats */ + "participation-stats": { + all: number[]; + owner: number[]; + }; + /** + * Repository Invitation + * @description Repository invitations let you manage who you collaborate with. + */ + "repository-subscription": { + /** + * @description Determines if notifications should be received from this repository. + * @example true + */ + subscribed: boolean; + /** @description Determines if all notifications should be blocked from this repository. */ + ignored: boolean; + reason: string | null; + /** + * Format: date-time + * @example 2012-10-06T21:34:12Z + */ + created_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/subscription + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example + */ + repository_url: string; + }; + /** + * Tag + * @description Tag + */ + tag: { + /** @example v0.1 */ + name: string; + commit: { + sha: string; + /** Format: uri */ + url: string; + }; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/zipball/v0.1 + */ + zipball_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/tarball/v0.1 + */ + tarball_url: string; + node_id: string; + }; + /** + * Tag protection + * @description Tag protection + */ + "tag-protection": { + /** @example 2 */ + id?: number; + /** @example 2011-01-26T19:01:12Z */ + created_at?: string; + /** @example 2011-01-26T19:01:12Z */ + updated_at?: string; + /** @example true */ + enabled?: boolean; + /** @example v1.* */ + pattern: string; + }; + /** + * Topic + * @description A topic aggregates entities that are related to a subject. + */ + topic: { + names: string[]; + }; + /** Traffic */ + traffic: { + /** Format: date-time */ + timestamp: string; + uniques: number; + count: number; + }; + /** + * Clone Traffic + * @description Clone Traffic + */ + "clone-traffic": { + /** @example 173 */ + count: number; + /** @example 128 */ + uniques: number; + clones: components["schemas"]["traffic"][]; + }; + /** + * Content Traffic + * @description Content Traffic + */ + "content-traffic": { + /** @example /github/hubot */ + path: string; + /** @example github/hubot: A customizable life embetterment robot. */ + title: string; + /** @example 3542 */ + count: number; + /** @example 2225 */ + uniques: number; + }; + /** + * Referrer Traffic + * @description Referrer Traffic + */ + "referrer-traffic": { + /** @example Google */ + referrer: string; + /** @example 4 */ + count: number; + /** @example 3 */ + uniques: number; + }; + /** + * View Traffic + * @description View Traffic + */ + "view-traffic": { + /** @example 14850 */ + count: number; + /** @example 3782 */ + uniques: number; + views: components["schemas"]["traffic"][]; + }; + /** Search Result Text Matches */ + "search-result-text-matches": { + object_url?: string; + object_type?: string | null; + property?: string; + fragment?: string; + matches?: { + text?: string; + indices?: number[]; + }[]; + }[]; + /** + * Code Search Result Item + * @description Code Search Result Item + */ + "code-search-result-item": { + name: string; + path: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string; + /** Format: uri */ + html_url: string; + repository: components["schemas"]["minimal-repository"]; + score: number; + file_size?: number; + language?: string | null; + /** Format: date-time */ + last_modified_at?: string; + /** + * @example [ + * "73..77", + * "77..78" + * ] + */ + line_numbers?: string[]; + text_matches?: components["schemas"]["search-result-text-matches"]; + }; + /** + * Commit Search Result Item + * @description Commit Search Result Item + */ + "commit-search-result-item": { + /** Format: uri */ + url: string; + sha: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + comments_url: string; + commit: { + author: { + name: string; + email: string; + /** Format: date-time */ + date: string; + }; + committer: components["schemas"]["nullable-git-user"]; + comment_count: number; + message: string; + tree: { + sha: string; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + url: string; + verification?: components["schemas"]["verification"]; + }; + author: components["schemas"]["nullable-simple-user"]; + committer: components["schemas"]["nullable-git-user"]; + parents: { + url?: string; + html_url?: string; + sha?: string; + }[]; + repository: components["schemas"]["minimal-repository"]; + score: number; + node_id: string; + text_matches?: components["schemas"]["search-result-text-matches"]; + }; + /** + * Issue Search Result Item + * @description Issue Search Result Item + */ + "issue-search-result-item": { + /** Format: uri */ + url: string; + /** Format: uri */ + repository_url: string; + labels_url: string; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + node_id: string; + number: number; + title: string; + locked: boolean; + active_lock_reason?: string | null; + assignees?: components["schemas"]["simple-user"][] | null; + user: components["schemas"]["nullable-simple-user"]; + labels: { + /** Format: int64 */ + id?: number; + node_id?: string; + url?: string; + name?: string; + color?: string; + default?: boolean; + description?: string | null; + }[]; + state: string; + state_reason?: string | null; + assignee: components["schemas"]["nullable-simple-user"]; + milestone: components["schemas"]["nullable-milestone"]; + comments: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** Format: date-time */ + closed_at: string | null; + text_matches?: components["schemas"]["search-result-text-matches"]; + pull_request?: { + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + diff_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + patch_url: string | null; + /** Format: uri */ + url: string | null; + }; + body?: string; + score: number; + author_association: components["schemas"]["author-association"]; + draft?: boolean; + repository?: components["schemas"]["repository"]; + body_html?: string; + body_text?: string; + /** Format: uri */ + timeline_url?: string; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Label Search Result Item + * @description Label Search Result Item + */ + "label-search-result-item": { + id: number; + node_id: string; + /** Format: uri */ + url: string; + name: string; + color: string; + default: boolean; + description: string | null; + score: number; + text_matches?: components["schemas"]["search-result-text-matches"]; + }; + /** + * Repo Search Result Item + * @description Repo Search Result Item + */ + "repo-search-result-item": { + id: number; + node_id: string; + name: string; + full_name: string; + owner: components["schemas"]["nullable-simple-user"]; + private: boolean; + /** Format: uri */ + html_url: string; + description: string | null; + fork: boolean; + /** Format: uri */ + url: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** Format: date-time */ + pushed_at: string; + /** Format: uri */ + homepage: string | null; + size: number; + stargazers_count: number; + watchers_count: number; + language: string | null; + forks_count: number; + open_issues_count: number; + master_branch?: string; + default_branch: string; + score: number; + /** Format: uri */ + forks_url: string; + keys_url: string; + collaborators_url: string; + /** Format: uri */ + teams_url: string; + /** Format: uri */ + hooks_url: string; + issue_events_url: string; + /** Format: uri */ + events_url: string; + assignees_url: string; + branches_url: string; + /** Format: uri */ + tags_url: string; + blobs_url: string; + git_tags_url: string; + git_refs_url: string; + trees_url: string; + statuses_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + stargazers_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + commits_url: string; + git_commits_url: string; + comments_url: string; + issue_comment_url: string; + contents_url: string; + compare_url: string; + /** Format: uri */ + merges_url: string; + archive_url: string; + /** Format: uri */ + downloads_url: string; + issues_url: string; + pulls_url: string; + milestones_url: string; + notifications_url: string; + labels_url: string; + releases_url: string; + /** Format: uri */ + deployments_url: string; + git_url: string; + ssh_url: string; + clone_url: string; + /** Format: uri */ + svn_url: string; + forks: number; + open_issues: number; + watchers: number; + topics?: string[]; + /** Format: uri */ + mirror_url: string | null; + has_issues: boolean; + has_projects: boolean; + has_pages: boolean; + has_wiki: boolean; + has_downloads: boolean; + has_discussions?: boolean; + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** @description The repository visibility: public, private, or internal. */ + visibility?: string; + license: components["schemas"]["nullable-license-simple"]; + permissions?: { + admin: boolean; + maintain?: boolean; + push: boolean; + triage?: boolean; + pull: boolean; + }; + text_matches?: components["schemas"]["search-result-text-matches"]; + temp_clone_token?: string; + allow_merge_commit?: boolean; + allow_squash_merge?: boolean; + allow_rebase_merge?: boolean; + allow_auto_merge?: boolean; + delete_branch_on_merge?: boolean; + allow_forking?: boolean; + is_template?: boolean; + /** @example false */ + web_commit_signoff_required?: boolean; + }; + /** + * Topic Search Result Item + * @description Topic Search Result Item + */ + "topic-search-result-item": { + name: string; + display_name: string | null; + short_description: string | null; + description: string | null; + created_by: string | null; + released: string | null; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + featured: boolean; + curated: boolean; + score: number; + repository_count?: number | null; + /** Format: uri */ + logo_url?: string | null; + text_matches?: components["schemas"]["search-result-text-matches"]; + related?: + | { + topic_relation?: { + id?: number; + name?: string; + topic_id?: number; + relation_type?: string; + }; + }[] + | null; + aliases?: + | { + topic_relation?: { + id?: number; + name?: string; + topic_id?: number; + relation_type?: string; + }; + }[] + | null; + }; + /** + * User Search Result Item + * @description User Search Result Item + */ + "user-search-result-item": { + login: string; + id: number; + node_id: string; + /** Format: uri */ + avatar_url: string; + gravatar_id: string | null; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + followers_url: string; + /** Format: uri */ + subscriptions_url: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + repos_url: string; + /** Format: uri */ + received_events_url: string; + type: string; + score: number; + following_url: string; + gists_url: string; + starred_url: string; + events_url: string; + public_repos?: number; + public_gists?: number; + followers?: number; + following?: number; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + name?: string | null; + bio?: string | null; + /** Format: email */ + email?: string | null; + location?: string | null; + site_admin: boolean; + hireable?: boolean | null; + text_matches?: components["schemas"]["search-result-text-matches"]; + blog?: string | null; + company?: string | null; + /** Format: date-time */ + suspended_at?: string | null; + }; + /** + * Private User + * @description Private User + */ + "private-user": { + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + /** @example monalisa octocat */ + name: string | null; + /** @example GitHub */ + company: string | null; + /** @example https://github.com/blog */ + blog: string | null; + /** @example San Francisco */ + location: string | null; + /** + * Format: email + * @example octocat@github.com + */ + email: string | null; + hireable: boolean | null; + /** @example There once was... */ + bio: string | null; + /** @example monalisa */ + twitter_username?: string | null; + /** @example 2 */ + public_repos: number; + /** @example 1 */ + public_gists: number; + /** @example 20 */ + followers: number; + /** @example 0 */ + following: number; + /** + * Format: date-time + * @example 2008-01-14T04:33:35Z + */ + created_at: string; + /** + * Format: date-time + * @example 2008-01-14T04:33:35Z + */ + updated_at: string; + /** @example 81 */ + private_gists: number; + /** @example 100 */ + total_private_repos: number; + /** @example 100 */ + owned_private_repos: number; + /** @example 10000 */ + disk_usage: number; + /** @example 8 */ + collaborators: number; + /** @example true */ + two_factor_authentication: boolean; + plan?: { + collaborators: number; + name: string; + space: number; + private_repos: number; + }; + /** Format: date-time */ + suspended_at?: string | null; + business_plus?: boolean; + ldap_dn?: string; + }; + /** + * Codespaces Secret + * @description Secrets for a GitHub Codespace. + */ + "codespaces-secret": { + /** + * @description The name of the secret + * @example SECRET_NAME + */ + name: string; + /** + * Format: date-time + * @description The date and time at which the secret was created, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + */ + created_at: string; + /** + * Format: date-time + * @description The date and time at which the secret was last updated, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + */ + updated_at: string; + /** + * @description The type of repositories in the organization that the secret is visible to + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** + * Format: uri + * @description The API URL at which the list of repositories this secret is visible to can be retrieved + * @example https://api.github.com/user/secrets/SECRET_NAME/repositories + */ + selected_repositories_url: string; + }; + /** + * CodespacesUserPublicKey + * @description The public key used for setting user Codespaces' Secrets. + */ + "codespaces-user-public-key": { + /** + * @description The identifier for the key. + * @example 1234567 + */ + key_id: string; + /** + * @description The Base64 encoded public key. + * @example hBT5WZEj8ZoOv6TYJsfWq7MxTEQopZO5/IT3ZCVQPzs= + */ + key: string; + }; + /** + * Fetches information about an export of a codespace. + * @description An export of a codespace. Also, latest export details for a codespace can be fetched with id = latest + */ + "codespace-export-details": { + /** + * @description State of the latest export + * @example succeeded | failed | in_progress + */ + state?: string | null; + /** + * Format: date-time + * @description Completion time of the last export operation + * @example 2021-01-01T19:01:12Z + */ + completed_at?: string | null; + /** + * @description Name of the exported branch + * @example codespace-monalisa-octocat-hello-world-g4wpq6h95q + */ + branch?: string | null; + /** + * @description Git commit SHA of the exported branch + * @example fd95a81ca01e48ede9f39c799ecbcef817b8a3b2 + */ + sha?: string | null; + /** + * @description Id for the export details + * @example latest + */ + id?: string; + /** + * @description Url for fetching export details + * @example https://api.github.com/user/codespaces/:name/exports/latest + */ + export_url?: string; + /** + * @description Web url for the exported branch + * @example https://github.com/octocat/hello-world/tree/:branch + */ + html_url?: string | null; + }; + /** + * Codespace + * @description A codespace. + */ + "codespace-with-full-repository": { + /** @example 1 */ + id: number; + /** + * @description Automatically generated name of this codespace. + * @example monalisa-octocat-hello-world-g4wpq6h95q + */ + name: string; + /** + * @description Display name for this codespace. + * @example bookish space pancake + */ + display_name?: string | null; + /** + * @description UUID identifying this codespace's environment. + * @example 26a7c758-7299-4a73-b978-5a92a7ae98a0 + */ + environment_id: string | null; + owner: components["schemas"]["simple-user"]; + billable_owner: components["schemas"]["simple-user"]; + repository: components["schemas"]["full-repository"]; + machine: components["schemas"]["nullable-codespace-machine"]; + /** + * @description Path to devcontainer.json from repo root used to create Codespace. + * @example .devcontainer/example/devcontainer.json + */ + devcontainer_path?: string | null; + /** + * @description Whether the codespace was created from a prebuild. + * @example false + */ + prebuild: boolean | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + updated_at: string; + /** + * Format: date-time + * @description Last known time this codespace was started. + * @example 2011-01-26T19:01:12Z + */ + last_used_at: string; + /** + * @description State of this codespace. + * @example Available + * @enum {string} + */ + state: + | "Unknown" + | "Created" + | "Queued" + | "Provisioning" + | "Available" + | "Awaiting" + | "Unavailable" + | "Deleted" + | "Moved" + | "Shutdown" + | "Archived" + | "Starting" + | "ShuttingDown" + | "Failed" + | "Exporting" + | "Updating" + | "Rebuilding"; + /** + * Format: uri + * @description API URL for this codespace. + */ + url: string; + /** @description Details about the codespace's git repository. */ + git_status: { + /** + * @description The number of commits the local repository is ahead of the remote. + * @example 0 + */ + ahead?: number; + /** + * @description The number of commits the local repository is behind the remote. + * @example 0 + */ + behind?: number; + /** @description Whether the local repository has unpushed changes. */ + has_unpushed_changes?: boolean; + /** @description Whether the local repository has uncommitted changes. */ + has_uncommitted_changes?: boolean; + /** + * @description The current branch (or SHA if in detached HEAD state) of the local repository. + * @example main + */ + ref?: string; + }; + /** + * @description The initally assigned location of a new codespace. + * @example WestUs2 + * @enum {string} + */ + location: "EastUs" | "SouthEastAsia" | "WestEurope" | "WestUs2"; + /** + * @description The number of minutes of inactivity after which this codespace will be automatically stopped. + * @example 60 + */ + idle_timeout_minutes: number | null; + /** + * Format: uri + * @description URL to access this codespace on the web. + */ + web_url: string; + /** + * Format: uri + * @description API URL to access available alternate machine types for this codespace. + */ + machines_url: string; + /** + * Format: uri + * @description API URL to start this codespace. + */ + start_url: string; + /** + * Format: uri + * @description API URL to stop this codespace. + */ + stop_url: string; + /** + * Format: uri + * @description API URL to publish this codespace to a new repository. + */ + publish_url?: string | null; + /** + * Format: uri + * @description API URL for the Pull Request associated with this codespace, if any. + */ + pulls_url: string | null; + recent_folders: string[]; + runtime_constraints?: { + /** @description The privacy settings a user can select from when forwarding a port. */ + allowed_port_privacy_settings?: string[] | null; + }; + /** @description Whether or not a codespace has a pending async operation. This would mean that the codespace is temporarily unavailable. The only thing that you can do with a codespace in this state is delete it. */ + pending_operation?: boolean | null; + /** @description Text to show user when codespace is disabled by a pending operation */ + pending_operation_disabled_reason?: string | null; + /** @description Text to show user when codespace idle timeout minutes has been overriden by an organization policy */ + idle_timeout_notice?: string | null; + /** + * @description Duration in minutes after codespace has gone idle in which it will be deleted. Must be integer minutes between 0 and 43200 (30 days). + * @example 60 + */ + retention_period_minutes?: number | null; + /** + * Format: date-time + * @description When a codespace will be auto-deleted based on the "retention_period_minutes" and "last_used_at" + * @example 2011-01-26T20:01:12Z + */ + retention_expires_at?: string | null; + }; + /** + * Email + * @description Email + */ + email: { + /** + * Format: email + * @example octocat@github.com + */ + email: string; + /** @example true */ + primary: boolean; + /** @example true */ + verified: boolean; + /** @example public */ + visibility: string | null; + }; + /** + * GPG Key + * @description A unique encryption key + */ + "gpg-key": { + /** @example 3 */ + id: number; + /** @example Octocat's GPG Key */ + name?: string | null; + primary_key_id: number | null; + /** @example 3262EFF25BA0D270 */ + key_id: string; + /** @example xsBNBFayYZ... */ + public_key: string; + /** + * @example [ + * { + * "email": "octocat@users.noreply.github.com", + * "verified": true + * } + * ] + */ + emails: { + email?: string; + verified?: boolean; + }[]; + /** + * @example [ + * { + * "id": 4, + * "primary_key_id": 3, + * "key_id": "4A595D4C72EE49C7", + * "public_key": "zsBNBFayYZ...", + * "emails": [], + * "can_sign": false, + * "can_encrypt_comms": true, + * "can_encrypt_storage": true, + * "can_certify": false, + * "created_at": "2016-03-24T11:31:04-06:00", + * "expires_at": null, + * "revoked": false + * } + * ] + */ + subkeys: { + id?: number; + primary_key_id?: number; + key_id?: string; + public_key?: string; + emails?: { + email?: string; + verified?: boolean; + }[]; + subkeys?: unknown[]; + can_sign?: boolean; + can_encrypt_comms?: boolean; + can_encrypt_storage?: boolean; + can_certify?: boolean; + created_at?: string; + expires_at?: string | null; + raw_key?: string | null; + revoked?: boolean; + }[]; + /** @example true */ + can_sign: boolean; + can_encrypt_comms: boolean; + can_encrypt_storage: boolean; + /** @example true */ + can_certify: boolean; + /** + * Format: date-time + * @example 2016-03-24T11:31:04-06:00 + */ + created_at: string; + /** Format: date-time */ + expires_at: string | null; + /** @example true */ + revoked: boolean; + raw_key: string | null; + }; + /** + * Key + * @description Key + */ + key: { + key: string; + id: number; + url: string; + title: string; + /** Format: date-time */ + created_at: string; + verified: boolean; + read_only: boolean; + }; + /** Marketplace Account */ + "marketplace-account": { + /** Format: uri */ + url: string; + id: number; + type: string; + node_id?: string; + login: string; + /** Format: email */ + email?: string | null; + /** Format: email */ + organization_billing_email?: string | null; + }; + /** + * User Marketplace Purchase + * @description User Marketplace Purchase + */ + "user-marketplace-purchase": { + /** @example monthly */ + billing_cycle: string; + /** + * Format: date-time + * @example 2017-11-11T00:00:00Z + */ + next_billing_date: string | null; + unit_count: number | null; + /** @example true */ + on_free_trial: boolean; + /** + * Format: date-time + * @example 2017-11-11T00:00:00Z + */ + free_trial_ends_on: string | null; + /** + * Format: date-time + * @example 2017-11-02T01:12:12Z + */ + updated_at: string | null; + account: components["schemas"]["marketplace-account"]; + plan: components["schemas"]["marketplace-listing-plan"]; + }; + /** + * Social account + * @description Social media account + */ + "social-account": { + /** @example linkedin */ + provider: string; + /** @example https://www.linkedin.com/company/github/ */ + url: string; + }; + /** + * SSH Signing Key + * @description A public SSH key used to sign Git commits + */ + "ssh-signing-key": { + key: string; + id: number; + title: string; + /** Format: date-time */ + created_at: string; + }; + /** + * Starred Repository + * @description Starred Repository + */ + "starred-repository": { + /** Format: date-time */ + starred_at: string; + repo: components["schemas"]["repository"]; + }; + /** + * Hovercard + * @description Hovercard + */ + hovercard: { + contexts: { + message: string; + octicon: string; + }[]; + }; + /** + * Key Simple + * @description Key Simple + */ + "key-simple": { + id: number; + key: string; + }; + /** + * Simple Installation + * @description The GitHub App installation. This property is included when the event is configured for and sent to a GitHub App. + */ + "simple-installation": { + /** + * @description The ID of the installation. + * @example 1 + */ + id: number; + /** + * @description The global node ID of the installation. + * @example MDQ6VXNlcjU4MzIzMQ== + */ + node_id: string; + }; + /** @description A suite of checks performed on the code of a given code change */ + "simple-check-suite": { + /** @example d6fde92930d4715a2b49857d24b940956b26d2d3 */ + after?: string | null; + app?: components["schemas"]["integration"]; + /** @example 146e867f55c26428e5f9fade55a9bbf5e95a7912 */ + before?: string | null; + /** + * @example neutral + * @enum {string|null} + */ + conclusion?: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "skipped" + | "timed_out" + | "action_required" + | "stale" + | "startup_failure" + | null; + /** Format: date-time */ + created_at?: string; + /** @example master */ + head_branch?: string | null; + /** + * @description The SHA of the head commit that is being checked. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha?: string; + /** @example 5 */ + id?: number; + /** @example MDEwOkNoZWNrU3VpdGU1 */ + node_id?: string; + pull_requests?: components["schemas"]["pull-request-minimal"][]; + repository?: components["schemas"]["minimal-repository"]; + /** + * @example completed + * @enum {string} + */ + status?: "queued" | "in_progress" | "completed" | "pending" | "waiting"; + /** Format: date-time */ + updated_at?: string; + /** @example https://api.github.com/repos/github/hello-world/check-suites/5 */ + url?: string; + }; + /** + * CheckRun + * @description A check performed on the code of a given code change + */ + "check-run-with-simple-check-suite": { + app: components["schemas"]["nullable-integration"]; + check_suite: components["schemas"]["simple-check-suite"]; + /** + * Format: date-time + * @example 2018-05-04T01:14:52Z + */ + completed_at: string | null; + /** + * @example neutral + * @enum {string|null} + */ + conclusion: + | "waiting" + | "pending" + | "startup_failure" + | "stale" + | "success" + | "failure" + | "neutral" + | "cancelled" + | "skipped" + | "timed_out" + | "action_required" + | null; + deployment?: components["schemas"]["deployment-simple"]; + /** @example https://example.com */ + details_url: string; + /** @example 42 */ + external_id: string; + /** + * @description The SHA of the commit that is being checked. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** @example https://github.com/github/hello-world/runs/4 */ + html_url: string; + /** + * @description The id of the check. + * @example 21 + */ + id: number; + /** + * @description The name of the check. + * @example test-coverage + */ + name: string; + /** @example MDg6Q2hlY2tSdW40 */ + node_id: string; + output: { + annotations_count: number; + /** Format: uri */ + annotations_url: string; + summary: string | null; + text: string | null; + title: string | null; + }; + pull_requests: components["schemas"]["pull-request-minimal"][]; + /** + * Format: date-time + * @example 2018-05-04T01:14:52Z + */ + started_at: string; + /** + * @description The phase of the lifecycle that the check is currently in. + * @example queued + * @enum {string} + */ + status: "queued" | "in_progress" | "completed" | "pending"; + /** @example https://api.github.com/repos/github/hello-world/check-runs/4 */ + url: string; + }; + /** + * Discussion + * @description A Discussion in a repository. + */ + discussion: { + active_lock_reason: string | null; + answer_chosen_at: string | null; + /** User */ + answer_chosen_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + answer_html_url: string | null; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + body: string; + category: { + /** Format: date-time */ + created_at: string; + description: string; + emoji: string; + id: number; + is_answerable: boolean; + name: string; + node_id?: string; + repository_id: number; + slug: string; + updated_at: string; + }; + comments: number; + /** Format: date-time */ + created_at: string; + html_url: string; + id: number; + locked: boolean; + node_id: string; + number: number; + /** Reactions */ + reactions?: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + repository_url: string; + /** + * @description The current state of the discussion. + * `converting` means that the discussion is being converted from an issue. + * `transferring` means that the discussion is being transferred from another repository. + * @enum {string} + */ + state: "open" | "closed" | "locked" | "converting" | "transferring"; + /** + * @description The reason for the current state + * @example resolved + * @enum {string|null} + */ + state_reason: "resolved" | "outdated" | "duplicate" | "reopened" | null; + timeline_url?: string; + title: string; + /** Format: date-time */ + updated_at: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** + * Merge Group + * @description A group of pull requests that the merge queue has grouped together to be merged. + */ + "merge-group": { + /** @description The SHA of the merge group. */ + head_sha: string; + /** @description The full ref of the merge group. */ + head_ref: string; + /** @description The SHA of the merge group's parent commit. */ + base_sha: string; + /** @description The full ref of the branch the merge group will be merged into. */ + base_ref: string; + head_commit: components["schemas"]["simple-commit"]; + }; + /** + * Personal Access Token Request + * @description Details of a Personal Access Token Request. + */ + "personal-access-token-request": { + /** @description Unique identifier of the request for access via fine-grained personal access token. Used as the `pat_request_id` parameter in the list and review API calls. */ + id: number; + owner: components["schemas"]["simple-user"]; + /** @description New requested permissions, categorized by type of permission. */ + permissions_added: { + organization?: { + [key: string]: string | undefined; + }; + repository?: { + [key: string]: string | undefined; + }; + other?: { + [key: string]: string | undefined; + }; + }; + /** @description Requested permissions that elevate access for a previously approved request for access, categorized by type of permission. */ + permissions_upgraded: { + organization?: { + [key: string]: string | undefined; + }; + repository?: { + [key: string]: string | undefined; + }; + other?: { + [key: string]: string | undefined; + }; + }; + /** @description Permissions requested, categorized by type of permission. This field incorporates `permissions_added` and `permissions_upgraded`. */ + permissions_result: { + organization?: { + [key: string]: string | undefined; + }; + repository?: { + [key: string]: string | undefined; + }; + other?: { + [key: string]: string | undefined; + }; + }; + /** + * @description Type of repository selection requested. + * @enum {string} + */ + repository_selection: "none" | "all" | "subset"; + /** @description The number of repositories the token is requesting access to. This field is only populated when `repository_selection` is `subset`. */ + repository_count: number | null; + /** @description An array of repository objects the token is requesting access to. This field is only populated when `repository_selection` is `subset`. */ + repositories: + | { + full_name: string; + /** @description Unique identifier of the repository */ + id: number; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** @description Whether the repository is private or public. */ + private: boolean; + }[] + | null; + /** @description Date and time when the request for access was created. */ + created_at: string; + /** @description Whether the associated fine-grained personal access token has expired. */ + token_expired: boolean; + /** @description Date and time when the associated fine-grained personal access token expires. */ + token_expires_at: string | null; + /** @description Date and time when the associated fine-grained personal access token was last used for authentication. */ + token_last_used_at: string | null; + }; + /** + * Projects v2 Project + * @description A projects v2 project + */ + "projects-v2": { + id: number; + node_id: string; + owner: components["schemas"]["simple-user"]; + creator: components["schemas"]["simple-user"]; + title: string; + description: string | null; + public: boolean; + /** + * Format: date-time + * @example 2022-04-28T12:00:00Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2022-04-28T12:00:00Z + */ + created_at: string; + /** + * Format: date-time + * @example 2022-04-28T12:00:00Z + */ + updated_at: string; + number: number; + short_description: string | null; + /** + * Format: date-time + * @example 2022-04-28T12:00:00Z + */ + deleted_at: string | null; + deleted_by: components["schemas"]["nullable-simple-user"]; + }; + /** + * Projects v2 Item Content Type + * @description The type of content tracked in a project item + * @enum {string} + */ + "projects-v2-item-content-type": "Issue" | "PullRequest" | "DraftIssue"; + /** + * Projects v2 Item + * @description An item belonging to a project + */ + "projects-v2-item": { + id: number; + node_id?: string; + project_node_id?: string; + content_node_id: string; + content_type: components["schemas"]["projects-v2-item-content-type"]; + creator?: components["schemas"]["simple-user"]; + /** + * Format: date-time + * @example 2022-04-28T12:00:00Z + */ + created_at: string; + /** + * Format: date-time + * @example 2022-04-28T12:00:00Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2022-04-28T12:00:00Z + */ + archived_at: string | null; + }; + /** branch protection rule created event */ + "webhook-branch-protection-rule-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + /** + * branch protection rule + * @description The branch protection rule. Includes a `name` and all the [branch protection settings](https://docs.github.com/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#about-branch-protection-settings) applied to branches that match the name. Binary settings are boolean. Multi-level configurations are one of `off`, `non_admins`, or `everyone`. Actor and build lists are arrays of strings. + */ + rule: { + admin_enforced: boolean; + /** @enum {string} */ + allow_deletions_enforcement_level: "off" | "non_admins" | "everyone"; + /** @enum {string} */ + allow_force_pushes_enforcement_level: "off" | "non_admins" | "everyone"; + authorized_actor_names: string[]; + authorized_actors_only: boolean; + authorized_dismissal_actors_only: boolean; + create_protected?: boolean; + /** Format: date-time */ + created_at: string; + dismiss_stale_reviews_on_push: boolean; + id: number; + ignore_approvals_from_contributors: boolean; + /** @enum {string} */ + linear_history_requirement_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + /** @enum {string} */ + merge_queue_enforcement_level: "off" | "non_admins" | "everyone"; + name: string; + /** @enum {string} */ + pull_request_reviews_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + repository_id: number; + require_code_owner_review: boolean; + required_approving_review_count: number; + /** @enum {string} */ + required_conversation_resolution_level: + | "off" + | "non_admins" + | "everyone"; + /** @enum {string} */ + required_deployments_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + required_status_checks: string[]; + /** @enum {string} */ + required_status_checks_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + /** @enum {string} */ + signature_requirement_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + strict_required_status_checks_policy: boolean; + /** Format: date-time */ + updated_at: string; + }; + sender: components["schemas"]["simple-user"]; + }; + /** branch protection rule deleted event */ + "webhook-branch-protection-rule-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + /** + * branch protection rule + * @description The branch protection rule. Includes a `name` and all the [branch protection settings](https://docs.github.com/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#about-branch-protection-settings) applied to branches that match the name. Binary settings are boolean. Multi-level configurations are one of `off`, `non_admins`, or `everyone`. Actor and build lists are arrays of strings. + */ + rule: { + admin_enforced: boolean; + /** @enum {string} */ + allow_deletions_enforcement_level: "off" | "non_admins" | "everyone"; + /** @enum {string} */ + allow_force_pushes_enforcement_level: "off" | "non_admins" | "everyone"; + authorized_actor_names: string[]; + authorized_actors_only: boolean; + authorized_dismissal_actors_only: boolean; + create_protected?: boolean; + /** Format: date-time */ + created_at: string; + dismiss_stale_reviews_on_push: boolean; + id: number; + ignore_approvals_from_contributors: boolean; + /** @enum {string} */ + linear_history_requirement_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + /** @enum {string} */ + merge_queue_enforcement_level: "off" | "non_admins" | "everyone"; + name: string; + /** @enum {string} */ + pull_request_reviews_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + repository_id: number; + require_code_owner_review: boolean; + required_approving_review_count: number; + /** @enum {string} */ + required_conversation_resolution_level: + | "off" + | "non_admins" + | "everyone"; + /** @enum {string} */ + required_deployments_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + required_status_checks: string[]; + /** @enum {string} */ + required_status_checks_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + /** @enum {string} */ + signature_requirement_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + strict_required_status_checks_policy: boolean; + /** Format: date-time */ + updated_at: string; + }; + sender: components["schemas"]["simple-user"]; + }; + /** branch protection rule edited event */ + "webhook-branch-protection-rule-edited": { + /** @enum {string} */ + action: "edited"; + /** @description If the action was `edited`, the changes to the rule. */ + changes?: { + admin_enforced?: { + from: boolean | null; + }; + authorized_actor_names?: { + from: string[]; + }; + authorized_actors_only?: { + from: boolean | null; + }; + authorized_dismissal_actors_only?: { + from: boolean | null; + }; + linear_history_requirement_enforcement_level?: { + /** @enum {string} */ + from: "off" | "non_admins" | "everyone"; + }; + required_status_checks?: { + from: string[]; + }; + required_status_checks_enforcement_level?: { + /** @enum {string} */ + from: "off" | "non_admins" | "everyone"; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + /** + * branch protection rule + * @description The branch protection rule. Includes a `name` and all the [branch protection settings](https://docs.github.com/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#about-branch-protection-settings) applied to branches that match the name. Binary settings are boolean. Multi-level configurations are one of `off`, `non_admins`, or `everyone`. Actor and build lists are arrays of strings. + */ + rule: { + admin_enforced: boolean; + /** @enum {string} */ + allow_deletions_enforcement_level: "off" | "non_admins" | "everyone"; + /** @enum {string} */ + allow_force_pushes_enforcement_level: "off" | "non_admins" | "everyone"; + authorized_actor_names: string[]; + authorized_actors_only: boolean; + authorized_dismissal_actors_only: boolean; + create_protected?: boolean; + /** Format: date-time */ + created_at: string; + dismiss_stale_reviews_on_push: boolean; + id: number; + ignore_approvals_from_contributors: boolean; + /** @enum {string} */ + linear_history_requirement_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + /** @enum {string} */ + merge_queue_enforcement_level: "off" | "non_admins" | "everyone"; + name: string; + /** @enum {string} */ + pull_request_reviews_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + repository_id: number; + require_code_owner_review: boolean; + required_approving_review_count: number; + /** @enum {string} */ + required_conversation_resolution_level: + | "off" + | "non_admins" + | "everyone"; + /** @enum {string} */ + required_deployments_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + required_status_checks: string[]; + /** @enum {string} */ + required_status_checks_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + /** @enum {string} */ + signature_requirement_enforcement_level: + | "off" + | "non_admins" + | "everyone"; + strict_required_status_checks_policy: boolean; + /** Format: date-time */ + updated_at: string; + }; + sender: components["schemas"]["simple-user"]; + }; + /** Check Run Completed Event */ + "webhook-check-run-completed": { + /** @enum {string} */ + action?: "completed"; + check_run: components["schemas"]["check-run-with-simple-check-suite"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** + * Check Run Completed Event + * @description The check_run.completed webhook encoded with URL encoding + */ + "webhook-check-run-completed-form-encoded": { + /** @description A URL-encoded string of the check_run.completed JSON payload. The decoded payload is a JSON object. */ + payload: string; + }; + /** Check Run Created Event */ + "webhook-check-run-created": { + /** @enum {string} */ + action?: "created"; + check_run: components["schemas"]["check-run-with-simple-check-suite"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** + * Check Run Created Event + * @description The check_run.created webhook encoded with URL encoding + */ + "webhook-check-run-created-form-encoded": { + /** @description A URL-encoded string of the check_run.created JSON payload. The decoded payload is a JSON object. */ + payload: string; + }; + /** Check Run Requested Action Event */ + "webhook-check-run-requested-action": { + /** @enum {string} */ + action: "requested_action"; + check_run: components["schemas"]["check-run-with-simple-check-suite"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + /** @description The action requested by the user. */ + requested_action?: { + /** @description The integrator reference of the action requested by the user. */ + identifier?: string; + }; + sender: components["schemas"]["simple-user"]; + }; + /** + * Check Run Requested Action Event + * @description The check_run.requested_action webhook encoded with URL encoding + */ + "webhook-check-run-requested-action-form-encoded": { + /** @description A URL-encoded string of the check_run.requested_action JSON payload. The decoded payload is a JSON object. */ + payload: string; + }; + /** Check Run Re-Requested Event */ + "webhook-check-run-rerequested": { + /** @enum {string} */ + action?: "rerequested"; + check_run: components["schemas"]["check-run-with-simple-check-suite"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** + * Check Run Re-Requested Event + * @description The check_run.rerequested webhook encoded with URL encoding + */ + "webhook-check-run-rerequested-form-encoded": { + /** @description A URL-encoded string of the check_run.rerequested JSON payload. The decoded payload is a JSON object. */ + payload: string; + }; + /** check_suite completed event */ + "webhook-check-suite-completed": { + /** @enum {string} */ + action: "completed"; + actions_meta?: Record | null; + /** @description The [check_suite](https://docs.github.com/rest/reference/checks#suites). */ + check_suite: { + after: string | null; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + app: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "merge_group" + | "pull_request_review_thread" + | "workflow_job" + | "merge_queue_entry" + | "security_and_analysis" + | "projects_v2_item" + | "secret_scanning_alert_location" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + }; + before: string | null; + /** Format: uri */ + check_runs_url: string; + /** + * @description The summary conclusion for all check runs that are part of the check suite. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has `completed`. + * @enum {string|null} + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "" + | "skipped" + | "startup_failure" + | null; + /** Format: date-time */ + created_at: string; + /** @description The head branch name the changes are on. */ + head_branch: string | null; + /** SimpleCommit */ + head_commit: { + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + author: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + committer: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + id: string; + message: string; + timestamp: string; + tree_id: string; + }; + /** @description The SHA of the head commit that is being checked. */ + head_sha: string; + id: number; + latest_check_runs_count: number; + node_id: string; + /** @description An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_sha` and `head_branch`. When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. */ + pull_requests: { + base: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + head: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + id: number; + number: number; + /** Format: uri */ + url: string; + }[]; + rerequestable?: boolean; + runs_rerequestable?: boolean; + /** + * @description The summary status for all check runs that are part of the check suite. Can be `requested`, `in_progress`, or `completed`. + * @enum {string|null} + */ + status: + | "requested" + | "in_progress" + | "completed" + | "queued" + | "" + | "pending" + | null; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL that points to the check suite API resource. + */ + url: string; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** check_suite requested event */ + "webhook-check-suite-requested": { + /** @enum {string} */ + action: "requested"; + actions_meta?: Record | null; + /** @description The [check_suite](https://docs.github.com/rest/reference/checks#suites). */ + check_suite: { + after: string | null; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + app: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "pull_request_review_thread" + | "workflow_job" + | "merge_queue_entry" + | "security_and_analysis" + | "secret_scanning_alert_location" + | "projects_v2_item" + | "merge_group" + | "repository_import" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + }; + before: string | null; + /** Format: uri */ + check_runs_url: string; + /** + * @description The summary conclusion for all check runs that are part of the check suite. Can be one of `success`, `failure`,` neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + * @enum {string|null} + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "" + | "skipped" + | null; + /** Format: date-time */ + created_at: string; + /** @description The head branch name the changes are on. */ + head_branch: string | null; + /** SimpleCommit */ + head_commit: { + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + author: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + committer: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + id: string; + message: string; + timestamp: string; + tree_id: string; + }; + /** @description The SHA of the head commit that is being checked. */ + head_sha: string; + id: number; + latest_check_runs_count: number; + node_id: string; + /** @description An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_sha` and `head_branch`. When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. */ + pull_requests: { + base: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + head: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + id: number; + number: number; + /** Format: uri */ + url: string; + }[]; + rerequestable?: boolean; + runs_rerequestable?: boolean; + /** + * @description The summary status for all check runs that are part of the check suite. Can be `requested`, `in_progress`, or `completed`. + * @enum {string|null} + */ + status: + | "requested" + | "in_progress" + | "completed" + | "queued" + | "" + | null; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL that points to the check suite API resource. + */ + url: string; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** check_suite rerequested event */ + "webhook-check-suite-rerequested": { + /** @enum {string} */ + action: "rerequested"; + actions_meta?: { + rerun_info?: { + plan_id?: string; + job_ids?: string[]; + }; + } | null; + /** @description The [check_suite](https://docs.github.com/rest/reference/checks#suites). */ + check_suite: { + after: string | null; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + app: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "pull_request_review_thread" + | "merge_queue_entry" + | "workflow_job" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + }; + before: string | null; + /** Format: uri */ + check_runs_url: string; + /** + * @description The summary conclusion for all check runs that are part of the check suite. Can be one of `success`, `failure`,` neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + * @enum {string|null} + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "" + | null; + /** Format: date-time */ + created_at: string; + /** @description The head branch name the changes are on. */ + head_branch: string | null; + /** SimpleCommit */ + head_commit: { + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + author: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + committer: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + id: string; + message: string; + timestamp: string; + tree_id: string; + }; + /** @description The SHA of the head commit that is being checked. */ + head_sha: string; + id: number; + latest_check_runs_count: number; + node_id: string; + /** @description An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_sha` and `head_branch`. When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. */ + pull_requests: { + base: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + head: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + id: number; + number: number; + /** Format: uri */ + url: string; + }[]; + rerequestable?: boolean; + runs_rerequestable?: boolean; + /** + * @description The summary status for all check runs that are part of the check suite. Can be `requested`, `in_progress`, or `completed`. + * @enum {string|null} + */ + status: + | "requested" + | "in_progress" + | "completed" + | "queued" + | "" + | null; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL that points to the check suite API resource. + */ + url: string; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** code_scanning_alert appeared_in_branch event */ + "webhook-code-scanning-alert-appeared-in-branch": { + /** @enum {string} */ + action: "appeared_in_branch"; + /** @description The code scanning alert involved in the event. */ + alert: { + /** + * Format: date-time + * @description The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * Format: date-time + * @description The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: string | null; + /** User */ + dismissed_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + * @enum {string|null} + */ + dismissed_reason: + | "false positive" + | "won't fix" + | "used in tests" + | "" + | null; + /** + * Format: uri + * @description The GitHub URL of the alert resource. + */ + html_url: string; + /** Alert Instance */ + most_recent_instance?: { + /** @description Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name. */ + analysis_key: string; + /** @description Identifies the configuration under which the analysis was executed. */ + category?: string; + classifications?: string[]; + commit_sha?: string; + /** @description Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed. */ + environment: string; + location?: { + end_column?: number; + end_line?: number; + path?: string; + start_column?: number; + start_line?: number; + }; + message?: { + text?: string; + }; + /** @description The full Git reference, formatted as `refs/heads/`. */ + ref: string; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + state: "open" | "dismissed" | "fixed"; + } | null; + /** @description The code scanning alert number. */ + number: number; + rule: { + /** @description A short description of the rule used to detect the alert. */ + description: string; + /** @description A unique identifier for the rule used to detect the alert. */ + id: string; + /** + * @description The severity of the alert. + * @enum {string|null} + */ + severity: "none" | "note" | "warning" | "error" | "" | null; + }; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + state: "open" | "dismissed" | "fixed"; + tool: { + /** @description The name of the tool used to generate the code scanning analysis alert. */ + name: string; + /** @description The version of the tool used to detect the alert. */ + version: string | null; + }; + /** Format: uri */ + url: string; + }; + /** @description The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. */ + commit_oid: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. */ + ref: string; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** code_scanning_alert closed_by_user event */ + "webhook-code-scanning-alert-closed-by-user": { + /** @enum {string} */ + action: "closed_by_user"; + /** @description The code scanning alert involved in the event. */ + alert: { + /** + * Format: date-time + * @description The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * Format: date-time + * @description The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: string; + /** User */ + dismissed_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + * @enum {string|null} + */ + dismissed_reason: + | "false positive" + | "won't fix" + | "used in tests" + | "" + | null; + /** + * Format: uri + * @description The GitHub URL of the alert resource. + */ + html_url: string; + /** Alert Instance */ + most_recent_instance?: { + /** @description Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name. */ + analysis_key: string; + /** @description Identifies the configuration under which the analysis was executed. */ + category?: string; + classifications?: string[]; + commit_sha?: string; + /** @description Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed. */ + environment: string; + location?: { + end_column?: number; + end_line?: number; + path?: string; + start_column?: number; + start_line?: number; + }; + message?: { + text?: string; + }; + /** @description The full Git reference, formatted as `refs/heads/`. */ + ref: string; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + state: "open" | "dismissed" | "fixed"; + } | null; + /** @description The code scanning alert number. */ + number: number; + rule: { + /** @description A short description of the rule used to detect the alert. */ + description: string; + full_description?: string; + help?: string | null; + /** @description A link to the documentation for the rule used to detect the alert. */ + help_uri?: string | null; + /** @description A unique identifier for the rule used to detect the alert. */ + id: string; + name?: string; + /** + * @description The severity of the alert. + * @enum {string|null} + */ + severity: "none" | "note" | "warning" | "error" | "" | null; + tags?: string[] | null; + }; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + state: "dismissed" | "fixed"; + tool: { + guid?: string | null; + /** @description The name of the tool used to generate the code scanning analysis alert. */ + name: string; + /** @description The version of the tool used to detect the alert. */ + version: string | null; + }; + /** Format: uri */ + url: string; + }; + /** @description The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. */ + commit_oid: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. */ + ref: string; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** code_scanning_alert created event */ + "webhook-code-scanning-alert-created": { + /** @enum {string} */ + action: "created"; + /** @description The code scanning alert involved in the event. */ + alert: { + /** + * Format: date-time + * @description The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string | null; + /** @description The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. */ + dismissed_at: Record | null; + dismissed_by: Record | null; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + /** @description The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. */ + dismissed_reason: Record | null; + fixed_at?: Record | null; + /** + * Format: uri + * @description The GitHub URL of the alert resource. + */ + html_url: string; + instances_url?: string; + /** Alert Instance */ + most_recent_instance?: { + /** @description Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name. */ + analysis_key: string; + /** @description Identifies the configuration under which the analysis was executed. */ + category?: string; + classifications?: string[]; + commit_sha?: string; + /** @description Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed. */ + environment: string; + location?: { + end_column?: number; + end_line?: number; + path?: string; + start_column?: number; + start_line?: number; + }; + message?: { + text?: string; + }; + /** @description The full Git reference, formatted as `refs/heads/`. */ + ref: string; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + state: "open" | "dismissed" | "fixed"; + } | null; + /** @description The code scanning alert number. */ + number: number; + rule: { + /** @description A short description of the rule used to detect the alert. */ + description: string; + full_description?: string; + help?: string | null; + /** @description A link to the documentation for the rule used to detect the alert. */ + help_uri?: string | null; + /** @description A unique identifier for the rule used to detect the alert. */ + id: string; + name?: string; + /** + * @description The severity of the alert. + * @enum {string|null} + */ + severity: "none" | "note" | "warning" | "error" | "" | null; + tags?: string[] | null; + }; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + state: "open" | "dismissed"; + tool: { + guid?: string | null; + /** @description The name of the tool used to generate the code scanning analysis alert. */ + name: string; + /** @description The version of the tool used to detect the alert. */ + version: string | null; + } | null; + updated_at?: string | null; + /** Format: uri */ + url: string; + }; + /** @description The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. */ + commit_oid: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. */ + ref: string; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** code_scanning_alert fixed event */ + "webhook-code-scanning-alert-fixed": { + /** @enum {string} */ + action: "fixed"; + /** @description The code scanning alert involved in the event. */ + alert: { + /** + * Format: date-time + * @description The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * Format: date-time + * @description The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: string | null; + /** User */ + dismissed_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + * @enum {string|null} + */ + dismissed_reason: + | "false positive" + | "won't fix" + | "used in tests" + | "" + | null; + /** + * Format: uri + * @description The GitHub URL of the alert resource. + */ + html_url: string; + /** Format: uri */ + instances_url?: string; + /** Alert Instance */ + most_recent_instance?: { + /** @description Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name. */ + analysis_key: string; + /** @description Identifies the configuration under which the analysis was executed. */ + category?: string; + classifications?: string[]; + commit_sha?: string; + /** @description Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed. */ + environment: string; + location?: { + end_column?: number; + end_line?: number; + path?: string; + start_column?: number; + start_line?: number; + }; + message?: { + text?: string; + }; + /** @description The full Git reference, formatted as `refs/heads/`. */ + ref: string; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + state: "open" | "dismissed" | "fixed"; + } | null; + /** @description The code scanning alert number. */ + number: number; + rule: { + /** @description A short description of the rule used to detect the alert. */ + description: string; + full_description?: string; + help?: string | null; + /** @description A link to the documentation for the rule used to detect the alert. */ + help_uri?: string | null; + /** @description A unique identifier for the rule used to detect the alert. */ + id: string; + name?: string; + /** + * @description The severity of the alert. + * @enum {string|null} + */ + severity: "none" | "note" | "warning" | "error" | "" | null; + tags?: string[] | null; + }; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + state: "fixed"; + tool: { + guid?: string | null; + /** @description The name of the tool used to generate the code scanning analysis alert. */ + name: string; + /** @description The version of the tool used to detect the alert. */ + version: string | null; + }; + /** Format: uri */ + url: string; + }; + /** @description The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. */ + commit_oid: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. */ + ref: string; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** code_scanning_alert reopened event */ + "webhook-code-scanning-alert-reopened": { + /** @enum {string} */ + action: "reopened"; + /** @description The code scanning alert involved in the event. */ + alert: { + /** + * Format: date-time + * @description The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** @description The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. */ + dismissed_at: string | null; + dismissed_by: Record | null; + /** @description The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. */ + dismissed_reason: string | null; + /** + * Format: uri + * @description The GitHub URL of the alert resource. + */ + html_url: string; + /** Alert Instance */ + most_recent_instance?: { + /** @description Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name. */ + analysis_key: string; + /** @description Identifies the configuration under which the analysis was executed. */ + category?: string; + classifications?: string[]; + commit_sha?: string; + /** @description Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed. */ + environment: string; + location?: { + end_column?: number; + end_line?: number; + path?: string; + start_column?: number; + start_line?: number; + }; + message?: { + text?: string; + }; + /** @description The full Git reference, formatted as `refs/heads/`. */ + ref: string; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + state: "open" | "dismissed" | "fixed"; + } | null; + /** @description The code scanning alert number. */ + number: number; + rule: { + /** @description A short description of the rule used to detect the alert. */ + description: string; + full_description?: string; + help?: string | null; + /** @description A link to the documentation for the rule used to detect the alert. */ + help_uri?: string | null; + /** @description A unique identifier for the rule used to detect the alert. */ + id: string; + name?: string; + /** + * @description The severity of the alert. + * @enum {string|null} + */ + severity: "none" | "note" | "warning" | "error" | "" | null; + tags?: string[] | null; + }; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + state: "open" | "dismissed" | "fixed"; + tool: { + guid?: string | null; + /** @description The name of the tool used to generate the code scanning analysis alert. */ + name: string; + /** @description The version of the tool used to detect the alert. */ + version: string | null; + }; + /** Format: uri */ + url: string; + } | null; + /** @description The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. */ + commit_oid: string | null; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. */ + ref: string | null; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** code_scanning_alert reopened_by_user event */ + "webhook-code-scanning-alert-reopened-by-user": { + /** @enum {string} */ + action: "reopened_by_user"; + /** @description The code scanning alert involved in the event. */ + alert: { + /** + * Format: date-time + * @description The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** @description The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. */ + dismissed_at: Record | null; + dismissed_by: Record | null; + /** @description The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. */ + dismissed_reason: Record | null; + /** + * Format: uri + * @description The GitHub URL of the alert resource. + */ + html_url: string; + /** Alert Instance */ + most_recent_instance?: { + /** @description Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name. */ + analysis_key: string; + /** @description Identifies the configuration under which the analysis was executed. */ + category?: string; + classifications?: string[]; + commit_sha?: string; + /** @description Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed. */ + environment: string; + location?: { + end_column?: number; + end_line?: number; + path?: string; + start_column?: number; + start_line?: number; + }; + message?: { + text?: string; + }; + /** @description The full Git reference, formatted as `refs/heads/`. */ + ref: string; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + state: "open" | "dismissed" | "fixed"; + } | null; + /** @description The code scanning alert number. */ + number: number; + rule: { + /** @description A short description of the rule used to detect the alert. */ + description: string; + /** @description A unique identifier for the rule used to detect the alert. */ + id: string; + /** + * @description The severity of the alert. + * @enum {string|null} + */ + severity: "none" | "note" | "warning" | "error" | "" | null; + }; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + state: "open" | "fixed"; + tool: { + /** @description The name of the tool used to generate the code scanning analysis alert. */ + name: string; + /** @description The version of the tool used to detect the alert. */ + version: string | null; + }; + /** Format: uri */ + url: string; + }; + /** @description The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. */ + commit_oid: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. */ + ref: string; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** commit_comment created event */ + "webhook-commit-comment-created": { + /** + * @description The action performed. Can be `created`. + * @enum {string} + */ + action: "created"; + /** @description The [commit comment](https://docs.github.com/rest/reference/repos#get-a-commit-comment) resource. */ + comment: { + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description The text of the comment. */ + body: string; + /** @description The SHA of the commit to which the comment applies. */ + commit_id: string; + created_at: string; + /** Format: uri */ + html_url: string; + /** @description The ID of the commit comment. */ + id: number; + /** @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ + line: number | null; + /** @description The node ID of the commit comment. */ + node_id: string; + /** @description The relative path of the file to which the comment applies. */ + path: string | null; + /** @description The line index in the diff to which the comment applies. */ + position: number | null; + /** Reactions */ + reactions?: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** create event */ + "webhook-create": { + /** @description The repository's current description. */ + description: string | null; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The name of the repository's default branch (usually `main`). */ + master_branch: string; + organization?: components["schemas"]["organization-simple"]; + /** @description The pusher type for the event. Can be either `user` or a deploy key. */ + pusher_type: string; + /** @description The [`git ref`](https://docs.github.com/rest/reference/git#get-a-reference) resource. */ + ref: string; + /** + * @description The type of Git ref object created in the repository. + * @enum {string} + */ + ref_type: "tag" | "branch"; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** delete event */ + "webhook-delete": { + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description The pusher type for the event. Can be either `user` or a deploy key. */ + pusher_type: string; + /** @description The [`git ref`](https://docs.github.com/rest/reference/git#get-a-reference) resource. */ + ref: string; + /** + * @description The type of Git ref object deleted in the repository. + * @enum {string} + */ + ref_type: "tag" | "branch"; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** Dependabot alert auto-dismissed event */ + "webhook-dependabot-alert-auto-dismissed": { + /** @enum {string} */ + action: "auto_dismissed"; + alert: components["schemas"]["dependabot-alert"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + enterprise?: components["schemas"]["enterprise"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** Dependabot alert auto-reopened event */ + "webhook-dependabot-alert-auto-reopened": { + /** @enum {string} */ + action: "auto_reopened"; + alert: components["schemas"]["dependabot-alert"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + enterprise?: components["schemas"]["enterprise"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** Dependabot alert created event */ + "webhook-dependabot-alert-created": { + /** @enum {string} */ + action: "created"; + alert: components["schemas"]["dependabot-alert"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + enterprise?: components["schemas"]["enterprise"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** Dependabot alert dismissed event */ + "webhook-dependabot-alert-dismissed": { + /** @enum {string} */ + action: "dismissed"; + alert: components["schemas"]["dependabot-alert"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + enterprise?: components["schemas"]["enterprise"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** Dependabot alert fixed event */ + "webhook-dependabot-alert-fixed": { + /** @enum {string} */ + action: "fixed"; + alert: components["schemas"]["dependabot-alert"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + enterprise?: components["schemas"]["enterprise"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** Dependabot alert reintroduced event */ + "webhook-dependabot-alert-reintroduced": { + /** @enum {string} */ + action: "reintroduced"; + alert: components["schemas"]["dependabot-alert"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + enterprise?: components["schemas"]["enterprise"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** Dependabot alert reopened event */ + "webhook-dependabot-alert-reopened": { + /** @enum {string} */ + action: "reopened"; + alert: components["schemas"]["dependabot-alert"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + enterprise?: components["schemas"]["enterprise"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** deploy_key created event */ + "webhook-deploy-key-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The [`deploy key`](https://docs.github.com/rest/reference/deployments#get-a-deploy-key) resource. */ + key: { + added_by?: string | null; + created_at: string; + id: number; + key: string; + last_used?: string | null; + read_only: boolean; + title: string; + /** Format: uri */ + url: string; + verified: boolean; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** deploy_key deleted event */ + "webhook-deploy-key-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The [`deploy key`](https://docs.github.com/rest/reference/deployments#get-a-deploy-key) resource. */ + key: { + added_by?: string | null; + created_at: string; + id: number; + key: string; + last_used?: string | null; + read_only: boolean; + title: string; + /** Format: uri */ + url: string; + verified: boolean; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** deployment created event */ + "webhook-deployment-created": { + /** @enum {string} */ + action: "created"; + /** + * Deployment + * @description The [deployment](https://docs.github.com/rest/reference/deployments#list-deployments). + */ + deployment: { + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + environment: string; + id: number; + node_id: string; + original_environment: string; + payload: Record | string; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "workflow_job" + | "pull_request_review_thread" + | "merge_queue_entry" + | "secret_scanning_alert_location" + | "merge_group" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + production_environment?: boolean; + ref: string; + /** Format: uri */ + repository_url: string; + sha: string; + /** Format: uri */ + statuses_url: string; + task: string; + transient_environment?: boolean; + updated_at: string; + /** Format: uri */ + url: string; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + /** Workflow */ + workflow: { + /** Format: uri */ + badge_url: string; + /** Format: date-time */ + created_at: string; + /** Format: uri */ + html_url: string; + id: number; + name: string; + node_id: string; + path: string; + state: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + /** Deployment Workflow Run */ + workflow_run: { + /** User */ + actor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + artifacts_url?: string; + cancel_url?: string; + check_suite_id: number; + check_suite_node_id: string; + check_suite_url?: string; + /** @enum {string|null} */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "" + | null; + /** Format: date-time */ + created_at: string; + display_title: string; + event: string; + head_branch: string; + head_commit?: Record | null; + head_repository?: { + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + description?: Record | null; + downloads_url?: string; + events_url?: string; + fork?: boolean; + forks_url?: string; + full_name?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + hooks_url?: string; + html_url?: string; + id?: number; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + name?: string; + node_id?: string; + notifications_url?: string; + owner?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + private?: boolean; + pulls_url?: string; + releases_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + url?: string; + }; + head_sha: string; + /** Format: uri */ + html_url: string; + id: number; + jobs_url?: string; + logs_url?: string; + name: string; + node_id: string; + path: string; + previous_attempt_url?: Record | null; + pull_requests: { + base: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + head: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + id: number; + number: number; + /** Format: uri */ + url: string; + }[]; + referenced_workflows?: + | { + path: string; + ref?: string; + sha: string; + }[] + | null; + repository?: { + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + description?: Record | null; + downloads_url?: string; + events_url?: string; + fork?: boolean; + forks_url?: string; + full_name?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + hooks_url?: string; + html_url?: string; + id?: number; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + name?: string; + node_id?: string; + notifications_url?: string; + owner?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + private?: boolean; + pulls_url?: string; + releases_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + url?: string; + }; + rerun_url?: string; + run_attempt: number; + run_number: number; + /** Format: date-time */ + run_started_at: string; + /** @enum {string} */ + status: + | "requested" + | "in_progress" + | "completed" + | "queued" + | "waiting" + | "pending"; + /** User */ + triggering_actor?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + workflow_id: number; + workflow_url?: string; + } | null; + }; + /** deployment protection rule requested event */ + "webhook-deployment-protection-rule-requested": { + /** @enum {string} */ + action?: "requested"; + /** @description The name of the environment that has the deployment protection rule. */ + environment?: string; + /** @description The event that triggered the deployment protection rule. */ + event?: string; + /** + * Format: uri + * @description The URL to review the deployment protection rule. + */ + deployment_callback_url?: string; + deployment?: components["schemas"]["deployment"]; + pull_requests?: components["schemas"]["pull-request"][]; + repository?: components["schemas"]["repository"]; + organization?: components["schemas"]["organization-simple"]; + installation?: components["schemas"]["simple-installation"]; + sender?: components["schemas"]["simple-user"]; + }; + /** deployment_status created event */ + "webhook-deployment-status-created": { + /** @enum {string} */ + action: "created"; + check_run?: { + /** Format: date-time */ + completed_at: string | null; + /** + * @description The result of the completed check run. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + * @enum {string|null} + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | "" + | null; + /** Format: uri */ + details_url: string; + external_id: string; + /** @description The SHA of the commit that is being checked. */ + head_sha: string; + /** Format: uri */ + html_url: string; + /** @description The id of the check. */ + id: number; + /** @description The name of the check run. */ + name: string; + node_id: string; + /** Format: date-time */ + started_at: string; + /** + * @description The current status of the check run. Can be `queued`, `in_progress`, or `completed`. + * @enum {string} + */ + status: "queued" | "in_progress" | "completed" | "waiting" | "pending"; + /** Format: uri */ + url: string; + } | null; + /** + * Deployment + * @description The [deployment](https://docs.github.com/rest/reference/deployments#list-deployments). + */ + deployment: { + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + environment: string; + id: number; + node_id: string; + original_environment: string; + payload: string | Record; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "merge_queue_entry" + | "workflow_job" + | "pull_request_review_thread" + | "secret_scanning_alert_location" + | "merge_group" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + production_environment?: boolean; + ref: string; + /** Format: uri */ + repository_url: string; + sha: string; + /** Format: uri */ + statuses_url: string; + task: string; + transient_environment?: boolean; + updated_at: string; + /** Format: uri */ + url: string; + }; + /** @description The [deployment status](https://docs.github.com/rest/reference/deployments#list-deployment-statuses). */ + deployment_status: { + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + deployment_url: string; + /** @description The optional human-readable description added to the status. */ + description: string; + environment: string; + /** Format: uri */ + environment_url?: string; + id: number; + /** Format: uri */ + log_url?: string; + node_id: string; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "pull_request_review_thread" + | "merge_queue_entry" + | "workflow_job" + | "merge_group" + | "secret_scanning_alert_location" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + /** Format: uri */ + repository_url: string; + /** @description The new state. Can be `pending`, `success`, `failure`, or `error`. */ + state: string; + /** @description The optional link added to the status. */ + target_url: string; + updated_at: string; + /** Format: uri */ + url: string; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + /** Workflow */ + workflow?: { + /** Format: uri */ + badge_url: string; + /** Format: date-time */ + created_at: string; + /** Format: uri */ + html_url: string; + id: number; + name: string; + node_id: string; + path: string; + state: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + /** Deployment Workflow Run */ + workflow_run?: { + /** User */ + actor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + artifacts_url?: string; + cancel_url?: string; + check_suite_id: number; + check_suite_node_id: string; + check_suite_url?: string; + /** @enum {string|null} */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "" + | "startup_failure" + | null; + /** Format: date-time */ + created_at: string; + display_title: string; + event: string; + head_branch: string; + head_commit?: Record | null; + head_repository?: { + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + description?: Record | null; + downloads_url?: string; + events_url?: string; + fork?: boolean; + forks_url?: string; + full_name?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + hooks_url?: string; + html_url?: string; + id?: number; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + name?: string; + node_id?: string; + notifications_url?: string; + owner?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + private?: boolean; + pulls_url?: string; + releases_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + url?: string; + }; + head_sha: string; + /** Format: uri */ + html_url: string; + id: number; + jobs_url?: string; + logs_url?: string; + name: string; + node_id: string; + path: string; + previous_attempt_url?: Record | null; + pull_requests: { + base: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + head: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + id: number; + number: number; + /** Format: uri */ + url: string; + }[]; + referenced_workflows?: + | { + path: string; + ref?: string; + sha: string; + }[] + | null; + repository?: { + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + description?: Record | null; + downloads_url?: string; + events_url?: string; + fork?: boolean; + forks_url?: string; + full_name?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + hooks_url?: string; + html_url?: string; + id?: number; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + name?: string; + node_id?: string; + notifications_url?: string; + owner?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + private?: boolean; + pulls_url?: string; + releases_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + url?: string; + }; + rerun_url?: string; + run_attempt: number; + run_number: number; + /** Format: date-time */ + run_started_at: string; + /** @enum {string} */ + status: + | "requested" + | "in_progress" + | "completed" + | "queued" + | "waiting" + | "pending"; + /** User */ + triggering_actor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + workflow_id: number; + workflow_url?: string; + } | null; + }; + /** discussion answered event */ + "webhook-discussion-answered": { + /** @enum {string} */ + action: "answered"; + answer: { + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + body: string; + child_comment_count: number; + /** Format: date-time */ + created_at: string; + discussion_id: number; + html_url: string; + id: number; + node_id: string; + parent_id: Record | null; + /** Reactions */ + reactions?: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + repository_url: string; + /** Format: date-time */ + updated_at: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion category changed event */ + "webhook-discussion-category-changed": { + /** @enum {string} */ + action: "category_changed"; + changes: { + category: { + from: { + /** Format: date-time */ + created_at: string; + description: string; + emoji: string; + id: number; + is_answerable: boolean; + name: string; + node_id?: string; + repository_id: number; + slug: string; + updated_at: string; + }; + }; + }; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion closed event */ + "webhook-discussion-closed": { + /** @enum {string} */ + action: "closed"; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion_comment created event */ + "webhook-discussion-comment-created": { + /** @enum {string} */ + action: "created"; + comment: { + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + body: string; + child_comment_count: number; + created_at: string; + discussion_id: number; + html_url: string; + id: number; + node_id: string; + parent_id: number | null; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + repository_url: string; + updated_at: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion_comment deleted event */ + "webhook-discussion-comment-deleted": { + /** @enum {string} */ + action: "deleted"; + comment: { + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + body: string; + child_comment_count: number; + created_at: string; + discussion_id: number; + html_url: string; + id: number; + node_id: string; + parent_id: number | null; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + repository_url: string; + updated_at: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion_comment edited event */ + "webhook-discussion-comment-edited": { + /** @enum {string} */ + action: "edited"; + changes: { + body: { + from: string; + }; + }; + comment: { + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + body: string; + child_comment_count: number; + created_at: string; + discussion_id: number; + html_url: string; + id: number; + node_id: string; + parent_id: number | null; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + repository_url: string; + updated_at: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion created event */ + "webhook-discussion-created": { + /** @enum {string} */ + action: "created"; + discussion: { + active_lock_reason: string | null; + answer_chosen_at: string | null; + /** User */ + answer_chosen_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + answer_html_url: string | null; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + body: string | null; + category: { + /** Format: date-time */ + created_at: string; + description: string; + emoji: string; + id: number; + is_answerable: boolean; + name: string; + node_id?: string; + repository_id: number; + slug: string; + updated_at: string; + }; + comments: number; + /** Format: date-time */ + created_at: string; + html_url: string; + id: number; + locked: boolean; + node_id: string; + number: number; + /** Reactions */ + reactions?: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + repository_url: string; + /** @enum {string} */ + state: "open" | "locked" | "converting" | "transferring"; + timeline_url?: string; + title: string; + /** Format: date-time */ + updated_at: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + } & { + active_lock_reason?: Record | null; + answer_chosen_at: Record | null; + answer_chosen_by: Record | null; + answer_html_url: string | null; + author_association?: string; + body?: string | null; + category?: { + created_at?: string; + description?: string; + emoji?: string; + id?: number; + is_answerable?: boolean; + name?: string; + node_id?: string; + repository_id?: number; + slug?: string; + updated_at?: string; + }; + comments?: number; + created_at?: string; + html_url?: string; + id?: number; + /** @enum {boolean} */ + locked: false; + node_id?: string; + number?: number; + reactions?: { + "+1"?: number; + "-1"?: number; + confused?: number; + eyes?: number; + heart?: number; + hooray?: number; + laugh?: number; + rocket?: number; + total_count?: number; + url?: string; + }; + repository_url?: string; + /** @enum {string} */ + state: "open" | "converting" | "transferring"; + timeline_url?: string; + title?: string; + updated_at?: string; + user?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion deleted event */ + "webhook-discussion-deleted": { + /** @enum {string} */ + action: "deleted"; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion edited event */ + "webhook-discussion-edited": { + /** @enum {string} */ + action: "edited"; + changes?: { + body?: { + from: string; + }; + title?: { + from: string; + }; + }; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion labeled event */ + "webhook-discussion-labeled": { + /** @enum {string} */ + action: "labeled"; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** Label */ + label: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion locked event */ + "webhook-discussion-locked": { + /** @enum {string} */ + action: "locked"; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion pinned event */ + "webhook-discussion-pinned": { + /** @enum {string} */ + action: "pinned"; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion reopened event */ + "webhook-discussion-reopened": { + /** @enum {string} */ + action: "reopened"; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion transferred event */ + "webhook-discussion-transferred": { + /** @enum {string} */ + action: "transferred"; + changes: { + new_discussion: components["schemas"]["discussion"]; + new_repository: components["schemas"]["repository"]; + }; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion unanswered event */ + "webhook-discussion-unanswered": { + /** @enum {string} */ + action: "unanswered"; + discussion: components["schemas"]["discussion"]; + old_answer: { + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + body: string; + child_comment_count: number; + /** Format: date-time */ + created_at: string; + discussion_id: number; + html_url: string; + id: number; + node_id: string; + parent_id: Record | null; + /** Reactions */ + reactions?: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + repository_url: string; + /** Format: date-time */ + updated_at: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** discussion unlabeled event */ + "webhook-discussion-unlabeled": { + /** @enum {string} */ + action: "unlabeled"; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** Label */ + label: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion unlocked event */ + "webhook-discussion-unlocked": { + /** @enum {string} */ + action: "unlocked"; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** discussion unpinned event */ + "webhook-discussion-unpinned": { + /** @enum {string} */ + action: "unpinned"; + discussion: components["schemas"]["discussion"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** + * fork event + * @description A user forks a repository. + */ + "webhook-fork": { + enterprise?: components["schemas"]["enterprise"]; + /** @description The created [`repository`](https://docs.github.com/rest/reference/repos#get-a-repository) resource. */ + forkee: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } & { + allow_forking?: boolean; + archive_url?: string; + archived?: boolean; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + clone_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + created_at?: string; + default_branch?: string; + deployments_url?: string; + description?: string | null; + disabled?: boolean; + downloads_url?: string; + events_url?: string; + /** @enum {boolean} */ + fork?: true; + forks?: number; + forks_count?: number; + forks_url?: string; + full_name?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + git_url?: string; + has_downloads?: boolean; + has_issues?: boolean; + has_pages?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + homepage?: string | null; + hooks_url?: string; + html_url?: string; + id?: number; + is_template?: boolean; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + language?: Record | null; + languages_url?: string; + license?: Record | null; + merges_url?: string; + milestones_url?: string; + mirror_url?: Record | null; + name?: string; + node_id?: string; + notifications_url?: string; + open_issues?: number; + open_issues_count?: number; + owner?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + private?: boolean; + public?: boolean; + pulls_url?: string; + pushed_at?: string; + releases_url?: string; + size?: number; + ssh_url?: string; + stargazers_count?: number; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + svn_url?: string; + tags_url?: string; + teams_url?: string; + topics?: (Record | null)[]; + trees_url?: string; + updated_at?: string; + url?: string; + visibility?: string; + watchers?: number; + watchers_count?: number; + }; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** github_app_authorization revoked event */ + "webhook-github-app-authorization-revoked": { + /** @enum {string} */ + action: "revoked"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** gollum event */ + "webhook-gollum": { + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description The pages that were updated. */ + pages: { + /** + * @description The action that was performed on the page. Can be `created` or `edited`. + * @enum {string} + */ + action: "created" | "edited"; + /** + * Format: uri + * @description Points to the HTML wiki page. + */ + html_url: string; + /** @description The name of the page. */ + page_name: string; + /** @description The latest commit SHA of the page. */ + sha: string; + summary: string | null; + /** @description The current page title. */ + title: string; + }[]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** installation created event */ + "webhook-installation-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation: components["schemas"]["installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description An array of repository objects that the installation can access. */ + repositories?: { + full_name: string; + /** @description Unique identifier of the repository */ + id: number; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** @description Whether the repository is private or public. */ + private: boolean; + }[]; + repository?: components["schemas"]["repository"]; + /** User */ + requester?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + sender: components["schemas"]["simple-user"]; + }; + /** installation deleted event */ + "webhook-installation-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation: components["schemas"]["installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description An array of repository objects that the installation can access. */ + repositories?: { + full_name: string; + /** @description Unique identifier of the repository */ + id: number; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** @description Whether the repository is private or public. */ + private: boolean; + }[]; + repository?: components["schemas"]["repository"]; + requester?: Record | null; + sender: components["schemas"]["simple-user"]; + }; + /** installation new_permissions_accepted event */ + "webhook-installation-new-permissions-accepted": { + /** @enum {string} */ + action: "new_permissions_accepted"; + enterprise?: components["schemas"]["enterprise"]; + installation: components["schemas"]["installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description An array of repository objects that the installation can access. */ + repositories?: { + full_name: string; + /** @description Unique identifier of the repository */ + id: number; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** @description Whether the repository is private or public. */ + private: boolean; + }[]; + repository?: components["schemas"]["repository"]; + requester?: Record | null; + sender: components["schemas"]["simple-user"]; + }; + /** installation_repositories added event */ + "webhook-installation-repositories-added": { + /** @enum {string} */ + action: "added"; + enterprise?: components["schemas"]["enterprise"]; + installation: components["schemas"]["installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description An array of repository objects, which were added to the installation. */ + repositories_added: { + full_name: string; + /** @description Unique identifier of the repository */ + id: number; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** @description Whether the repository is private or public. */ + private: boolean; + }[]; + /** @description An array of repository objects, which were removed from the installation. */ + repositories_removed: { + full_name?: string; + /** @description Unique identifier of the repository */ + id?: number; + /** @description The name of the repository. */ + name?: string; + node_id?: string; + /** @description Whether the repository is private or public. */ + private?: boolean; + }[]; + repository?: components["schemas"]["repository"]; + /** + * @description Describe whether all repositories have been selected or there's a selection involved + * @enum {string} + */ + repository_selection: "all" | "selected"; + /** User */ + requester: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + sender: components["schemas"]["simple-user"]; + }; + /** installation_repositories removed event */ + "webhook-installation-repositories-removed": { + /** @enum {string} */ + action: "removed"; + enterprise?: components["schemas"]["enterprise"]; + installation: components["schemas"]["installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description An array of repository objects, which were added to the installation. */ + repositories_added: { + full_name: string; + /** @description Unique identifier of the repository */ + id: number; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** @description Whether the repository is private or public. */ + private: boolean; + }[]; + /** @description An array of repository objects, which were removed from the installation. */ + repositories_removed: { + full_name: string; + /** @description Unique identifier of the repository */ + id: number; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** @description Whether the repository is private or public. */ + private: boolean; + }[]; + repository?: components["schemas"]["repository"]; + /** + * @description Describe whether all repositories have been selected or there's a selection involved + * @enum {string} + */ + repository_selection: "all" | "selected"; + /** User */ + requester: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + sender: components["schemas"]["simple-user"]; + }; + /** installation suspend event */ + "webhook-installation-suspend": { + /** @enum {string} */ + action: "suspend"; + enterprise?: components["schemas"]["enterprise"]; + installation: components["schemas"]["installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description An array of repository objects that the installation can access. */ + repositories?: { + full_name: string; + /** @description Unique identifier of the repository */ + id: number; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** @description Whether the repository is private or public. */ + private: boolean; + }[]; + repository?: components["schemas"]["repository"]; + requester?: Record | null; + sender: components["schemas"]["simple-user"]; + }; + "webhook-installation-target-renamed": { + account: { + avatar_url: string; + created_at?: string; + description?: Record | null; + events_url?: string; + followers?: number; + followers_url?: string; + following?: number; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + has_organization_projects?: boolean; + has_repository_projects?: boolean; + hooks_url?: string; + html_url: string; + id: number; + is_verified?: boolean; + issues_url?: string; + login?: string; + members_url?: string; + name?: string; + node_id: string; + organizations_url?: string; + public_gists?: number; + public_members_url?: string; + public_repos?: number; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + slug?: string; + starred_url?: string; + subscriptions_url?: string; + type?: string; + updated_at?: string; + url?: string; + website_url?: Record | null; + }; + action: string; + changes: { + login?: { + from: string; + }; + slug?: { + from: string; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + target_type: string; + }; + /** installation unsuspend event */ + "webhook-installation-unsuspend": { + /** @enum {string} */ + action: "unsuspend"; + enterprise?: components["schemas"]["enterprise"]; + installation: components["schemas"]["installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description An array of repository objects that the installation can access. */ + repositories?: { + full_name: string; + /** @description Unique identifier of the repository */ + id: number; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** @description Whether the repository is private or public. */ + private: boolean; + }[]; + repository?: components["schemas"]["repository"]; + requester?: Record | null; + sender: components["schemas"]["simple-user"]; + }; + /** issue_comment created event */ + "webhook-issue-comment-created": { + /** @enum {string} */ + action: "created"; + /** + * issue comment + * @description The [comment](https://docs.github.com/rest/reference/issues#comments) itself. + */ + comment: { + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue comment */ + body: string; + /** Format: date-time */ + created_at: string; + /** Format: uri */ + html_url: string; + /** + * Format: int64 + * @description Unique identifier of the issue comment + */ + id: number; + /** Format: uri */ + issue_url: string; + node_id: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue comment + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The [issue](https://docs.github.com/rest/reference/issues) the comment belongs to. */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "reminder" + | "pull_request_review_thread" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + } & { + active_lock_reason?: string | null; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees?: (Record | null)[]; + author_association?: string; + body?: string | null; + closed_at?: string | null; + comments?: number; + comments_url?: string; + created_at?: string; + events_url?: string; + html_url?: string; + id?: number; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + labels_url?: string; + locked: boolean; + milestone?: Record | null; + node_id?: string; + number?: number; + performed_via_github_app?: Record | null; + reactions?: { + "+1"?: number; + "-1"?: number; + confused?: number; + eyes?: number; + heart?: number; + hooray?: number; + laugh?: number; + rocket?: number; + total_count?: number; + url?: string; + }; + repository_url?: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state: "open" | "closed"; + timeline_url?: string; + title?: string; + updated_at?: string; + url?: string; + user?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issue_comment deleted event */ + "webhook-issue-comment-deleted": { + /** @enum {string} */ + action: "deleted"; + /** + * issue comment + * @description The [comment](https://docs.github.com/rest/reference/issues#comments) itself. + */ + comment: { + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue comment */ + body: string; + /** Format: date-time */ + created_at: string; + /** Format: uri */ + html_url: string; + /** + * Format: int64 + * @description Unique identifier of the issue comment + */ + id: number; + /** Format: uri */ + issue_url: string; + node_id: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue comment + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The [issue](https://docs.github.com/rest/reference/issues) the comment belongs to. */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + } & { + active_lock_reason?: string | null; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees?: (Record | null)[]; + author_association?: string; + body?: string | null; + closed_at?: string | null; + comments?: number; + comments_url?: string; + created_at?: string; + events_url?: string; + html_url?: string; + id?: number; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + labels_url?: string; + locked: boolean; + milestone?: Record | null; + node_id?: string; + number?: number; + performed_via_github_app?: Record | null; + reactions?: { + "+1"?: number; + "-1"?: number; + confused?: number; + eyes?: number; + heart?: number; + hooray?: number; + laugh?: number; + rocket?: number; + total_count?: number; + url?: string; + }; + repository_url?: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state: "open" | "closed"; + timeline_url?: string; + title?: string; + updated_at?: string; + url?: string; + user?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issue_comment edited event */ + "webhook-issue-comment-edited": { + /** @enum {string} */ + action: "edited"; + /** @description The changes to the comment. */ + changes: { + body?: { + /** @description The previous version of the body. */ + from: string; + }; + }; + /** + * issue comment + * @description The [comment](https://docs.github.com/rest/reference/issues#comments) itself. + */ + comment: { + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue comment */ + body: string; + /** Format: date-time */ + created_at: string; + /** Format: uri */ + html_url: string; + /** + * Format: int64 + * @description Unique identifier of the issue comment + */ + id: number; + /** Format: uri */ + issue_url: string; + node_id: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue comment + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The [issue](https://docs.github.com/rest/reference/issues) the comment belongs to. */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "reminder" + | "pull_request_review_thread" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + } & { + active_lock_reason?: string | null; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees?: (Record | null)[]; + author_association?: string; + body?: string | null; + closed_at?: string | null; + comments?: number; + comments_url?: string; + created_at?: string; + events_url?: string; + html_url?: string; + id?: number; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + labels_url?: string; + locked: boolean; + milestone?: Record | null; + node_id?: string; + number?: number; + performed_via_github_app?: Record | null; + reactions?: { + "+1"?: number; + "-1"?: number; + confused?: number; + eyes?: number; + heart?: number; + hooray?: number; + laugh?: number; + rocket?: number; + total_count?: number; + url?: string; + }; + repository_url?: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state: "open" | "closed"; + timeline_url?: string; + title?: string; + updated_at?: string; + url?: string; + user?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues assigned event */ + "webhook-issues-assigned": { + /** + * @description The action that was performed. + * @enum {string} + */ + action: "assigned"; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Issue + * @description The [issue](https://docs.github.com/rest/reference/issues) itself. + */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "reminder" + | "pull_request_review_thread" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues closed event */ + "webhook-issues-closed": { + /** + * @description The action that was performed. + * @enum {string} + */ + action: "closed"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The [issue](https://docs.github.com/rest/reference/issues) itself. */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "security_and_analysis" + | "reminder" + | "pull_request_review_thread" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + } & { + active_lock_reason?: string | null; + assignee?: Record | null; + assignees?: (Record | null)[]; + author_association?: string; + body?: string | null; + closed_at: string | null; + comments?: number; + comments_url?: string; + created_at?: string; + events_url?: string; + html_url?: string; + id?: number; + labels?: (Record | null)[]; + labels_url?: string; + locked?: boolean; + milestone?: Record | null; + node_id?: string; + number?: number; + performed_via_github_app?: Record | null; + reactions?: { + "+1"?: number; + "-1"?: number; + confused?: number; + eyes?: number; + heart?: number; + hooray?: number; + laugh?: number; + rocket?: number; + total_count?: number; + url?: string; + }; + repository_url?: string; + /** @enum {string} */ + state: "closed" | "open"; + timeline_url?: string; + title?: string; + updated_at?: string; + url?: string; + user?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues deleted event */ + "webhook-issues-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Issue + * @description The [issue](https://docs.github.com/rest/reference/issues) itself. + */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "reminder" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues demilestoned event */ + "webhook-issues-demilestoned": { + /** @enum {string} */ + action: "demilestoned"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + } & { + active_lock_reason?: string | null; + assignee?: Record | null; + assignees?: (Record | null)[]; + author_association?: string; + body?: string | null; + closed_at?: string | null; + comments?: number; + comments_url?: string; + created_at?: string; + events_url?: string; + html_url?: string; + id?: number; + labels?: (Record | null)[]; + labels_url?: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id?: string; + number?: number; + performed_via_github_app?: Record | null; + reactions?: { + "+1"?: number; + "-1"?: number; + confused?: number; + eyes?: number; + heart?: number; + hooray?: number; + laugh?: number; + rocket?: number; + total_count?: number; + url?: string; + }; + repository_url?: string; + state?: string; + timeline_url?: string; + title?: string; + updated_at?: string; + url?: string; + user?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + }; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone?: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues edited event */ + "webhook-issues-edited": { + /** @enum {string} */ + action: "edited"; + /** @description The changes to the issue. */ + changes: { + body?: { + /** @description The previous version of the body. */ + from: string; + }; + title?: { + /** @description The previous version of the title. */ + from: string; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Issue + * @description The [issue](https://docs.github.com/rest/reference/issues) itself. + */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "security_and_analysis" + | "pull_request_review_thread" + | "reminder" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Label */ + label?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues labeled event */ + "webhook-issues-labeled": { + /** @enum {string} */ + action: "labeled"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Issue + * @description The [issue](https://docs.github.com/rest/reference/issues) itself. + */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "pull_request_review_thread" + | "reminder" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Label */ + label?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues locked event */ + "webhook-issues-locked": { + /** @enum {string} */ + action: "locked"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "reminder" + | "security_and_analysis" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + } & { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + assignee?: Record | null; + assignees?: (Record | null)[]; + author_association?: string; + body?: string | null; + closed_at?: string | null; + comments?: number; + comments_url?: string; + created_at?: string; + events_url?: string; + html_url?: string; + id?: number; + labels?: (Record | null)[]; + labels_url?: string; + /** @enum {boolean} */ + locked: true; + milestone?: Record | null; + node_id?: string; + number?: number; + performed_via_github_app?: Record | null; + reactions?: { + "+1"?: number; + "-1"?: number; + confused?: number; + eyes?: number; + heart?: number; + hooray?: number; + laugh?: number; + rocket?: number; + total_count?: number; + url?: string; + }; + repository_url?: string; + state?: string; + timeline_url?: string; + title?: string; + updated_at?: string; + url?: string; + user?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues milestoned event */ + "webhook-issues-milestoned": { + /** @enum {string} */ + action: "milestoned"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "reminder" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + } & { + active_lock_reason?: string | null; + assignee?: Record | null; + assignees?: (Record | null)[]; + author_association?: string; + body?: string | null; + closed_at?: string | null; + comments?: number; + comments_url?: string; + created_at?: string; + events_url?: string; + html_url?: string; + id?: number; + labels?: (Record | null)[]; + labels_url?: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + node_id?: string; + number?: number; + performed_via_github_app?: Record | null; + reactions?: { + "+1"?: number; + "-1"?: number; + confused?: number; + eyes?: number; + heart?: number; + hooray?: number; + laugh?: number; + rocket?: number; + total_count?: number; + url?: string; + }; + repository_url?: string; + state?: string; + timeline_url?: string; + title?: string; + updated_at?: string; + url?: string; + user?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + }; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues opened event */ + "webhook-issues-opened": { + /** @enum {string} */ + action: "opened"; + changes?: { + /** + * Issue + * @description The [issue](https://docs.github.com/rest/reference/issues) itself. + */ + old_issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + } | null; + /** + * Repository + * @description A git repository + */ + old_repository: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Issue + * @description The [issue](https://docs.github.com/rest/reference/issues) itself. + */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "security_and_analysis" + | "pull_request_review_thread" + | "reminder" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues pinned event */ + "webhook-issues-pinned": { + /** @enum {string} */ + action: "pinned"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Issue + * @description The [issue](https://docs.github.com/rest/reference/issues) itself. + */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues reopened event */ + "webhook-issues-reopened": { + /** @enum {string} */ + action: "reopened"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "pull_request_review_thread" + | "reminder" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + } & { + active_lock_reason?: string | null; + assignee?: Record | null; + assignees?: (Record | null)[]; + author_association?: string; + body?: string | null; + closed_at?: string | null; + comments?: number; + comments_url?: string; + created_at?: string; + events_url?: string; + html_url?: string; + id?: number; + labels?: (Record | null)[]; + labels_url?: string; + locked?: boolean; + milestone?: Record | null; + node_id?: string; + number?: number; + performed_via_github_app?: Record | null; + reactions?: { + "+1"?: number; + "-1"?: number; + confused?: number; + eyes?: number; + heart?: number; + hooray?: number; + laugh?: number; + rocket?: number; + total_count?: number; + url?: string; + }; + repository_url?: string; + /** @enum {string} */ + state: "open" | "closed"; + timeline_url?: string; + title?: string; + updated_at?: string; + url?: string; + user?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues transferred event */ + "webhook-issues-transferred": { + /** @enum {string} */ + action: "transferred"; + changes: { + /** + * Issue + * @description The [issue](https://docs.github.com/rest/reference/issues) itself. + */ + new_issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** + * Repository + * @description A git repository + */ + new_repository: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Issue + * @description The [issue](https://docs.github.com/rest/reference/issues) itself. + */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues unassigned event */ + "webhook-issues-unassigned": { + /** + * @description The action that was performed. + * @enum {string} + */ + action: "unassigned"; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Issue + * @description The [issue](https://docs.github.com/rest/reference/issues) itself. + */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "reminder" + | "pull_request_review_thread" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues unlabeled event */ + "webhook-issues-unlabeled": { + /** @enum {string} */ + action: "unlabeled"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Issue + * @description The [issue](https://docs.github.com/rest/reference/issues) itself. + */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "reminder" + | "pull_request_review_thread" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write" | "admin"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Label */ + label?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues unlocked event */ + "webhook-issues-unlocked": { + /** @enum {string} */ + action: "unlocked"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + } & { + active_lock_reason: Record | null; + assignee?: Record | null; + assignees?: (Record | null)[]; + author_association?: string; + body?: string | null; + closed_at?: string | null; + comments?: number; + comments_url?: string; + created_at?: string; + events_url?: string; + html_url?: string; + id?: number; + labels?: (Record | null)[]; + labels_url?: string; + /** @enum {boolean} */ + locked: false; + milestone?: Record | null; + node_id?: string; + number?: number; + performed_via_github_app?: Record | null; + reactions?: { + "+1"?: number; + "-1"?: number; + confused?: number; + eyes?: number; + heart?: number; + hooray?: number; + laugh?: number; + rocket?: number; + total_count?: number; + url?: string; + }; + repository_url?: string; + state?: string; + timeline_url?: string; + title?: string; + updated_at?: string; + url?: string; + user?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** issues unpinned event */ + "webhook-issues-unpinned": { + /** @enum {string} */ + action: "unpinned"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Issue + * @description The [issue](https://docs.github.com/rest/reference/issues) itself. + */ + issue: { + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description Contents of the issue */ + body: string | null; + /** Format: date-time */ + closed_at: string | null; + comments: number; + /** Format: uri */ + comments_url: string; + /** Format: date-time */ + created_at: string; + draft?: boolean; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** Format: int64 */ + id: number; + labels?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + /** Format: uri-template */ + labels_url: string; + locked?: boolean; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** + * App + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + performed_via_github_app?: { + /** Format: date-time */ + created_at: string | null; + description: string | null; + /** @description The list of events for the GitHub app */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + )[]; + /** Format: uri */ + external_url: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the GitHub app */ + id: number | null; + /** @description The name of the GitHub app */ + name: string; + node_id: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The set of permissions for the GitHub app */ + permissions?: { + /** @enum {string} */ + actions?: "read" | "write"; + /** @enum {string} */ + administration?: "read" | "write"; + /** @enum {string} */ + checks?: "read" | "write"; + /** @enum {string} */ + content_references?: "read" | "write"; + /** @enum {string} */ + contents?: "read" | "write"; + /** @enum {string} */ + deployments?: "read" | "write"; + /** @enum {string} */ + discussions?: "read" | "write"; + /** @enum {string} */ + emails?: "read" | "write"; + /** @enum {string} */ + environments?: "read" | "write"; + /** @enum {string} */ + issues?: "read" | "write"; + /** @enum {string} */ + keys?: "read" | "write"; + /** @enum {string} */ + members?: "read" | "write"; + /** @enum {string} */ + metadata?: "read" | "write"; + /** @enum {string} */ + organization_administration?: "read" | "write"; + /** @enum {string} */ + organization_hooks?: "read" | "write"; + /** @enum {string} */ + organization_packages?: "read" | "write"; + /** @enum {string} */ + organization_plan?: "read" | "write"; + /** @enum {string} */ + organization_projects?: "read" | "write"; + /** @enum {string} */ + organization_secrets?: "read" | "write"; + /** @enum {string} */ + organization_self_hosted_runners?: "read" | "write"; + /** @enum {string} */ + organization_user_blocking?: "read" | "write"; + /** @enum {string} */ + packages?: "read" | "write"; + /** @enum {string} */ + pages?: "read" | "write"; + /** @enum {string} */ + pull_requests?: "read" | "write"; + /** @enum {string} */ + repository_hooks?: "read" | "write"; + /** @enum {string} */ + repository_projects?: "read" | "write"; + /** @enum {string} */ + secret_scanning_alerts?: "read" | "write"; + /** @enum {string} */ + secrets?: "read" | "write"; + /** @enum {string} */ + security_events?: "read" | "write"; + /** @enum {string} */ + security_scanning_alert?: "read" | "write"; + /** @enum {string} */ + single_file?: "read" | "write"; + /** @enum {string} */ + statuses?: "read" | "write"; + /** @enum {string} */ + team_discussions?: "read" | "write"; + /** @enum {string} */ + vulnerability_alerts?: "read" | "write"; + /** @enum {string} */ + workflows?: "read" | "write"; + }; + /** @description The slug name of the GitHub app */ + slug?: string; + /** Format: date-time */ + updated_at: string | null; + } | null; + pull_request?: { + /** Format: uri */ + diff_url?: string; + /** Format: uri */ + html_url?: string; + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + patch_url?: string; + /** Format: uri */ + url?: string; + }; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + repository_url: string; + /** + * @description State of the issue; either 'open' or 'closed' + * @enum {string} + */ + state?: "open" | "closed"; + state_reason?: string | null; + /** Format: uri */ + timeline_url?: string; + /** @description Title of the issue */ + title: string; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the issue + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** label created event */ + "webhook-label-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** Label */ + label: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** label deleted event */ + "webhook-label-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** Label */ + label: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** label edited event */ + "webhook-label-edited": { + /** @enum {string} */ + action: "edited"; + /** @description The changes to the label if the action was `edited`. */ + changes?: { + color?: { + /** @description The previous version of the color if the action was `edited`. */ + from: string; + }; + description?: { + /** @description The previous version of the description if the action was `edited`. */ + from: string; + }; + name?: { + /** @description The previous version of the name if the action was `edited`. */ + from: string; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** Label */ + label: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** marketplace_purchase cancelled event */ + "webhook-marketplace-purchase-cancelled": { + /** @enum {string} */ + action: "cancelled"; + effective_date: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + marketplace_purchase: { + account: { + id: number; + login: string; + node_id: string; + organization_billing_email: string | null; + type: string; + }; + billing_cycle: string; + free_trial_ends_on: string | null; + next_billing_date?: string | null; + on_free_trial: boolean; + plan: { + bullets: string[]; + description: string; + has_free_trial: boolean; + id: number; + monthly_price_in_cents: number; + name: string; + /** @enum {string} */ + price_model: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name: string | null; + yearly_price_in_cents: number; + }; + unit_count: number; + } & { + account?: { + id?: number; + login?: string; + node_id?: string; + organization_billing_email?: string | null; + type?: string; + }; + billing_cycle?: string; + free_trial_ends_on?: string | null; + next_billing_date: string | null; + on_free_trial?: boolean; + plan?: { + bullets?: (string | null)[]; + description?: string; + has_free_trial?: boolean; + id?: number; + monthly_price_in_cents?: number; + name?: string; + /** @enum {string} */ + price_model?: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name?: string | null; + yearly_price_in_cents?: number; + }; + unit_count?: number; + }; + organization?: components["schemas"]["organization-simple"]; + /** Marketplace Purchase */ + previous_marketplace_purchase?: { + account: { + id: number; + login: string; + node_id: string; + organization_billing_email: string | null; + type: string; + }; + billing_cycle: string; + free_trial_ends_on: Record | null; + next_billing_date?: string | null; + on_free_trial: boolean; + plan: { + bullets: string[]; + description: string; + has_free_trial: boolean; + id: number; + monthly_price_in_cents: number; + name: string; + /** @enum {string} */ + price_model: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name: string | null; + yearly_price_in_cents: number; + }; + unit_count: number; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** marketplace_purchase changed event */ + "webhook-marketplace-purchase-changed": { + /** @enum {string} */ + action: "changed"; + effective_date: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + marketplace_purchase: { + account: { + id: number; + login: string; + node_id: string; + organization_billing_email: string | null; + type: string; + }; + billing_cycle: string; + free_trial_ends_on: string | null; + next_billing_date?: string | null; + on_free_trial: boolean; + plan: { + bullets: string[]; + description: string; + has_free_trial: boolean; + id: number; + monthly_price_in_cents: number; + name: string; + /** @enum {string} */ + price_model: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name: string | null; + yearly_price_in_cents: number; + }; + unit_count: number; + } & { + account?: { + id?: number; + login?: string; + node_id?: string; + organization_billing_email?: string | null; + type?: string; + }; + billing_cycle?: string; + free_trial_ends_on?: string | null; + next_billing_date: string | null; + on_free_trial?: boolean; + plan?: { + bullets?: (string | null)[]; + description?: string; + has_free_trial?: boolean; + id?: number; + monthly_price_in_cents?: number; + name?: string; + /** @enum {string} */ + price_model?: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name?: string | null; + yearly_price_in_cents?: number; + }; + unit_count?: number; + }; + organization?: components["schemas"]["organization-simple"]; + /** Marketplace Purchase */ + previous_marketplace_purchase?: { + account: { + id: number; + login: string; + node_id: string; + organization_billing_email: string | null; + type: string; + }; + billing_cycle: string; + free_trial_ends_on: string | null; + next_billing_date?: string | null; + on_free_trial: boolean | null; + plan: { + bullets: string[]; + description: string; + has_free_trial: boolean; + id: number; + monthly_price_in_cents: number; + name: string; + /** @enum {string} */ + price_model: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name: string | null; + yearly_price_in_cents: number; + }; + unit_count: number; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** marketplace_purchase pending_change event */ + "webhook-marketplace-purchase-pending-change": { + /** @enum {string} */ + action: "pending_change"; + effective_date: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + marketplace_purchase: { + account: { + id: number; + login: string; + node_id: string; + organization_billing_email: string | null; + type: string; + }; + billing_cycle: string; + free_trial_ends_on: string | null; + next_billing_date?: string | null; + on_free_trial: boolean; + plan: { + bullets: string[]; + description: string; + has_free_trial: boolean; + id: number; + monthly_price_in_cents: number; + name: string; + /** @enum {string} */ + price_model: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name: string | null; + yearly_price_in_cents: number; + }; + unit_count: number; + } & { + account?: { + id?: number; + login?: string; + node_id?: string; + organization_billing_email?: string | null; + type?: string; + }; + billing_cycle?: string; + free_trial_ends_on?: string | null; + next_billing_date: string | null; + on_free_trial?: boolean; + plan?: { + bullets?: (string | null)[]; + description?: string; + has_free_trial?: boolean; + id?: number; + monthly_price_in_cents?: number; + name?: string; + /** @enum {string} */ + price_model?: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name?: string | null; + yearly_price_in_cents?: number; + }; + unit_count?: number; + }; + organization?: components["schemas"]["organization-simple"]; + /** Marketplace Purchase */ + previous_marketplace_purchase?: { + account: { + id: number; + login: string; + node_id: string; + organization_billing_email: string | null; + type: string; + }; + billing_cycle: string; + free_trial_ends_on: string | null; + next_billing_date?: string | null; + on_free_trial: boolean; + plan: { + bullets: string[]; + description: string; + has_free_trial: boolean; + id: number; + monthly_price_in_cents: number; + name: string; + /** @enum {string} */ + price_model: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name: string | null; + yearly_price_in_cents: number; + }; + unit_count: number; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** marketplace_purchase pending_change_cancelled event */ + "webhook-marketplace-purchase-pending-change-cancelled": { + /** @enum {string} */ + action: "pending_change_cancelled"; + effective_date: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + marketplace_purchase: { + account: { + id: number; + login: string; + node_id: string; + organization_billing_email: string | null; + type: string; + }; + billing_cycle: string; + free_trial_ends_on: Record | null; + next_billing_date?: string | null; + on_free_trial: boolean; + plan: { + bullets: string[]; + description: string; + has_free_trial: boolean; + id: number; + monthly_price_in_cents: number; + name: string; + /** @enum {string} */ + price_model: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name: string | null; + yearly_price_in_cents: number; + }; + unit_count: number; + } & { + next_billing_date: string; + }; + organization?: components["schemas"]["organization-simple"]; + /** Marketplace Purchase */ + previous_marketplace_purchase?: { + account: { + id: number; + login: string; + node_id: string; + organization_billing_email: string | null; + type: string; + }; + billing_cycle: string; + free_trial_ends_on: Record | null; + next_billing_date?: string | null; + on_free_trial: boolean; + plan: { + bullets: string[]; + description: string; + has_free_trial: boolean; + id: number; + monthly_price_in_cents: number; + name: string; + /** @enum {string} */ + price_model: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name: string | null; + yearly_price_in_cents: number; + }; + unit_count: number; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** marketplace_purchase purchased event */ + "webhook-marketplace-purchase-purchased": { + /** @enum {string} */ + action: "purchased"; + effective_date: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + marketplace_purchase: { + account: { + id: number; + login: string; + node_id: string; + organization_billing_email: string | null; + type: string; + }; + billing_cycle: string; + free_trial_ends_on: string | null; + next_billing_date?: string | null; + on_free_trial: boolean; + plan: { + bullets: string[]; + description: string; + has_free_trial: boolean; + id: number; + monthly_price_in_cents: number; + name: string; + /** @enum {string} */ + price_model: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name: string | null; + yearly_price_in_cents: number; + }; + unit_count: number; + } & { + account?: { + id?: number; + login?: string; + node_id?: string; + organization_billing_email?: string | null; + type?: string; + }; + billing_cycle?: string; + free_trial_ends_on?: string | null; + next_billing_date: string | null; + on_free_trial?: boolean; + plan?: { + bullets?: (string | null)[]; + description?: string; + has_free_trial?: boolean; + id?: number; + monthly_price_in_cents?: number; + name?: string; + /** @enum {string} */ + price_model?: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name?: string | null; + yearly_price_in_cents?: number; + }; + unit_count?: number; + }; + organization?: components["schemas"]["organization-simple"]; + /** Marketplace Purchase */ + previous_marketplace_purchase?: { + account: { + id: number; + login: string; + node_id: string; + organization_billing_email: string | null; + type: string; + }; + billing_cycle: string; + free_trial_ends_on: Record | null; + next_billing_date?: string | null; + on_free_trial: boolean; + plan: { + bullets: string[]; + description: string; + has_free_trial: boolean; + id: number; + monthly_price_in_cents: number; + name: string; + /** @enum {string} */ + price_model: "FREE" | "FLAT_RATE" | "PER_UNIT"; + unit_name: string | null; + yearly_price_in_cents: number; + }; + unit_count: number; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** member added event */ + "webhook-member-added": { + /** @enum {string} */ + action: "added"; + changes?: { + permission?: { + /** @enum {string} */ + to: "write" | "admin" | "read"; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** User */ + member: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** member edited event */ + "webhook-member-edited": { + /** @enum {string} */ + action: "edited"; + /** @description The changes to the collaborator permissions */ + changes: { + old_permission?: { + /** @description The previous permissions of the collaborator if the action was edited. */ + from: string; + }; + permission?: { + from?: string | null; + to?: string | null; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** User */ + member: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** member removed event */ + "webhook-member-removed": { + /** @enum {string} */ + action: "removed"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** User */ + member: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** membership added event */ + "webhook-membership-added": { + /** @enum {string} */ + action: "added"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** User */ + member: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + organization: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + /** + * @description The scope of the membership. Currently, can only be `team`. + * @enum {string} + */ + scope: "team"; + /** User */ + sender: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + team: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** @enum {string} */ + notification_setting?: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }; + }; + /** membership removed event */ + "webhook-membership-removed": { + /** @enum {string} */ + action: "removed"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** User */ + member: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + organization: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + /** + * @description The scope of the membership. Currently, can only be `team`. + * @enum {string} + */ + scope: "team" | "organization"; + /** User */ + sender: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + team: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** @enum {string} */ + notification_setting?: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }; + }; + "webhook-merge-group-checks-requested": { + /** @enum {string} */ + action: "checks_requested"; + installation?: components["schemas"]["simple-installation"]; + merge_group: components["schemas"]["merge-group"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + "webhook-merge-group-destroyed": { + /** @enum {string} */ + action: "destroyed"; + /** + * @description Explains why the merge group is being destroyed. The group could have been merged, removed from the queue (dequeued), or invalidated by an earlier queue entry being dequeued (invalidated). + * @enum {string} + */ + reason?: "merged" | "invalidated" | "dequeued"; + installation?: components["schemas"]["simple-installation"]; + merge_group: components["schemas"]["merge-group"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** meta deleted event */ + "webhook-meta-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + /** @description The modified webhook. This will contain different keys based on the type of webhook it is: repository, organization, business, app, or GitHub Marketplace. */ + hook: { + active: boolean; + config: { + /** @enum {string} */ + content_type: "json" | "form"; + insecure_ssl: string; + secret?: string; + /** Format: uri */ + url: string; + }; + created_at: string; + events: ( + | "*" + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "create" + | "delete" + | "deployment" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "meta" + | "milestone" + | "organization" + | "org_block" + | "package" + | "page_build" + | "project" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "pull_request_review_thread" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_import" + | "repository_vulnerability_alert" + | "secret_scanning_alert" + | "secret_scanning_alert_location" + | "security_and_analysis" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_job" + | "workflow_run" + | "repository_dispatch" + | "projects_v2_item" + )[]; + id: number; + name: string; + type: string; + updated_at: string; + }; + /** @description The id of the modified webhook. */ + hook_id: number; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["nullable-repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** milestone closed event */ + "webhook-milestone-closed": { + /** @enum {string} */ + action: "closed"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** milestone created event */ + "webhook-milestone-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** milestone deleted event */ + "webhook-milestone-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** milestone edited event */ + "webhook-milestone-edited": { + /** @enum {string} */ + action: "edited"; + /** @description The changes to the milestone if the action was `edited`. */ + changes: { + description?: { + /** @description The previous version of the description if the action was `edited`. */ + from: string; + }; + due_on?: { + /** @description The previous version of the due date if the action was `edited`. */ + from: string; + }; + title?: { + /** @description The previous version of the title if the action was `edited`. */ + from: string; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** milestone opened event */ + "webhook-milestone-opened": { + /** @enum {string} */ + action: "opened"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** org_block blocked event */ + "webhook-org-block-blocked": { + /** @enum {string} */ + action: "blocked"; + /** User */ + blocked_user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** org_block unblocked event */ + "webhook-org-block-unblocked": { + /** @enum {string} */ + action: "unblocked"; + /** User */ + blocked_user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** organization deleted event */ + "webhook-organization-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Membership + * @description The membership between the user and the organization. Not present when the action is `member_invited`. + */ + membership?: { + /** Format: uri */ + organization_url: string; + role: string; + state: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + organization: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** organization member_added event */ + "webhook-organization-member-added": { + /** @enum {string} */ + action: "member_added"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Membership + * @description The membership between the user and the organization. Not present when the action is `member_invited`. + */ + membership: { + /** Format: uri */ + organization_url: string; + role: string; + state: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + organization: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** organization member_invited event */ + "webhook-organization-member-invited": { + /** @enum {string} */ + action: "member_invited"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The invitation for the user or email if the action is `member_invited`. */ + invitation: { + /** Format: date-time */ + created_at: string; + email: string | null; + /** Format: date-time */ + failed_at: string | null; + failed_reason: string | null; + id: number; + /** Format: uri */ + invitation_teams_url: string; + /** User */ + inviter: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + login: string | null; + node_id: string; + role: string; + team_count: number; + invitation_source?: string; + }; + organization: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + /** User */ + user?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** organization member_removed event */ + "webhook-organization-member-removed": { + /** @enum {string} */ + action: "member_removed"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Membership + * @description The membership between the user and the organization. Not present when the action is `member_invited`. + */ + membership: { + /** Format: uri */ + organization_url: string; + role: string; + state: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + organization: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** organization renamed event */ + "webhook-organization-renamed": { + /** @enum {string} */ + action: "renamed"; + changes?: { + login?: { + from?: string; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** + * Membership + * @description The membership between the user and the organization. Not present when the action is `member_invited`. + */ + membership?: { + /** Format: uri */ + organization_url: string; + role: string; + state: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + organization: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** Ruby Gems metadata */ + "webhook-rubygems-metadata": { + name?: string; + description?: string; + readme?: string; + homepage?: string; + version_info?: { + version?: string; + }; + platform?: string; + metadata?: { + [key: string]: string | undefined; + }; + repo?: string; + dependencies?: { + [key: string]: string | undefined; + }[]; + commit_oid?: string; + }; + /** package published event */ + "webhook-package-published": { + /** @enum {string} */ + action: "published"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description Information about the package. */ + package: { + created_at: string | null; + description: string | null; + ecosystem: string; + /** Format: uri */ + html_url: string; + id: number; + name: string; + namespace: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + package_type: string; + package_version: { + /** User */ + author?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + body?: string | Record; + body_html?: string; + container_metadata?: { + labels?: Record | null; + manifest?: Record | null; + tag?: { + digest?: string; + name?: string; + }; + } | null; + created_at?: string; + description: string; + docker_metadata?: { + tags?: string[]; + }[]; + draft?: boolean; + /** Format: uri */ + html_url: string; + id: number; + installation_command: string; + manifest?: string; + metadata: { + [key: string]: unknown; + }[]; + name: string; + npm_metadata?: { + name?: string; + version?: string; + npm_user?: string; + author?: Record | null; + bugs?: Record | null; + dependencies?: Record; + dev_dependencies?: Record; + peer_dependencies?: Record; + optional_dependencies?: Record; + description?: string; + dist?: Record | null; + git_head?: string; + homepage?: string; + license?: string; + main?: string; + repository?: Record | null; + scripts?: Record; + id?: string; + node_version?: string; + npm_version?: string; + has_shrinkwrap?: boolean; + maintainers?: Record[]; + contributors?: Record[]; + engines?: Record; + keywords?: string[]; + files?: string[]; + bin?: Record; + man?: Record; + directories?: Record | null; + os?: string[]; + cpu?: string[]; + readme?: string; + installation_command?: string; + release_id?: number; + commit_oid?: string; + published_via_actions?: boolean; + deleted_by_id?: number; + } | null; + nuget_metadata?: + | { + id?: number | string; + name?: string; + value?: OneOf< + [ + boolean, + string, + number, + { + url?: string; + branch?: string; + commit?: string; + type?: string; + }, + ] + >; + }[] + | null; + package_files: { + content_type: string; + created_at: string; + /** Format: uri */ + download_url: string; + id: number; + md5: string | null; + name: string; + sha1: string | null; + sha256: string | null; + size: number; + state: string | null; + updated_at: string; + }[]; + package_url?: string; + prerelease?: boolean; + release?: { + /** User */ + author: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + created_at: string; + draft: boolean; + /** Format: uri */ + html_url: string; + id: number; + name: string | null; + prerelease: boolean; + published_at: string; + tag_name: string; + target_commitish: string; + /** Format: uri */ + url: string; + }; + rubygems_metadata?: components["schemas"]["webhook-rubygems-metadata"][]; + source_url?: string; + summary: string; + tag_name?: string; + target_commitish?: string; + target_oid?: string; + updated_at?: string; + version: string; + } | null; + registry: { + /** Format: uri */ + about_url: string; + name: string; + type: string; + /** Format: uri */ + url: string; + vendor: string; + } | null; + updated_at: string | null; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** package updated event */ + "webhook-package-updated": { + /** @enum {string} */ + action: "updated"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** @description Information about the package. */ + package: { + created_at: string; + description: string | null; + ecosystem: string; + /** Format: uri */ + html_url: string; + id: number; + name: string; + namespace: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + package_type: string; + package_version: { + /** User */ + author: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + body: string; + body_html: string; + created_at: string; + description: string; + docker_metadata?: { + tags?: string[]; + }[]; + draft?: boolean; + /** Format: uri */ + html_url: string; + id: number; + installation_command: string; + manifest?: string; + metadata: { + [key: string]: unknown; + }[]; + name: string; + package_files: { + content_type: string; + created_at: string; + /** Format: uri */ + download_url: string; + id: number; + md5: string | null; + name: string; + sha1: string | null; + sha256: string; + size: number; + state: string; + updated_at: string; + }[]; + package_url?: string; + prerelease?: boolean; + release?: { + /** User */ + author: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + created_at: string; + draft: boolean; + /** Format: uri */ + html_url: string; + id: number; + name: string; + prerelease: boolean; + published_at: string; + tag_name: string; + target_commitish: string; + /** Format: uri */ + url: string; + }; + rubygems_metadata?: components["schemas"]["webhook-rubygems-metadata"][]; + /** Format: uri */ + source_url?: string; + summary: string; + tag_name?: string; + target_commitish: string; + target_oid: string; + updated_at: string; + version: string; + }; + registry: { + /** Format: uri */ + about_url: string; + name: string; + type: string; + /** Format: uri */ + url: string; + vendor: string; + } | null; + updated_at: string; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** page_build event */ + "webhook-page-build": { + /** @description The [List GitHub Pages builds](https://docs.github.com/rest/reference/repos#list-github-pages-builds) itself. */ + build: { + commit: string | null; + created_at: string; + duration: number; + error: { + message: string | null; + }; + /** User */ + pusher: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + status: string; + updated_at: string; + /** Format: uri */ + url: string; + }; + enterprise?: components["schemas"]["enterprise"]; + id: number; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** personal_access_token_request approved event */ + "webhook-personal-access-token-request-approved": { + /** @enum {string} */ + action: "approved"; + personal_access_token_request: components["schemas"]["personal-access-token-request"]; + organization: components["schemas"]["organization-simple"]; + sender: components["schemas"]["simple-user"]; + installation: components["schemas"]["simple-installation"]; + }; + /** personal_access_token_request cancelled event */ + "webhook-personal-access-token-request-cancelled": { + /** @enum {string} */ + action: "cancelled"; + personal_access_token_request: components["schemas"]["personal-access-token-request"]; + organization: components["schemas"]["organization-simple"]; + sender: components["schemas"]["simple-user"]; + installation: components["schemas"]["simple-installation"]; + }; + /** personal_access_token_request created event */ + "webhook-personal-access-token-request-created": { + /** @enum {string} */ + action: "created"; + personal_access_token_request: components["schemas"]["personal-access-token-request"]; + organization: components["schemas"]["organization-simple"]; + sender: components["schemas"]["simple-user"]; + installation: components["schemas"]["simple-installation"]; + }; + /** personal_access_token_request denied event */ + "webhook-personal-access-token-request-denied": { + /** @enum {string} */ + action: "denied"; + personal_access_token_request: components["schemas"]["personal-access-token-request"]; + organization: components["schemas"]["organization-simple"]; + sender: components["schemas"]["simple-user"]; + installation: components["schemas"]["simple-installation"]; + }; + "webhook-ping": { + /** + * Webhook + * @description The webhook that is being pinged + */ + hook?: { + /** @description Determines whether the hook is actually triggered for the events it subscribes to. */ + active: boolean; + /** @description Only included for GitHub Apps. When you register a new GitHub App, GitHub sends a ping event to the webhook URL you specified during registration. The GitHub App ID sent in this field is required for authenticating an app. */ + app_id?: number; + config: { + content_type?: components["schemas"]["webhook-config-content-type"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + secret?: components["schemas"]["webhook-config-secret"]; + url?: components["schemas"]["webhook-config-url"]; + }; + /** Format: date-time */ + created_at: string; + /** Format: uri */ + deliveries_url?: string; + /** @description Determines what events the hook is triggered for. Default: ['push']. */ + events: string[]; + /** @description Unique identifier of the webhook. */ + id: number; + last_response?: components["schemas"]["hook-response"]; + /** + * @description The type of webhook. The only valid value is 'web'. + * @enum {string} + */ + name: "web"; + /** Format: uri */ + ping_url?: string; + /** Format: uri */ + test_url?: string; + type: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url?: string; + }; + /** @description The ID of the webhook that triggered the ping. */ + hook_id?: number; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + /** @description Random string of GitHub zen. */ + zen?: string; + }; + /** @description The webhooks ping payload encoded with URL encoding. */ + "webhook-ping-form-encoded": { + /** @description A URL-encoded string of the ping JSON payload. The decoded payload is a JSON object. */ + payload: string; + }; + /** project_card converted event */ + "webhook-project-card-converted": { + /** @enum {string} */ + action: "converted"; + changes: { + note: { + from: string; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project Card */ + project_card: { + after_id?: number | null; + /** @description Whether or not the card is archived */ + archived: boolean; + column_id: number; + /** Format: uri */ + column_url: string; + /** Format: uri */ + content_url?: string; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The project card's ID */ + id: number; + node_id: string; + note: string | null; + /** Format: uri */ + project_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** project_card created event */ + "webhook-project-card-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project Card */ + project_card: { + after_id?: number | null; + /** @description Whether or not the card is archived */ + archived: boolean; + column_id: number; + /** Format: uri */ + column_url: string; + /** Format: uri */ + content_url?: string; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The project card's ID */ + id: number; + node_id: string; + note: string | null; + /** Format: uri */ + project_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** project_card deleted event */ + "webhook-project-card-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project Card */ + project_card: { + after_id?: number | null; + /** @description Whether or not the card is archived */ + archived: boolean; + column_id: number | null; + /** Format: uri */ + column_url: string; + /** Format: uri */ + content_url?: string; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + /** @description The project card's ID */ + id: number; + node_id: string; + note: string | null; + /** Format: uri */ + project_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["nullable-repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** project_card edited event */ + "webhook-project-card-edited": { + /** @enum {string} */ + action: "edited"; + changes: { + note: { + from: string | null; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project Card */ + project_card: { + after_id?: number | null; + /** @description Whether or not the card is archived */ + archived: boolean; + column_id: number; + /** Format: uri */ + column_url: string; + /** Format: uri */ + content_url?: string; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description The project card's ID */ + id: number; + node_id: string; + note: string | null; + /** Format: uri */ + project_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** project_card moved event */ + "webhook-project-card-moved": { + /** @enum {string} */ + action: "moved"; + changes?: { + column_id: { + from: number; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + project_card: { + after_id?: number | null; + /** @description Whether or not the card is archived */ + archived: boolean; + column_id: number; + /** Format: uri */ + column_url: string; + /** Format: uri */ + content_url?: string; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + /** @description The project card's ID */ + id: number; + node_id: string; + note: string | null; + /** Format: uri */ + project_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } & { + after_id: number | null; + archived?: boolean; + column_id?: number; + column_url?: string; + created_at?: string; + creator?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + } | null; + id?: number; + node_id?: string; + note?: string | null; + project_url?: string; + updated_at?: string; + url?: string; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** project closed event */ + "webhook-project-closed": { + /** @enum {string} */ + action: "closed"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project */ + project: { + /** @description Body of the project */ + body: string | null; + /** Format: uri */ + columns_url: string; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + html_url: string; + id: number; + /** @description Name of the project */ + name: string; + node_id: string; + number: number; + /** Format: uri */ + owner_url: string; + /** + * @description State of the project; either 'open' or 'closed' + * @enum {string} + */ + state: "open" | "closed"; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** project_column created event */ + "webhook-project-column-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project Column */ + project_column: { + after_id?: number | null; + /** Format: uri */ + cards_url: string; + /** Format: date-time */ + created_at: string; + /** @description The unique identifier of the project column */ + id: number; + /** @description Name of the project column */ + name: string; + node_id: string; + /** Format: uri */ + project_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** project_column deleted event */ + "webhook-project-column-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project Column */ + project_column: { + after_id?: number | null; + /** Format: uri */ + cards_url: string; + /** Format: date-time */ + created_at: string; + /** @description The unique identifier of the project column */ + id: number; + /** @description Name of the project column */ + name: string; + node_id: string; + /** Format: uri */ + project_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["nullable-repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** project_column edited event */ + "webhook-project-column-edited": { + /** @enum {string} */ + action: "edited"; + changes: { + name?: { + from: string; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project Column */ + project_column: { + after_id?: number | null; + /** Format: uri */ + cards_url: string; + /** Format: date-time */ + created_at: string; + /** @description The unique identifier of the project column */ + id: number; + /** @description Name of the project column */ + name: string; + node_id: string; + /** Format: uri */ + project_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** project_column moved event */ + "webhook-project-column-moved": { + /** @enum {string} */ + action: "moved"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project Column */ + project_column: { + after_id?: number | null; + /** Format: uri */ + cards_url: string; + /** Format: date-time */ + created_at: string; + /** @description The unique identifier of the project column */ + id: number; + /** @description Name of the project column */ + name: string; + node_id: string; + /** Format: uri */ + project_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** project created event */ + "webhook-project-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project */ + project: { + /** @description Body of the project */ + body: string | null; + /** Format: uri */ + columns_url: string; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + html_url: string; + id: number; + /** @description Name of the project */ + name: string; + node_id: string; + number: number; + /** Format: uri */ + owner_url: string; + /** + * @description State of the project; either 'open' or 'closed' + * @enum {string} + */ + state: "open" | "closed"; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** project deleted event */ + "webhook-project-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project */ + project: { + /** @description Body of the project */ + body: string | null; + /** Format: uri */ + columns_url: string; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + html_url: string; + id: number; + /** @description Name of the project */ + name: string; + node_id: string; + number: number; + /** Format: uri */ + owner_url: string; + /** + * @description State of the project; either 'open' or 'closed' + * @enum {string} + */ + state: "open" | "closed"; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["nullable-repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** project edited event */ + "webhook-project-edited": { + /** @enum {string} */ + action: "edited"; + /** @description The changes to the project if the action was `edited`. */ + changes?: { + body?: { + /** @description The previous version of the body if the action was `edited`. */ + from: string; + }; + name?: { + /** @description The changes to the project if the action was `edited`. */ + from: string; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project */ + project: { + /** @description Body of the project */ + body: string | null; + /** Format: uri */ + columns_url: string; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + html_url: string; + id: number; + /** @description Name of the project */ + name: string; + node_id: string; + number: number; + /** Format: uri */ + owner_url: string; + /** + * @description State of the project; either 'open' or 'closed' + * @enum {string} + */ + state: "open" | "closed"; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** project reopened event */ + "webhook-project-reopened": { + /** @enum {string} */ + action: "reopened"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Project */ + project: { + /** @description Body of the project */ + body: string | null; + /** Format: uri */ + columns_url: string; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + html_url: string; + id: number; + /** @description Name of the project */ + name: string; + node_id: string; + number: number; + /** Format: uri */ + owner_url: string; + /** + * @description State of the project; either 'open' or 'closed' + * @enum {string} + */ + state: "open" | "closed"; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** Projects v2 Project Closed Event */ + "webhook-projects-v2-project-closed": { + /** @enum {string} */ + action: "closed"; + organization: components["schemas"]["organization-simple"]; + projects_v2: components["schemas"]["projects-v2"]; + sender: components["schemas"]["simple-user"]; + }; + /** @description A project was created */ + "webhook-projects-v2-project-created": { + /** @enum {string} */ + action: "created"; + organization: components["schemas"]["organization-simple"]; + projects_v2: components["schemas"]["projects-v2"]; + sender: components["schemas"]["simple-user"]; + }; + /** Projects v2 Project Deleted Event */ + "webhook-projects-v2-project-deleted": { + /** @enum {string} */ + action: "deleted"; + organization: components["schemas"]["organization-simple"]; + projects_v2: components["schemas"]["projects-v2"]; + sender: components["schemas"]["simple-user"]; + }; + /** Projects v2 Project Edited Event */ + "webhook-projects-v2-project-edited": { + /** @enum {string} */ + action: "edited"; + changes: { + description?: { + from?: string | null; + to?: string | null; + }; + public?: { + from?: boolean; + to?: boolean; + }; + short_description?: { + from?: string | null; + to?: string | null; + }; + title?: { + from?: string; + to?: string; + }; + }; + organization: components["schemas"]["organization-simple"]; + projects_v2: components["schemas"]["projects-v2"]; + sender: components["schemas"]["simple-user"]; + }; + /** Projects v2 Item Archived Event */ + "webhook-projects-v2-item-archived": { + /** @enum {string} */ + action: "archived"; + changes: { + archived_at?: { + /** Format: date-time */ + from?: string | null; + /** Format: date-time */ + to?: string | null; + }; + }; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + projects_v2_item: components["schemas"]["projects-v2-item"]; + sender: components["schemas"]["simple-user"]; + }; + /** Projects v2 Item Converted Event */ + "webhook-projects-v2-item-converted": { + /** @enum {string} */ + action: "converted"; + changes: { + content_type?: { + from?: string | null; + to?: string; + }; + }; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + projects_v2_item: components["schemas"]["projects-v2-item"]; + sender: components["schemas"]["simple-user"]; + }; + /** Projects v2 Item Created Event */ + "webhook-projects-v2-item-created": { + /** @enum {string} */ + action: "created"; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + projects_v2_item: components["schemas"]["projects-v2-item"]; + sender: components["schemas"]["simple-user"]; + }; + /** Projects v2 Item Deleted Event */ + "webhook-projects-v2-item-deleted": { + /** @enum {string} */ + action: "deleted"; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + projects_v2_item: components["schemas"]["projects-v2-item"]; + sender: components["schemas"]["simple-user"]; + }; + /** Projects v2 Item Edited Event */ + "webhook-projects-v2-item-edited": { + /** @enum {string} */ + action: "edited"; + changes?: OneOf< + [ + { + field_value: { + field_node_id?: string; + field_type?: string; + }; + }, + { + body: { + from?: string | null; + to?: string | null; + }; + }, + ] + >; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + projects_v2_item: components["schemas"]["projects-v2-item"]; + sender: components["schemas"]["simple-user"]; + }; + /** Projects v2 Item Reordered Event */ + "webhook-projects-v2-item-reordered": { + /** @enum {string} */ + action: "reordered"; + changes: { + previous_projects_v2_item_node_id?: { + from?: string | null; + to?: string | null; + }; + }; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + projects_v2_item: components["schemas"]["projects-v2-item"]; + sender: components["schemas"]["simple-user"]; + }; + /** Projects v2 Item Restored Event */ + "webhook-projects-v2-item-restored": { + /** @enum {string} */ + action: "restored"; + changes: { + archived_at?: { + /** Format: date-time */ + from?: string | null; + /** Format: date-time */ + to?: string | null; + }; + }; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + projects_v2_item: components["schemas"]["projects-v2-item"]; + sender: components["schemas"]["simple-user"]; + }; + /** Projects v2 Project Reopened Event */ + "webhook-projects-v2-project-reopened": { + /** @enum {string} */ + action: "reopened"; + organization: components["schemas"]["organization-simple"]; + projects_v2: components["schemas"]["projects-v2"]; + sender: components["schemas"]["simple-user"]; + }; + /** public event */ + "webhook-public": { + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request assigned event */ + "webhook-pull-request-assigned": { + /** @enum {string} */ + action: "assigned"; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string | null; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request auto_merge_disabled event */ + "webhook-pull-request-auto-merge-disabled": { + /** @enum {string} */ + action: "auto_merge_disabled"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + reason: string; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request auto_merge_enabled event */ + "webhook-pull-request-auto-merge-enabled": { + /** @enum {string} */ + action: "auto_merge_enabled"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + reason?: string; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request closed event */ + "webhook-pull-request-closed": { + /** @enum {string} */ + action: "closed"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + pull_request: components["schemas"]["pull-request"] & { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow updating the pull request's branch. */ + allow_update_branch?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged. + * @default false + */ + delete_branch_on_merge?: boolean; + /** + * @description The default value for a merge commit message. + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., "Merge pull request #123 from branch-name"). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a squash merge commit message: + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead.** + * @default false + */ + use_squash_pr_title_as_default?: boolean; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request converted_to_draft event */ + "webhook-pull-request-converted-to-draft": { + /** @enum {string} */ + action: "converted_to_draft"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + pull_request: components["schemas"]["pull-request"] & { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow updating the pull request's branch. */ + allow_update_branch?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged. + * @default false + */ + delete_branch_on_merge?: boolean; + /** + * @description The default value for a merge commit message. + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., "Merge pull request #123 from branch-name"). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a squash merge commit message: + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead.** + * @default false + */ + use_squash_pr_title_as_default?: boolean; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request demilestoned event */ + "webhook-pull-request-demilestoned": { + /** @enum {string} */ + action: "demilestoned"; + enterprise?: components["schemas"]["enterprise"]; + milestone?: components["schemas"]["milestone"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** pull_request dequeued event */ + "webhook-pull-request-dequeued": { + /** @enum {string} */ + action: "dequeued"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + reason: string; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request edited event */ + "webhook-pull-request-edited": { + /** @enum {string} */ + action: "edited"; + /** @description The changes to the comment if the action was `edited`. */ + changes: { + base?: { + ref: { + from: string; + }; + sha: { + from: string; + }; + }; + body?: { + /** @description The previous version of the body if the action was `edited`. */ + from: string; + }; + title?: { + /** @description The previous version of the title if the action was `edited`. */ + from: string; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + pull_request: components["schemas"]["pull-request"] & { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow updating the pull request's branch. */ + allow_update_branch?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged. + * @default false + */ + delete_branch_on_merge?: boolean; + /** + * @description The default value for a merge commit message. + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., "Merge pull request #123 from branch-name"). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a squash merge commit message: + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + }; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** pull_request enqueued event */ + "webhook-pull-request-enqueued": { + /** @enum {string} */ + action: "enqueued"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request labeled event */ + "webhook-pull-request-labeled": { + /** @enum {string} */ + action: "labeled"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** Label */ + label?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string | null; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request locked event */ + "webhook-pull-request-locked": { + /** @enum {string} */ + action: "locked"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string | null; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request milestoned event */ + "webhook-pull-request-milestoned": { + /** @enum {string} */ + action: "milestoned"; + enterprise?: components["schemas"]["enterprise"]; + milestone?: components["schemas"]["milestone"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** pull_request opened event */ + "webhook-pull-request-opened": { + /** @enum {string} */ + action: "opened"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + pull_request: components["schemas"]["pull-request"] & { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow updating the pull request's branch. */ + allow_update_branch?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged. + * @default false + */ + delete_branch_on_merge?: boolean; + /** + * @description The default value for a merge commit message. + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a squash merge commit message: + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead.** + * @default false + */ + use_squash_pr_title_as_default?: boolean; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request ready_for_review event */ + "webhook-pull-request-ready-for-review": { + /** @enum {string} */ + action: "ready_for_review"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + pull_request: components["schemas"]["pull-request"] & { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow updating the pull request's branch. */ + allow_update_branch?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged. + * @default false + */ + delete_branch_on_merge?: boolean; + /** + * @description The default value for a merge commit message. + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., "Merge pull request #123 from branch-name"). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a squash merge commit message: + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead.** + * @default false + */ + use_squash_pr_title_as_default?: boolean; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request reopened event */ + "webhook-pull-request-reopened": { + /** @enum {string} */ + action: "reopened"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + pull_request: components["schemas"]["pull-request"] & { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow updating the pull request's branch. */ + allow_update_branch?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged. + * @default false + */ + delete_branch_on_merge?: boolean; + /** + * @description The default value for a merge commit message. + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., "Merge pull request #123 from branch-name"). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a squash merge commit message: + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead.** + * @default false + */ + use_squash_pr_title_as_default?: boolean; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request_review_comment created event */ + "webhook-pull-request-review-comment-created": { + /** @enum {string} */ + action: "created"; + /** + * Pull Request Review Comment + * @description The [comment](https://docs.github.com/rest/reference/pulls#comments) itself. + */ + comment: { + _links: { + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + pull_request: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + }; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description The text of the comment. */ + body: string; + /** @description The SHA of the commit to which the comment applies. */ + commit_id: string; + /** Format: date-time */ + created_at: string; + /** @description The diff of the line that the comment refers to. */ + diff_hunk: string; + /** + * Format: uri + * @description HTML URL for the pull request review comment. + */ + html_url: string; + /** @description The ID of the pull request review comment. */ + id: number; + /** @description The comment ID to reply to. */ + in_reply_to_id?: number; + /** @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ + line: number | null; + /** @description The node ID of the pull request review comment. */ + node_id: string; + /** @description The SHA of the original commit to which the comment applies. */ + original_commit_id: string; + /** @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ + original_line: number | null; + /** @description The index of the original line in the diff to which the comment applies. */ + original_position: number; + /** @description The first line of the range for a multi-line comment. */ + original_start_line: number | null; + /** @description The relative path of the file to which the comment applies. */ + path: string; + /** @description The line index in the diff to which the comment applies. */ + position: number | null; + /** @description The ID of the pull request review to which the comment belongs. */ + pull_request_review_id: number | null; + /** + * Format: uri + * @description URL for the pull request that the review comment belongs to. + */ + pull_request_url: string; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** + * @description The side of the first line of the range for a multi-line comment. + * @enum {string} + */ + side: "LEFT" | "RIGHT"; + /** @description The first line of the range for a multi-line comment. */ + start_line: number | null; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string|null} + */ + start_side: "LEFT" | "RIGHT" | "" | null; + /** + * @description The level at which the comment is targeted, can be a diff line or a file. + * @enum {string} + */ + subject_type?: "line" | "file"; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the pull request review comment + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge?: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + closed_at: string | null; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + commits_url: string; + created_at: string; + /** Format: uri */ + diff_url: string; + draft?: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions?: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + merge_commit_sha: string | null; + merged_at: string | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** Format: uri */ + patch_url: string; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + /** Format: uri */ + review_comments_url: string; + /** @enum {string} */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + title: string; + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request_review_comment deleted event */ + "webhook-pull-request-review-comment-deleted": { + /** @enum {string} */ + action: "deleted"; + /** + * Pull Request Review Comment + * @description The [comment](https://docs.github.com/rest/reference/pulls#comments) itself. + */ + comment: { + _links: { + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + pull_request: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + }; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description The text of the comment. */ + body: string; + /** @description The SHA of the commit to which the comment applies. */ + commit_id: string; + /** Format: date-time */ + created_at: string; + /** @description The diff of the line that the comment refers to. */ + diff_hunk: string; + /** + * Format: uri + * @description HTML URL for the pull request review comment. + */ + html_url: string; + /** @description The ID of the pull request review comment. */ + id: number; + /** @description The comment ID to reply to. */ + in_reply_to_id?: number; + /** @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ + line: number | null; + /** @description The node ID of the pull request review comment. */ + node_id: string; + /** @description The SHA of the original commit to which the comment applies. */ + original_commit_id: string; + /** @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ + original_line: number; + /** @description The index of the original line in the diff to which the comment applies. */ + original_position: number; + /** @description The first line of the range for a multi-line comment. */ + original_start_line: number | null; + /** @description The relative path of the file to which the comment applies. */ + path: string; + /** @description The line index in the diff to which the comment applies. */ + position: number | null; + /** @description The ID of the pull request review to which the comment belongs. */ + pull_request_review_id: number | null; + /** + * Format: uri + * @description URL for the pull request that the review comment belongs to. + */ + pull_request_url: string; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** + * @description The side of the first line of the range for a multi-line comment. + * @enum {string} + */ + side: "LEFT" | "RIGHT"; + /** @description The first line of the range for a multi-line comment. */ + start_line: number | null; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string|null} + */ + start_side: "LEFT" | "RIGHT" | "" | null; + /** + * @description The level at which the comment is targeted, can be a diff line or a file. + * @enum {string} + */ + subject_type?: "line" | "file"; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the pull request review comment + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge?: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + closed_at: string | null; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + commits_url: string; + created_at: string; + /** Format: uri */ + diff_url: string; + draft?: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + merge_commit_sha: string | null; + merged_at: string | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** Format: uri */ + patch_url: string; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + /** Format: uri */ + review_comments_url: string; + /** @enum {string} */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + title: string; + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request_review_comment edited event */ + "webhook-pull-request-review-comment-edited": { + /** @enum {string} */ + action: "edited"; + /** @description The changes to the comment. */ + changes: { + body?: { + /** @description The previous version of the body. */ + from: string; + }; + }; + /** + * Pull Request Review Comment + * @description The [comment](https://docs.github.com/rest/reference/pulls#comments) itself. + */ + comment: { + _links: { + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + pull_request: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + }; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description The text of the comment. */ + body: string; + /** @description The SHA of the commit to which the comment applies. */ + commit_id: string; + /** Format: date-time */ + created_at: string; + /** @description The diff of the line that the comment refers to. */ + diff_hunk: string; + /** + * Format: uri + * @description HTML URL for the pull request review comment. + */ + html_url: string; + /** @description The ID of the pull request review comment. */ + id: number; + /** @description The comment ID to reply to. */ + in_reply_to_id?: number; + /** @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ + line: number | null; + /** @description The node ID of the pull request review comment. */ + node_id: string; + /** @description The SHA of the original commit to which the comment applies. */ + original_commit_id: string; + /** @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ + original_line: number; + /** @description The index of the original line in the diff to which the comment applies. */ + original_position: number; + /** @description The first line of the range for a multi-line comment. */ + original_start_line: number | null; + /** @description The relative path of the file to which the comment applies. */ + path: string; + /** @description The line index in the diff to which the comment applies. */ + position: number | null; + /** @description The ID of the pull request review to which the comment belongs. */ + pull_request_review_id: number | null; + /** + * Format: uri + * @description URL for the pull request that the review comment belongs to. + */ + pull_request_url: string; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** + * @description The side of the first line of the range for a multi-line comment. + * @enum {string} + */ + side: "LEFT" | "RIGHT"; + /** @description The first line of the range for a multi-line comment. */ + start_line: number | null; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string|null} + */ + start_side: "LEFT" | "RIGHT" | "" | null; + /** + * @description The level at which the comment is targeted, can be a diff line or a file. + * @enum {string} + */ + subject_type?: "line" | "file"; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the pull request review comment + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge?: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + closed_at: string | null; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + commits_url: string; + created_at: string; + /** Format: uri */ + diff_url: string; + draft?: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + merge_commit_sha: string | null; + merged_at: string | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** Format: uri */ + patch_url: string; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + /** Format: uri */ + review_comments_url: string; + /** @enum {string} */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + title: string; + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request_review dismissed event */ + "webhook-pull-request-review-dismissed": { + /** @enum {string} */ + action: "dismissed"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Simple Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + closed_at: string | null; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + commits_url: string; + created_at: string; + /** Format: uri */ + diff_url: string; + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + merge_commit_sha: string | null; + merged_at: string | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** Format: uri */ + patch_url: string; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + /** Format: uri */ + review_comments_url: string; + /** @enum {string} */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + title: string; + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + /** @description The review that was affected. */ + review: { + _links: { + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + pull_request: { + /** Format: uri-template */ + href: string; + }; + }; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description The text of the review. */ + body: string | null; + /** @description A commit SHA for the review. */ + commit_id: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the review */ + id: number; + node_id: string; + /** Format: uri */ + pull_request_url: string; + /** @enum {string} */ + state: "dismissed" | "approved" | "changes_requested"; + /** Format: date-time */ + submitted_at: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request_review edited event */ + "webhook-pull-request-review-edited": { + /** @enum {string} */ + action: "edited"; + changes: { + body?: { + /** @description The previous version of the body if the action was `edited`. */ + from: string; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Simple Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + closed_at: string | null; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + commits_url: string; + created_at: string; + /** Format: uri */ + diff_url: string; + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + merge_commit_sha: string | null; + merged_at: string | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** Format: uri */ + patch_url: string; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + /** Format: uri */ + review_comments_url: string; + /** @enum {string} */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + title: string; + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + /** @description The review that was affected. */ + review: { + _links: { + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + pull_request: { + /** Format: uri-template */ + href: string; + }; + }; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description The text of the review. */ + body: string | null; + /** @description A commit SHA for the review. */ + commit_id: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the review */ + id: number; + node_id: string; + /** Format: uri */ + pull_request_url: string; + state: string; + /** Format: date-time */ + submitted_at: string | null; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request review_request_removed event */ + "webhook-pull-request-review-request-removed": OneOf< + [ + { + /** @enum {string} */ + action: "review_request_removed"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title. + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + /** User */ + requested_reviewer: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + sender: components["schemas"]["simple-user"]; + }, + { + /** @enum {string} */ + action: "review_request_removed"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + requested_team: { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }; + sender: components["schemas"]["simple-user"]; + }, + ] + >; + /** pull_request review_requested event */ + "webhook-pull-request-review-requested": OneOf< + [ + { + /** @enum {string} */ + action: "review_requested"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + /** User */ + requested_reviewer: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + sender: components["schemas"]["simple-user"]; + }, + { + /** @enum {string} */ + action: "review_requested"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + requested_team: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }; + sender: components["schemas"]["simple-user"]; + }, + ] + >; + /** pull_request_review submitted event */ + "webhook-pull-request-review-submitted": { + /** @enum {string} */ + action: "submitted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Simple Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + closed_at: string | null; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + commits_url: string; + created_at: string; + /** Format: uri */ + diff_url: string; + draft: boolean; + head: { + label: string | null; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + merge_commit_sha: string | null; + merged_at: string | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** Format: uri */ + patch_url: string; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + /** Format: uri */ + review_comments_url: string; + /** @enum {string} */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + title: string; + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + /** @description The review that was affected. */ + review: { + _links: { + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + pull_request: { + /** Format: uri-template */ + href: string; + }; + }; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description The text of the review. */ + body: string | null; + /** @description A commit SHA for the review. */ + commit_id: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the review */ + id: number; + node_id: string; + /** Format: uri */ + pull_request_url: string; + state: string; + /** Format: date-time */ + submitted_at: string | null; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request_review_thread resolved event */ + "webhook-pull-request-review-thread-resolved": { + /** @enum {string} */ + action: "resolved"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Simple Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + closed_at: string | null; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + commits_url: string; + created_at: string; + /** Format: uri */ + diff_url: string; + draft: boolean; + head: { + label: string | null; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + merge_commit_sha: string | null; + merged_at: string | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** Format: uri */ + patch_url: string; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + /** Format: uri */ + review_comments_url: string; + /** @enum {string} */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + title: string; + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + thread: { + comments: { + _links: { + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + pull_request: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + }; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description The text of the comment. */ + body: string; + /** @description The SHA of the commit to which the comment applies. */ + commit_id: string; + /** Format: date-time */ + created_at: string; + /** @description The diff of the line that the comment refers to. */ + diff_hunk: string; + /** + * Format: uri + * @description HTML URL for the pull request review comment. + */ + html_url: string; + /** @description The ID of the pull request review comment. */ + id: number; + /** @description The comment ID to reply to. */ + in_reply_to_id?: number; + /** @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ + line: number | null; + /** @description The node ID of the pull request review comment. */ + node_id: string; + /** @description The SHA of the original commit to which the comment applies. */ + original_commit_id: string; + /** @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ + original_line: number | null; + /** @description The index of the original line in the diff to which the comment applies. */ + original_position: number; + /** @description The first line of the range for a multi-line comment. */ + original_start_line: number | null; + /** @description The relative path of the file to which the comment applies. */ + path: string; + /** @description The line index in the diff to which the comment applies. */ + position: number | null; + /** @description The ID of the pull request review to which the comment belongs. */ + pull_request_review_id: number | null; + /** + * Format: uri + * @description URL for the pull request that the review comment belongs to. + */ + pull_request_url: string; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** + * @description The side of the first line of the range for a multi-line comment. + * @enum {string} + */ + side: "LEFT" | "RIGHT"; + /** @description The first line of the range for a multi-line comment. */ + start_line: number | null; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string|null} + */ + start_side: "LEFT" | "RIGHT" | "" | null; + /** + * @description The level at which the comment is targeted, can be a diff line or a file. + * @enum {string} + */ + subject_type?: "line" | "file"; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the pull request review comment + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }[]; + node_id: string; + }; + }; + /** pull_request_review_thread unresolved event */ + "webhook-pull-request-review-thread-unresolved": { + /** @enum {string} */ + action: "unresolved"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** Simple Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + closed_at: string | null; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + commits_url: string; + created_at: string; + /** Format: uri */ + diff_url: string; + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + merge_commit_sha: string | null; + merged_at: string | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + number: number; + /** Format: uri */ + patch_url: string; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + /** Format: uri */ + review_comments_url: string; + /** @enum {string} */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + title: string; + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + thread: { + comments: { + _links: { + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + pull_request: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + }; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** @description The text of the comment. */ + body: string; + /** @description The SHA of the commit to which the comment applies. */ + commit_id: string; + /** Format: date-time */ + created_at: string; + /** @description The diff of the line that the comment refers to. */ + diff_hunk: string; + /** + * Format: uri + * @description HTML URL for the pull request review comment. + */ + html_url: string; + /** @description The ID of the pull request review comment. */ + id: number; + /** @description The comment ID to reply to. */ + in_reply_to_id?: number; + /** @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ + line: number | null; + /** @description The node ID of the pull request review comment. */ + node_id: string; + /** @description The SHA of the original commit to which the comment applies. */ + original_commit_id: string; + /** @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment */ + original_line: number; + /** @description The index of the original line in the diff to which the comment applies. */ + original_position: number; + /** @description The first line of the range for a multi-line comment. */ + original_start_line: number | null; + /** @description The relative path of the file to which the comment applies. */ + path: string; + /** @description The line index in the diff to which the comment applies. */ + position: number | null; + /** @description The ID of the pull request review to which the comment belongs. */ + pull_request_review_id: number | null; + /** + * Format: uri + * @description URL for the pull request that the review comment belongs to. + */ + pull_request_url: string; + /** Reactions */ + reactions: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** + * @description The side of the first line of the range for a multi-line comment. + * @enum {string} + */ + side: "LEFT" | "RIGHT"; + /** @description The first line of the range for a multi-line comment. */ + start_line: number | null; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string|null} + */ + start_side: "LEFT" | "RIGHT" | "" | null; + /** + * @description The level at which the comment is targeted, can be a diff line or a file. + * @enum {string} + */ + subject_type?: "line" | "file"; + /** Format: date-time */ + updated_at: string; + /** + * Format: uri + * @description URL for the pull request review comment + */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }[]; + node_id: string; + }; + }; + /** pull_request synchronize event */ + "webhook-pull-request-synchronize": { + /** @enum {string} */ + action: "synchronize"; + after: string; + before: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit message title. + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request unassigned event */ + "webhook-pull-request-unassigned": { + /** @enum {string} */ + action: "unassigned"; + /** User */ + assignee?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string | null; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string | null; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** pull_request unlabeled event */ + "webhook-pull-request-unlabeled": { + /** @enum {string} */ + action: "unlabeled"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** Label */ + label?: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string | null; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string | null; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit message title. + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization" | "Mannequin"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** pull_request unlocked event */ + "webhook-pull-request-unlocked": { + /** @enum {string} */ + action: "unlocked"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + /** @description The pull request number. */ + number: number; + organization?: components["schemas"]["organization-simple"]; + /** Pull Request */ + pull_request: { + _links: { + /** Link */ + comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + commits: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + html: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + issue: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comment: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + review_comments: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + self: { + /** Format: uri-template */ + href: string; + }; + /** Link */ + statuses: { + /** Format: uri-template */ + href: string; + }; + }; + /** @enum {string|null} */ + active_lock_reason: + | "resolved" + | "off-topic" + | "too heated" + | "spam" + | "" + | null; + additions?: number; + /** User */ + assignee: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + assignees: ({ + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null)[]; + /** + * AuthorAssociation + * @description How the author is associated with the repository. + * @enum {string} + */ + author_association: + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** + * PullRequestAutoMerge + * @description The status of auto merging a pull request. + */ + auto_merge: { + /** @description Commit message for the merge commit. */ + commit_message: string | null; + /** @description Title for the merge commit message. */ + commit_title: string; + /** User */ + enabled_by: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + } | null; + base: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + body: string | null; + changed_files?: number; + /** Format: date-time */ + closed_at: string | null; + comments?: number; + /** Format: uri */ + comments_url: string; + commits?: number; + /** Format: uri */ + commits_url: string; + /** Format: date-time */ + created_at: string; + deletions?: number; + /** Format: uri */ + diff_url: string; + /** @description Indicates whether or not the pull request is a draft. */ + draft: boolean; + head: { + label: string; + ref: string; + /** + * Repository + * @description A git repository + */ + repo: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: + | "PR_BODY" + | "COMMIT_MESSAGES" + | "BLANK"; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + } | null; + sha: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + issue_url: string; + labels: { + /** @description 6-character hex code, without the leading #, identifying the color */ + color: string; + default: boolean; + description: string | null; + id: number; + /** @description The name of the label. */ + name: string; + node_id: string; + /** + * Format: uri + * @description URL for the label + */ + url: string; + }[]; + locked: boolean; + /** @description Indicates whether maintainers can modify the pull request. */ + maintainer_can_modify?: boolean; + merge_commit_sha: string | null; + mergeable?: boolean | null; + mergeable_state?: string; + merged?: boolean | null; + /** Format: date-time */ + merged_at: string | null; + /** User */ + merged_by?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** Format: date-time */ + closed_at: string | null; + closed_issues: number; + /** Format: date-time */ + created_at: string; + /** User */ + creator: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + description: string | null; + /** Format: date-time */ + due_on: string | null; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + labels_url: string; + node_id: string; + /** @description The number of the milestone. */ + number: number; + open_issues: number; + /** + * @description The state of the milestone. + * @enum {string} + */ + state: "open" | "closed"; + /** @description The title of the milestone. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + node_id: string; + /** @description Number uniquely identifying the pull request within its repository. */ + number: number; + /** Format: uri */ + patch_url: string; + rebaseable?: boolean | null; + requested_reviewers: OneOf< + [ + { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null, + { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }, + ] + >[]; + requested_teams: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }[]; + /** Format: uri-template */ + review_comment_url: string; + review_comments?: number; + /** Format: uri */ + review_comments_url: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state: "open" | "closed"; + /** Format: uri */ + statuses_url: string; + /** @description The title of the pull request. */ + title: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** User */ + user: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** push event */ + "webhook-push": { + /** @description The SHA of the most recent commit on `ref` after the push. */ + after: string; + base_ref: string | null; + /** @description The SHA of the most recent commit on `ref` before the push. */ + before: string; + /** @description An array of commit objects describing the pushed commits. (Pushed commits are all commits that are included in the `compare` between the `before` commit and the `after` commit.) The array includes a maximum of 20 commits. If necessary, you can use the [Commits API](https://docs.github.com/rest/reference/repos#commits) to fetch additional commits. This limit is applied to timeline events only and isn't applied to webhook deliveries. */ + commits: { + /** @description An array of files added in the commit. */ + added?: string[]; + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + author: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + committer: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + /** @description Whether this commit is distinct from any that have been pushed before. */ + distinct: boolean; + id: string; + /** @description The commit message. */ + message: string; + /** @description An array of files modified by the commit. */ + modified?: string[]; + /** @description An array of files removed in the commit. */ + removed?: string[]; + /** + * Format: date-time + * @description The ISO 8601 timestamp of the commit. + */ + timestamp: string; + tree_id: string; + /** + * Format: uri + * @description URL that points to the commit API resource. + */ + url: string; + }[]; + /** @description URL that shows the changes in this `ref` update, from the `before` commit to the `after` commit. For a newly created `ref` that is directly based on the default branch, this is the comparison between the head of the default branch and the `after` commit. Otherwise, this shows all commits until the `after` commit. */ + compare: string; + /** @description Whether this push created the `ref`. */ + created: boolean; + /** @description Whether this push deleted the `ref`. */ + deleted: boolean; + enterprise?: components["schemas"]["enterprise"]; + /** @description Whether this push was a force push of the `ref`. */ + forced: boolean; + /** Commit */ + head_commit: { + /** @description An array of files added in the commit. */ + added?: string[]; + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + author: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + committer: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + /** @description Whether this commit is distinct from any that have been pushed before. */ + distinct: boolean; + id: string; + /** @description The commit message. */ + message: string; + /** @description An array of files modified by the commit. */ + modified?: string[]; + /** @description An array of files removed in the commit. */ + removed?: string[]; + /** + * Format: date-time + * @description The ISO 8601 timestamp of the commit. + */ + timestamp: string; + tree_id: string; + /** + * Format: uri + * @description URL that points to the commit API resource. + */ + url: string; + } | null; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + pusher: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email?: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + /** @description The full git ref that was pushed. Example: `refs/heads/main` or `refs/tags/v3.14.1`. */ + ref: string; + /** + * Repository + * @description A git repository + */ + repository: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + /** + * @description Whether discussions are enabled. + * @default false + */ + has_discussions: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + /** @description Whether to require contributors to sign off on web-based commits */ + web_commit_signoff_required?: boolean; + }; + sender?: components["schemas"]["simple-user"]; + }; + "webhook-registry-package-published": { + /** @enum {string} */ + action: "published"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + registry_package: { + created_at: string | null; + description: string | null; + ecosystem: string; + html_url: string; + id: number; + name: string; + namespace: string; + owner: { + avatar_url: string; + events_url: string; + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string; + html_url: string; + id: number; + login: string; + node_id: string; + organizations_url: string; + received_events_url: string; + repos_url: string; + site_admin: boolean; + starred_url: string; + subscriptions_url: string; + type: string; + url: string; + }; + package_type: string; + package_version: { + author?: { + avatar_url: string; + events_url: string; + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string; + html_url: string; + id: number; + login: string; + node_id: string; + organizations_url: string; + received_events_url: string; + repos_url: string; + site_admin: boolean; + starred_url: string; + subscriptions_url: string; + type: string; + url: string; + }; + body?: string | Record; + body_html?: string; + container_metadata?: { + labels?: Record | null; + manifest?: Record | null; + tag?: { + digest?: string; + name?: string; + }; + }; + created_at?: string; + description: string; + docker_metadata?: { + tags?: string[]; + }[]; + draft?: boolean; + html_url: string; + id: number; + installation_command: string; + manifest?: string; + metadata: { + [key: string]: unknown; + }[]; + name: string; + npm_metadata?: { + name?: string; + version?: string; + npm_user?: string; + author?: string | Record; + bugs?: string | Record; + dependencies?: Record; + dev_dependencies?: Record; + peer_dependencies?: Record; + optional_dependencies?: Record; + description?: string; + dist?: string | Record; + git_head?: string; + homepage?: string; + license?: string; + main?: string; + repository?: string | Record; + scripts?: Record; + id?: string; + node_version?: string; + npm_version?: string; + has_shrinkwrap?: boolean; + maintainers?: string[]; + contributors?: string[]; + engines?: Record; + keywords?: string[]; + files?: string[]; + bin?: Record; + man?: Record; + directories?: string | Record; + os?: string[]; + cpu?: string[]; + readme?: string; + installation_command?: string; + release_id?: number; + commit_oid?: string; + published_via_actions?: boolean; + deleted_by_id?: number; + } | null; + nuget_metadata?: + | { + id?: string | Record | number; + name?: string; + value?: OneOf< + [ + boolean, + string, + number, + { + url?: string; + branch?: string; + commit?: string; + type?: string; + }, + ] + >; + }[] + | null; + package_files: { + content_type: string; + created_at: string; + download_url: string; + id: number; + md5: string | null; + name: string; + sha1: string | null; + sha256: string | null; + size: number; + state: string | null; + updated_at: string; + }[]; + package_url: string; + prerelease?: boolean; + release?: { + author?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + created_at?: string; + draft?: boolean; + html_url?: string; + id?: number; + name?: string | null; + prerelease?: boolean; + published_at?: string; + tag_name?: string; + target_commitish?: string; + url?: string; + }; + rubygems_metadata?: components["schemas"]["webhook-rubygems-metadata"][]; + summary: string; + tag_name?: string; + target_commitish?: string; + target_oid?: string; + updated_at?: string; + version: string; + } | null; + registry: { + about_url?: string; + name?: string; + type?: string; + url?: string; + vendor?: string; + } | null; + updated_at: string | null; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + "webhook-registry-package-updated": { + action: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + registry_package: { + created_at: string; + description: Record | null; + ecosystem: string; + html_url: string; + id: number; + name: string; + namespace: string; + owner: { + avatar_url: string; + events_url: string; + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string; + html_url: string; + id: number; + login: string; + node_id: string; + organizations_url: string; + received_events_url: string; + repos_url: string; + site_admin: boolean; + starred_url: string; + subscriptions_url: string; + type: string; + url: string; + }; + package_type: string; + package_version: { + author: { + avatar_url: string; + events_url: string; + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string; + html_url: string; + id: number; + login: string; + node_id: string; + organizations_url: string; + received_events_url: string; + repos_url: string; + site_admin: boolean; + starred_url: string; + subscriptions_url: string; + type: string; + url: string; + }; + body: string; + body_html: string; + created_at: string; + description: string; + docker_metadata?: ({ + tags?: string[]; + } | null)[]; + draft?: boolean; + html_url: string; + id: number; + installation_command: string; + manifest?: string; + metadata: { + [key: string]: unknown; + }[]; + name: string; + package_files: { + content_type?: string; + created_at?: string; + download_url?: string; + id?: number; + md5?: string | null; + name?: string; + sha1?: string | null; + sha256?: string; + size?: number; + state?: string; + updated_at?: string; + }[]; + package_url: string; + prerelease?: boolean; + release?: { + author: { + avatar_url: string; + events_url: string; + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string; + html_url: string; + id: number; + login: string; + node_id: string; + organizations_url: string; + received_events_url: string; + repos_url: string; + site_admin: boolean; + starred_url: string; + subscriptions_url: string; + type: string; + url: string; + }; + created_at: string; + draft: boolean; + html_url: string; + id: number; + name: string; + prerelease: boolean; + published_at: string; + tag_name: string; + target_commitish: string; + url: string; + }; + rubygems_metadata?: components["schemas"]["webhook-rubygems-metadata"][]; + summary: string; + tag_name?: string; + target_commitish: string; + target_oid: string; + updated_at: string; + version: string; + }; + registry: Record | null; + updated_at: string; + }; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** release created event */ + "webhook-release-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** + * Release + * @description The [release](https://docs.github.com/rest/reference/repos/#get-a-release) object. + */ + release: { + assets: { + /** Format: uri */ + browser_download_url: string; + content_type: string; + /** Format: date-time */ + created_at: string; + download_count: number; + id: number; + label: string | null; + /** @description The file name of the asset. */ + name: string; + node_id: string; + size: number; + /** + * @description State of the release asset. + * @enum {string} + */ + state: "uploaded"; + /** Format: date-time */ + updated_at: string; + /** User */ + uploader?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + url: string; + }[]; + /** Format: uri */ + assets_url: string; + /** User */ + author: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + body: string | null; + /** Format: date-time */ + created_at: string | null; + /** Format: uri */ + discussion_url?: string; + /** @description Whether the release is a draft or published */ + draft: boolean; + /** Format: uri */ + html_url: string; + id: number; + name: string | null; + node_id: string; + /** @description Whether the release is identified as a prerelease or a full release. */ + prerelease: boolean; + /** Format: date-time */ + published_at: string | null; + /** Reactions */ + reactions?: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** @description The name of the tag. */ + tag_name: string; + /** Format: uri */ + tarball_url: string | null; + /** @description Specifies the commitish value that determines where the Git tag is created from. */ + target_commitish: string; + /** Format: uri-template */ + upload_url: string; + /** Format: uri */ + url: string; + /** Format: uri */ + zipball_url: string | null; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** release deleted event */ + "webhook-release-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** + * Release + * @description The [release](https://docs.github.com/rest/reference/repos/#get-a-release) object. + */ + release: { + assets: { + /** Format: uri */ + browser_download_url: string; + content_type: string; + /** Format: date-time */ + created_at: string; + download_count: number; + id: number; + label: string | null; + /** @description The file name of the asset. */ + name: string; + node_id: string; + size: number; + /** + * @description State of the release asset. + * @enum {string} + */ + state: "uploaded"; + /** Format: date-time */ + updated_at: string; + /** User */ + uploader?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + url: string; + }[]; + /** Format: uri */ + assets_url: string; + /** User */ + author: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + body: string | null; + /** Format: date-time */ + created_at: string | null; + /** Format: uri */ + discussion_url?: string; + /** @description Whether the release is a draft or published */ + draft: boolean; + /** Format: uri */ + html_url: string; + id: number; + name: string | null; + node_id: string; + /** @description Whether the release is identified as a prerelease or a full release. */ + prerelease: boolean; + /** Format: date-time */ + published_at: string | null; + /** Reactions */ + reactions?: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** @description The name of the tag. */ + tag_name: string; + /** Format: uri */ + tarball_url: string | null; + /** @description Specifies the commitish value that determines where the Git tag is created from. */ + target_commitish: string; + /** Format: uri-template */ + upload_url: string; + /** Format: uri */ + url: string; + /** Format: uri */ + zipball_url: string | null; + }; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** release edited event */ + "webhook-release-edited": { + /** @enum {string} */ + action: "edited"; + changes: { + body?: { + /** @description The previous version of the body if the action was `edited`. */ + from: string; + }; + name?: { + /** @description The previous version of the name if the action was `edited`. */ + from: string; + }; + make_latest?: { + /** @description Whether this release was explicitly `edited` to be the latest. */ + to: boolean; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** + * Release + * @description The [release](https://docs.github.com/rest/reference/repos/#get-a-release) object. + */ + release: { + assets: { + /** Format: uri */ + browser_download_url: string; + content_type: string; + /** Format: date-time */ + created_at: string; + download_count: number; + id: number; + label: string | null; + /** @description The file name of the asset. */ + name: string; + node_id: string; + size: number; + /** + * @description State of the release asset. + * @enum {string} + */ + state: "uploaded"; + /** Format: date-time */ + updated_at: string; + /** User */ + uploader?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + url: string; + }[]; + /** Format: uri */ + assets_url: string; + /** User */ + author: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + body: string | null; + /** Format: date-time */ + created_at: string | null; + /** Format: uri */ + discussion_url?: string; + /** @description Whether the release is a draft or published */ + draft: boolean; + /** Format: uri */ + html_url: string; + id: number; + name: string | null; + node_id: string; + /** @description Whether the release is identified as a prerelease or a full release. */ + prerelease: boolean; + /** Format: date-time */ + published_at: string | null; + /** Reactions */ + reactions?: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** @description The name of the tag. */ + tag_name: string; + /** Format: uri */ + tarball_url: string | null; + /** @description Specifies the commitish value that determines where the Git tag is created from. */ + target_commitish: string; + /** Format: uri-template */ + upload_url: string; + /** Format: uri */ + url: string; + /** Format: uri */ + zipball_url: string | null; + }; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** release prereleased event */ + "webhook-release-prereleased": { + /** @enum {string} */ + action: "prereleased"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + release: { + assets: { + /** Format: uri */ + browser_download_url: string; + content_type: string; + /** Format: date-time */ + created_at: string; + download_count: number; + id: number; + label: string | null; + /** @description The file name of the asset. */ + name: string; + node_id: string; + size: number; + /** + * @description State of the release asset. + * @enum {string} + */ + state: "uploaded"; + /** Format: date-time */ + updated_at: string; + /** User */ + uploader?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + url: string; + }[]; + /** Format: uri */ + assets_url: string; + /** User */ + author: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + body: string | null; + /** Format: date-time */ + created_at: string | null; + /** Format: uri */ + discussion_url?: string; + /** @description Whether the release is a draft or published */ + draft: boolean; + /** Format: uri */ + html_url: string; + id: number; + name: string | null; + node_id: string; + /** @description Whether the release is identified as a prerelease or a full release. */ + prerelease: boolean; + /** Format: date-time */ + published_at: string | null; + /** Reactions */ + reactions?: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** @description The name of the tag. */ + tag_name: string; + /** Format: uri */ + tarball_url: string | null; + /** @description Specifies the commitish value that determines where the Git tag is created from. */ + target_commitish: string; + /** Format: uri-template */ + upload_url: string; + /** Format: uri */ + url: string; + /** Format: uri */ + zipball_url: string | null; + } & { + assets?: (Record | null)[]; + assets_url?: string; + author?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + body?: string | null; + created_at?: string; + draft?: boolean; + html_url?: string; + id?: number; + name?: string | null; + node_id?: string; + /** + * @description Whether the release is identified as a prerelease or a full release. + * @enum {boolean} + */ + prerelease: true; + published_at?: string | null; + tag_name?: string; + tarball_url?: string | null; + target_commitish?: string; + upload_url?: string; + url?: string; + zipball_url?: string | null; + }; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** release published event */ + "webhook-release-published": { + /** @enum {string} */ + action: "published"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + release: { + assets: { + /** Format: uri */ + browser_download_url: string; + content_type: string; + /** Format: date-time */ + created_at: string; + download_count: number; + id: number; + label: string | null; + /** @description The file name of the asset. */ + name: string; + node_id: string; + size: number; + /** + * @description State of the release asset. + * @enum {string} + */ + state: "uploaded"; + /** Format: date-time */ + updated_at: string; + /** User */ + uploader?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + url: string; + }[]; + /** Format: uri */ + assets_url: string; + /** User */ + author: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + body: string | null; + /** Format: date-time */ + created_at: string | null; + /** Format: uri */ + discussion_url?: string; + /** @description Whether the release is a draft or published */ + draft: boolean; + /** Format: uri */ + html_url: string; + id: number; + name: string | null; + node_id: string; + /** @description Whether the release is identified as a prerelease or a full release. */ + prerelease: boolean; + /** Format: date-time */ + published_at: string | null; + /** Reactions */ + reactions?: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** @description The name of the tag. */ + tag_name: string; + /** Format: uri */ + tarball_url: string | null; + /** @description Specifies the commitish value that determines where the Git tag is created from. */ + target_commitish: string; + /** Format: uri-template */ + upload_url: string; + /** Format: uri */ + url: string; + /** Format: uri */ + zipball_url: string | null; + } & { + assets?: (Record | null)[]; + assets_url?: string; + author?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + body?: string | null; + created_at?: string; + draft?: boolean; + html_url?: string; + id?: number; + name?: string | null; + node_id?: string; + prerelease?: boolean; + /** Format: date-time */ + published_at: string | null; + tag_name?: string; + tarball_url?: string | null; + target_commitish?: string; + upload_url?: string; + url?: string; + zipball_url?: string | null; + }; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** release released event */ + "webhook-release-released": { + /** @enum {string} */ + action: "released"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + /** + * Release + * @description The [release](https://docs.github.com/rest/reference/repos/#get-a-release) object. + */ + release: { + assets: { + /** Format: uri */ + browser_download_url: string; + content_type: string; + /** Format: date-time */ + created_at: string; + download_count: number; + id: number; + label: string | null; + /** @description The file name of the asset. */ + name: string; + node_id: string; + size: number; + /** + * @description State of the release asset. + * @enum {string} + */ + state: "uploaded"; + /** Format: date-time */ + updated_at: string; + /** User */ + uploader?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + url: string; + }[]; + /** Format: uri */ + assets_url: string; + /** User */ + author: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + body: string | null; + /** Format: date-time */ + created_at: string | null; + /** Format: uri */ + discussion_url?: string; + /** @description Whether the release is a draft or published */ + draft: boolean; + /** Format: uri */ + html_url: string; + id: number; + name: string | null; + node_id: string; + /** @description Whether the release is identified as a prerelease or a full release. */ + prerelease: boolean; + /** Format: date-time */ + published_at: string | null; + /** Reactions */ + reactions?: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** @description The name of the tag. */ + tag_name: string; + /** Format: uri */ + tarball_url: string | null; + /** @description Specifies the commitish value that determines where the Git tag is created from. */ + target_commitish: string; + /** Format: uri-template */ + upload_url: string; + /** Format: uri */ + url: string; + /** Format: uri */ + zipball_url: string | null; + }; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** release unpublished event */ + "webhook-release-unpublished": { + /** @enum {string} */ + action: "unpublished"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + release: { + assets: { + /** Format: uri */ + browser_download_url: string; + content_type: string; + /** Format: date-time */ + created_at: string; + download_count: number; + id: number; + label: string | null; + /** @description The file name of the asset. */ + name: string; + node_id: string; + size: number; + /** + * @description State of the release asset. + * @enum {string} + */ + state: "uploaded"; + /** Format: date-time */ + updated_at: string; + /** User */ + uploader?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + url: string; + }[]; + /** Format: uri */ + assets_url: string; + /** User */ + author: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + body: string | null; + /** Format: date-time */ + created_at: string | null; + /** Format: uri */ + discussion_url?: string; + /** @description Whether the release is a draft or published */ + draft: boolean; + /** Format: uri */ + html_url: string; + id: number; + name: string | null; + node_id: string; + /** @description Whether the release is identified as a prerelease or a full release. */ + prerelease: boolean; + /** Format: date-time */ + published_at: string | null; + /** Reactions */ + reactions?: { + "+1": number; + "-1": number; + confused: number; + eyes: number; + heart: number; + hooray: number; + laugh: number; + rocket: number; + total_count: number; + /** Format: uri */ + url: string; + }; + /** @description The name of the tag. */ + tag_name: string; + /** Format: uri */ + tarball_url: string | null; + /** @description Specifies the commitish value that determines where the Git tag is created from. */ + target_commitish: string; + /** Format: uri-template */ + upload_url: string; + /** Format: uri */ + url: string; + /** Format: uri */ + zipball_url: string | null; + } & { + assets?: (Record | null)[]; + assets_url?: string; + author?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + body?: string | null; + created_at?: string; + draft?: boolean; + html_url?: string; + id?: number; + name?: string | null; + node_id?: string; + prerelease?: boolean; + published_at: string | null; + tag_name?: string; + tarball_url?: string | null; + target_commitish?: string; + upload_url?: string; + url?: string; + zipball_url?: string | null; + }; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** Repository advisory published event */ + "webhook-repository-advisory-published": { + /** @enum {string} */ + action: "published"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + repository_advisory: components["schemas"]["repository-advisory"]; + sender?: components["schemas"]["simple-user"]; + }; + /** Repository advisory reported event */ + "webhook-repository-advisory-reported": { + /** @enum {string} */ + action: "reported"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + repository_advisory: components["schemas"]["repository-advisory"]; + sender?: components["schemas"]["simple-user"]; + }; + /** repository archived event */ + "webhook-repository-archived": { + /** @enum {string} */ + action: "archived"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository created event */ + "webhook-repository-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository deleted event */ + "webhook-repository-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository_dispatch event */ + "webhook-repository-dispatch-sample": { + action: string; + branch: string; + client_payload: { + [key: string]: unknown; + } | null; + enterprise?: components["schemas"]["enterprise"]; + installation: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository edited event */ + "webhook-repository-edited": { + /** @enum {string} */ + action: "edited"; + changes: { + default_branch?: { + from: string; + }; + description?: { + from: string | null; + }; + homepage?: { + from: string | null; + }; + topics?: { + from?: string[] | null; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository_import event */ + "webhook-repository-import": { + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + /** @enum {string} */ + status: "success" | "cancelled" | "failure"; + }; + /** repository privatized event */ + "webhook-repository-privatized": { + /** @enum {string} */ + action: "privatized"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository publicized event */ + "webhook-repository-publicized": { + /** @enum {string} */ + action: "publicized"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository renamed event */ + "webhook-repository-renamed": { + /** @enum {string} */ + action: "renamed"; + changes: { + repository: { + name: { + from: string; + }; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository transferred event */ + "webhook-repository-transferred": { + /** @enum {string} */ + action: "transferred"; + changes: { + owner: { + from: { + /** Organization */ + organization?: { + /** Format: uri */ + avatar_url: string; + description: string | null; + /** Format: uri */ + events_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url?: string; + id: number; + /** Format: uri */ + issues_url: string; + login: string; + /** Format: uri-template */ + members_url: string; + node_id: string; + /** Format: uri-template */ + public_members_url: string; + /** Format: uri */ + repos_url: string; + /** Format: uri */ + url: string; + }; + /** User */ + user?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + }; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository unarchived event */ + "webhook-repository-unarchived": { + /** @enum {string} */ + action: "unarchived"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository_vulnerability_alert create event */ + "webhook-repository-vulnerability-alert-create": { + /** @enum {string} */ + action: "create"; + alert: { + affected_package_name: string; + affected_range: string; + created_at: string; + dismiss_reason?: string; + dismissed_at?: string; + /** User */ + dismisser?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + external_identifier: string; + /** Format: uri */ + external_reference: string | null; + fix_reason?: string; + /** Format: date-time */ + fixed_at?: string; + fixed_in?: string; + ghsa_id: string; + id: number; + node_id: string; + number: number; + severity: string; + /** @enum {string} */ + state: "open" | "dismissed" | "fixed"; + } & { + affected_package_name?: string; + affected_range?: string; + created_at?: string; + external_identifier?: string; + external_reference?: string | null; + fixed_in?: string; + ghsa_id?: string; + id?: number; + node_id?: string; + number?: number; + severity?: string; + /** @enum {string} */ + state: "open"; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository_vulnerability_alert dismiss event */ + "webhook-repository-vulnerability-alert-dismiss": { + /** @enum {string} */ + action: "dismiss"; + alert: { + affected_package_name: string; + affected_range: string; + created_at: string; + dismiss_comment?: string | null; + dismiss_reason?: string; + dismissed_at?: string; + /** User */ + dismisser?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + external_identifier: string; + /** Format: uri */ + external_reference: string | null; + fix_reason?: string; + /** Format: date-time */ + fixed_at?: string; + fixed_in?: string; + ghsa_id: string; + id: number; + node_id: string; + number: number; + severity: string; + /** @enum {string} */ + state: "open" | "dismissed" | "fixed"; + } & { + affected_package_name?: string; + affected_range?: string; + created_at?: string; + dismiss_comment?: string | null; + dismiss_reason: string; + dismissed_at: string; + /** User */ + dismisser: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + external_identifier?: string; + external_reference?: string | null; + fixed_in?: string; + ghsa_id?: string; + id?: number; + node_id?: string; + number?: number; + severity?: string; + /** @enum {string} */ + state: "dismissed"; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository_vulnerability_alert reopen event */ + "webhook-repository-vulnerability-alert-reopen": { + /** @enum {string} */ + action: "reopen"; + alert: { + affected_package_name: string; + affected_range: string; + created_at: string; + dismiss_reason?: string; + dismissed_at?: string; + /** User */ + dismisser?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + external_identifier: string; + /** Format: uri */ + external_reference: string | null; + fix_reason?: string; + /** Format: date-time */ + fixed_at?: string; + fixed_in?: string; + ghsa_id: string; + id: number; + node_id: string; + number: number; + severity: string; + /** @enum {string} */ + state: "open" | "dismissed" | "fixed"; + } & { + affected_package_name?: string; + affected_range?: string; + created_at?: string; + external_identifier?: string; + external_reference?: string | null; + fixed_in?: string; + ghsa_id?: string; + id?: number; + node_id?: string; + number?: number; + severity?: string; + /** @enum {string} */ + state: "open"; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** repository_vulnerability_alert resolve event */ + "webhook-repository-vulnerability-alert-resolve": { + /** @enum {string} */ + action: "resolve"; + alert: { + affected_package_name: string; + affected_range: string; + created_at: string; + dismiss_reason?: string; + dismissed_at?: string; + /** User */ + dismisser?: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + external_identifier: string; + /** Format: uri */ + external_reference: string | null; + fix_reason?: string; + /** Format: date-time */ + fixed_at?: string; + fixed_in?: string; + ghsa_id: string; + id: number; + node_id: string; + number: number; + severity: string; + /** @enum {string} */ + state: "open" | "dismissed" | "fixed"; + } & { + affected_package_name?: string; + affected_range?: string; + created_at?: string; + external_identifier?: string; + external_reference?: string | null; + fix_reason?: string; + /** Format: date-time */ + fixed_at?: string; + fixed_in?: string; + ghsa_id?: string; + id?: number; + node_id?: string; + number?: number; + severity?: string; + /** @enum {string} */ + state: "fixed" | "open"; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** secret_scanning_alert created event */ + "webhook-secret-scanning-alert-created": { + /** @enum {string} */ + action: "created"; + alert: components["schemas"]["secret-scanning-alert"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** Secret Scanning Alert Location Created Event */ + "webhook-secret-scanning-alert-location-created": { + /** @enum {string} */ + action?: "created"; + alert: components["schemas"]["secret-scanning-alert"]; + installation?: components["schemas"]["simple-installation"]; + location: components["schemas"]["secret-scanning-location"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** Secret Scanning Alert Location Created Event */ + "webhook-secret-scanning-alert-location-created-form-encoded": { + /** @description A URL-encoded string of the secret_scanning_alert_location.created JSON payload. The decoded payload is a JSON object. */ + payload: string; + }; + /** secret_scanning_alert reopened event */ + "webhook-secret-scanning-alert-reopened": { + /** @enum {string} */ + action: "reopened"; + alert: components["schemas"]["secret-scanning-alert"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** secret_scanning_alert resolved event */ + "webhook-secret-scanning-alert-resolved": { + /** @enum {string} */ + action: "resolved"; + alert: { + created_at?: components["schemas"]["alert-created-at"]; + html_url?: components["schemas"]["alert-html-url"]; + /** + * Format: uri + * @description The REST API URL of the code locations for this alert. + */ + locations_url?: string; + number?: components["schemas"]["alert-number"]; + /** @description Whether push protection was bypassed for the detected secret. */ + push_protection_bypassed?: boolean | null; + /** + * Format: date-time + * @description The time that push protection was bypassed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + push_protection_bypassed_at?: string | null; + push_protection_bypassed_by?: components["schemas"]["nullable-simple-user"]; + /** + * @description **Required when the `state` is `resolved`.** The reason for resolving the alert. + * @enum {string|null} + */ + resolution?: + | "" + | "false_positive" + | "wont_fix" + | "revoked" + | "used_in_tests" + | "pattern_deleted" + | "pattern_edited" + | null; + /** + * Format: date-time + * @description The time that the alert was resolved in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + resolved_at?: string | null; + resolved_by?: components["schemas"]["nullable-simple-user"]; + /** @description An optional comment to resolve an alert. */ + resolution_comment?: string | null; + /** @description The secret that was detected. */ + secret?: string; + /** @description The type of secret that secret scanning detected. */ + secret_type?: string; + /** + * @description User-friendly name for the detected secret, matching the `secret_type`. + * For a list of built-in patterns, see "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)." + */ + secret_type_display_name?: string; + state?: components["schemas"]["secret-scanning-alert-state"]; + updated_at?: components["schemas"]["alert-updated-at"]; + url?: components["schemas"]["alert-url"]; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** secret_scanning_alert revoked event */ + "webhook-secret-scanning-alert-revoked": { + /** @enum {string} */ + action: "revoked"; + alert: components["schemas"]["secret-scanning-alert"]; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** security_advisory published event */ + "webhook-security-advisory-published": { + /** @enum {string} */ + action: "published"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + /** @description The details of the security advisory, including summary, description, and severity. */ + security_advisory: { + cvss: { + score: number; + vector_string: string | null; + }; + cwes: { + cwe_id: string; + name: string; + }[]; + description: string; + ghsa_id: string; + identifiers: { + type: string; + value: string; + }[]; + published_at: string; + references: { + /** Format: uri */ + url: string; + }[]; + severity: string; + summary: string; + updated_at: string; + vulnerabilities: { + first_patched_version: { + identifier: string; + } | null; + package: { + ecosystem: string; + name: string; + }; + severity: string; + vulnerable_version_range: string; + }[]; + withdrawn_at: string | null; + }; + sender?: components["schemas"]["simple-user"]; + }; + /** security_advisory updated event */ + "webhook-security-advisory-updated": { + /** @enum {string} */ + action: "updated"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + /** @description The details of the security advisory, including summary, description, and severity. */ + security_advisory: { + cvss: { + score: number; + vector_string: string | null; + }; + cwes: { + cwe_id: string; + name: string; + }[]; + description: string; + ghsa_id: string; + identifiers: { + type: string; + value: string; + }[]; + published_at: string; + references: { + /** Format: uri */ + url: string; + }[]; + severity: string; + summary: string; + updated_at: string; + vulnerabilities: { + first_patched_version: { + identifier: string; + } | null; + package: { + ecosystem: string; + name: string; + }; + severity: string; + vulnerable_version_range: string; + }[]; + withdrawn_at: string | null; + }; + sender?: components["schemas"]["simple-user"]; + }; + /** security_advisory withdrawn event */ + "webhook-security-advisory-withdrawn": { + /** @enum {string} */ + action: "withdrawn"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + /** @description The details of the security advisory, including summary, description, and severity. */ + security_advisory: { + cvss: { + score: number; + vector_string: string | null; + }; + cwes: { + cwe_id: string; + name: string; + }[]; + description: string; + ghsa_id: string; + identifiers: { + type: string; + value: string; + }[]; + published_at: string; + references: { + /** Format: uri */ + url: string; + }[]; + severity: string; + summary: string; + updated_at: string; + vulnerabilities: { + first_patched_version: { + identifier: string; + } | null; + package: { + ecosystem: string; + name: string; + }; + severity: string; + vulnerable_version_range: string; + }[]; + withdrawn_at: string; + }; + sender?: components["schemas"]["simple-user"]; + }; + /** security_and_analysis event */ + "webhook-security-and-analysis": { + changes: { + from?: { + security_and_analysis?: components["schemas"]["security-and-analysis"]; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["full-repository"]; + sender?: components["schemas"]["simple-user"]; + }; + /** sponsorship cancelled event */ + "webhook-sponsorship-cancelled": { + /** @enum {string} */ + action: "cancelled"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + sponsorship: { + created_at: string; + maintainer?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + node_id: string; + privacy_level: string; + /** User */ + sponsor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** User */ + sponsorable: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Sponsorship Tier + * @description The `tier_changed` and `pending_tier_change` will include the original tier before the change or pending change. For more information, see the pending tier change payload. + */ + tier: { + created_at: string; + description: string; + is_custom_ammount?: boolean; + is_custom_amount?: boolean; + is_one_time: boolean; + monthly_price_in_cents: number; + monthly_price_in_dollars: number; + name: string; + node_id: string; + }; + }; + }; + /** sponsorship created event */ + "webhook-sponsorship-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + sponsorship: { + created_at: string; + maintainer?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + node_id: string; + privacy_level: string; + /** User */ + sponsor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** User */ + sponsorable: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Sponsorship Tier + * @description The `tier_changed` and `pending_tier_change` will include the original tier before the change or pending change. For more information, see the pending tier change payload. + */ + tier: { + created_at: string; + description: string; + is_custom_ammount?: boolean; + is_custom_amount?: boolean; + is_one_time: boolean; + monthly_price_in_cents: number; + monthly_price_in_dollars: number; + name: string; + node_id: string; + }; + }; + }; + /** sponsorship edited event */ + "webhook-sponsorship-edited": { + /** @enum {string} */ + action: "edited"; + changes: { + privacy_level?: { + /** @description The `edited` event types include the details about the change when someone edits a sponsorship to change the privacy. */ + from: string; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + sponsorship: { + created_at: string; + maintainer?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + node_id: string; + privacy_level: string; + /** User */ + sponsor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** User */ + sponsorable: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Sponsorship Tier + * @description The `tier_changed` and `pending_tier_change` will include the original tier before the change or pending change. For more information, see the pending tier change payload. + */ + tier: { + created_at: string; + description: string; + is_custom_ammount?: boolean; + is_custom_amount?: boolean; + is_one_time: boolean; + monthly_price_in_cents: number; + monthly_price_in_dollars: number; + name: string; + node_id: string; + }; + }; + }; + /** sponsorship pending_cancellation event */ + "webhook-sponsorship-pending-cancellation": { + /** @enum {string} */ + action: "pending_cancellation"; + /** @description The `pending_cancellation` and `pending_tier_change` event types will include the date the cancellation or tier change will take effect. */ + effective_date?: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + sponsorship: { + created_at: string; + maintainer?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + node_id: string; + privacy_level: string; + /** User */ + sponsor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** User */ + sponsorable: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Sponsorship Tier + * @description The `tier_changed` and `pending_tier_change` will include the original tier before the change or pending change. For more information, see the pending tier change payload. + */ + tier: { + created_at: string; + description: string; + is_custom_ammount?: boolean; + is_custom_amount?: boolean; + is_one_time: boolean; + monthly_price_in_cents: number; + monthly_price_in_dollars: number; + name: string; + node_id: string; + }; + }; + }; + /** sponsorship pending_tier_change event */ + "webhook-sponsorship-pending-tier-change": { + /** @enum {string} */ + action: "pending_tier_change"; + changes: { + tier: { + /** + * Sponsorship Tier + * @description The `tier_changed` and `pending_tier_change` will include the original tier before the change or pending change. For more information, see the pending tier change payload. + */ + from: { + created_at: string; + description: string; + is_custom_ammount?: boolean; + is_custom_amount?: boolean; + is_one_time: boolean; + monthly_price_in_cents: number; + monthly_price_in_dollars: number; + name: string; + node_id: string; + }; + }; + }; + /** @description The `pending_cancellation` and `pending_tier_change` event types will include the date the cancellation or tier change will take effect. */ + effective_date?: string; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + sponsorship: { + created_at: string; + maintainer?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + node_id: string; + privacy_level: string; + /** User */ + sponsor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** User */ + sponsorable: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Sponsorship Tier + * @description The `tier_changed` and `pending_tier_change` will include the original tier before the change or pending change. For more information, see the pending tier change payload. + */ + tier: { + created_at: string; + description: string; + is_custom_ammount?: boolean; + is_custom_amount?: boolean; + is_one_time: boolean; + monthly_price_in_cents: number; + monthly_price_in_dollars: number; + name: string; + node_id: string; + }; + }; + }; + /** sponsorship tier_changed event */ + "webhook-sponsorship-tier-changed": { + /** @enum {string} */ + action: "tier_changed"; + changes: { + tier: { + /** + * Sponsorship Tier + * @description The `tier_changed` and `pending_tier_change` will include the original tier before the change or pending change. For more information, see the pending tier change payload. + */ + from: { + created_at: string; + description: string; + is_custom_ammount?: boolean; + is_custom_amount?: boolean; + is_one_time: boolean; + monthly_price_in_cents: number; + monthly_price_in_dollars: number; + name: string; + node_id: string; + }; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + sponsorship: { + created_at: string; + maintainer?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + node_id: string; + privacy_level: string; + /** User */ + sponsor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** User */ + sponsorable: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** + * Sponsorship Tier + * @description The `tier_changed` and `pending_tier_change` will include the original tier before the change or pending change. For more information, see the pending tier change payload. + */ + tier: { + created_at: string; + description: string; + is_custom_ammount?: boolean; + is_custom_amount?: boolean; + is_one_time: boolean; + monthly_price_in_cents: number; + monthly_price_in_dollars: number; + name: string; + node_id: string; + }; + }; + }; + /** star created event */ + "webhook-star-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + /** @description The time the star was created. This is a timestamp in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. Will be `null` for the `deleted` action. */ + starred_at: string | null; + }; + /** star deleted event */ + "webhook-star-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + /** @description The time the star was created. This is a timestamp in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. Will be `null` for the `deleted` action. */ + starred_at: Record | null; + }; + /** status event */ + "webhook-status": { + /** Format: uri */ + avatar_url?: string | null; + /** @description An array of branch objects containing the status' SHA. Each branch contains the given SHA, but the SHA may or may not be the head of the branch. The array includes a maximum of 10 branches. */ + branches: { + commit: { + sha: string | null; + /** Format: uri */ + url: string | null; + }; + name: string; + protected: boolean; + }[]; + commit: { + /** User */ + author: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id?: number; + login?: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + comments_url: string; + commit: { + author: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + } & { + date: string; + email?: string; + name?: string; + }; + comment_count: number; + committer: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + } & { + date: string; + email?: string; + name?: string; + }; + message: string; + tree: { + sha: string; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + url: string; + verification: { + payload: string | null; + /** @enum {string} */ + reason: + | "expired_key" + | "not_signing_key" + | "gpgverify_error" + | "gpgverify_unavailable" + | "unsigned" + | "unknown_signature_type" + | "no_user" + | "unverified_email" + | "bad_email" + | "unknown_key" + | "malformed_signature" + | "invalid" + | "valid" + | "bad_cert" + | "ocsp_pending"; + signature: string | null; + verified: boolean; + }; + }; + /** User */ + committer: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id?: number; + login?: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + html_url: string; + node_id: string; + parents: { + /** Format: uri */ + html_url: string; + sha: string; + /** Format: uri */ + url: string; + }[]; + sha: string; + /** Format: uri */ + url: string; + }; + context: string; + created_at: string; + /** @description The optional human-readable description added to the status. */ + description: string | null; + enterprise?: components["schemas"]["enterprise"]; + /** @description The unique identifier of the status. */ + id: number; + installation?: components["schemas"]["simple-installation"]; + name: string; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + /** @description The Commit SHA. */ + sha: string; + /** + * @description The new state. Can be `pending`, `success`, `failure`, or `error`. + * @enum {string} + */ + state: "pending" | "success" | "failure" | "error"; + /** @description The optional link added to the status. */ + target_url: string | null; + updated_at: string; + }; + /** team_add event */ + "webhook-team-add": { + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + team: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting?: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }; + }; + /** team added_to_repository event */ + "webhook-team-added-to-repository": { + /** @enum {string} */ + action: "added_to_repository"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + /** + * Repository + * @description A git repository + */ + repository?: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + }; + sender?: components["schemas"]["simple-user"]; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + team: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting?: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }; + }; + /** team created event */ + "webhook-team-created": { + /** @enum {string} */ + action: "created"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + /** + * Repository + * @description A git repository + */ + repository?: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + }; + sender: components["schemas"]["simple-user"]; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + team: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting?: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }; + }; + /** team deleted event */ + "webhook-team-deleted": { + /** @enum {string} */ + action: "deleted"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + /** + * Repository + * @description A git repository + */ + repository?: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + }; + sender?: components["schemas"]["simple-user"]; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + team: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting?: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }; + }; + /** team edited event */ + "webhook-team-edited": { + /** @enum {string} */ + action: "edited"; + /** @description The changes to the team if the action was `edited`. */ + changes: { + description?: { + /** @description The previous version of the description if the action was `edited`. */ + from: string; + }; + name?: { + /** @description The previous version of the name if the action was `edited`. */ + from: string; + }; + privacy?: { + /** @description The previous version of the team's privacy if the action was `edited`. */ + from: string; + }; + notification_setting?: { + /** @description The previous version of the team's notification setting if the action was `edited`. */ + from: string; + }; + repository?: { + permissions: { + from: { + /** @description The previous version of the team member's `admin` permission on a repository, if the action was `edited`. */ + admin?: boolean; + /** @description The previous version of the team member's `pull` permission on a repository, if the action was `edited`. */ + pull?: boolean; + /** @description The previous version of the team member's `push` permission on a repository, if the action was `edited`. */ + push?: boolean; + }; + }; + }; + }; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + /** + * Repository + * @description A git repository + */ + repository?: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + }; + sender: components["schemas"]["simple-user"]; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + team: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting?: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }; + }; + /** team removed_from_repository event */ + "webhook-team-removed-from-repository": { + /** @enum {string} */ + action: "removed_from_repository"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization: components["schemas"]["organization-simple"]; + /** + * Repository + * @description A git repository + */ + repository?: { + /** + * @description Whether to allow auto-merge for pull requests. + * @default false + */ + allow_auto_merge?: boolean; + /** @description Whether to allow private forks */ + allow_forking?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + */ + allow_squash_merge?: boolean; + allow_update_branch?: boolean; + /** Format: uri-template */ + archive_url: string; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri */ + clone_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + created_at: number | string; + /** @description The default branch of the repository. */ + default_branch: string; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + */ + delete_branch_on_merge?: boolean; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** @description Returns whether or not this repository is disabled. */ + disabled?: boolean; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + forks: number; + forks_count: number; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + git_url: string; + /** + * @description Whether downloads are enabled. + * @default true + */ + has_downloads: boolean; + /** + * @description Whether issues are enabled. + * @default true + */ + has_issues: boolean; + has_pages: boolean; + /** + * @description Whether projects are enabled. + * @default true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + */ + has_wiki: boolean; + homepage: string | null; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + is_template?: boolean; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + language: string | null; + /** Format: uri */ + languages_url: string; + /** License */ + license: { + key: string; + name: string; + node_id: string; + spdx_id: string; + /** Format: uri */ + url: string | null; + } | null; + master_branch?: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** Format: uri */ + mirror_url: string | null; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + open_issues: number; + open_issues_count: number; + organization?: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + permissions?: { + admin: boolean; + maintain?: boolean; + pull: boolean; + push: boolean; + triage?: boolean; + }; + /** @description Whether the repository is private or public. */ + private: boolean; + public?: boolean; + /** Format: uri-template */ + pulls_url: string; + pushed_at: number | string; + /** Format: uri-template */ + releases_url: string; + role_name?: string | null; + size: number; + ssh_url: string; + stargazers?: number; + stargazers_count: number; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + svn_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + topics: string[]; + /** Format: uri-template */ + trees_url: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + /** @enum {string} */ + visibility: "public" | "private" | "internal"; + watchers: number; + watchers_count: number; + }; + sender: components["schemas"]["simple-user"]; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + team: { + deleted?: boolean; + /** @description Description of the team */ + description?: string | null; + /** Format: uri */ + html_url?: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url?: string; + /** @description Name of the team */ + name: string; + node_id?: string; + parent?: { + /** @description Description of the team */ + description: string | null; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the team */ + id: number; + /** Format: uri-template */ + members_url: string; + /** @description Name of the team */ + name: string; + node_id: string; + /** @description Permission that the team will have for its repositories */ + permission: string; + /** @enum {string} */ + privacy: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url: string; + slug: string; + /** + * Format: uri + * @description URL for the team + */ + url: string; + } | null; + /** @description Permission that the team will have for its repositories */ + permission?: string; + /** @enum {string} */ + privacy?: "open" | "closed" | "secret"; + /** + * @description Whether team members will receive notifications when their team is @mentioned + * @enum {string} + */ + notification_setting?: + | "notifications_enabled" + | "notifications_disabled"; + /** Format: uri */ + repositories_url?: string; + slug?: string; + /** + * Format: uri + * @description URL for the team + */ + url?: string; + }; + }; + /** watch started event */ + "webhook-watch-started": { + /** @enum {string} */ + action: "started"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + }; + /** workflow_dispatch event */ + "webhook-workflow-dispatch": { + enterprise?: components["schemas"]["enterprise"]; + inputs: { + [key: string]: unknown; + } | null; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + ref: string; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + workflow: string; + }; + /** workflow_job completed event */ + "webhook-workflow-job-completed": { + /** @enum {string} */ + action: "completed"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + workflow_job: { + /** Format: uri */ + check_run_url: string; + completed_at: string | null; + /** @enum {string|null} */ + conclusion: + | "success" + | "failure" + | "" + | "skipped" + | "cancelled" + | "action_required" + | "neutral" + | "timed_out" + | null; + /** @description The time that the job created. */ + created_at: string; + head_sha: string; + /** Format: uri */ + html_url: string; + id: number; + /** @description Custom labels for the job. Specified by the [`"runs-on"` attribute](https://docs.github.com/actions/reference/workflow-syntax-for-github-actions#jobsjob_idruns-on) in the workflow YAML. */ + labels: string[]; + name: string; + node_id: string; + run_attempt: number; + run_id: number; + /** Format: uri */ + run_url: string; + /** @description The ID of the runner group that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. */ + runner_group_id: number | null; + /** @description The name of the runner group that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. */ + runner_group_name: string | null; + /** @description The ID of the runner that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. */ + runner_id: number | null; + /** @description The name of the runner that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. */ + runner_name: string | null; + started_at: string; + /** + * @description The current status of the job. Can be `queued`, `in_progress`, `waiting`, or `completed`. + * @enum {string} + */ + status: "queued" | "in_progress" | "completed" | "waiting"; + /** @description The name of the current branch. */ + head_branch: string | null; + /** @description The name of the workflow. */ + workflow_name: string | null; + steps: { + completed_at: string | null; + /** @enum {string|null} */ + conclusion: + | "failure" + | "skipped" + | "success" + | "cancelled" + | "" + | null; + name: string; + number: number; + started_at: string | null; + /** @enum {string} */ + status: "in_progress" | "completed" | "queued"; + }[]; + /** Format: uri */ + url: string; + } & { + check_run_url?: string; + completed_at?: string; + /** @enum {string} */ + conclusion: + | "success" + | "failure" + | "skipped" + | "cancelled" + | "action_required" + | "neutral" + | "timed_out"; + /** @description The time that the job created. */ + created_at?: string; + head_sha?: string; + html_url?: string; + id?: number; + labels?: (string | null)[]; + name?: string; + node_id?: string; + run_attempt?: number; + run_id?: number; + run_url?: string; + runner_group_id?: number | null; + runner_group_name?: string | null; + runner_id?: number | null; + runner_name?: string | null; + started_at?: string; + status?: string; + /** @description The name of the current branch. */ + head_branch?: string | null; + /** @description The name of the workflow. */ + workflow_name?: string | null; + steps?: (Record | null)[]; + url?: string; + }; + deployment?: components["schemas"]["deployment"]; + }; + /** workflow_job in_progress event */ + "webhook-workflow-job-in-progress": { + /** @enum {string} */ + action: "in_progress"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + workflow_job: { + /** Format: uri */ + check_run_url: string; + completed_at: string | null; + /** @enum {string|null} */ + conclusion: "success" | "failure" | "" | "cancelled" | "neutral" | null; + /** @description The time that the job created. */ + created_at: string; + head_sha: string; + /** Format: uri */ + html_url: string; + id: number; + /** @description Custom labels for the job. Specified by the [`"runs-on"` attribute](https://docs.github.com/actions/reference/workflow-syntax-for-github-actions#jobsjob_idruns-on) in the workflow YAML. */ + labels: string[]; + name: string; + node_id: string; + run_attempt: number; + run_id: number; + /** Format: uri */ + run_url: string; + /** @description The ID of the runner group that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. */ + runner_group_id: number | null; + /** @description The name of the runner group that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. */ + runner_group_name: string | null; + /** @description The ID of the runner that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. */ + runner_id: number | null; + /** @description The name of the runner that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. */ + runner_name: string | null; + started_at: string; + /** + * @description The current status of the job. Can be `queued`, `in_progress`, or `completed`. + * @enum {string} + */ + status: "queued" | "in_progress" | "completed"; + /** @description The name of the current branch. */ + head_branch: string | null; + /** @description The name of the workflow. */ + workflow_name: string | null; + steps: { + completed_at: string | null; + /** @enum {string|null} */ + conclusion: + | "failure" + | "skipped" + | "success" + | "" + | "cancelled" + | null; + name: string; + number: number; + started_at: string | null; + /** @enum {string} */ + status: "in_progress" | "completed" | "queued" | "pending"; + }[]; + /** Format: uri */ + url: string; + } & { + check_run_url?: string; + completed_at?: string | null; + conclusion?: string | null; + /** @description The time that the job created. */ + created_at?: string; + head_sha?: string; + html_url?: string; + id?: number; + labels?: string[]; + name?: string; + node_id?: string; + run_attempt?: number; + run_id?: number; + run_url?: string; + runner_group_id?: number | null; + runner_group_name?: string | null; + runner_id?: number | null; + runner_name?: string | null; + started_at?: string; + /** @enum {string} */ + status: "in_progress" | "completed" | "queued"; + /** @description The name of the current branch. */ + head_branch?: string | null; + /** @description The name of the workflow. */ + workflow_name?: string | null; + steps: { + completed_at: string | null; + conclusion: string | null; + name: string; + number: number; + started_at: string | null; + /** @enum {string} */ + status: "in_progress" | "completed" | "pending" | "queued"; + }[]; + url?: string; + }; + deployment?: components["schemas"]["deployment"]; + }; + /** workflow_job queued event */ + "webhook-workflow-job-queued": { + /** @enum {string} */ + action: "queued"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + workflow_job: { + /** Format: uri */ + check_run_url: string; + completed_at: string | null; + conclusion: string | null; + /** @description The time that the job created. */ + created_at: string; + head_sha: string; + /** Format: uri */ + html_url: string; + id: number; + labels: string[]; + name: string; + node_id: string; + run_attempt: number; + run_id: number; + /** Format: uri */ + run_url: string; + runner_group_id: number | null; + runner_group_name: string | null; + runner_id: number | null; + runner_name: string | null; + /** Format: date-time */ + started_at: string; + /** @enum {string} */ + status: "queued" | "in_progress" | "completed" | "waiting"; + /** @description The name of the current branch. */ + head_branch: string | null; + /** @description The name of the workflow. */ + workflow_name: string | null; + steps: { + completed_at: string | null; + /** @enum {string|null} */ + conclusion: + | "failure" + | "skipped" + | "success" + | "cancelled" + | "" + | null; + name: string; + number: number; + started_at: string | null; + /** @enum {string} */ + status: "completed" | "in_progress" | "queued" | "pending"; + }[]; + /** Format: uri */ + url: string; + }; + deployment?: components["schemas"]["deployment"]; + }; + /** workflow_job waiting event */ + "webhook-workflow-job-waiting": { + /** @enum {string} */ + action: "waiting"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + workflow_job: { + /** Format: uri */ + check_run_url: string; + completed_at: string | null; + conclusion: string | null; + /** @description The time that the job created. */ + created_at: string; + head_sha: string; + /** Format: uri */ + html_url: string; + id: number; + labels: string[]; + name: string; + node_id: string; + run_attempt: number; + run_id: number; + /** Format: uri */ + run_url: string; + runner_group_id: number | null; + runner_group_name: string | null; + runner_id: number | null; + runner_name: string | null; + /** Format: date-time */ + started_at: string; + /** @description The name of the current branch. */ + head_branch: string | null; + /** @description The name of the workflow. */ + workflow_name: string | null; + /** @enum {string} */ + status: "queued" | "in_progress" | "completed" | "waiting"; + steps: { + completed_at: string | null; + /** @enum {string|null} */ + conclusion: + | "failure" + | "skipped" + | "success" + | "cancelled" + | "" + | null; + name: string; + number: number; + started_at: string | null; + /** @enum {string} */ + status: + | "completed" + | "in_progress" + | "queued" + | "pending" + | "waiting"; + }[]; + /** Format: uri */ + url: string; + }; + deployment?: components["schemas"]["deployment"]; + }; + /** workflow_run completed event */ + "webhook-workflow-run-completed": { + /** @enum {string} */ + action: "completed"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + /** Workflow */ + workflow: { + /** Format: uri */ + badge_url: string; + /** Format: date-time */ + created_at: string; + /** Format: uri */ + html_url: string; + id: number; + name: string; + node_id: string; + path: string; + state: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + workflow_run: { + /** User */ + actor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + artifacts_url: string; + /** Format: uri */ + cancel_url: string; + check_suite_id: number; + check_suite_node_id: string; + /** Format: uri */ + check_suite_url: string; + /** @enum {string|null} */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "" + | "skipped" + | null; + /** Format: date-time */ + created_at: string; + event: string; + head_branch: string | null; + /** SimpleCommit */ + head_commit: { + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + author: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + committer: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + id: string; + message: string; + timestamp: string; + tree_id: string; + }; + /** Repository Lite */ + head_repository: { + /** Format: uri-template */ + archive_url: string; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description Whether the repository is private or public. */ + private: boolean; + /** Format: uri-template */ + pulls_url: string; + /** Format: uri-template */ + releases_url: string; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + /** Format: uri-template */ + trees_url: string; + /** Format: uri */ + url: string; + }; + head_sha: string; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + jobs_url: string; + /** Format: uri */ + logs_url: string; + name: string | null; + node_id: string; + path: string; + /** Format: uri */ + previous_attempt_url: string | null; + pull_requests: { + base: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + head: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + id: number; + number: number; + /** Format: uri */ + url: string; + }[]; + referenced_workflows?: + | { + path: string; + ref?: string; + sha: string; + }[] + | null; + /** Repository Lite */ + repository: { + /** Format: uri-template */ + archive_url: string; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description Whether the repository is private or public. */ + private: boolean; + /** Format: uri-template */ + pulls_url: string; + /** Format: uri-template */ + releases_url: string; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + /** Format: uri-template */ + trees_url: string; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + rerun_url: string; + run_attempt: number; + run_number: number; + /** Format: date-time */ + run_started_at: string; + /** @enum {string} */ + status: + | "requested" + | "in_progress" + | "completed" + | "queued" + | "pending" + | "waiting"; + /** User */ + triggering_actor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + workflow_id: number; + /** Format: uri */ + workflow_url: string; + } & { + actor?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + artifacts_url?: string; + cancel_url?: string; + check_suite_id?: number; + check_suite_node_id?: string; + check_suite_url?: string; + /** @enum {string} */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped"; + created_at?: string; + event?: string; + head_branch?: string | null; + head_commit?: { + author?: { + email?: string; + name?: string; + }; + committer?: { + email?: string; + name?: string; + }; + id?: string; + message?: string; + timestamp?: string; + tree_id?: string; + }; + head_repository?: { + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + description?: string | null; + downloads_url?: string; + events_url?: string; + fork?: boolean; + forks_url?: string; + full_name?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + hooks_url?: string; + html_url?: string; + id?: number; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + name?: string; + node_id?: string; + notifications_url?: string; + owner?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + private?: boolean; + pulls_url?: string; + releases_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + url?: string; + }; + head_sha?: string; + html_url?: string; + id?: number; + jobs_url?: string; + logs_url?: string; + name?: string | null; + node_id?: string; + path?: string; + previous_attempt_url?: string | null; + pull_requests?: (Record | null)[]; + referenced_workflows?: + | { + path: string; + ref?: string; + sha: string; + }[] + | null; + repository?: { + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + description?: string | null; + downloads_url?: string; + events_url?: string; + fork?: boolean; + forks_url?: string; + full_name?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + hooks_url?: string; + html_url?: string; + id?: number; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + name?: string; + node_id?: string; + notifications_url?: string; + owner?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + private?: boolean; + pulls_url?: string; + releases_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + url?: string; + }; + rerun_url?: string; + run_attempt?: number; + run_number?: number; + run_started_at?: string; + status?: string; + triggering_actor?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + } | null; + updated_at?: string; + url?: string; + workflow_id?: number; + workflow_url?: string; + }; + }; + /** workflow_run in_progress event */ + "webhook-workflow-run-in-progress": { + /** @enum {string} */ + action: "in_progress"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + /** Workflow */ + workflow: { + /** Format: uri */ + badge_url: string; + /** Format: date-time */ + created_at: string; + /** Format: uri */ + html_url: string; + id: number; + name: string; + node_id: string; + path: string; + state: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + workflow_run: { + /** User */ + actor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + artifacts_url: string; + /** Format: uri */ + cancel_url: string; + check_suite_id: number; + check_suite_node_id: string; + /** Format: uri */ + check_suite_url: string; + /** @enum {string|null} */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | "" + | null; + /** Format: date-time */ + created_at: string; + event: string; + head_branch: string | null; + /** SimpleCommit */ + head_commit: { + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + author: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + committer: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + id: string; + message: string; + timestamp: string; + tree_id: string; + }; + /** Repository Lite */ + head_repository: { + /** Format: uri-template */ + archive_url: string; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description Whether the repository is private or public. */ + private: boolean; + /** Format: uri-template */ + pulls_url: string; + /** Format: uri-template */ + releases_url: string; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + /** Format: uri-template */ + trees_url: string; + /** Format: uri */ + url: string; + }; + head_sha: string; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + jobs_url: string; + /** Format: uri */ + logs_url: string; + name: string | null; + node_id: string; + path: string; + /** Format: uri */ + previous_attempt_url: string | null; + pull_requests: { + base: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + head: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + id: number; + number: number; + /** Format: uri */ + url: string; + }[]; + referenced_workflows?: + | { + path: string; + ref?: string; + sha: string; + }[] + | null; + /** Repository Lite */ + repository: { + /** Format: uri-template */ + archive_url: string; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description Whether the repository is private or public. */ + private: boolean; + /** Format: uri-template */ + pulls_url: string; + /** Format: uri-template */ + releases_url: string; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + /** Format: uri-template */ + trees_url: string; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + rerun_url: string; + run_attempt: number; + run_number: number; + /** Format: date-time */ + run_started_at: string; + /** @enum {string} */ + status: + | "requested" + | "in_progress" + | "completed" + | "queued" + | "pending"; + /** User */ + triggering_actor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + workflow_id: number; + /** Format: uri */ + workflow_url: string; + } & { + actor?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + artifacts_url?: string; + cancel_url?: string; + check_suite_id?: number; + check_suite_node_id?: string; + check_suite_url?: string; + /** @enum {string|null} */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "skipped" + | "stale" + | null; + created_at?: string; + event?: string; + head_branch?: string | null; + head_commit?: { + author?: { + email?: string; + name?: string; + }; + committer?: { + email?: string; + name?: string; + }; + id?: string; + message?: string; + timestamp?: string; + tree_id?: string; + }; + head_repository?: { + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + description?: string | null; + downloads_url?: string; + events_url?: string; + fork?: boolean; + forks_url?: string; + full_name?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + hooks_url?: string; + html_url?: string; + id?: number; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + name?: string | null; + node_id?: string; + notifications_url?: string; + owner?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + private?: boolean; + pulls_url?: string; + releases_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + url?: string; + }; + head_sha?: string; + html_url?: string; + id?: number; + jobs_url?: string; + logs_url?: string; + name?: string | null; + node_id?: string; + path?: string; + previous_attempt_url?: string | null; + pull_requests?: (Record | null)[]; + referenced_workflows?: + | { + path: string; + ref?: string; + sha: string; + }[] + | null; + repository?: { + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + description?: string | null; + downloads_url?: string; + events_url?: string; + fork?: boolean; + forks_url?: string; + full_name?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + hooks_url?: string; + html_url?: string; + id?: number; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + name?: string; + node_id?: string; + notifications_url?: string; + owner?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + private?: boolean; + pulls_url?: string; + releases_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + url?: string; + }; + rerun_url?: string; + run_attempt?: number; + run_number?: number; + run_started_at?: string; + status?: string; + triggering_actor?: { + avatar_url?: string; + events_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + html_url?: string; + id?: number; + login?: string; + node_id?: string; + organizations_url?: string; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + starred_url?: string; + subscriptions_url?: string; + type?: string; + url?: string; + }; + updated_at?: string; + url?: string; + workflow_id?: number; + workflow_url?: string; + }; + }; + /** workflow_run requested event */ + "webhook-workflow-run-requested": { + /** @enum {string} */ + action: "requested"; + enterprise?: components["schemas"]["enterprise"]; + installation?: components["schemas"]["simple-installation"]; + organization?: components["schemas"]["organization-simple"]; + repository: components["schemas"]["repository"]; + sender: components["schemas"]["simple-user"]; + /** Workflow */ + workflow: { + /** Format: uri */ + badge_url: string; + /** Format: date-time */ + created_at: string; + /** Format: uri */ + html_url: string; + id: number; + name: string; + node_id: string; + path: string; + state: string; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + } | null; + /** Workflow Run */ + workflow_run: { + /** User */ + actor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: uri */ + artifacts_url: string; + /** Format: uri */ + cancel_url: string; + check_suite_id: number; + check_suite_node_id: string; + /** Format: uri */ + check_suite_url: string; + /** @enum {string|null} */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "" + | "skipped" + | "startup_failure" + | null; + /** Format: date-time */ + created_at: string; + event: string; + head_branch: string | null; + /** SimpleCommit */ + head_commit: { + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + author: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + /** + * Committer + * @description Metaproperties for Git author/committer information. + */ + committer: { + /** Format: date-time */ + date?: string; + /** Format: email */ + email: string | null; + /** @description The git author's name. */ + name: string; + username?: string; + }; + id: string; + message: string; + timestamp: string; + tree_id: string; + }; + /** Repository Lite */ + head_repository: { + /** Format: uri-template */ + archive_url: string; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description Whether the repository is private or public. */ + private: boolean; + /** Format: uri-template */ + pulls_url: string; + /** Format: uri-template */ + releases_url: string; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + /** Format: uri-template */ + trees_url: string; + /** Format: uri */ + url: string; + }; + head_sha: string; + /** Format: uri */ + html_url: string; + id: number; + /** Format: uri */ + jobs_url: string; + /** Format: uri */ + logs_url: string; + name: string | null; + node_id: string; + path: string; + /** Format: uri */ + previous_attempt_url: string | null; + pull_requests: { + base: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + head: { + ref: string; + /** Repo Ref */ + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + sha: string; + }; + id: number; + number: number; + /** Format: uri */ + url: string; + }[]; + referenced_workflows?: + | { + path: string; + ref?: string; + sha: string; + }[] + | null; + /** Repository Lite */ + repository: { + /** Format: uri-template */ + archive_url: string; + /** Format: uri-template */ + assignees_url: string; + /** Format: uri-template */ + blobs_url: string; + /** Format: uri-template */ + branches_url: string; + /** Format: uri-template */ + collaborators_url: string; + /** Format: uri-template */ + comments_url: string; + /** Format: uri-template */ + commits_url: string; + /** Format: uri-template */ + compare_url: string; + /** Format: uri-template */ + contents_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + /** Format: uri */ + forks_url: string; + full_name: string; + /** Format: uri-template */ + git_commits_url: string; + /** Format: uri-template */ + git_refs_url: string; + /** Format: uri-template */ + git_tags_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + /** @description Unique identifier of the repository */ + id: number; + /** Format: uri-template */ + issue_comment_url: string; + /** Format: uri-template */ + issue_events_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + labels_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + merges_url: string; + /** Format: uri-template */ + milestones_url: string; + /** @description The name of the repository. */ + name: string; + node_id: string; + /** Format: uri-template */ + notifications_url: string; + /** User */ + owner: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** @description Whether the repository is private or public. */ + private: boolean; + /** Format: uri-template */ + pulls_url: string; + /** Format: uri-template */ + releases_url: string; + /** Format: uri */ + stargazers_url: string; + /** Format: uri-template */ + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + /** Format: uri-template */ + trees_url: string; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + rerun_url: string; + run_attempt: number; + run_number: number; + /** Format: date-time */ + run_started_at: string; + /** @enum {string} */ + status: + | "requested" + | "in_progress" + | "completed" + | "queued" + | "pending" + | "waiting"; + /** User */ + triggering_actor: { + /** Format: uri */ + avatar_url?: string; + deleted?: boolean; + email?: string | null; + /** Format: uri-template */ + events_url?: string; + /** Format: uri */ + followers_url?: string; + /** Format: uri-template */ + following_url?: string; + /** Format: uri-template */ + gists_url?: string; + gravatar_id?: string; + /** Format: uri */ + html_url?: string; + id: number; + login: string; + name?: string; + node_id?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + received_events_url?: string; + /** Format: uri */ + repos_url?: string; + site_admin?: boolean; + /** Format: uri-template */ + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** @enum {string} */ + type?: "Bot" | "User" | "Organization"; + /** Format: uri */ + url?: string; + } | null; + /** Format: date-time */ + updated_at: string; + /** Format: uri */ + url: string; + workflow_id: number; + /** Format: uri */ + workflow_url: string; + display_title: string; + }; + }; + }; + responses: { + /** @description Resource not found */ + not_found: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** @description Validation failed, or the endpoint has been spammed. */ + validation_failed_simple: { + content: { + "application/json": components["schemas"]["validation-error-simple"]; + }; + }; + /** @description Bad Request */ + bad_request: { + content: { + "application/json": components["schemas"]["basic-error"]; + "application/scim+json": components["schemas"]["scim-error"]; + }; + }; + /** @description Validation failed, or the endpoint has been spammed. */ + validation_failed: { + content: { + "application/json": components["schemas"]["validation-error"]; + }; + }; + /** @description Accepted */ + accepted: { + content: { + "application/json": Record; + }; + }; + /** @description Not modified */ + not_modified: never; + /** @description Requires authentication */ + requires_authentication: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** @description Forbidden */ + forbidden: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** @description Service unavailable */ + service_unavailable: { + content: { + "application/json": { + code?: string; + message?: string; + documentation_url?: string; + }; + }; + }; + /** @description Forbidden Gist */ + forbidden_gist: { + content: { + "application/json": { + block?: { + reason?: string; + created_at?: string; + html_url?: string | null; + }; + message?: string; + documentation_url?: string; + }; + }; + }; + /** @description Moved permanently */ + moved_permanently: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** @description Internal Error */ + internal_error: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** @description A header with no content is returned. */ + no_content: never; + /** @description Conflict */ + conflict: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** @description Response */ + actions_runner_jitconfig: { + content: { + "application/json": { + runner: components["schemas"]["runner"]; + /** @description The base64 encoded runner configuration. */ + encoded_jit_config: string; + }; + }; + }; + /** @description Response */ + actions_runner_labels: { + content: { + "application/json": { + total_count: number; + labels: components["schemas"]["runner-label"][]; + }; + }; + }; + /** @description Response */ + actions_runner_labels_readonly: { + content: { + "application/json": { + total_count: number; + labels: components["schemas"]["runner-label"][]; + }; + }; + }; + /** @description The value of `per_page` multiplied by `page` cannot be greater than 10000. */ + package_es_list_error: never; + /** @description Gone */ + gone: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** @description Temporary Redirect */ + temporary_redirect: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** @description Response if GitHub Advanced Security is not enabled for this repository */ + code_scanning_forbidden_read: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** @description Response if the repository is archived or if GitHub Advanced Security is not enabled for this repository */ + code_scanning_forbidden_write: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** @description Found */ + found: never; + /** @description Response if there is already a validation run in progress with a different default setup configuration */ + code_scanning_conflict: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** @description Unavailable due to service under maintenance. */ + porter_maintenance: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + parameters: { + /** @description The number of results per page (max 100). */ + "per-page"?: number; + /** @description Used for pagination: the starting delivery from which the page of deliveries is fetched. Refer to the `link` header for the next and previous page cursors. */ + cursor?: string; + "delivery-id": number; + /** @description Page number of the results to fetch. */ + page?: number; + /** @description Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: string; + /** @description The unique identifier of the installation. */ + "installation-id": number; + /** @description The client ID of the GitHub app. */ + "client-id": string; + "app-slug": string; + /** @description The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: string; + /** + * @description A comma-separated list of states. If specified, only alerts with these states will be returned. + * + * Can be: `auto_dismissed`, `dismissed`, `fixed`, `open` + */ + "dependabot-alert-comma-separated-states"?: string; + /** + * @description A comma-separated list of severities. If specified, only alerts with these severities will be returned. + * + * Can be: `low`, `medium`, `high`, `critical` + */ + "dependabot-alert-comma-separated-severities"?: string; + /** + * @description A comma-separated list of ecosystems. If specified, only alerts for these ecosystems will be returned. + * + * Can be: `composer`, `go`, `maven`, `npm`, `nuget`, `pip`, `pub`, `rubygems`, `rust` + */ + "dependabot-alert-comma-separated-ecosystems"?: string; + /** @description A comma-separated list of package names. If specified, only alerts for these packages will be returned. */ + "dependabot-alert-comma-separated-packages"?: string; + /** @description The scope of the vulnerable dependency. If specified, only alerts with this scope will be returned. */ + "dependabot-alert-scope"?: "development" | "runtime"; + /** + * @description The property by which to sort the results. + * `created` means when the alert was created. + * `updated` means when the alert's state last changed. + */ + "dependabot-alert-sort"?: "created" | "updated"; + /** @description The direction to sort the results by. */ + direction?: "asc" | "desc"; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers). If specified, the query only searches for results before this cursor. */ + "pagination-before"?: string; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers). If specified, the query only searches for results after this cursor. */ + "pagination-after"?: string; + /** + * @description **Deprecated**. The number of results per page (max 100), starting from the first matching result. + * This parameter must not be used in combination with `last`. + * Instead, use `per_page` in combination with `after` to fetch the first page of results. + */ + "pagination-first"?: number; + /** + * @description **Deprecated**. The number of results per page (max 100), starting from the last matching result. + * This parameter must not be used in combination with `first`. + * Instead, use `per_page` in combination with `before` to fetch the last page of results. + */ + "pagination-last"?: number; + /** @description Set to `open` or `resolved` to only list secret scanning alerts in a specific state. */ + "secret-scanning-alert-state"?: "open" | "resolved"; + /** + * @description A comma-separated list of secret types to return. By default all secret types are returned. + * See "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)" + * for a complete list of secret types. + */ + "secret-scanning-alert-secret-type"?: string; + /** @description A comma-separated list of resolutions. Only secret scanning alerts with one of these resolutions are listed. Valid resolutions are `false_positive`, `wont_fix`, `revoked`, `pattern_edited`, `pattern_deleted` or `used_in_tests`. */ + "secret-scanning-alert-resolution"?: string; + /** @description The property to sort the results by. `created` means when the alert was created. `updated` means when the alert was updated or resolved. */ + "secret-scanning-alert-sort"?: "created" | "updated"; + /** @description The unique identifier of the gist. */ + "gist-id": string; + /** @description The unique identifier of the comment. */ + "comment-id": number; + /** @description A list of comma separated label names. Example: `bug,ui,@high` */ + labels?: string; + /** @description account_id parameter */ + "account-id": number; + /** @description The unique identifier of the plan. */ + "plan-id": number; + /** @description The property to sort the results by. */ + sort?: "created" | "updated"; + /** @description The account owner of the repository. The name is not case sensitive. */ + owner: string; + /** @description The name of the repository without the `.git` extension. The name is not case sensitive. */ + repo: string; + /** @description If `true`, show notifications marked as read. */ + all?: boolean; + /** @description If `true`, only shows notifications in which the user is directly participating or mentioned. */ + participating?: boolean; + /** @description Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + before?: string; + /** @description The unique identifier of the notification thread. This corresponds to the value returned in the `id` field when you retrieve notifications (for example with the [`GET /notifications` operation](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user)). */ + "thread-id": number; + /** @description An organization ID. Only return organizations with an ID greater than this ID. */ + "since-org"?: number; + /** @description The organization name. The name is not case sensitive. */ + org: string; + /** @description The property by which to sort the results. */ + "personal-access-token-sort"?: "created_at"; + /** @description A list of owner usernames to use to filter the results. */ + "personal-access-token-owner"?: string[]; + /** @description The name of the repository to use to filter the results. */ + "personal-access-token-repository"?: string; + /** @description The permission to use to filter the results. */ + "personal-access-token-permission"?: string; + /** @description Only show fine-grained personal access tokens used before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + "personal-access-token-before"?: string; + /** @description Only show fine-grained personal access tokens used after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + "personal-access-token-after"?: string; + /** @description The unique identifier of the fine-grained personal access token. */ + "fine-grained-personal-access-token-id": number; + /** @description The unique identifier of the repository. */ + "repository-id": number; + /** @description The unique identifier of the required workflow. */ + "required-workflow-id": number; + /** @description Unique identifier of the self-hosted runner. */ + "runner-id": number; + /** @description The name of a self-hosted runner's custom label. */ + "runner-label-name": string; + /** @description The name of the secret. */ + "secret-name": string; + /** @description The number of results per page (max 30). */ + "variables-per-page"?: number; + /** @description The name of the variable. */ + "variable-name": string; + /** @description The handle for the GitHub user account. */ + username: string; + /** @description The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ + "tool-name"?: components["schemas"]["code-scanning-analysis-tool-name"]; + /** @description The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ + "tool-guid"?: components["schemas"]["code-scanning-analysis-tool-guid"]; + /** @description The unique identifier of the hook. */ + "hook-id": number; + /** @description The unique identifier of the invitation. */ + "invitation-id": number; + /** @description The name of the codespace. */ + "codespace-name": string; + /** @description The unique identifier of the migration. */ + "migration-id": number; + /** @description repo_name parameter */ + "repo-name": string; + /** + * @description The selected visibility of the packages. This parameter is optional and only filters an existing result set. + * + * The `internal` visibility is only supported for GitHub Packages registries that allow for granular permissions. For other ecosystems `internal` is synonymous with `private`. + * For the list of GitHub Packages registries that support granular permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + "package-visibility"?: "public" | "private" | "internal"; + /** @description The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + "package-type": + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** @description The name of the package. */ + "package-name": string; + /** @description Unique identifier of the package version. */ + "package-version-id": number; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers). If specified, the query only searches for events before this cursor. To receive an initial cursor on your first request, include an empty "before" query string. */ + "secret-scanning-pagination-before-org-repo"?: string; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers). If specified, the query only searches for events after this cursor. To receive an initial cursor on your first request, include an empty "after" query string. */ + "secret-scanning-pagination-after-org-repo"?: string; + /** @description The slug of the team name. */ + "team-slug": string; + /** @description The number that identifies the discussion. */ + "discussion-number": number; + /** @description The number that identifies the comment. */ + "comment-number": number; + /** @description The unique identifier of the reaction. */ + "reaction-id": number; + /** @description The unique identifier of the project. */ + "project-id": number; + /** @description The security feature to enable or disable. */ + "security-product": + | "dependency_graph" + | "dependabot_alerts" + | "dependabot_security_updates" + | "advanced_security" + | "code_scanning_default_setup" + | "secret_scanning" + | "secret_scanning_push_protection"; + /** + * @description The action to take. + * + * `enable_all` means to enable the specified security feature for all repositories in the organization. + * `disable_all` means to disable the specified security feature for all repositories in the organization. + */ + "org-security-product-enablement": "enable_all" | "disable_all"; + /** @description The unique identifier of the card. */ + "card-id": number; + /** @description The unique identifier of the column. */ + "column-id": number; + /** @description The ID of the required workflow that has run at least once in a repository. */ + "repo-required-workflow-id": number; + /** @description The unique identifier of the artifact. */ + "artifact-id": number; + /** @description The full Git reference for narrowing down the cache. The `ref` for a branch should be formatted as `refs/heads/`. To reference a pull request use `refs/pull//merge`. */ + "actions-cache-git-ref-full"?: string; + /** @description An explicit key or prefix for identifying the cache */ + "actions-cache-key"?: string; + /** @description The property to sort the results by. `created_at` means when the cache was created. `last_accessed_at` means when the cache was last accessed. `size_in_bytes` is the size of the cache in bytes. */ + "actions-cache-list-sort"?: + | "created_at" + | "last_accessed_at" + | "size_in_bytes"; + /** @description A key for identifying the cache. */ + "actions-cache-key-required": string; + /** @description The unique identifier of the GitHub Actions cache. */ + "cache-id": number; + /** @description The unique identifier of the job. */ + "job-id": number; + /** @description Returns someone's workflow runs. Use the login for the user who created the `push` associated with the check suite or workflow run. */ + actor?: string; + /** @description Returns workflow runs associated with a branch. Use the name of the branch of the `push`. */ + "workflow-run-branch"?: string; + /** @description Returns workflow run triggered by the event you specify. For example, `push`, `pull_request` or `issue`. For more information, see "[Events that trigger workflows](https://docs.github.com/actions/automating-your-workflow-with-github-actions/events-that-trigger-workflows)." */ + event?: string; + /** @description Returns workflow runs with the check run `status` or `conclusion` that you specify. For example, a conclusion can be `success` or a status can be `in_progress`. Only GitHub can set a status of `waiting` or `requested`. */ + "workflow-run-status"?: + | "completed" + | "action_required" + | "cancelled" + | "failure" + | "neutral" + | "skipped" + | "stale" + | "success" + | "timed_out" + | "in_progress" + | "queued" + | "requested" + | "waiting" + | "pending"; + /** @description Returns workflow runs created within the given date-time range. For more information on the syntax, see "[Understanding the search syntax](https://docs.github.com/search-github/getting-started-with-searching-on-github/understanding-the-search-syntax#query-for-dates)." */ + created?: string; + /** @description If `true` pull requests are omitted from the response (empty array). */ + "exclude-pull-requests"?: boolean; + /** @description Returns workflow runs with the `check_suite_id` that you specify. */ + "workflow-run-check-suite-id"?: number; + /** @description Only returns workflow runs that are associated with the specified `head_sha`. */ + "workflow-run-head-sha"?: string; + /** @description The unique identifier of the workflow run. */ + "run-id": number; + /** @description The attempt number of the workflow run. */ + "attempt-number": number; + /** @description The ID of the workflow. You can also pass the workflow file name as a string. */ + "workflow-id": number | string; + /** @description The unique identifier of the autolink. */ + "autolink-id": number; + /** @description The name of the branch. Cannot contain wildcard characters. To use wildcard characters in branch names, use [the GraphQL API](https://docs.github.com/graphql). */ + branch: string; + /** @description The unique identifier of the check run. */ + "check-run-id": number; + /** @description The unique identifier of the check suite. */ + "check-suite-id": number; + /** @description Returns check runs with the specified `name`. */ + "check-name"?: string; + /** @description Returns check runs with the specified `status`. */ + status?: "queued" | "in_progress" | "completed"; + /** @description The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + "git-ref"?: components["schemas"]["code-scanning-ref"]; + /** @description The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + "alert-number": components["schemas"]["alert-number"]; + /** @description The SHA of the commit. */ + "commit-sha": string; + /** @description A comma-separated list of full manifest paths. If specified, only alerts for these manifests will be returned. */ + "dependabot-alert-comma-separated-manifests"?: string; + /** + * @description The number that identifies a Dependabot alert in its repository. + * You can find this at the end of the URL for a Dependabot alert within GitHub, + * or in `number` fields in the response from the + * `GET /repos/{owner}/{repo}/dependabot/alerts` operation. + */ + "dependabot-alert-number": components["schemas"]["alert-number"]; + /** @description The full path, relative to the repository root, of the dependency manifest file. */ + "manifest-path"?: string; + /** @description deployment_id parameter */ + "deployment-id": number; + /** @description The name of the environment. */ + "environment-name": string; + /** @description The unique identifier of the branch policy. */ + "branch-policy-id": number; + /** @description The unique identifier of the protection rule. */ + "protection-rule-id": number; + /** @description A user ID. Only return users with an ID greater than this ID. */ + "since-user"?: number; + /** @description The number that identifies the issue. */ + "issue-number": number; + /** @description The unique identifier of the key. */ + "key-id": number; + /** @description The number that identifies the milestone. */ + "milestone-number": number; + /** @description The number that identifies the pull request. */ + "pull-number": number; + /** @description The unique identifier of the review. */ + "review-id": number; + /** @description The unique identifier of the asset. */ + "asset-id": number; + /** @description The unique identifier of the release. */ + "release-id": number; + /** @description The GHSA (GitHub Security Advisory) identifier of the advisory. */ + ghsa_id: string; + /** @description The unique identifier of the tag protection. */ + "tag-protection-id": number; + /** @description The time frame to display results for. */ + per?: "day" | "week"; + /** @description A repository ID. Only return repositories with an ID greater than this ID. */ + "since-repo"?: number; + /** @description Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: "desc" | "asc"; + /** @description The unique identifier of the team. */ + "team-id": number; + /** @description ID of the Repository to filter on */ + "repository-id-in-query"?: number; + /** @description The ID of the export operation, or `latest`. Currently only `latest` is currently supported. */ + "export-id": string; + /** @description The unique identifier of the GPG key. */ + "gpg-key-id": number; + /** @description Only show repositories updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + "since-repo-date"?: string; + /** @description Only show repositories updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + "before-repo-date"?: string; + /** @description The unique identifier of the SSH signing key. */ + "ssh-signing-key-id": number; + /** @description The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + "sort-starred"?: "created" | "updated"; + }; + requestBodies: never; + headers: { + /** @example ; rel="next", ; rel="last" */ + link: string; + /** @example text/html */ + "content-type": string; + /** @example 0.17.4 */ + "x-common-marker-version": string; + /** @example 5000 */ + "x-rate-limit-limit": number; + /** @example 4999 */ + "x-rate-limit-remaining": number; + /** @example 1590701888 */ + "x-rate-limit-reset": number; + /** @example https://pipelines.actions.githubusercontent.com/OhgS4QRKqmgx7bKC27GKU83jnQjyeqG8oIMTge8eqtheppcmw8/_apis/pipelines/1/runs/176/signedlogcontent?urlExpires=2020-01-24T18%3A10%3A31.5729946Z&urlSigningMethod=HMACV1&urlSignature=agG73JakPYkHrh06seAkvmH7rBR4Ji4c2%2B6a2ejYh3E%3D */ + location: string; + }; + pathItems: never; +} + +export type external = Record; + +export interface operations { + /** + * GitHub API Root + * @description Get Hypermedia links to resources accessible in GitHub's REST API + */ + "meta/root": { + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["root"]; + }; + }; + }; + }; + /** + * Get the authenticated app + * @description Returns the GitHub App associated with the authentication credentials used. To see how many app installations are associated with this GitHub App, see the `installations_count` in the response. For more details about your app's installations, see the "[List installations for the authenticated app](https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-authenticated": { + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"]; + }; + }; + }; + }; + /** + * Create a GitHub App from a manifest + * @description Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`. + */ + "apps/create-from-manifest": { + parameters: { + path: { + code: string; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["integration"] & { + client_id: string; + client_secret: string; + webhook_secret: string | null; + pem: string; + [key: string]: unknown; + }; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Get a webhook configuration for an app + * @description Returns the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-webhook-config-for-app": { + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + }; + /** + * Update a webhook configuration for an app + * @description Updates the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/update-webhook-config-for-app": { + requestBody: { + content: { + "application/json": { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + }; + /** + * List deliveries for an app webhook + * @description Returns a list of webhook deliveries for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/list-webhook-deliveries": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + cursor?: components["parameters"]["cursor"]; + redelivery?: boolean; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery-item"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a delivery for an app webhook + * @description Returns a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-webhook-delivery": { + parameters: { + path: { + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery"]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Redeliver a delivery for an app webhook + * @description Redeliver a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/redeliver-webhook-delivery": { + parameters: { + path: { + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List installation requests for the authenticated app + * @description Lists all the pending installation requests for the authenticated GitHub App. + */ + "apps/list-installation-requests-for-authenticated-app": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description List of integration installation requests */ + 200: { + content: { + "application/json": components["schemas"]["integration-installation-request"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + }; + }; + /** + * List installations for the authenticated app + * @description You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + * + * The permissions the installation has are included under the `permissions` key. + */ + "apps/list-installations": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + since?: components["parameters"]["since"]; + outdated?: string; + }; + }; + responses: { + /** @description The permissions the installation has are included under the `permissions` key. */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["installation"][]; + }; + }; + }; + }; + /** + * Get an installation for the authenticated app + * @description Enables an authenticated GitHub App to find an installation's information using the installation id. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-installation": { + parameters: { + path: { + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["installation"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete an installation for the authenticated app + * @description Uninstalls a GitHub App on a user, organization, or business account. If you prefer to temporarily suspend an app's access to your account's resources, then we recommend the "[Suspend an app installation](https://docs.github.com/rest/reference/apps/#suspend-an-app-installation)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/delete-installation": { + parameters: { + path: { + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create an installation access token for an app + * @description Creates an installation access token that enables a GitHub App to make authenticated API requests for the app's installation on an organization or individual account. Installation tokens expire one hour from the time you create them. Using an expired token produces a status code of `401 - Unauthorized`, and requires creating a new installation token. By default the installation token has access to all repositories that the installation can access. To restrict the access to specific repositories, you can provide the `repository_ids` when creating the token. When you omit `repository_ids`, the response does not contain the `repositories` key. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/create-installation-access-token": { + parameters: { + path: { + installation_id: components["parameters"]["installation-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description List of repository names that the token should have access to */ + repositories?: string[]; + /** + * @description List of repository IDs that the token should have access to + * @example [ + * 1 + * ] + */ + repository_ids?: number[]; + permissions?: components["schemas"]["app-permissions"]; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["installation-token"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Suspend an app installation + * @description Suspends a GitHub App on a user, organization, or business account, which blocks the app from accessing the account's resources. When a GitHub App is suspended, the app's access to the GitHub API or webhook events is blocked for that account. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/suspend-installation": { + parameters: { + path: { + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Unsuspend an app installation + * @description Removes a GitHub App installation suspension. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/unsuspend-installation": { + parameters: { + path: { + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete an app authorization + * @description OAuth and GitHub application owners can revoke a grant for their application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid OAuth `access_token` as an input parameter and the grant for the token's owner will be deleted. + * Deleting an application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). + */ + "apps/delete-authorization": { + parameters: { + path: { + client_id: components["parameters"]["client-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The OAuth access token used to authenticate to the GitHub API. */ + access_token: string; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Check a token + * @description OAuth applications and GitHub applications with OAuth authorizations can use this API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to use this endpoint, where the username is the application `client_id` and the password is its `client_secret`. Invalid tokens will return `404 NOT FOUND`. + */ + "apps/check-token": { + parameters: { + path: { + client_id: components["parameters"]["client-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The access_token of the OAuth or GitHub application. */ + access_token: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Delete an app token + * @description OAuth or GitHub application owners can revoke a single token for an OAuth application or a GitHub application with an OAuth authorization. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the application's `client_id` and `client_secret` as the username and password. + */ + "apps/delete-token": { + parameters: { + path: { + client_id: components["parameters"]["client-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The OAuth access token used to authenticate to the GitHub API. */ + access_token: string; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Reset a token + * @description OAuth applications and GitHub applications with OAuth authorizations can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. + */ + "apps/reset-token": { + parameters: { + path: { + client_id: components["parameters"]["client-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The access_token of the OAuth or GitHub application. */ + access_token: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Create a scoped access token + * @description Use a non-scoped user-to-server access token to create a repository scoped and/or permission scoped user-to-server access token. You can specify which repositories the token can access and which permissions are granted to the token. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the `client_id` and `client_secret` of the GitHub App as the username and password. Invalid tokens will return `404 NOT FOUND`. + */ + "apps/scope-token": { + parameters: { + path: { + client_id: components["parameters"]["client-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The access token used to authenticate to the GitHub API. + * @example e72e16c7e42f292c6912e7710c838347ae178b4a + */ + access_token: string; + /** + * @description The name of the user or organization to scope the user-to-server access token to. **Required** unless `target_id` is specified. + * @example octocat + */ + target?: string; + /** + * @description The ID of the user or organization to scope the user-to-server access token to. **Required** unless `target` is specified. + * @example 1 + */ + target_id?: number; + /** @description The list of repository names to scope the user-to-server access token to. `repositories` may not be specified if `repository_ids` is specified. */ + repositories?: string[]; + /** + * @description The list of repository IDs to scope the user-to-server access token to. `repository_ids` may not be specified if `repositories` is specified. + * @example [ + * 1 + * ] + */ + repository_ids?: number[]; + permissions?: components["schemas"]["app-permissions"]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get an app + * @description **Note**: The `:app_slug` is just the URL-friendly name of your GitHub App. You can find this on the settings page for your GitHub App (e.g., `https://github.com/settings/apps/:app_slug`). + * + * If the GitHub App you specify is public, you can access this endpoint without authenticating. If the GitHub App you specify is private, you must authenticate with a [personal access token](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + "apps/get-by-slug": { + parameters: { + path: { + app_slug: components["parameters"]["app-slug"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Get all codes of conduct */ + "codes-of-conduct/get-all-codes-of-conduct": { + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-of-conduct"][]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** Get a code of conduct */ + "codes-of-conduct/get-conduct-code": { + parameters: { + path: { + key: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-of-conduct"]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get emojis + * @description Lists all the emojis available to use on GitHub. + */ + "emojis/get": { + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + [key: string]: string | undefined; + }; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * List Dependabot alerts for an enterprise + * @description Lists Dependabot alerts for repositories that are owned by the specified enterprise. + * To use this endpoint, you must be a member of the enterprise, and you must use an + * access token with the `repo` scope or `security_events` scope. + * Alerts are only returned for organizations in the enterprise for which you are an organization owner or a security manager. For more information about security managers, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + "dependabot/list-alerts-for-enterprise": { + parameters: { + query?: { + state?: components["parameters"]["dependabot-alert-comma-separated-states"]; + severity?: components["parameters"]["dependabot-alert-comma-separated-severities"]; + ecosystem?: components["parameters"]["dependabot-alert-comma-separated-ecosystems"]; + package?: components["parameters"]["dependabot-alert-comma-separated-packages"]; + scope?: components["parameters"]["dependabot-alert-scope"]; + sort?: components["parameters"]["dependabot-alert-sort"]; + direction?: components["parameters"]["direction"]; + before?: components["parameters"]["pagination-before"]; + after?: components["parameters"]["pagination-after"]; + first?: components["parameters"]["pagination-first"]; + last?: components["parameters"]["pagination-last"]; + per_page?: components["parameters"]["per-page"]; + }; + path: { + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-alert-with-repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List secret scanning alerts for an enterprise + * @description Lists secret scanning alerts for eligible repositories in an enterprise, from newest to oldest. + * To use this endpoint, you must be a member of the enterprise, and you must use an access token with the `repo` scope or `security_events` scope. Alerts are only returned for organizations in the enterprise for which you are an organization owner or a [security manager](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization). + */ + "secret-scanning/list-alerts-for-enterprise": { + parameters: { + query?: { + state?: components["parameters"]["secret-scanning-alert-state"]; + secret_type?: components["parameters"]["secret-scanning-alert-secret-type"]; + resolution?: components["parameters"]["secret-scanning-alert-resolution"]; + sort?: components["parameters"]["secret-scanning-alert-sort"]; + direction?: components["parameters"]["direction"]; + per_page?: components["parameters"]["per-page"]; + before?: components["parameters"]["pagination-before"]; + after?: components["parameters"]["pagination-after"]; + }; + path: { + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["organization-secret-scanning-alert"][]; + }; + }; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List public events + * @description We delay the public events feed by five minutes, which means the most recent event returned by the public events API actually occurred at least five minutes ago. + */ + "activity/list-public-events": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Get feeds + * @description GitHub provides several timeline resources in [Atom](http://en.wikipedia.org/wiki/Atom_(standard)) format. The Feeds API lists all the feeds available to the authenticated user: + * + * * **Timeline**: The GitHub global public timeline + * * **User**: The public timeline for any user, using [URI template](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) + * * **Current user public**: The public timeline for the authenticated user + * * **Current user**: The private timeline for the authenticated user + * * **Current user actor**: The private timeline for activity created by the authenticated user + * * **Current user organizations**: The private timeline for the organizations the authenticated user is a member of. + * * **Security advisories**: A collection of public announcements that provide information about security-related vulnerabilities in software on GitHub. + * + * **Note**: Private feeds are only returned when [authenticating via Basic Auth](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) since current feed URIs use the older, non revocable auth tokens. + */ + "activity/get-feeds": { + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["feed"]; + }; + }; + }; + }; + /** + * List gists for the authenticated user + * @description Lists the authenticated user's gists or if called anonymously, this endpoint returns all public gists: + */ + "gists/list": { + parameters: { + query?: { + since?: components["parameters"]["since"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["base-gist"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Create a gist + * @description Allows you to add a new gist with one or more files. + * + * **Note:** Don't name your files "gistfile" with a numerical suffix. This is the format of the automatic naming scheme that Gist uses internally. + */ + "gists/create": { + requestBody: { + content: { + "application/json": { + /** + * @description Description of the gist + * @example Example Ruby script + */ + description?: string; + /** + * @description Names and content for the files that make up the gist + * @example { + * "hello.rb": { + * "content": "puts \"Hello, World!\"" + * } + * } + */ + files: { + [key: string]: + | { + /** @description Content of the file */ + content: string; + } + | undefined; + }; + public?: boolean | ("true" | "false"); + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/gists/aa5a315d61ae9438b18d */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["gist-simple"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List public gists + * @description List public gists sorted by most recently updated to least recently updated. + * + * Note: With [pagination](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination), you can fetch up to 3000 gists. For example, you can fetch 100 pages with 30 gists per page or 30 pages with 100 gists per page. + */ + "gists/list-public": { + parameters: { + query?: { + since?: components["parameters"]["since"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["base-gist"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List starred gists + * @description List the authenticated user's starred gists: + */ + "gists/list-starred": { + parameters: { + query?: { + since?: components["parameters"]["since"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["base-gist"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Get a gist */ + "gists/get": { + parameters: { + path: { + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-simple"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden_gist"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Delete a gist */ + "gists/delete": { + parameters: { + path: { + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update a gist + * @description Allows you to update a gist's description and to update, delete, or rename gist files. Files from the previous version of the gist that aren't explicitly changed during an edit are unchanged. + */ + "gists/update": { + parameters: { + path: { + gist_id: components["parameters"]["gist-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The description of the gist. + * @example Example Ruby script + */ + description?: string; + /** + * @description The gist files to be updated, renamed, or deleted. Each `key` must match the current filename + * (including extension) of the targeted gist file. For example: `hello.py`. + * + * To delete a file, set the whole file to null. For example: `hello.py : null`. + * @example { + * "hello.rb": { + * "content": "blah", + * "filename": "goodbye.rb" + * } + * } + */ + files?: { + [key: string]: + | { + /** @description The new content of the file. */ + content?: string; + /** @description The new filename for the file. */ + filename?: string | null; + } + | undefined; + }; + } | null; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-simple"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** List gist comments */ + "gists/list-comments": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["gist-comment"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Create a gist comment */ + "gists/create-comment": { + parameters: { + path: { + gist_id: components["parameters"]["gist-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The comment text. + * @example Body of the attachment + */ + body: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/gists/a6db0bec360bb87e9418/comments/1 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["gist-comment"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Get a gist comment */ + "gists/get-comment": { + parameters: { + path: { + gist_id: components["parameters"]["gist-id"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-comment"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden_gist"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Delete a gist comment */ + "gists/delete-comment": { + parameters: { + path: { + gist_id: components["parameters"]["gist-id"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Update a gist comment */ + "gists/update-comment": { + parameters: { + path: { + gist_id: components["parameters"]["gist-id"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The comment text. + * @example Body of the attachment + */ + body: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** List gist commits */ + "gists/list-commits": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + /** @example ; rel="next" */ + Link?: string; + }; + content: { + "application/json": components["schemas"]["gist-commit"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** List gist forks */ + "gists/list-forks": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["gist-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Fork a gist */ + "gists/fork": { + parameters: { + path: { + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/gists/aa5a315d61ae9438b18d */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["base-gist"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Check if a gist is starred */ + "gists/check-is-starred": { + parameters: { + path: { + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** @description Response if gist is starred */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + /** @description Not Found if gist is not starred */ + 404: { + content: { + "application/json": Record; + }; + }; + }; + }; + /** + * Star a gist + * @description Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "gists/star": { + parameters: { + path: { + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Unstar a gist */ + "gists/unstar": { + parameters: { + path: { + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Get a gist revision */ + "gists/get-revision": { + parameters: { + path: { + gist_id: components["parameters"]["gist-id"]; + sha: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-simple"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get all gitignore templates + * @description List all templates available to pass as an option when [creating a repository](https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user). + */ + "gitignore/get-all-templates": { + responses: { + /** @description Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Get a gitignore template + * @description The API also allows fetching the source of a single template. + * Use the raw [media type](https://docs.github.com/rest/overview/media-types/) to get the raw contents. + */ + "gitignore/get-template": { + parameters: { + path: { + name: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["gitignore-template"]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * List repositories accessible to the app installation + * @description List repositories that an app installation can access. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + "apps/list-repos-accessible-to-installation": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["repository"][]; + /** @example selected */ + repository_selection?: string; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Revoke an installation access token + * @description Revokes the installation token you're using to authenticate as an installation and access this endpoint. + * + * Once an installation token is revoked, the token is invalidated and cannot be used. Other endpoints that require the revoked installation token must have a new installation token to work. You can create a new token using the "[Create an installation access token for an app](https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app)" endpoint. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + "apps/revoke-installation-access-token": { + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List issues assigned to the authenticated user + * @description List issues assigned to the authenticated user across all visible repositories including owned repositories, member + * repositories, and organization repositories. You can use the `filter` query parameter to fetch issues that are not + * necessarily assigned to you. + * + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/list": { + parameters: { + query?: { + /** @description Indicates which sorts of issues to return. `assigned` means issues assigned to you. `created` means issues created by you. `mentioned` means issues mentioning you. `subscribed` means issues you're subscribed to updates for. `all` or `repos` means all issues you can see, regardless of participation or creation. */ + filter?: + | "assigned" + | "created" + | "mentioned" + | "subscribed" + | "repos" + | "all"; + /** @description Indicates the state of the issues to return. */ + state?: "open" | "closed" | "all"; + labels?: components["parameters"]["labels"]; + /** @description What to sort results by. */ + sort?: "created" | "updated" | "comments"; + direction?: components["parameters"]["direction"]; + since?: components["parameters"]["since"]; + collab?: boolean; + orgs?: boolean; + owned?: boolean; + pulls?: boolean; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["issue"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Get all commonly used licenses */ + "licenses/get-all-commonly-used": { + parameters: { + query?: { + featured?: boolean; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["license-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** Get a license */ + "licenses/get": { + parameters: { + path: { + license: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["license"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Render a Markdown document */ + "markdown/render": { + requestBody: { + content: { + "application/json": { + /** @description The Markdown text to render in HTML. */ + text: string; + /** + * @description The rendering mode. + * @default markdown + * @example markdown + * @enum {string} + */ + mode?: "markdown" | "gfm"; + /** @description The repository context to use when creating references in `gfm` mode. For example, setting `context` to `octo-org/octo-repo` will change the text `#42` into an HTML link to issue 42 in the `octo-org/octo-repo` repository. */ + context?: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + "Content-Type": components["headers"]["content-type"]; + /** @example 279 */ + "Content-Length"?: string; + "X-CommonMarker-Version": components["headers"]["x-common-marker-version"]; + }; + content: { + "text/html": string; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Render a Markdown document in raw mode + * @description You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less. + */ + "markdown/render-raw": { + requestBody?: { + content: { + "text/plain": string; + "text/x-markdown": string; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + "X-CommonMarker-Version": components["headers"]["x-common-marker-version"]; + }; + content: { + "text/html": string; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Get a subscription plan for an account + * @description Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/get-subscription-plan-for-account": { + parameters: { + path: { + account_id: components["parameters"]["account-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["marketplace-purchase"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + /** @description Not Found when the account has not purchased the listing */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * List plans + * @description Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/list-plans": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["marketplace-listing-plan"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List accounts for a plan + * @description Returns user and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/list-accounts-for-plan": { + parameters: { + query?: { + sort?: components["parameters"]["sort"]; + /** @description To return the oldest accounts first, set to `asc`. Ignored without the `sort` parameter. */ + direction?: "asc" | "desc"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + plan_id: components["parameters"]["plan-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["marketplace-purchase"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a subscription plan for an account (stubbed) + * @description Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/get-subscription-plan-for-account-stubbed": { + parameters: { + path: { + account_id: components["parameters"]["account-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["marketplace-purchase"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + /** @description Not Found when the account has not purchased the listing */ + 404: never; + }; + }; + /** + * List plans (stubbed) + * @description Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/list-plans-stubbed": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["marketplace-listing-plan"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + }; + }; + /** + * List accounts for a plan (stubbed) + * @description Returns repository and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/list-accounts-for-plan-stubbed": { + parameters: { + query?: { + sort?: components["parameters"]["sort"]; + /** @description To return the oldest accounts first, set to `asc`. Ignored without the `sort` parameter. */ + direction?: "asc" | "desc"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + plan_id: components["parameters"]["plan-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["marketplace-purchase"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + }; + }; + /** + * Get GitHub meta information + * @description Returns meta information about GitHub, including a list of GitHub's IP addresses. For more information, see "[About GitHub's IP addresses](https://docs.github.com/articles/about-github-s-ip-addresses/)." + * + * The API's response also includes a list of GitHub's domain names. + * + * The values shown in the documentation's response are example values. You must always query the API directly to get the latest values. + * + * **Note:** This endpoint returns both IPv4 and IPv6 addresses. However, not all features support IPv6. You should refer to the specific documentation for each feature to determine if IPv6 is supported. + */ + "meta/get": { + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["api-overview"]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** List public events for a network of repositories */ + "activity/list-public-events-for-repo-network": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List notifications for the authenticated user + * @description List all notifications for the current user, sorted by most recently updated. + */ + "activity/list-notifications-for-authenticated-user": { + parameters: { + query?: { + all?: components["parameters"]["all"]; + participating?: components["parameters"]["participating"]; + since?: components["parameters"]["since"]; + before?: components["parameters"]["before"]; + page?: components["parameters"]["page"]; + /** @description The number of results per page (max 50). */ + per_page?: number; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["thread"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Mark notifications as read + * @description Marks all notifications as "read" for the current user. If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. + */ + "activity/mark-notifications-as-read": { + requestBody?: { + content: { + "application/json": { + /** + * Format: date-time + * @description Describes the last point that notifications were checked. Anything updated since this time will not be marked as read. If you omit this parameter, all notifications are marked as read. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. Default: The current timestamp. + */ + last_read_at?: string; + /** @description Whether the notification has been read. */ + read?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 202: { + content: { + "application/json": { + message?: string; + }; + }; + }; + /** @description Reset Content */ + 205: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Get a thread + * @description Gets information about a notification thread. + */ + "activity/get-thread": { + parameters: { + path: { + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["thread"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Mark a thread as read + * @description Marks a thread as "read." Marking a thread as "read" is equivalent to clicking a notification in your notification inbox on GitHub: https://github.com/notifications. + */ + "activity/mark-thread-as-read": { + parameters: { + path: { + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** @description Reset Content */ + 205: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Get a thread subscription for the authenticated user + * @description This checks to see if the current user is subscribed to a thread. You can also [get a repository subscription](https://docs.github.com/rest/reference/activity#get-a-repository-subscription). + * + * Note that subscriptions are only generated if a user is participating in a conversation--for example, they've replied to the thread, were **@mentioned**, or manually subscribe to a thread. + */ + "activity/get-thread-subscription-for-authenticated-user": { + parameters: { + path: { + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["thread-subscription"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Set a thread subscription + * @description If you are watching a repository, you receive notifications for all threads by default. Use this endpoint to ignore future notifications for threads until you comment on the thread or get an **@mention**. + * + * You can also use this endpoint to subscribe to threads that you are currently not receiving notifications for or to subscribed to threads that you have previously ignored. + * + * Unsubscribing from a conversation in a repository that you are not watching is functionally equivalent to the [Delete a thread subscription](https://docs.github.com/rest/reference/activity#delete-a-thread-subscription) endpoint. + */ + "activity/set-thread-subscription": { + parameters: { + path: { + thread_id: components["parameters"]["thread-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description Whether to block all notifications from a thread. + * @default false + */ + ignored?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["thread-subscription"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Delete a thread subscription + * @description Mutes all future notifications for a conversation until you comment on the thread or get an **@mention**. If you are watching the repository of the thread, you will still receive notifications. To ignore future notifications for a repository you are watching, use the [Set a thread subscription](https://docs.github.com/rest/reference/activity#set-a-thread-subscription) endpoint and set `ignore` to `true`. + */ + "activity/delete-thread-subscription": { + parameters: { + path: { + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Get Octocat + * @description Get the octocat as ASCII art + */ + "meta/get-octocat": { + parameters: { + query?: { + /** @description The words to show in Octocat's speech bubble */ + s?: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/octocat-stream": string; + }; + }; + }; + }; + /** + * List organizations + * @description Lists all organizations, in the order that they were created on GitHub. + * + * **Note:** Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers) to get the URL for the next page of organizations. + */ + "orgs/list": { + parameters: { + query?: { + since?: components["parameters"]["since-org"]; + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + /** @example ; rel="next" */ + Link?: string; + }; + content: { + "application/json": components["schemas"]["organization-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * List requests to access organization resources with fine-grained personal access tokens + * @description Lists requests from organization members to access organization resources with a fine-grained personal access token. Only GitHub Apps can call this API, + * using the `organization_personal_access_token_requests: read` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + "orgs/list-pat-grant-requests": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + sort?: components["parameters"]["personal-access-token-sort"]; + direction?: components["parameters"]["direction"]; + owner?: components["parameters"]["personal-access-token-owner"]; + repository?: components["parameters"]["personal-access-token-repository"]; + permission?: components["parameters"]["personal-access-token-permission"]; + last_used_before?: components["parameters"]["personal-access-token-before"]; + last_used_after?: components["parameters"]["personal-access-token-after"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["organization-programmatic-access-grant-request"][]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Review requests to access organization resources with fine-grained personal access tokens + * @description Approves or denies multiple pending requests to access organization resources via a fine-grained personal access token. Only GitHub Apps can call this API, + * using the `organization_personal_access_token_requests: write` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + "orgs/review-pat-grant-requests-in-bulk": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Unique identifiers of the requests for access via fine-grained personal access token. Must be formed of between 1 and 100 `pat_request_id` values. */ + pat_request_ids?: number[]; + /** + * @description Action to apply to the requests. + * @enum {string} + */ + action: "approve" | "deny"; + /** @description Reason for approving or denying the requests. Max 1024 characters. */ + reason?: string | null; + }; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Review a request to access organization resources with a fine-grained personal access token + * @description Approves or denies a pending request to access organization resources via a fine-grained personal access token. Only GitHub Apps can call this API, + * using the `organization_personal_access_token_requests: write` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + "orgs/review-pat-grant-request": { + parameters: { + path: { + org: components["parameters"]["org"]; + /** @description Unique identifier of the request for access via fine-grained personal access token. */ + pat_request_id: number; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Action to apply to the request. + * @enum {string} + */ + action: "approve" | "deny"; + /** @description Reason for approving or denying the request. Max 1024 characters. */ + reason?: string | null; + }; + }; + }; + responses: { + 204: components["responses"]["no_content"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * List repositories requested to be accessed by a fine-grained personal access token + * @description Lists the repositories a fine-grained personal access token request is requesting access to. Only GitHub Apps can call this API, + * using the `organization_personal_access_token_requests: read` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + "orgs/list-pat-grant-request-repositories": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + /** @description Unique identifier of the request for access via fine-grained personal access token. */ + pat_request_id: number; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * List fine-grained personal access tokens with access to organization resources + * @description Lists approved fine-grained personal access tokens owned by organization members that can access organization resources. Only GitHub Apps can call this API, + * using the `organization_personal_access_tokens: read` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + "orgs/list-pat-grants": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + sort?: components["parameters"]["personal-access-token-sort"]; + direction?: components["parameters"]["direction"]; + owner?: components["parameters"]["personal-access-token-owner"]; + repository?: components["parameters"]["personal-access-token-repository"]; + permission?: components["parameters"]["personal-access-token-permission"]; + last_used_before?: components["parameters"]["personal-access-token-before"]; + last_used_after?: components["parameters"]["personal-access-token-after"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["organization-programmatic-access-grant"][]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Update the access to organization resources via fine-grained personal access tokens + * @description Updates the access organization members have to organization resources via fine-grained personal access tokens. Limited to revoking a token's existing access. Only GitHub Apps can call this API, + * using the `organization_personal_access_tokens: write` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + "orgs/update-pat-accesses": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Action to apply to the fine-grained personal access token. + * @enum {string} + */ + action: "revoke"; + /** @description The IDs of the fine-grained personal access tokens. */ + pat_ids: number[]; + }; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Update the access a fine-grained personal access token has to organization resources + * @description Updates the access an organization member has to organization resources via a fine-grained personal access token. Limited to revoking the token's existing access. Limited to revoking a token's existing access. Only GitHub Apps can call this API, + * using the `organization_personal_access_tokens: write` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + "orgs/update-pat-access": { + parameters: { + path: { + org: components["parameters"]["org"]; + pat_id: components["parameters"]["fine-grained-personal-access-token-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Action to apply to the fine-grained personal access token. + * @enum {string} + */ + action: "revoke"; + }; + }; + }; + responses: { + 204: components["responses"]["no_content"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * List repositories a fine-grained personal access token has access to + * @description Lists the repositories a fine-grained personal access token has access to. Only GitHub Apps can call this API, + * using the `organization_personal_access_tokens: read` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + "orgs/list-pat-grant-repositories": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + /** @description Unique identifier of the fine-grained personal access token. */ + pat_id: number; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Get an organization + * @description To see many of the organization response values, you need to be an authenticated organization owner with the `admin:org` scope. When the value of `two_factor_requirement_enabled` is `true`, the organization requires all members, billing managers, and outside collaborators to enable [two-factor authentication](https://docs.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/). + * + * GitHub Apps with the `Organization plan` permission can use this endpoint to retrieve information about an organization's GitHub plan. See "[Authenticating with GitHub Apps](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/)" for details. For an example response, see 'Response with GitHub plan information' below." + */ + "orgs/get": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-full"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete an organization + * @description Deletes an organization and all its repositories. + * + * The organization login will be unavailable for 90 days after deletion. + * + * Please review the Terms of Service regarding account deletion before using this endpoint: + * + * https://docs.github.com/site-policy/github-terms/github-terms-of-service + */ + "orgs/delete": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update an organization + * @description **Parameter Deprecation Notice:** GitHub will replace and discontinue `members_allowed_repository_creation_type` in favor of more granular permissions. The new input parameters are `members_can_create_public_repositories`, `members_can_create_private_repositories` for all organizations and `members_can_create_internal_repositories` for organizations associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see the [blog post](https://developer.github.com/changes/2019-12-03-internal-visibility-changes). + * + * Enables an authenticated organization owner with the `admin:org` scope or the `repo` scope to update the organization's profile and member privileges. + */ + "orgs/update": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description Billing email address. This address is not publicized. */ + billing_email?: string; + /** @description The company name. */ + company?: string; + /** @description The publicly visible email address. */ + email?: string; + /** @description The Twitter username of the company. */ + twitter_username?: string; + /** @description The location. */ + location?: string; + /** @description The shorthand name of the company. */ + name?: string; + /** @description The description of the company. */ + description?: string; + /** @description Whether an organization can use organization projects. */ + has_organization_projects?: boolean; + /** @description Whether repositories that belong to the organization can use repository projects. */ + has_repository_projects?: boolean; + /** + * @description Default permission level members have for organization repositories. + * @default read + * @enum {string} + */ + default_repository_permission?: "read" | "write" | "admin" | "none"; + /** + * @description Whether of non-admin organization members can create repositories. **Note:** A parameter can override this parameter. See `members_allowed_repository_creation_type` in this table for details. + * @default true + */ + members_can_create_repositories?: boolean; + /** @description Whether organization members can create internal repositories, which are visible to all enterprise members. You can only allow members to create internal repositories if your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see "[Restricting repository creation in your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation. */ + members_can_create_internal_repositories?: boolean; + /** @description Whether organization members can create private repositories, which are visible to organization members with permission. For more information, see "[Restricting repository creation in your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation. */ + members_can_create_private_repositories?: boolean; + /** @description Whether organization members can create public repositories, which are visible to anyone. For more information, see "[Restricting repository creation in your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation. */ + members_can_create_public_repositories?: boolean; + /** + * @description Specifies which types of repositories non-admin organization members can create. `private` is only available to repositories that are part of an organization on GitHub Enterprise Cloud. + * **Note:** This parameter is deprecated and will be removed in the future. Its return value ignores internal repositories. Using this parameter overrides values set in `members_can_create_repositories`. See the parameter deprecation notice in the operation description for details. + * @enum {string} + */ + members_allowed_repository_creation_type?: "all" | "private" | "none"; + /** + * @description Whether organization members can create GitHub Pages sites. Existing published sites will not be impacted. + * @default true + */ + members_can_create_pages?: boolean; + /** + * @description Whether organization members can create public GitHub Pages sites. Existing published sites will not be impacted. + * @default true + */ + members_can_create_public_pages?: boolean; + /** + * @description Whether organization members can create private GitHub Pages sites. Existing published sites will not be impacted. + * @default true + */ + members_can_create_private_pages?: boolean; + /** + * @description Whether organization members can fork private organization repositories. + * @default false + */ + members_can_fork_private_repositories?: boolean; + /** + * @description Whether contributors to organization repositories are required to sign off on commits they make through GitHub's web interface. + * @default false + */ + web_commit_signoff_required?: boolean; + /** @example "http://github.blog" */ + blog?: string; + /** + * @description Whether GitHub Advanced Security is automatically enabled for new repositories. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * You can check which security and analysis features are currently enabled by using a `GET /orgs/{org}` request. + */ + advanced_security_enabled_for_new_repositories?: boolean; + /** + * @description Whether Dependabot alerts is automatically enabled for new repositories. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * You can check which security and analysis features are currently enabled by using a `GET /orgs/{org}` request. + */ + dependabot_alerts_enabled_for_new_repositories?: boolean; + /** + * @description Whether Dependabot security updates is automatically enabled for new repositories. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * You can check which security and analysis features are currently enabled by using a `GET /orgs/{org}` request. + */ + dependabot_security_updates_enabled_for_new_repositories?: boolean; + /** + * @description Whether dependency graph is automatically enabled for new repositories. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * You can check which security and analysis features are currently enabled by using a `GET /orgs/{org}` request. + */ + dependency_graph_enabled_for_new_repositories?: boolean; + /** + * @description Whether secret scanning is automatically enabled for new repositories. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * You can check which security and analysis features are currently enabled by using a `GET /orgs/{org}` request. + */ + secret_scanning_enabled_for_new_repositories?: boolean; + /** + * @description Whether secret scanning push protection is automatically enabled for new repositories. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * You can check which security and analysis features are currently enabled by using a `GET /orgs/{org}` request. + */ + secret_scanning_push_protection_enabled_for_new_repositories?: boolean; + /** @description Whether a custom link is shown to contributors who are blocked from pushing a secret by push protection. */ + secret_scanning_push_protection_custom_link_enabled?: boolean; + /** @description If `secret_scanning_push_protection_custom_link_enabled` is true, the URL that will be displayed to contributors who are blocked from pushing a secret. */ + secret_scanning_push_protection_custom_link?: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-full"]; + }; + }; + 409: components["responses"]["conflict"]; + /** @description Validation failed */ + 422: { + content: { + "application/json": + | components["schemas"]["validation-error"] + | components["schemas"]["validation-error-simple"]; + }; + }; + }; + }; + /** + * Get GitHub Actions cache usage for an organization + * @description Gets the total GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + "actions/get-actions-cache-usage-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["actions-cache-usage-org-enterprise"]; + }; + }; + }; + }; + /** + * List repositories with GitHub Actions cache usage for an organization + * @description Lists repositories and their GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + "actions/get-actions-cache-usage-by-repo-for-org": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + repository_cache_usages: components["schemas"]["actions-cache-usage-by-repository"][]; + }; + }; + }; + }; + }; + /** + * Get the customization template for an OIDC subject claim for an organization + * @description Gets the customization template for an OpenID Connect (OIDC) subject claim. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. + * GitHub Apps must have the `organization_administration:write` permission to use this endpoint. + */ + "oidc/get-oidc-custom-sub-template-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description A JSON serialized template for OIDC subject claim customization */ + 200: { + content: { + "application/json": components["schemas"]["oidc-custom-sub"]; + }; + }; + }; + }; + /** + * Set the customization template for an OIDC subject claim for an organization + * @description Creates or updates the customization template for an OpenID Connect (OIDC) subject claim. + * You must authenticate using an access token with the `write:org` scope to use this endpoint. + * GitHub Apps must have the `admin:org` permission to use this endpoint. + */ + "oidc/update-oidc-custom-sub-template-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["oidc-custom-sub"]; + }; + }; + responses: { + /** @description Empty response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get GitHub Actions permissions for an organization + * @description Gets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/get-github-actions-permissions-organization": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-organization-permissions"]; + }; + }; + }; + }; + /** + * Set GitHub Actions permissions for an organization + * @description Sets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/set-github-actions-permissions-organization": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + enabled_repositories: components["schemas"]["enabled-repositories"]; + allowed_actions?: components["schemas"]["allowed-actions"]; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List selected repositories enabled for GitHub Actions in an organization + * @description Lists the selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/list-selected-repositories-enabled-github-actions-organization": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["repository"][]; + }; + }; + }; + }; + }; + /** + * Set selected repositories enabled for GitHub Actions in an organization + * @description Replaces the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/set-selected-repositories-enabled-github-actions-organization": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description List of repository IDs to enable for GitHub Actions. */ + selected_repository_ids: number[]; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Enable a selected repository for GitHub Actions in an organization + * @description Adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/enable-selected-repository-github-actions-organization": { + parameters: { + path: { + org: components["parameters"]["org"]; + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Disable a selected repository for GitHub Actions in an organization + * @description Removes a repository from the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/disable-selected-repository-github-actions-organization": { + parameters: { + path: { + org: components["parameters"]["org"]; + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get allowed actions and reusable workflows for an organization + * @description Gets the selected actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."" + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/get-allowed-actions-organization": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + }; + /** + * Set allowed actions and reusable workflows for an organization + * @description Sets the actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/set-allowed-actions-organization": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get default workflow permissions for an organization + * @description Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/get-github-actions-default-workflow-permissions-organization": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-get-default-workflow-permissions"]; + }; + }; + }; + }; + /** + * Set default workflow permissions for an organization + * @description Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, and sets if GitHub Actions + * can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/set-github-actions-default-workflow-permissions-organization": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["actions-set-default-workflow-permissions"]; + }; + }; + responses: { + /** @description Success response */ + 204: never; + }; + }; + /** + * List required workflows + * @description List all required workflows in an organization. + * + * You must authenticate using an access token with the `read:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + "actions/list-required-workflows": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + required_workflows: components["schemas"]["required-workflow"][]; + }; + }; + }; + }; + }; + /** + * Create a required workflow + * @description Create a required workflow in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + "actions/create-required-workflow": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Path of the workflow file to be configured as a required workflow. */ + workflow_file_path: string; + /** @description The ID of the repository that contains the workflow file. */ + repository_id: string; + /** + * @description Enable the required workflow for all repositories or selected repositories in the organization. + * @default all + * @enum {string} + */ + scope?: "selected" | "all"; + /** @description A list of repository IDs where you want to enable the required workflow. You can only provide a list of repository ids when the `scope` is set to `selected`. */ + selected_repository_ids?: number[]; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["required-workflow"]; + }; + }; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Get a required workflow + * @description Get a required workflow configured in an organization. + * + * You must authenticate using an access token with the `read:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + "actions/get-required-workflow": { + parameters: { + path: { + org: components["parameters"]["org"]; + required_workflow_id: components["parameters"]["required-workflow-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["required-workflow"]; + }; + }; + }; + }; + /** + * Delete a required workflow + * @description Deletes a required workflow configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + "actions/delete-required-workflow": { + parameters: { + path: { + org: components["parameters"]["org"]; + required_workflow_id: components["parameters"]["required-workflow-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update a required workflow + * @description Update a required workflow in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + "actions/update-required-workflow": { + parameters: { + path: { + org: components["parameters"]["org"]; + required_workflow_id: components["parameters"]["required-workflow-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Path of the workflow file to be configured as a required workflow. */ + workflow_file_path?: string; + /** @description The ID of the repository that contains the workflow file. */ + repository_id?: string; + /** + * @description Enable the required workflow for all repositories or selected repositories in the organization. + * @default all + * @enum {string} + */ + scope?: "selected" | "all"; + /** @description A list of repository IDs where you want to enable the required workflow. A list of repository IDs where you want to enable the required workflow. You can only provide a list of repository ids when the `scope` is set to `selected`. */ + selected_repository_ids?: number[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["required-workflow"]; + }; + }; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List selected repositories for a required workflow + * @description Lists the selected repositories that are configured for a required workflow in an organization. To use this endpoint, the required workflow must be configured to run on selected repositories. + * + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + "actions/list-selected-repositories-required-workflow": { + parameters: { + path: { + org: components["parameters"]["org"]; + required_workflow_id: components["parameters"]["required-workflow-id"]; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["repository"][]; + }; + }; + }; + /** @description Resource Not Found */ + 404: never; + }; + }; + /** + * Sets repositories for a required workflow + * @description Sets the repositories for a required workflow that is required for selected repositories. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + "actions/set-selected-repos-to-required-workflow": { + parameters: { + path: { + org: components["parameters"]["org"]; + required_workflow_id: components["parameters"]["required-workflow-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The IDs of the repositories for which the workflow should be required. */ + selected_repository_ids: number[]; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Add a repository to a required workflow + * @description Adds a repository to a required workflow. To use this endpoint, the required workflow must be configured to run on selected repositories. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + "actions/add-selected-repo-to-required-workflow": { + parameters: { + path: { + org: components["parameters"]["org"]; + required_workflow_id: components["parameters"]["required-workflow-id"]; + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** @description Success */ + 204: never; + /** @description Resource Not Found */ + 404: never; + /** @description Validation Error */ + 422: never; + }; + }; + /** + * Remove a selected repository from required workflow + * @description Removes a repository from a required workflow. To use this endpoint, the required workflow must be configured to run on selected repositories. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + "actions/remove-selected-repo-from-required-workflow": { + parameters: { + path: { + org: components["parameters"]["org"]; + required_workflow_id: components["parameters"]["required-workflow-id"]; + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** @description Success */ + 204: never; + /** @description Resource Not Found */ + 404: never; + /** @description Validation Error */ + 422: never; + }; + }; + /** + * List self-hosted runners for an organization + * @description Lists all self-hosted runners configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-self-hosted-runners-for-org": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + runners: components["schemas"]["runner"][]; + }; + }; + }; + }; + }; + /** + * List runner applications for an organization + * @description Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-runner-applications-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-application"][]; + }; + }; + }; + }; + /** + * Create configuration for a just-in-time runner for an organization + * @description Generates a configuration that can be passed to the runner application at startup. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/generate-runner-jitconfig-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the new runner. */ + name: string; + /** @description The ID of the runner group to register the runner to. */ + runner_group_id: number; + /** @description The names of the custom labels to add to the runner. **Minimum items**: 1. **Maximum items**: 100. */ + labels: string[]; + /** + * @description The working directory to be used for job execution, relative to the runner install directory. + * @default _work + */ + work_folder?: string; + }; + }; + }; + responses: { + 201: components["responses"]["actions_runner_jitconfig"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Create a registration token for an organization + * @description Returns a token that you can pass to the `config` script. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org --token TOKEN + * ``` + */ + "actions/create-registration-token-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Create a remove token for an organization + * @description Returns a token that you can pass to the `config` script to remove a self-hosted runner from an organization. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from an organization, replace `TOKEN` with the remove token provided by this + * endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + "actions/create-remove-token-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Get a self-hosted runner for an organization + * @description Gets a specific self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/get-self-hosted-runner-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["runner"]; + }; + }; + }; + }; + /** + * Delete a self-hosted runner from an organization + * @description Forces the removal of a self-hosted runner from an organization. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/delete-self-hosted-runner-from-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List labels for a self-hosted runner for an organization + * @description Lists all labels for a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-labels-for-self-hosted-runner-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Set custom labels for a self-hosted runner for an organization + * @description Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/set-custom-labels-for-self-hosted-runner-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + runner_id: components["parameters"]["runner-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to set for the runner. You can pass an empty array to remove all custom labels. */ + labels: string[]; + }; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Add custom labels to a self-hosted runner for an organization + * @description Add custom labels to a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/add-custom-labels-to-self-hosted-runner-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + runner_id: components["parameters"]["runner-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to add to the runner. */ + labels: string[]; + }; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Remove all custom labels from a self-hosted runner for an organization + * @description Remove all custom labels from a self-hosted runner configured in an + * organization. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/remove-all-custom-labels-from-self-hosted-runner-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels_readonly"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove a custom label from a self-hosted runner for an organization + * @description Remove a custom label from a self-hosted runner configured + * in an organization. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/remove-custom-label-from-self-hosted-runner-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + runner_id: components["parameters"]["runner-id"]; + name: components["parameters"]["runner-label-name"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List organization secrets + * @description Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + "actions/list-org-secrets": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["organization-actions-secret"][]; + }; + }; + }; + }; + }; + /** + * Get an organization public key + * @description Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + "actions/get-org-public-key": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-public-key"]; + }; + }; + }; + }; + /** + * Get an organization secret + * @description Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + "actions/get-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-actions-secret"]; + }; + }; + }; + }; + /** + * Create or update an organization secret + * @description Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to + * use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "actions/create-or-update-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an organization public key](https://docs.github.com/rest/reference/actions#get-an-organization-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + /** + * @description Which type of organization repositories have access to the organization secret. `selected` means only the repositories specified by `selected_repository_ids` can access the secret. + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can manage the list of selected repositories using the [List selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#list-selected-repositories-for-an-organization-secret), [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret), and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids?: (number | string)[]; + }; + }; + }; + responses: { + /** @description Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** @description Response when updating a secret */ + 204: never; + }; + }; + /** + * Delete an organization secret + * @description Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + "actions/delete-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List selected repositories for an organization secret + * @description Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + "actions/list-selected-repos-for-org-secret": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + }; + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + }; + /** + * Set selected repositories for an organization secret + * @description Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + "actions/set-selected-repos-for-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can add and remove individual repositories using the [Add selected repository to an organization secret](https://docs.github.com/rest/actions/secrets#add-selected-repository-to-an-organization-secret) and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids: number[]; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Add selected repository to an organization secret + * @description Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + "actions/add-selected-repo-to-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** @description No Content when repository was added to the selected list */ + 204: never; + /** @description Conflict when visibility type is not set to selected */ + 409: never; + }; + }; + /** + * Remove selected repository from an organization secret + * @description Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + "actions/remove-selected-repo-from-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** @description Response when repository was removed from the selected list */ + 204: never; + /** @description Conflict when visibility type not set to selected */ + 409: never; + }; + }; + /** + * List organization variables + * @description Lists all organization variables. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:read` organization permission to use this endpoint. + */ + "actions/list-org-variables": { + parameters: { + query?: { + per_page?: components["parameters"]["variables-per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + variables: components["schemas"]["organization-actions-variable"][]; + }; + }; + }; + }; + }; + /** + * Create an organization variable + * @description Creates an organization variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + "actions/create-org-variable": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the variable. */ + name: string; + /** @description The value of the variable. */ + value: string; + /** + * @description The type of repositories in the organization that can access the variable. `selected` means only the repositories specified by `selected_repository_ids` can access the variable. + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** @description An array of repository ids that can access the organization variable. You can only provide a list of repository ids when the `visibility` is set to `selected`. */ + selected_repository_ids?: number[]; + }; + }; + }; + responses: { + /** @description Response when creating a variable */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + }; + }; + /** + * Get an organization variable + * @description Gets a specific variable in an organization. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:read` organization permission to use this endpoint. + */ + "actions/get-org-variable": { + parameters: { + path: { + org: components["parameters"]["org"]; + name: components["parameters"]["variable-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-actions-variable"]; + }; + }; + }; + }; + /** + * Delete an organization variable + * @description Deletes an organization variable using the variable name. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + "actions/delete-org-variable": { + parameters: { + path: { + org: components["parameters"]["org"]; + name: components["parameters"]["variable-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update an organization variable + * @description Updates an organization variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + "actions/update-org-variable": { + parameters: { + path: { + org: components["parameters"]["org"]; + name: components["parameters"]["variable-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the variable. */ + name?: string; + /** @description The value of the variable. */ + value?: string; + /** + * @description The type of repositories in the organization that can access the variable. `selected` means only the repositories specified by `selected_repository_ids` can access the variable. + * @enum {string} + */ + visibility?: "all" | "private" | "selected"; + /** @description An array of repository ids that can access the organization variable. You can only provide a list of repository ids when the `visibility` is set to `selected`. */ + selected_repository_ids?: number[]; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List selected repositories for an organization variable + * @description Lists all repositories that can access an organization variable that is available to selected repositories. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:read` organization permission to use this endpoint. + */ + "actions/list-selected-repos-for-org-variable": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + }; + path: { + org: components["parameters"]["org"]; + name: components["parameters"]["variable-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + /** @description Response when the visibility of the variable is not set to `selected` */ + 409: never; + }; + }; + /** + * Set selected repositories for an organization variable + * @description Replaces all repositories for an organization variable that is available to selected repositories. Organization variables that are available to selected repositories have their `visibility` field set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + "actions/set-selected-repos-for-org-variable": { + parameters: { + path: { + org: components["parameters"]["org"]; + name: components["parameters"]["variable-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The IDs of the repositories that can access the organization variable. */ + selected_repository_ids: number[]; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + /** @description Response when the visibility of the variable is not set to `selected` */ + 409: never; + }; + }; + /** + * Add selected repository to an organization variable + * @description Adds a repository to an organization variable that is available to selected repositories. Organization variables that are available to selected repositories have their `visibility` field set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + "actions/add-selected-repo-to-org-variable": { + parameters: { + path: { + org: components["parameters"]["org"]; + name: components["parameters"]["variable-name"]; + repository_id: number; + }; + }; + responses: { + /** @description Response */ + 204: never; + /** @description Response when the visibility of the variable is not set to `selected` */ + 409: never; + }; + }; + /** + * Remove selected repository from an organization variable + * @description Removes a repository from an organization variable that is available to selected repositories. Organization variables that are available to selected repositories have their `visibility` field set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + "actions/remove-selected-repo-from-org-variable": { + parameters: { + path: { + org: components["parameters"]["org"]; + name: components["parameters"]["variable-name"]; + repository_id: number; + }; + }; + responses: { + /** @description Response */ + 204: never; + /** @description Response when the visibility of the variable is not set to `selected` */ + 409: never; + }; + }; + /** + * List users blocked by an organization + * @description List the users blocked by an organization. + */ + "orgs/list-blocked-users": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** Check if a user is blocked by an organization */ + "orgs/check-blocked-user": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description If the user is blocked */ + 204: never; + /** @description If the user is not blocked */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** Block a user from an organization */ + "orgs/block-user": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Unblock a user from an organization */ + "orgs/unblock-user": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List code scanning alerts for an organization + * @description Lists code scanning alerts for the default branch for all eligible repositories in an organization. Eligible repositories are repositories that are owned by organizations that you own or for which you are a security manager. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be an owner or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + "code-scanning/list-alerts-for-org": { + parameters: { + query?: { + tool_name?: components["parameters"]["tool-name"]; + tool_guid?: components["parameters"]["tool-guid"]; + before?: components["parameters"]["pagination-before"]; + after?: components["parameters"]["pagination-after"]; + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + direction?: components["parameters"]["direction"]; + /** @description If specified, only code scanning alerts with this state will be returned. */ + state?: components["schemas"]["code-scanning-alert-state"]; + /** @description The property by which to sort the results. */ + sort?: "created" | "updated"; + /** @description If specified, only code scanning alerts with this severity will be returned. */ + severity?: components["schemas"]["code-scanning-alert-severity"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["code-scanning-organization-alert-items"][]; + }; + }; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List codespaces for the organization + * @description Lists the codespaces associated to a specified organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/list-in-organization": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + codespaces: components["schemas"]["codespace"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Manage access control for organization codespaces + * @description Sets which users can access codespaces in an organization. This is synonymous with granting or revoking codespaces billing permissions for users according to the visibility. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/set-codespaces-billing": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Which users can access codespaces in the organization. `disabled` means that no users can access codespaces in the organization. + * @enum {string} + */ + visibility: + | "disabled" + | "selected_members" + | "all_members" + | "all_members_and_outside_collaborators"; + /** @description The usernames of the organization members who should have access to codespaces in the organization. Required when `visibility` is `selected_members`. The provided list of usernames will replace any existing value. */ + selected_usernames?: string[]; + }; + }; + }; + responses: { + /** @description Response when successfully modifying permissions. */ + 204: never; + 304: components["responses"]["not_modified"]; + /** @description Users are neither members nor collaborators of this organization. */ + 400: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Add users to Codespaces billing for an organization + * @description Codespaces for the specified users will be billed to the organization. + * To use this endpoint, the billing settings for the organization must be set to `selected_members`. For information on how to change this setting please see [these docs].(https://docs.github.com/rest/codespaces/organizations#manage-access-control-for-organization-codespaces) You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/set-codespaces-billing-users": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The usernames of the organization members whose codespaces be billed to the organization. */ + selected_usernames: string[]; + }; + }; + }; + responses: { + /** @description Response when successfully modifying permissions. */ + 204: never; + 304: components["responses"]["not_modified"]; + /** @description Users are neither members nor collaborators of this organization. */ + 400: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Removes users from Codespaces billing for an organization + * @description Codespaces for the specified users will no longer be billed to the organization. + * To use this endpoint, the billing settings for the organization must be set to `selected_members`. For information on how to change this setting please see [these docs].(https://docs.github.com/rest/codespaces/organizations#manage-access-control-for-organization-codespaces) You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/delete-codespaces-billing-users": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The usernames of the organization members whose codespaces should not be billed to the organization. */ + selected_usernames: string[]; + }; + }; + }; + responses: { + /** @description Response when successfully modifying permissions. */ + 204: never; + 304: components["responses"]["not_modified"]; + /** @description Users are neither members nor collaborators of this organization. */ + 400: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * List organization secrets + * @description Lists all Codespaces secrets available at the organization-level without revealing their encrypted values. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/list-org-secrets": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["codespaces-org-secret"][]; + }; + }; + }; + }; + }; + /** + * Get an organization public key + * @description Gets a public key for an organization, which is required in order to encrypt secrets. You need to encrypt the value of a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/get-org-public-key": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["codespaces-public-key"]; + }; + }; + }; + }; + /** + * Get an organization secret + * @description Gets an organization secret without revealing its encrypted value. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/get-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["codespaces-org-secret"]; + }; + }; + }; + }; + /** + * Create or update an organization secret + * @description Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "codespaces/create-or-update-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an organization public key](https://docs.github.com/rest/reference/codespaces#get-an-organization-public-key) endpoint. */ + encrypted_value?: string; + /** @description The ID of the key you used to encrypt the secret. */ + key_id?: string; + /** + * @description Which type of organization repositories have access to the organization secret. `selected` means only the repositories specified by `selected_repository_ids` can access the secret. + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** @description An array of repository IDs that can access the organization secret. You can only provide a list of repository IDs when the `visibility` is set to `selected`. You can manage the list of selected repositories using the [List selected repositories for an organization secret](https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-an-organization-secret), [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-an-organization-secret), and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/codespaces#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids?: number[]; + }; + }; + }; + responses: { + /** @description Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** @description Response when updating a secret */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Delete an organization secret + * @description Deletes an organization secret using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/delete-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List selected repositories for an organization secret + * @description Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/list-selected-repos-for-org-secret": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + }; + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Set selected repositories for an organization secret + * @description Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/set-selected-repos-for-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can add and remove individual repositories using the [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-an-organization-secret) and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/codespaces#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids: number[]; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + /** @description Conflict when visibility type not set to selected */ + 409: never; + }; + }; + /** + * Add selected repository to an organization secret + * @description Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/add-selected-repo-to-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** @description No Content when repository was added to the selected list */ + 204: never; + 404: components["responses"]["not_found"]; + /** @description Conflict when visibility type is not set to selected */ + 409: never; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove selected repository from an organization secret + * @description Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/remove-selected-repo-from-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** @description Response when repository was removed from the selected list */ + 204: never; + 404: components["responses"]["not_found"]; + /** @description Conflict when visibility type not set to selected */ + 409: never; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List Dependabot alerts for an organization + * @description Lists Dependabot alerts for an organization. + * + * To use this endpoint, you must be an owner or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + "dependabot/list-alerts-for-org": { + parameters: { + query?: { + state?: components["parameters"]["dependabot-alert-comma-separated-states"]; + severity?: components["parameters"]["dependabot-alert-comma-separated-severities"]; + ecosystem?: components["parameters"]["dependabot-alert-comma-separated-ecosystems"]; + package?: components["parameters"]["dependabot-alert-comma-separated-packages"]; + scope?: components["parameters"]["dependabot-alert-scope"]; + sort?: components["parameters"]["dependabot-alert-sort"]; + direction?: components["parameters"]["direction"]; + before?: components["parameters"]["pagination-before"]; + after?: components["parameters"]["pagination-after"]; + first?: components["parameters"]["pagination-first"]; + last?: components["parameters"]["pagination-last"]; + per_page?: components["parameters"]["per-page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-alert-with-repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 400: components["responses"]["bad_request"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List organization secrets + * @description Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + "dependabot/list-org-secrets": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["organization-dependabot-secret"][]; + }; + }; + }; + }; + }; + /** + * Get an organization public key + * @description Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + "dependabot/get-org-public-key": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-public-key"]; + }; + }; + }; + }; + /** + * Get an organization secret + * @description Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + "dependabot/get-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-dependabot-secret"]; + }; + }; + }; + }; + /** + * Create or update an organization secret + * @description Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "dependabot/create-or-update-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an organization public key](https://docs.github.com/rest/reference/dependabot#get-an-organization-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + /** + * @description Which type of organization repositories have access to the organization secret. `selected` means only the repositories specified by `selected_repository_ids` can access the secret. + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can manage the list of selected repositories using the [List selected repositories for an organization secret](https://docs.github.com/rest/reference/dependabot#list-selected-repositories-for-an-organization-secret), [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/dependabot#set-selected-repositories-for-an-organization-secret), and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/dependabot#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids?: (string | number)[]; + }; + }; + }; + responses: { + /** @description Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** @description Response when updating a secret */ + 204: never; + }; + }; + /** + * Delete an organization secret + * @description Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + "dependabot/delete-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List selected repositories for an organization secret + * @description Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + "dependabot/list-selected-repos-for-org-secret": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + }; + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + }; + /** + * Set selected repositories for an organization secret + * @description Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + "dependabot/set-selected-repos-for-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can add and remove individual repositories using the [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/dependabot#set-selected-repositories-for-an-organization-secret) and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/dependabot#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids: number[]; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Add selected repository to an organization secret + * @description Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + "dependabot/add-selected-repo-to-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** @description No Content when repository was added to the selected list */ + 204: never; + /** @description Conflict when visibility type is not set to selected */ + 409: never; + }; + }; + /** + * Remove selected repository from an organization secret + * @description Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + "dependabot/remove-selected-repo-from-org-secret": { + parameters: { + path: { + org: components["parameters"]["org"]; + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** @description Response when repository was removed from the selected list */ + 204: never; + /** @description Conflict when visibility type not set to selected */ + 409: never; + }; + }; + /** + * Get list of conflicting packages during Docker migration for organization + * @description Lists all packages that are in a specific organization, are readable by the requesting user, and that encountered a conflict during a Docker migration. + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. + */ + "packages/list-docker-migration-conflicting-packages-for-organization": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** List public organization events */ + "activity/list-public-org-events": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** + * List failed organization invitations + * @description The return hash contains `failed_at` and `failed_reason` fields which represent the time at which the invitation failed and the reason for the failure. + */ + "orgs/list-failed-invitations": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["organization-invitation"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** List organization webhooks */ + "orgs/list-webhooks": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["org-hook"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create an organization webhook + * @description Here's how you can create a hook that posts payloads in JSON format: + */ + "orgs/create-webhook": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Must be passed as "web". */ + name: string; + /** @description Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/orgs#create-hook-config-params). */ + config: { + url: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + /** @example "kdaigle" */ + username?: string; + /** @example "password" */ + password?: string; + }; + /** + * @description Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. Set to `["*"]` to receive all possible events. + * @default [ + * "push" + * ] + */ + events?: string[]; + /** + * @description Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. + * @default true + */ + active?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/orgs/octocat/hooks/1 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["org-hook"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get an organization webhook + * @description Returns a webhook configured in an organization. To get only the webhook `config` properties, see "[Get a webhook configuration for an organization](/rest/reference/orgs#get-a-webhook-configuration-for-an-organization)." + */ + "orgs/get-webhook": { + parameters: { + path: { + org: components["parameters"]["org"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["org-hook"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Delete an organization webhook */ + "orgs/delete-webhook": { + parameters: { + path: { + org: components["parameters"]["org"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update an organization webhook + * @description Updates a webhook configured in an organization. When you update a webhook, the `secret` will be overwritten. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for an organization](/rest/reference/orgs#update-a-webhook-configuration-for-an-organization)." + */ + "orgs/update-webhook": { + parameters: { + path: { + org: components["parameters"]["org"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/orgs#update-hook-config-params). */ + config?: { + url: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + /** + * @description Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. + * @default [ + * "push" + * ] + */ + events?: string[]; + /** + * @description Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. + * @default true + */ + active?: boolean; + /** @example "web" */ + name?: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["org-hook"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a webhook configuration for an organization + * @description Returns the webhook configuration for an organization. To get more information about the webhook, including the `active` state and `events`, use "[Get an organization webhook ](/rest/reference/orgs#get-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:read` permission. + */ + "orgs/get-webhook-config-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + }; + /** + * Update a webhook configuration for an organization + * @description Updates the webhook configuration for an organization. To update more information about the webhook, including the `active` state and `events`, use "[Update an organization webhook ](/rest/reference/orgs#update-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:write` permission. + */ + "orgs/update-webhook-config-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + }; + /** + * List deliveries for an organization webhook + * @description Returns a list of webhook deliveries for a webhook configured in an organization. + */ + "orgs/list-webhook-deliveries": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + cursor?: components["parameters"]["cursor"]; + redelivery?: boolean; + }; + path: { + org: components["parameters"]["org"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery-item"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a webhook delivery for an organization webhook + * @description Returns a delivery for a webhook configured in an organization. + */ + "orgs/get-webhook-delivery": { + parameters: { + path: { + org: components["parameters"]["org"]; + hook_id: components["parameters"]["hook-id"]; + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery"]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Redeliver a delivery for an organization webhook + * @description Redeliver a delivery for a webhook configured in an organization. + */ + "orgs/redeliver-webhook-delivery": { + parameters: { + path: { + org: components["parameters"]["org"]; + hook_id: components["parameters"]["hook-id"]; + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Ping an organization webhook + * @description This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. + */ + "orgs/ping-webhook": { + parameters: { + path: { + org: components["parameters"]["org"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get an organization installation for the authenticated app + * @description Enables an authenticated GitHub App to find the organization's installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-org-installation": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["installation"]; + }; + }; + }; + }; + /** + * List app installations for an organization + * @description Lists all GitHub Apps in an organization. The installation count includes all GitHub Apps installed on repositories in the organization. You must be an organization owner with `admin:read` scope to use this endpoint. + */ + "orgs/list-app-installations": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + installations: components["schemas"]["installation"][]; + }; + }; + }; + }; + }; + /** + * Get interaction restrictions for an organization + * @description Shows which type of GitHub user can interact with this organization and when the restriction expires. If there is no restrictions, you will see an empty response. + */ + "interactions/get-restrictions-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": + | components["schemas"]["interaction-limit-response"] + | Record; + }; + }; + }; + }; + /** + * Set interaction restrictions for an organization + * @description Temporarily restricts interactions to a certain type of GitHub user in any public repository in the given organization. You must be an organization owner to set these restrictions. Setting the interaction limit at the organization level will overwrite any interaction limits that are set for individual repositories owned by the organization. + */ + "interactions/set-restrictions-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["interaction-limit"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["interaction-limit-response"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove interaction restrictions for an organization + * @description Removes all interaction restrictions from public repositories in the given organization. You must be an organization owner to remove restrictions. + */ + "interactions/remove-restrictions-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List pending organization invitations + * @description The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, or `hiring_manager`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + */ + "orgs/list-pending-invitations": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + /** @description Filter invitations by their member role. */ + role?: + | "all" + | "admin" + | "direct_member" + | "billing_manager" + | "hiring_manager"; + /** @description Filter invitations by their invitation source. */ + invitation_source?: "all" | "member" | "scim"; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["organization-invitation"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create an organization invitation + * @description Invite people to an organization by using their GitHub user ID or their email address. In order to create invitations in an organization, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "orgs/create-invitation": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description **Required unless you provide `email`**. GitHub user ID for the person you are inviting. */ + invitee_id?: number; + /** @description **Required unless you provide `invitee_id`**. Email address of the person you are inviting, which can be an existing GitHub user. */ + email?: string; + /** + * @description The role for the new member. + * * `admin` - Organization owners with full administrative rights to the organization and complete access to all repositories and teams. + * * `direct_member` - Non-owner organization members with ability to see other members and join teams by invitation. + * * `billing_manager` - Non-owner organization members with ability to manage the billing settings of your organization. + * @default direct_member + * @enum {string} + */ + role?: "admin" | "direct_member" | "billing_manager"; + /** @description Specify IDs for the teams you want to invite new members to. */ + team_ids?: number[]; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["organization-invitation"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Cancel an organization invitation + * @description Cancel an organization invitation. In order to cancel an organization invitation, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). + */ + "orgs/cancel-invitation": { + parameters: { + path: { + org: components["parameters"]["org"]; + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List organization invitation teams + * @description List all teams associated with an invitation. In order to see invitations in an organization, the authenticated user must be an organization owner. + */ + "orgs/list-invitation-teams": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List organization issues assigned to the authenticated user + * @description List issues in an organization assigned to the authenticated user. + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/list-for-org": { + parameters: { + query?: { + /** @description Indicates which sorts of issues to return. `assigned` means issues assigned to you. `created` means issues created by you. `mentioned` means issues mentioning you. `subscribed` means issues you're subscribed to updates for. `all` or `repos` means all issues you can see, regardless of participation or creation. */ + filter?: + | "assigned" + | "created" + | "mentioned" + | "subscribed" + | "repos" + | "all"; + /** @description Indicates the state of the issues to return. */ + state?: "open" | "closed" | "all"; + labels?: components["parameters"]["labels"]; + /** @description What to sort results by. */ + sort?: "created" | "updated" | "comments"; + direction?: components["parameters"]["direction"]; + since?: components["parameters"]["since"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["issue"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List organization members + * @description List all users who are members of an organization. If the authenticated user is also a member of this organization then both concealed and public members will be returned. + */ + "orgs/list-members": { + parameters: { + query?: { + /** @description Filter members returned in the list. `2fa_disabled` means that only members without [two-factor authentication](https://github.com/blog/1614-two-factor-authentication) enabled will be returned. This options is only available for organization owners. */ + filter?: "2fa_disabled" | "all"; + /** @description Filter members returned by their role. */ + role?: "all" | "admin" | "member"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Check organization membership for a user + * @description Check if a user is, publicly or privately, a member of the organization. + */ + "orgs/check-membership-for-user": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response if requester is an organization member and user is a member */ + 204: never; + /** @description Response if requester is not an organization member */ + 302: never; + /** @description Not Found if requester is an organization member and user is not a member */ + 404: never; + }; + }; + /** + * Remove an organization member + * @description Removing a user from this list will remove them from all teams and they will no longer have any access to the organization's repositories. + */ + "orgs/remove-member": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * List codespaces for a user in organization + * @description Lists the codespaces that a member of an organization has for repositories in that organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/get-codespaces-for-user-in-org": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + codespaces: components["schemas"]["codespace"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Delete a codespace from the organization + * @description Deletes a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/delete-from-organization": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Stop a codespace for an organization user + * @description Stops a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/stop-in-organization": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Get organization membership for a user + * @description In order to get a user's membership with an organization, the authenticated user must be an organization member. The `state` parameter in the response can be used to identify the user's membership status. + */ + "orgs/get-membership-for-user": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["org-membership"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Set organization membership for a user + * @description Only authenticated organization owners can add a member to the organization or update the member's role. + * + * * If the authenticated user is _adding_ a member to the organization, the invited user will receive an email inviting them to the organization. The user's [membership status](https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user) will be `pending` until they accept the invitation. + * + * * Authenticated users can _update_ a user's membership by passing the `role` parameter. If the authenticated user changes a member's role to `admin`, the affected user will receive an email notifying them that they've been made an organization owner. If the authenticated user changes an owner's role to `member`, no email will be sent. + * + * **Rate limits** + * + * To prevent abuse, the authenticated user is limited to 50 organization invitations per 24 hour period. If the organization is more than one month old or on a paid plan, the limit is 500 invitations per 24 hour period. + */ + "orgs/set-membership-for-user": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The role to give the user in the organization. Can be one of: + * * `admin` - The user will become an owner of the organization. + * * `member` - The user will become a non-owner member of the organization. + * @default member + * @enum {string} + */ + role?: "admin" | "member"; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["org-membership"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove organization membership for a user + * @description In order to remove a user's membership with an organization, the authenticated user must be an organization owner. + * + * If the specified user is an active member of the organization, this will remove them from the organization. If the specified user has been invited to the organization, this will cancel their invitation. The specified user will receive an email notification in both cases. + */ + "orgs/remove-membership-for-user": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List organization migrations + * @description Lists the most recent migrations, including both exports (which can be started through the REST API) and imports (which cannot be started using the REST API). + * + * A list of `repositories` is only returned for export migrations. + */ + "migrations/list-for-org": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + /** @description Exclude attributes from the API response to improve performance */ + exclude?: "repositories"[]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["migration"][]; + }; + }; + }; + }; + /** + * Start an organization migration + * @description Initiates the generation of a migration archive. + */ + "migrations/start-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description A list of arrays indicating which repositories should be migrated. */ + repositories: string[]; + /** + * @description Indicates whether repositories should be locked (to prevent manipulation) while migrating data. + * @default false + * @example true + */ + lock_repositories?: boolean; + /** + * @description Indicates whether metadata should be excluded and only git source should be included for the migration. + * @default false + */ + exclude_metadata?: boolean; + /** + * @description Indicates whether the repository git data should be excluded from the migration. + * @default false + */ + exclude_git_data?: boolean; + /** + * @description Indicates whether attachments should be excluded from the migration (to reduce migration archive file size). + * @default false + * @example true + */ + exclude_attachments?: boolean; + /** + * @description Indicates whether releases should be excluded from the migration (to reduce migration archive file size). + * @default false + * @example true + */ + exclude_releases?: boolean; + /** + * @description Indicates whether projects owned by the organization or users should be excluded. from the migration. + * @default false + * @example true + */ + exclude_owner_projects?: boolean; + /** + * @description Indicates whether this should only include organization metadata (repositories array should be empty and will ignore other flags). + * @default false + * @example true + */ + org_metadata_only?: boolean; + /** @description Exclude related items from being returned in the response in order to improve performance of the request. The array can include any of: `"repositories"`. */ + exclude?: "repositories"[]; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["migration"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get an organization migration status + * @description Fetches the status of a migration. + * + * The `state` of a migration can be one of the following values: + * + * * `pending`, which means the migration hasn't started yet. + * * `exporting`, which means the migration is in progress. + * * `exported`, which means the migration finished successfully. + * * `failed`, which means the migration failed. + */ + "migrations/get-status-for-org": { + parameters: { + query?: { + /** @description Exclude attributes from the API response to improve performance */ + exclude?: "repositories"[]; + }; + path: { + org: components["parameters"]["org"]; + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** + * @description * `pending`, which means the migration hasn't started yet. + * * `exporting`, which means the migration is in progress. + * * `exported`, which means the migration finished successfully. + * * `failed`, which means the migration failed. + */ + 200: { + content: { + "application/json": components["schemas"]["migration"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Download an organization migration archive + * @description Fetches the URL to a migration archive. + */ + "migrations/download-archive-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** @description Response */ + 302: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete an organization migration archive + * @description Deletes a previous migration archive. Migration archives are automatically deleted after seven days. + */ + "migrations/delete-archive-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Unlock an organization repository + * @description Unlocks a repository that was locked for migration. You should unlock each migrated repository and [delete them](https://docs.github.com/rest/repos/repos#delete-a-repository) when the migration is complete and you no longer need the source data. + */ + "migrations/unlock-repo-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + migration_id: components["parameters"]["migration-id"]; + repo_name: components["parameters"]["repo-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List repositories in an organization migration + * @description List all the repositories for this organization migration. + */ + "migrations/list-repos-for-org": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List outside collaborators for an organization + * @description List all users who are outside collaborators of an organization. + */ + "orgs/list-outside-collaborators": { + parameters: { + query?: { + /** @description Filter the list of outside collaborators. `2fa_disabled` means that only outside collaborators without [two-factor authentication](https://github.com/blog/1614-two-factor-authentication) enabled will be returned. */ + filter?: "2fa_disabled" | "all"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** + * Convert an organization member to outside collaborator + * @description When an organization member is converted to an outside collaborator, they'll only have access to the repositories that their current team membership allows. The user will no longer be a member of the organization. For more information, see "[Converting an organization member to an outside collaborator](https://docs.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)". Converting an organization member to an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." + */ + "orgs/convert-member-to-outside-collaborator": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description When set to `true`, the request will be performed asynchronously. Returns a 202 status code when the job is successfully queued. + * @default false + */ + async?: boolean; + }; + }; + }; + responses: { + /** @description User is getting converted asynchronously */ + 202: { + content: { + "application/json": Record; + }; + }; + /** @description User was converted */ + 204: never; + /** @description Forbidden if user is the last owner of the organization, not a member of the organization, or if the enterprise enforces a policy for inviting outside collaborators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." */ + 403: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove outside collaborator from an organization + * @description Removing a user from this list will remove them from all the organization's repositories. + */ + "orgs/remove-outside-collaborator": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + /** @description Unprocessable Entity if user is a member of the organization */ + 422: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + }; + }; + /** + * List packages for an organization + * @description Lists packages in an organization readable by the user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/list-packages-for-organization": { + parameters: { + query: { + /** @description The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + visibility?: components["parameters"]["package-visibility"]; + /** @description Page number of the results to fetch. */ + page?: number; + /** @description The number of results per page (max 100). */ + per_page?: number; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package"][]; + }; + }; + 400: components["responses"]["package_es_list_error"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Get a package for an organization + * @description Gets a specific package in an organization. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/get-package-for-organization": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package"]; + }; + }; + }; + }; + /** + * Delete a package for an organization + * @description Deletes an entire package in an organization. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `delete:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package you want to delete. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + "packages/delete-package-for-org": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restore a package for an organization + * @description Restores an entire package in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `write:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package you want to restore. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + "packages/restore-package-for-org": { + parameters: { + query?: { + /** @description package token */ + token?: string; + }; + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List package versions for a package owned by an organization + * @description Lists package versions for a package owned by an organization. + * + * If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/get-all-package-versions-for-package-owned-by-org": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + /** @description The state of the package, either active or deleted. */ + state?: "active" | "deleted"; + }; + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get a package version for an organization + * @description Gets a specific package version in an organization. + * + * You must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/get-package-version-for-organization": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + org: components["parameters"]["org"]; + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"]; + }; + }; + }; + }; + /** + * Delete package version for an organization + * @description Deletes a specific package version in an organization. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `delete:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package whose version you want to delete. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + "packages/delete-package-version-for-org": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + org: components["parameters"]["org"]; + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restore package version for an organization + * @description Restores a specific package version in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `write:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package whose version you want to restore. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + "packages/restore-package-version-for-org": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + org: components["parameters"]["org"]; + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List organization projects + * @description Lists the projects in an organization. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + "projects/list-for-org": { + parameters: { + query?: { + /** @description Indicates the state of the projects to return. */ + state?: "open" | "closed" | "all"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["project"][]; + }; + }; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Create an organization project + * @description Creates an organization project board. Returns a `410 Gone` status if projects are disabled in the organization or if the organization does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + "projects/create-for-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the project. */ + name: string; + /** @description The description of the project. */ + body?: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List public organization members + * @description Members of an organization can choose to have their membership publicized or not. + */ + "orgs/list-public-members": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** + * Check public organization membership for a user + * @description Check if the provided user is a public member of the organization. + */ + "orgs/check-public-membership-for-user": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response if user is a public member */ + 204: never; + /** @description Not Found if user is not a public member */ + 404: never; + }; + }; + /** + * Set public organization membership for the authenticated user + * @description The user can publicize their own membership. (A user cannot publicize the membership for another user.) + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "orgs/set-public-membership-for-authenticated-user": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Remove public organization membership for the authenticated user + * @description Removes the public membership for the authenticated user from the specified organization, unless public visibility is enforced by default. + */ + "orgs/remove-public-membership-for-authenticated-user": { + parameters: { + path: { + org: components["parameters"]["org"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List organization repositories + * @description Lists repositories for the specified organization. + * + * **Note:** In order to see the `security_and_analysis` block for a repository you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + "repos/list-for-org": { + parameters: { + query?: { + /** @description Specifies the types of repositories you want returned. */ + type?: "all" | "public" | "private" | "forks" | "sources" | "member"; + /** @description The property to sort the results by. */ + sort?: "created" | "updated" | "pushed" | "full_name"; + /** @description The order to sort by. Default: `asc` when using `full_name`, otherwise `desc`. */ + direction?: "asc" | "desc"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + /** + * Create an organization repository + * @description Creates a new repository in the specified organization. The authenticated user must be a member of the organization. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + "repos/create-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the repository. */ + name: string; + /** @description A short description of the repository. */ + description?: string; + /** @description A URL with more information about the repository. */ + homepage?: string; + /** + * @description Whether the repository is private. + * @default false + */ + private?: boolean; + /** + * @description The visibility of the repository. + * @enum {string} + */ + visibility?: "public" | "private"; + /** + * @description Either `true` to enable issues for this repository or `false` to disable them. + * @default true + */ + has_issues?: boolean; + /** + * @description Either `true` to enable projects for this repository or `false` to disable them. **Note:** If you're creating a repository in an organization that has disabled repository projects, the default is `false`, and if you pass `true`, the API returns an error. + * @default true + */ + has_projects?: boolean; + /** + * @description Either `true` to enable the wiki for this repository or `false` to disable it. + * @default true + */ + has_wiki?: boolean; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads?: boolean; + /** + * @description Either `true` to make this repo available as a template repository or `false` to prevent it. + * @default false + */ + is_template?: boolean; + /** @description The id of the team that will be granted access to this repository. This is only valid when creating a repository in an organization. */ + team_id?: number; + /** + * @description Pass `true` to create an initial commit with empty README. + * @default false + */ + auto_init?: boolean; + /** @description Desired language or platform [.gitignore template](https://github.com/github/gitignore) to apply. Use the name of the template without the extension. For example, "Haskell". */ + gitignore_template?: string; + /** @description Choose an [open source license template](https://choosealicense.com/) that best suits your needs, and then use the [license keyword](https://docs.github.com/articles/licensing-a-repository/#searching-github-by-license-type) as the `license_template` string. For example, "mit" or "mpl-2.0". */ + license_template?: string; + /** + * @description Either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging. + * @default true + */ + allow_squash_merge?: boolean; + /** + * @description Either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Either `true` to allow auto-merge on pull requests, or `false` to disallow auto-merge. + * @default false + */ + allow_auto_merge?: boolean; + /** + * @description Either `true` to allow automatically deleting head branches when pull requests are merged, or `false` to prevent automatic deletion. **The authenticated user must be an organization owner to set this property to `true`.** + * @default false + */ + delete_branch_on_merge?: boolean; + /** + * @deprecated + * @description Either `true` to allow squash-merge commits to use pull request title, or `false` to use commit message. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["repository"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get all organization repository rulesets + * @description Get all the repository rulesets for an organization. + */ + "repos/get-org-rulesets": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-ruleset"][]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Create an organization repository ruleset + * @description Create a repository ruleset for an organization. + */ + "repos/create-org-ruleset": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + /** @description Request body */ + requestBody: { + content: { + "application/json": { + /** @description The name of the ruleset. */ + name: string; + /** + * @description The target of the ruleset. + * @enum {string} + */ + target?: "branch" | "tag"; + enforcement: components["schemas"]["repository-rule-enforcement"]; + /** @description The actors that can bypass the rules in this ruleset */ + bypass_actors?: components["schemas"]["repository-ruleset-bypass-actor"][]; + conditions?: components["schemas"]["org-ruleset-conditions"]; + /** @description An array of rules within the ruleset. */ + rules?: components["schemas"]["repository-rule"][]; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["repository-ruleset"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Get an organization repository ruleset + * @description Get a repository ruleset for an organization. + */ + "repos/get-org-ruleset": { + parameters: { + path: { + org: components["parameters"]["org"]; + /** @description The ID of the ruleset. */ + ruleset_id: number; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-ruleset"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Update an organization repository ruleset + * @description Update a ruleset for an organization. + */ + "repos/update-org-ruleset": { + parameters: { + path: { + org: components["parameters"]["org"]; + /** @description The ID of the ruleset. */ + ruleset_id: number; + }; + }; + /** @description Request body */ + requestBody?: { + content: { + "application/json": { + /** @description The name of the ruleset. */ + name?: string; + /** + * @description The target of the ruleset. + * @enum {string} + */ + target?: "branch" | "tag"; + enforcement?: components["schemas"]["repository-rule-enforcement"]; + /** @description The actors that can bypass the rules in this ruleset */ + bypass_actors?: components["schemas"]["repository-ruleset-bypass-actor"][]; + conditions?: components["schemas"]["org-ruleset-conditions"]; + /** @description An array of rules within the ruleset. */ + rules?: components["schemas"]["repository-rule"][]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-ruleset"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Delete an organization repository ruleset + * @description Delete a ruleset for an organization. + */ + "repos/delete-org-ruleset": { + parameters: { + path: { + org: components["parameters"]["org"]; + /** @description The ID of the ruleset. */ + ruleset_id: number; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * List secret scanning alerts for an organization + * @description Lists secret scanning alerts for eligible repositories in an organization, from newest to oldest. + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + "secret-scanning/list-alerts-for-org": { + parameters: { + query?: { + state?: components["parameters"]["secret-scanning-alert-state"]; + secret_type?: components["parameters"]["secret-scanning-alert-secret-type"]; + resolution?: components["parameters"]["secret-scanning-alert-resolution"]; + sort?: components["parameters"]["secret-scanning-alert-sort"]; + direction?: components["parameters"]["direction"]; + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + before?: components["parameters"]["secret-scanning-pagination-before-org-repo"]; + after?: components["parameters"]["secret-scanning-pagination-after-org-repo"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["organization-secret-scanning-alert"][]; + }; + }; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List security manager teams + * @description Lists teams that are security managers for an organization. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `read:org` scope. + * + * GitHub Apps must have the `administration` organization read permission to use this endpoint. + */ + "orgs/list-security-manager-teams": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-simple"][]; + }; + }; + }; + }; + /** + * Add a security manager team + * @description Adds a team as a security manager for an organization. For more information, see "[Managing security for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization) for an organization." + * + * To use this endpoint, you must be an administrator for the organization, and you must use an access token with the `write:org` scope. + * + * GitHub Apps must have the `administration` organization read-write permission to use this endpoint. + */ + "orgs/add-security-manager-team": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + /** @description The organization has reached the maximum number of security manager teams. */ + 409: never; + }; + }; + /** + * Remove a security manager team + * @description Removes the security manager role from a team for an organization. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization) team from an organization." + * + * To use this endpoint, you must be an administrator for the organization, and you must use an access token with the `admin:org` scope. + * + * GitHub Apps must have the `administration` organization read-write permission to use this endpoint. + */ + "orgs/remove-security-manager-team": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get GitHub Actions billing for an organization + * @description Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + "billing/get-github-actions-billing-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-billing-usage"]; + }; + }; + }; + }; + /** + * Get GitHub Packages billing for an organization + * @description Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + "billing/get-github-packages-billing-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["packages-billing-usage"]; + }; + }; + }; + }; + /** + * Get shared storage billing for an organization + * @description Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + "billing/get-shared-storage-billing-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["combined-billing-usage"]; + }; + }; + }; + }; + /** + * List teams + * @description Lists all teams in an organization that are visible to the authenticated user. + */ + "teams/list": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Create a team + * @description To create a team, the authenticated user must be a member or owner of `{org}`. By default, organization members can create teams. Organization owners can limit team creation to organization owners. For more information, see "[Setting team creation permissions](https://docs.github.com/articles/setting-team-creation-permissions-in-your-organization)." + * + * When you create a new team, you automatically become a team maintainer without explicitly adding yourself to the optional array of `maintainers`. For more information, see "[About teams](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/about-teams)". + */ + "teams/create": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the team. */ + name: string; + /** @description The description of the team. */ + description?: string; + /** @description List GitHub IDs for organization members who will become team maintainers. */ + maintainers?: string[]; + /** @description The full name (e.g., "organization-name/repository-name") of repositories to add the team to. */ + repo_names?: string[]; + /** + * @description The level of privacy this team should have. The options are: + * **For a non-nested team:** + * * `secret` - only visible to organization owners and members of this team. + * * `closed` - visible to all members of this organization. + * Default: `secret` + * **For a parent or child team:** + * * `closed` - visible to all members of this organization. + * Default for child team: `closed` + * @enum {string} + */ + privacy?: "secret" | "closed"; + /** + * @description The notification setting the team has chosen. The options are: + * * `notifications_enabled` - team members receive notifications when the team is @mentioned. + * * `notifications_disabled` - no one receives notifications. + * Default: `notifications_enabled` + * @enum {string} + */ + notification_setting?: + | "notifications_enabled" + | "notifications_disabled"; + /** + * @description **Deprecated**. The permission that new repositories will be added to the team with when none is specified. + * @default pull + * @enum {string} + */ + permission?: "pull" | "push"; + /** @description The ID of a team to set as the parent team. */ + parent_team_id?: number; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a team by name + * @description Gets a team using the team's `slug`. To create the `slug`, GitHub replaces special characters in the `name` string, changes all words to lowercase, and replaces spaces with a `-` separator. For example, `"My TEam Näme"` would become `my-team-name`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}`. + */ + "teams/get-by-name": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a team + * @description To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}`. + */ + "teams/delete-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update a team + * @description To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}`. + */ + "teams/update-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The name of the team. */ + name?: string; + /** @description The description of the team. */ + description?: string; + /** + * @description The level of privacy this team should have. Editing teams without specifying this parameter leaves `privacy` intact. When a team is nested, the `privacy` for parent teams cannot be `secret`. The options are: + * **For a non-nested team:** + * * `secret` - only visible to organization owners and members of this team. + * * `closed` - visible to all members of this organization. + * **For a parent or child team:** + * * `closed` - visible to all members of this organization. + * @enum {string} + */ + privacy?: "secret" | "closed"; + /** + * @description The notification setting the team has chosen. Editing teams without specifying this parameter leaves `notification_setting` intact. The options are: + * * `notifications_enabled` - team members receive notifications when the team is @mentioned. + * * `notifications_disabled` - no one receives notifications. + * @enum {string} + */ + notification_setting?: + | "notifications_enabled" + | "notifications_disabled"; + /** + * @description **Deprecated**. The permission that new repositories will be added to the team with when none is specified. + * @default pull + * @enum {string} + */ + permission?: "pull" | "push" | "admin"; + /** @description The ID of a team to set as the parent team. */ + parent_team_id?: number | null; + }; + }; + }; + responses: { + /** @description Response when the updated information already exists */ + 200: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List discussions + * @description List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions`. + */ + "teams/list-discussions-in-org": { + parameters: { + query?: { + direction?: components["parameters"]["direction"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + /** @description Pinned discussions only filter */ + pinned?: string; + }; + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["team-discussion"][]; + }; + }; + }; + }; + /** + * Create a discussion + * @description Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions`. + */ + "teams/create-discussion-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion post's title. */ + title: string; + /** @description The discussion post's body text. */ + body: string; + /** + * @description Private posts are only visible to team members, organization owners, and team maintainers. Public posts are visible to all members of the organization. Set to `true` to create a private post. + * @default false + */ + private?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + }; + /** + * Get a discussion + * @description Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + "teams/get-discussion-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + }; + /** + * Delete a discussion + * @description Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + "teams/delete-discussion-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update a discussion + * @description Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + "teams/update-discussion-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The discussion post's title. */ + title?: string; + /** @description The discussion post's body text. */ + body?: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + }; + /** + * List discussion comments + * @description List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + "teams/list-discussion-comments-in-org": { + parameters: { + query?: { + direction?: components["parameters"]["direction"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["team-discussion-comment"][]; + }; + }; + }; + }; + /** + * Create a discussion comment + * @description Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + "teams/create-discussion-comment-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion comment's body text. */ + body: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + }; + /** + * Get a discussion comment + * @description Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + "teams/get-discussion-comment-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + }; + /** + * Delete a discussion comment + * @description Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + "teams/delete-discussion-comment-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update a discussion comment + * @description Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + "teams/update-discussion-comment-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + comment_number: components["parameters"]["comment-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion comment's body text. */ + body: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + }; + /** + * List reactions for a team discussion comment + * @description List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments/). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + "reactions/list-for-team-discussion-comment-in-org": { + parameters: { + query?: { + /** @description Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + }; + }; + /** + * Create reaction for a team discussion comment + * @description Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + "reactions/create-for-team-discussion-comment-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + comment_number: components["parameters"]["comment-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + responses: { + /** @description Response when the reaction type has already been added to this team discussion comment */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + }; + }; + /** + * Delete team discussion comment reaction + * @description **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "reactions/delete-for-team-discussion-comment": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + comment_number: components["parameters"]["comment-number"]; + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List reactions for a team discussion + * @description List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + "reactions/list-for-team-discussion-in-org": { + parameters: { + query?: { + /** @description Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + }; + }; + /** + * Create reaction for a team discussion + * @description Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + "reactions/create-for-team-discussion-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + }; + }; + /** + * Delete team discussion reaction + * @description **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "reactions/delete-for-team-discussion": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + discussion_number: components["parameters"]["discussion-number"]; + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List pending team invitations + * @description The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/invitations`. + */ + "teams/list-pending-invitations-in-org": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["organization-invitation"][]; + }; + }; + }; + }; + /** + * List team members + * @description Team members will include the members of child teams. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + "teams/list-members-in-org": { + parameters: { + query?: { + /** @description Filters members returned by their role in the team. */ + role?: "member" | "maintainer" | "all"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** + * Get team membership for a user + * @description Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/memberships/{username}`. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + "teams/get-membership-for-user-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-membership"]; + }; + }; + /** @description if user has no team membership */ + 404: never; + }; + }; + /** + * Add or update team membership for a user + * @description Adds an organization member to a team. An authenticated organization owner or team maintainer can add organization members to a team. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * An organization owner can add someone who is not part of the team's organization to a team. When an organization owner adds someone to a team who is not an organization member, this endpoint will send an invitation to the person via email. This newly-created membership will be in the "pending" state until the person accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + "teams/add-or-update-membership-for-user-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + username: components["parameters"]["username"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The role that this user should have in the team. + * @default member + * @enum {string} + */ + role?: "member" | "maintainer"; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-membership"]; + }; + }; + /** @description Forbidden if team synchronization is set up */ + 403: never; + /** @description Unprocessable Entity if you attempt to add an organization to a team */ + 422: never; + }; + }; + /** + * Remove team membership for a user + * @description To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + "teams/remove-membership-for-user-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + /** @description Forbidden if team synchronization is set up */ + 403: never; + }; + }; + /** + * List team projects + * @description Lists the organization projects for a team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects`. + */ + "teams/list-projects-in-org": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["team-project"][]; + }; + }; + }; + }; + /** + * Check team permissions for a project + * @description Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + "teams/check-permissions-for-project-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-project"]; + }; + }; + /** @description Not Found if project is not managed by this team */ + 404: never; + }; + }; + /** + * Add or update team project permissions + * @description Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + "teams/add-or-update-project-permissions-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + project_id: components["parameters"]["project-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The permission to grant to the team for this project. Default: the team's `permission` attribute will be used to determine what permission to grant the team on this project. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * @enum {string} + */ + permission?: "read" | "write" | "admin"; + } | null; + }; + }; + responses: { + /** @description Response */ + 204: never; + /** @description Forbidden if the project is not owned by the organization */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + }; + }; + /** + * Remove a project from a team + * @description Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. This endpoint removes the project from the team, but does not delete the project. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + "teams/remove-project-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List team repositories + * @description Lists a team's repositories visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos`. + */ + "teams/list-repos-in-org": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + /** + * Check team permissions for a repository + * @description Checks whether a team has `admin`, `push`, `maintain`, `triage`, or `pull` permission for a repository. Repositories inherited through a parent team will also be checked. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `application/vnd.github.v3.repository+json` accept header. + * + * If a team doesn't have permission for the repository, you will receive a `404 Not Found` response status. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + "teams/check-permissions-for-repo-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Alternative response with repository permissions */ + 200: { + content: { + "application/json": components["schemas"]["team-repository"]; + }; + }; + /** @description Response if team has permission for the repository. This is the response when the repository media type hasn't been provded in the Accept header. */ + 204: never; + /** @description Not Found if team does not have permission for the repository */ + 404: never; + }; + }; + /** + * Add or update team repository permissions + * @description To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + * + * For more information about the permission levels, see "[Repository permission levels for an organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". + */ + "teams/add-or-update-repo-permissions-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The permission to grant the team on this repository. We accept the following permissions to be set: `pull`, `triage`, `push`, `maintain`, `admin` and you can also specify a custom repository role name, if the owning organization has defined any. If no permission is specified, the team's `permission` attribute will be used to determine what permission to grant the team on this repository. + * @default push + */ + permission?: string; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Remove a repository from a team + * @description If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. This does not delete the repository, it just removes it from the team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + "teams/remove-repo-in-org": { + parameters: { + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List child teams + * @description Lists the child teams of the team specified by `{team_slug}`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/teams`. + */ + "teams/list-child-in-org": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** @description if child teams exist */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + }; + }; + /** + * Enable or disable a security feature for an organization + * @description Enables or disables the specified security feature for all eligible repositories in an organization. + * + * To use this endpoint, you must be an organization owner or be member of a team with the security manager role. + * A token with the 'write:org' scope is also required. + * + * GitHub Apps must have the `organization_administration:write` permission to use this endpoint. + * + * For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + "orgs/enable-or-disable-security-product-on-all-org-repos": { + parameters: { + path: { + org: components["parameters"]["org"]; + security_product: components["parameters"]["security-product"]; + enablement: components["parameters"]["org-security-product-enablement"]; + }; + }; + responses: { + /** @description Action started */ + 204: never; + /** @description The action could not be taken due to an in progress enablement, or a policy is preventing enablement */ + 422: never; + }; + }; + /** + * Get a project card + * @description Gets information about a project card. + */ + "projects/get-card": { + parameters: { + path: { + card_id: components["parameters"]["card-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["project-card"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a project card + * @description Deletes a project card + */ + "projects/delete-card": { + parameters: { + path: { + card_id: components["parameters"]["card-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + /** @description Forbidden */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + errors?: string[]; + }; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Update an existing project card */ + "projects/update-card": { + parameters: { + path: { + card_id: components["parameters"]["card-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The project card's note + * @example Update all gems + */ + note?: string | null; + /** + * @description Whether or not the card is archived + * @example false + */ + archived?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["project-card"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** Move a project card */ + "projects/move-card": { + parameters: { + path: { + card_id: components["parameters"]["card-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The position of the card in a column. Can be one of: `top`, `bottom`, or `after:` to place after the specified card. + * @example bottom + */ + position: string; + /** + * @description The unique identifier of the column the card should be moved to + * @example 42 + */ + column_id?: number; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": Record; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + /** @description Forbidden */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + errors?: { + code?: string; + message?: string; + resource?: string; + field?: string; + }[]; + }; + }; + }; + 422: components["responses"]["validation_failed"]; + /** @description Response */ + 503: { + content: { + "application/json": { + code?: string; + message?: string; + documentation_url?: string; + errors?: { + code?: string; + message?: string; + }[]; + }; + }; + }; + }; + }; + /** + * Get a project column + * @description Gets information about a project column. + */ + "projects/get-column": { + parameters: { + path: { + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["project-column"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a project column + * @description Deletes a project column. + */ + "projects/delete-column": { + parameters: { + path: { + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Update an existing project column */ + "projects/update-column": { + parameters: { + path: { + column_id: components["parameters"]["column-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Name of the project column + * @example Remaining tasks + */ + name: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["project-column"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * List project cards + * @description Lists the project cards in a project. + */ + "projects/list-cards": { + parameters: { + query?: { + /** @description Filters the project cards that are returned by the card's state. */ + archived_state?: "all" | "archived" | "not_archived"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["project-card"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Create a project card */ + "projects/create-card": { + parameters: { + path: { + column_id: components["parameters"]["column-id"]; + }; + }; + requestBody: { + content: { + "application/json": OneOf< + [ + { + /** + * @description The project card's note + * @example Update all gems + */ + note: string | null; + }, + { + /** + * @description The unique identifier of the content associated with the card + * @example 42 + */ + content_id: number; + /** + * @description The piece of content associated with the card + * @example PullRequest + */ + content_type: string; + }, + ] + >; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["project-card"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + /** @description Validation failed */ + 422: { + content: { + "application/json": + | components["schemas"]["validation-error"] + | components["schemas"]["validation-error-simple"]; + }; + }; + /** @description Response */ + 503: { + content: { + "application/json": { + code?: string; + message?: string; + documentation_url?: string; + errors?: { + code?: string; + message?: string; + }[]; + }; + }; + }; + }; + }; + /** Move a project column */ + "projects/move-column": { + parameters: { + path: { + column_id: components["parameters"]["column-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The position of the column in a project. Can be one of: `first`, `last`, or `after:` to place after the specified column. + * @example last + */ + position: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": Record; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Get a project + * @description Gets a project by its `id`. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + "projects/get": { + parameters: { + path: { + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Delete a project + * @description Deletes a project board. Returns a `404 Not Found` status if projects are disabled. + */ + "projects/delete": { + parameters: { + path: { + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** @description Delete Success */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + /** @description Forbidden */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + errors?: string[]; + }; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** + * Update a project + * @description Updates a project board's information. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + "projects/update": { + parameters: { + path: { + project_id: components["parameters"]["project-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description Name of the project + * @example Week One Sprint + */ + name?: string; + /** + * @description Body of the project + * @example This project represents the sprint of the first week in January + */ + body?: string | null; + /** + * @description State of the project; either 'open' or 'closed' + * @example open + */ + state?: string; + /** + * @description The baseline permission that all organization members have on this project + * @enum {string} + */ + organization_permission?: "read" | "write" | "admin" | "none"; + /** @description Whether or not this project can be seen by everyone. */ + private?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + /** @description Forbidden */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + errors?: string[]; + }; + }; + }; + /** @description Not Found if the authenticated user does not have access to the project */ + 404: never; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List project collaborators + * @description Lists the collaborators for an organization project. For a project, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. You must be an organization owner or a project `admin` to list collaborators. + */ + "projects/list-collaborators": { + parameters: { + query?: { + /** @description Filters the collaborators by their affiliation. `outside` means outside collaborators of a project that are not a member of the project's organization. `direct` means collaborators with permissions to a project, regardless of organization membership status. `all` means all collaborators the authenticated user can see. */ + affiliation?: "outside" | "direct" | "all"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Add project collaborator + * @description Adds a collaborator to an organization project and sets their permission level. You must be an organization owner or a project `admin` to add a collaborator. + */ + "projects/add-collaborator": { + parameters: { + path: { + project_id: components["parameters"]["project-id"]; + username: components["parameters"]["username"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The permission to grant the collaborator. + * @default write + * @example write + * @enum {string} + */ + permission?: "read" | "write" | "admin"; + } | null; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove user as a collaborator + * @description Removes a collaborator from an organization project. You must be an organization owner or a project `admin` to remove a collaborator. + */ + "projects/remove-collaborator": { + parameters: { + path: { + project_id: components["parameters"]["project-id"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get project permission for a user + * @description Returns the collaborator's permission level for an organization project. Possible values for the `permission` key: `admin`, `write`, `read`, `none`. You must be an organization owner or a project `admin` to review a user's permission level. + */ + "projects/get-permission-for-user": { + parameters: { + path: { + project_id: components["parameters"]["project-id"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["project-collaborator-permission"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List project columns + * @description Lists the project columns in a project. + */ + "projects/list-columns": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["project-column"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Create a project column + * @description Creates a new project column. + */ + "projects/create-column": { + parameters: { + path: { + project_id: components["parameters"]["project-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Name of the project column + * @example Remaining tasks + */ + name: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["project-column"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Get rate limit status for the authenticated user + * @description **Note:** Accessing this endpoint does not count against your REST API rate limit. + * + * **Note:** The `rate` object is deprecated. If you're writing new API client code or updating existing code, you should use the `core` object instead of the `rate` object. The `core` object contains the same information that is present in the `rate` object. + */ + "rate-limit/get": { + responses: { + /** @description Response */ + 200: { + headers: { + "X-RateLimit-Limit": components["headers"]["x-rate-limit-limit"]; + "X-RateLimit-Remaining": components["headers"]["x-rate-limit-remaining"]; + "X-RateLimit-Reset": components["headers"]["x-rate-limit-reset"]; + }; + content: { + "application/json": components["schemas"]["rate-limit-overview"]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List repository required workflows + * @description Lists the required workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + "actions/list-repo-required-workflows": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + org: components["parameters"]["org"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + required_workflows: components["schemas"]["repo-required-workflow"][]; + }; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get a required workflow entity for a repository + * @description Gets a specific required workflow present in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + "actions/get-repo-required-workflow": { + parameters: { + path: { + org: components["parameters"]["org"]; + repo: components["parameters"]["repo"]; + required_workflow_id_for_repo: components["parameters"]["repo-required-workflow-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repo-required-workflow"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get required workflow usage + * @description Gets the number of billable minutes used by a specific required workflow during the current billing cycle. + * + * Billable minutes only apply to required workflows running in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)." + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-repo-required-workflow-usage": { + parameters: { + path: { + org: components["parameters"]["org"]; + repo: components["parameters"]["repo"]; + required_workflow_id_for_repo: components["parameters"]["repo-required-workflow-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-usage"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get a repository + * @description The `parent` and `source` objects are present when the repository is a fork. `parent` is the repository this repository was forked from, `source` is the ultimate source for the network. + * + * **Note:** In order to see the `security_and_analysis` block for a repository you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + "repos/get": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["full-repository"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a repository + * @description Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required. + * + * If an organization owner has configured the organization to prevent members from deleting organization-owned + * repositories, you will get a `403 Forbidden` response. + */ + "repos/delete": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 307: components["responses"]["temporary_redirect"]; + /** @description If an organization owner has configured the organization to prevent members from deleting organization-owned repositories, a member will get this response: */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update a repository + * @description **Note**: To edit a repository's topics, use the [Replace all repository topics](https://docs.github.com/rest/reference/repos#replace-all-repository-topics) endpoint. + */ + "repos/update": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The name of the repository. */ + name?: string; + /** @description A short description of the repository. */ + description?: string; + /** @description A URL with more information about the repository. */ + homepage?: string; + /** + * @description Either `true` to make the repository private or `false` to make it public. Default: `false`. + * **Note**: You will get a `422` error if the organization restricts [changing repository visibility](https://docs.github.com/articles/repository-permission-levels-for-an-organization#changing-the-visibility-of-repositories) to organization owners and a non-owner tries to change the value of private. + * @default false + */ + private?: boolean; + /** + * @description The visibility of the repository. + * @enum {string} + */ + visibility?: "public" | "private"; + /** + * @description Specify which security and analysis features to enable or disable for the repository. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * For example, to enable GitHub Advanced Security, use this data in the body of the `PATCH` request: + * `{ "security_and_analysis": {"advanced_security": { "status": "enabled" } } }`. + * + * You can check which security and analysis features are currently enabled by using a `GET /repos/{owner}/{repo}` request. + */ + security_and_analysis?: { + /** @description Use the `status` property to enable or disable GitHub Advanced Security for this repository. For more information, see "[About GitHub Advanced Security](/github/getting-started-with-github/learning-about-github/about-github-advanced-security)." */ + advanced_security?: { + /** @description Can be `enabled` or `disabled`. */ + status?: string; + }; + /** @description Use the `status` property to enable or disable secret scanning for this repository. For more information, see "[About secret scanning](/code-security/secret-security/about-secret-scanning)." */ + secret_scanning?: { + /** @description Can be `enabled` or `disabled`. */ + status?: string; + }; + /** @description Use the `status` property to enable or disable secret scanning push protection for this repository. For more information, see "[Protecting pushes with secret scanning](/code-security/secret-scanning/protecting-pushes-with-secret-scanning)." */ + secret_scanning_push_protection?: { + /** @description Can be `enabled` or `disabled`. */ + status?: string; + }; + } | null; + /** + * @description Either `true` to enable issues for this repository or `false` to disable them. + * @default true + */ + has_issues?: boolean; + /** + * @description Either `true` to enable projects for this repository or `false` to disable them. **Note:** If you're creating a repository in an organization that has disabled repository projects, the default is `false`, and if you pass `true`, the API returns an error. + * @default true + */ + has_projects?: boolean; + /** + * @description Either `true` to enable the wiki for this repository or `false` to disable it. + * @default true + */ + has_wiki?: boolean; + /** + * @description Either `true` to make this repo available as a template repository or `false` to prevent it. + * @default false + */ + is_template?: boolean; + /** @description Updates the default branch for this repository. */ + default_branch?: string; + /** + * @description Either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging. + * @default true + */ + allow_squash_merge?: boolean; + /** + * @description Either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Either `true` to allow auto-merge on pull requests, or `false` to disallow auto-merge. + * @default false + */ + allow_auto_merge?: boolean; + /** + * @description Either `true` to allow automatically deleting head branches when pull requests are merged, or `false` to prevent automatic deletion. + * @default false + */ + delete_branch_on_merge?: boolean; + /** + * @description Either `true` to always allow a pull request head branch that is behind its base branch to be updated even if it is not required to be up to date before merging, or false otherwise. + * @default false + */ + allow_update_branch?: boolean; + /** + * @deprecated + * @description Either `true` to allow squash-merge commits to use pull request title, or `false` to use commit message. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description Whether to archive this repository. `false` will unarchive a previously archived repository. + * @default false + */ + archived?: boolean; + /** + * @description Either `true` to allow private forks, or `false` to prevent private forks. + * @default false + */ + allow_forking?: boolean; + /** + * @description Either `true` to require contributors to sign off on web-based commits, or `false` to not require contributors to sign off on web-based commits. + * @default false + */ + web_commit_signoff_required?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["full-repository"]; + }; + }; + 307: components["responses"]["temporary_redirect"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List artifacts for a repository + * @description Lists all artifacts for a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/list-artifacts-for-repo": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + /** @description Filters artifacts by exact match on their name field. */ + name?: string; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + artifacts: components["schemas"]["artifact"][]; + }; + }; + }; + }; + }; + /** + * Get an artifact + * @description Gets a specific artifact for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-artifact": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + artifact_id: components["parameters"]["artifact-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["artifact"]; + }; + }; + }; + }; + /** + * Delete an artifact + * @description Deletes an artifact for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/delete-artifact": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + artifact_id: components["parameters"]["artifact-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Download an artifact + * @description Gets a redirect URL to download an archive for a repository. This URL expires after 1 minute. Look for `Location:` in + * the response header to find the URL for the download. The `:archive_format` must be `zip`. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/download-artifact": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + artifact_id: components["parameters"]["artifact-id"]; + archive_format: string; + }; + }; + responses: { + /** @description Response */ + 302: never; + 410: components["responses"]["gone"]; + }; + }; + /** + * Get GitHub Actions cache usage for a repository + * @description Gets GitHub Actions cache usage for a repository. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-actions-cache-usage": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-cache-usage-by-repository"]; + }; + }; + }; + }; + /** + * List GitHub Actions caches for a repository + * @description Lists the GitHub Actions caches for a repository. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-actions-cache-list": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + ref?: components["parameters"]["actions-cache-git-ref-full"]; + key?: components["parameters"]["actions-cache-key"]; + sort?: components["parameters"]["actions-cache-list-sort"]; + direction?: components["parameters"]["direction"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["actions-cache-list"]; + }; + }; + }; + }; + /** + * Delete GitHub Actions caches for a repository (using a cache key) + * @description Deletes one or more GitHub Actions caches for a repository, using a complete cache key. By default, all caches that match the provided key are deleted, but you can optionally provide a Git ref to restrict deletions to caches that match both the provided key and the Git ref. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/delete-actions-cache-by-key": { + parameters: { + query: { + key: components["parameters"]["actions-cache-key-required"]; + ref?: components["parameters"]["actions-cache-git-ref-full"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-cache-list"]; + }; + }; + }; + }; + /** + * Delete a GitHub Actions cache for a repository (using a cache ID) + * @description Deletes a GitHub Actions cache for a repository, using a cache ID. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/delete-actions-cache-by-id": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + cache_id: components["parameters"]["cache-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get a job for a workflow run + * @description Gets a specific job in a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-job-for-workflow-run": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + job_id: components["parameters"]["job-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["job"]; + }; + }; + }; + }; + /** + * Download job logs for a workflow run + * @description Gets a redirect URL to download a plain text file of logs for a workflow job. This link expires after 1 minute. Look + * for `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can + * use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must + * have the `actions:read` permission to use this endpoint. + */ + "actions/download-job-logs-for-workflow-run": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + job_id: components["parameters"]["job-id"]; + }; + }; + responses: { + /** @description Response */ + 302: never; + }; + }; + /** + * Re-run a job from a workflow run + * @description Re-run a job and its dependent jobs in a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/re-run-job-for-workflow-run": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + job_id: components["parameters"]["job-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description Whether to enable debug logging for the re-run. + * @default false + */ + enable_debug_logging?: boolean; + } | null; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Get the customization template for an OIDC subject claim for a repository + * @description Gets the customization template for an OpenID Connect (OIDC) subject claim. + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. GitHub Apps must have the `organization_administration:read` permission to use this endpoint. + */ + "actions/get-custom-oidc-sub-claim-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Status response */ + 200: { + content: { + "application/json": components["schemas"]["oidc-custom-sub-repo"]; + }; + }; + 400: components["responses"]["bad_request"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Set the customization template for an OIDC subject claim for a repository + * @description Sets the customization template and `opt-in` or `opt-out` flag for an OpenID Connect (OIDC) subject claim for a repository. + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/set-custom-oidc-sub-claim-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Whether to use the default template or not. If `true`, the `include_claim_keys` field is ignored. */ + use_default: boolean; + /** @description Array of unique strings. Each claim key can only contain alphanumeric characters and underscores. */ + include_claim_keys?: string[]; + }; + }; + }; + responses: { + /** @description Empty response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 400: components["responses"]["bad_request"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List repository organization secrets + * @description Lists all organization secrets shared with a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + "actions/list-repo-organization-secrets": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["actions-secret"][]; + }; + }; + }; + }; + }; + /** + * List repository organization variables + * @description Lists all organiation variables shared with a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions_variables:read` repository permission to use this endpoint. + */ + "actions/list-repo-organization-variables": { + parameters: { + query?: { + per_page?: components["parameters"]["variables-per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + variables: components["schemas"]["actions-variable"][]; + }; + }; + }; + }; + }; + /** + * Get GitHub Actions permissions for a repository + * @description Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions and reusable workflows allowed to run in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + "actions/get-github-actions-permissions-repository": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-repository-permissions"]; + }; + }; + }; + }; + /** + * Set GitHub Actions permissions for a repository + * @description Sets the GitHub Actions permissions policy for enabling GitHub Actions and allowed actions and reusable workflows in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + "actions/set-github-actions-permissions-repository": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + enabled: components["schemas"]["actions-enabled"]; + allowed_actions?: components["schemas"]["allowed-actions"]; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get the level of access for workflows outside of the repository + * @description Gets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to private repositories. + * For more information, see "[Allowing access to components in a private repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-a-private-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + "actions/get-workflow-access-to-repository": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-workflow-access-to-repository"]; + }; + }; + }; + }; + /** + * Set the level of access for workflows outside of the repository + * @description Sets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to private repositories. + * For more information, see "[Allowing access to components in a private repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-a-private-repository)". + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + "actions/set-workflow-access-to-repository": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["actions-workflow-access-to-repository"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get allowed actions and reusable workflows for a repository + * @description Gets the settings for selected actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + "actions/get-allowed-actions-repository": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + }; + /** + * Set allowed actions and reusable workflows for a repository + * @description Sets the actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + "actions/set-allowed-actions-repository": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get default workflow permissions for a repository + * @description Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, + * as well as if GitHub Actions can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + "actions/get-github-actions-default-workflow-permissions-repository": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-get-default-workflow-permissions"]; + }; + }; + }; + }; + /** + * Set default workflow permissions for a repository + * @description Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, and sets if GitHub Actions + * can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + "actions/set-github-actions-default-workflow-permissions-repository": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["actions-set-default-workflow-permissions"]; + }; + }; + responses: { + /** @description Success response */ + 204: never; + /** @description Conflict response when changing a setting is prevented by the owning organization */ + 409: never; + }; + }; + /** + * List workflow runs for a required workflow + * @description List all workflow runs for a required workflow. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + "actions/list-required-workflow-runs": { + parameters: { + query?: { + actor?: components["parameters"]["actor"]; + branch?: components["parameters"]["workflow-run-branch"]; + event?: components["parameters"]["event"]; + status?: components["parameters"]["workflow-run-status"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + created?: components["parameters"]["created"]; + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + check_suite_id?: components["parameters"]["workflow-run-check-suite-id"]; + head_sha?: components["parameters"]["workflow-run-head-sha"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + required_workflow_id_for_repo: components["parameters"]["repo-required-workflow-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + workflow_runs: components["schemas"]["workflow-run"][]; + }; + }; + }; + }; + }; + /** + * List self-hosted runners for a repository + * @description Lists all self-hosted runners configured in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + "actions/list-self-hosted-runners-for-repo": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + runners: components["schemas"]["runner"][]; + }; + }; + }; + }; + }; + /** + * List runner applications for a repository + * @description Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + "actions/list-runner-applications-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-application"][]; + }; + }; + }; + }; + /** + * Create configuration for a just-in-time runner for a repository + * @description Generates a configuration that can be passed to the runner application at startup. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + "actions/generate-runner-jitconfig-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the new runner. */ + name: string; + /** @description The ID of the runner group to register the runner to. */ + runner_group_id: number; + /** @description The names of the custom labels to add to the runner. **Minimum items**: 1. **Maximum items**: 100. */ + labels: string[]; + /** + * @description The working directory to be used for job execution, relative to the runner install directory. + * @default _work + */ + work_folder?: string; + }; + }; + }; + responses: { + 201: components["responses"]["actions_runner_jitconfig"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Create a registration token for a repository + * @description Returns a token that you can pass to the `config` script. The token expires after one hour. You must authenticate + * using an access token with the `repo` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org/octo-repo-artifacts --token TOKEN + * ``` + */ + "actions/create-registration-token-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Create a remove token for a repository + * @description Returns a token that you can pass to remove a self-hosted runner from a repository. The token expires after one hour. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from a repository, replace TOKEN with the remove token provided by this endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + "actions/create-remove-token-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Get a self-hosted runner for a repository + * @description Gets a specific self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/get-self-hosted-runner-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["runner"]; + }; + }; + }; + }; + /** + * Delete a self-hosted runner from a repository + * @description Forces the removal of a self-hosted runner from a repository. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `repo` + * scope to use this endpoint. + */ + "actions/delete-self-hosted-runner-from-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List labels for a self-hosted runner for a repository + * @description Lists all labels for a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/list-labels-for-self-hosted-runner-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Set custom labels for a self-hosted runner for a repository + * @description Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/set-custom-labels-for-self-hosted-runner-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + runner_id: components["parameters"]["runner-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to set for the runner. You can pass an empty array to remove all custom labels. */ + labels: string[]; + }; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Add custom labels to a self-hosted runner for a repository + * @description Add custom labels to a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/add-custom-labels-to-self-hosted-runner-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + runner_id: components["parameters"]["runner-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to add to the runner. */ + labels: string[]; + }; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Remove all custom labels from a self-hosted runner for a repository + * @description Remove all custom labels from a self-hosted runner configured in a + * repository. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/remove-all-custom-labels-from-self-hosted-runner-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels_readonly"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove a custom label from a self-hosted runner for a repository + * @description Remove a custom label from a self-hosted runner configured + * in a repository. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/remove-custom-label-from-self-hosted-runner-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + runner_id: components["parameters"]["runner-id"]; + name: components["parameters"]["runner-label-name"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List workflow runs for a repository + * @description Lists all workflow runs for a repository. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/list-workflow-runs-for-repo": { + parameters: { + query?: { + actor?: components["parameters"]["actor"]; + branch?: components["parameters"]["workflow-run-branch"]; + event?: components["parameters"]["event"]; + status?: components["parameters"]["workflow-run-status"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + created?: components["parameters"]["created"]; + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + check_suite_id?: components["parameters"]["workflow-run-check-suite-id"]; + head_sha?: components["parameters"]["workflow-run-head-sha"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + workflow_runs: components["schemas"]["workflow-run"][]; + }; + }; + }; + }; + }; + /** + * Get a workflow run + * @description Gets a specific workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-workflow-run": { + parameters: { + query?: { + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-run"]; + }; + }; + }; + }; + /** + * Delete a workflow run + * @description Delete a specific workflow run. Anyone with write access to the repository can use this endpoint. If the repository is + * private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:write` permission to use + * this endpoint. + */ + "actions/delete-workflow-run": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get the review history for a workflow run + * @description Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-reviews-for-run": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["environment-approvals"][]; + }; + }; + }; + }; + /** + * Approve a workflow run for a fork pull request + * @description Approves a workflow run for a pull request from a public fork of a first time contributor. For more information, see ["Approving workflow runs from public forks](https://docs.github.com/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/approve-workflow-run": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List workflow run artifacts + * @description Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/list-workflow-run-artifacts": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + artifacts: components["schemas"]["artifact"][]; + }; + }; + }; + }; + }; + /** + * Get a workflow run attempt + * @description Gets a specific workflow run attempt. Anyone with read access to the repository + * can use this endpoint. If the repository is private you must use an access token + * with the `repo` scope. GitHub Apps must have the `actions:read` permission to + * use this endpoint. + */ + "actions/get-workflow-run-attempt": { + parameters: { + query?: { + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + attempt_number: components["parameters"]["attempt-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-run"]; + }; + }; + }; + }; + /** + * List jobs for a workflow run attempt + * @description Lists jobs for a specific workflow run attempt. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + */ + "actions/list-jobs-for-workflow-run-attempt": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + attempt_number: components["parameters"]["attempt-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + jobs: components["schemas"]["job"][]; + }; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Download workflow run attempt logs + * @description Gets a redirect URL to download an archive of log files for a specific workflow run attempt. This link expires after + * 1 minute. Look for `Location:` in the response header to find the URL for the download. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/download-workflow-run-attempt-logs": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + attempt_number: components["parameters"]["attempt-number"]; + }; + }; + responses: { + /** @description Response */ + 302: never; + }; + }; + /** + * Cancel a workflow run + * @description Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/cancel-workflow-run": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** @description Response */ + 202: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 409: components["responses"]["conflict"]; + }; + }; + /** + * Review custom deployment protection rules for a workflow run + * @description Approve or reject custom deployment protection rules provided by a GitHub App for a workflow run. For more information, see "[Using environments for deployment](https://docs.github.com/actions/deployment/targeting-different-environments/using-environments-for-deployment)." + * + * **Note:** GitHub Apps can only review their own custom deployment protection rules. + * To approve or reject pending deployments that are waiting for review from a specific person or team, see [`POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments`](/rest/actions/workflow-runs#review-pending-deployments-for-a-workflow-run). + * + * GitHub Apps must have read and write permission for **Deployments** to use this endpoint. + */ + "actions/review-custom-gates-for-run": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + requestBody: { + content: { + "application/json": + | components["schemas"]["review-custom-gates-comment-required"] + | components["schemas"]["review-custom-gates-state-required"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List jobs for a workflow run + * @description Lists jobs for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + */ + "actions/list-jobs-for-workflow-run": { + parameters: { + query?: { + /** @description Filters jobs by their `completed_at` timestamp. `latest` returns jobs from the most recent execution of the workflow run. `all` returns all jobs for a workflow run, including from old executions of the workflow run. */ + filter?: "latest" | "all"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + jobs: components["schemas"]["job"][]; + }; + }; + }; + }; + }; + /** + * Download workflow run logs + * @description Gets a redirect URL to download an archive of log files for a workflow run. This link expires after 1 minute. Look for + * `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can use + * this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have + * the `actions:read` permission to use this endpoint. + */ + "actions/download-workflow-run-logs": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** @description Response */ + 302: never; + }; + }; + /** + * Delete workflow run logs + * @description Deletes all logs for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/delete-workflow-run-logs": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Get pending deployments for a workflow run + * @description Get all deployment environments for a workflow run that are waiting for protection rules to pass. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-pending-deployments-for-run": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["pending-deployment"][]; + }; + }; + }; + }; + /** + * Review pending deployments for a workflow run + * @description Approve or reject pending deployments that are waiting on approval by a required reviewer. + * + * Required reviewers with read access to the repository contents and deployments can use this endpoint. Required reviewers must authenticate using an access token with the `repo` scope to use this endpoint. + */ + "actions/review-pending-deployments-for-run": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The list of environment ids to approve or reject + * @example [ + * 161171787, + * 161171795 + * ] + */ + environment_ids: number[]; + /** + * @description Whether to approve or reject deployment to the specified environments. + * @example approved + * @enum {string} + */ + state: "approved" | "rejected"; + /** + * @description A comment to accompany the deployment review + * @example Ship it! + */ + comment: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment"][]; + }; + }; + }; + }; + /** + * Re-run a workflow + * @description Re-runs your workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/re-run-workflow": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description Whether to enable debug logging for the re-run. + * @default false + */ + enable_debug_logging?: boolean; + } | null; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + }; + }; + /** + * Re-run failed jobs from a workflow run + * @description Re-run all of the failed jobs and their dependent jobs in a workflow run using the `id` of the workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + "actions/re-run-workflow-failed-jobs": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description Whether to enable debug logging for the re-run. + * @default false + */ + enable_debug_logging?: boolean; + } | null; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + }; + }; + /** + * Get workflow run usage + * @description Gets the number of billable minutes and total run time for a specific workflow run. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-workflow-run-usage": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-run-usage"]; + }; + }; + }; + }; + /** + * List repository secrets + * @description Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + "actions/list-repo-secrets": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["actions-secret"][]; + }; + }; + }; + }; + }; + /** + * Get a repository public key + * @description Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + "actions/get-repo-public-key": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-public-key"]; + }; + }; + }; + }; + /** + * Get a repository secret + * @description Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + "actions/get-repo-secret": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-secret"]; + }; + }; + }; + }; + /** + * Create or update a repository secret + * @description Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "actions/create-or-update-repo-secret": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get a repository public key](https://docs.github.com/rest/reference/actions#get-a-repository-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + }; + }; + }; + responses: { + /** @description Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** @description Response when updating a secret */ + 204: never; + }; + }; + /** + * Delete a repository secret + * @description Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + "actions/delete-repo-secret": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List repository variables + * @description Lists all repository variables. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions_variables:read` repository permission to use this endpoint. + */ + "actions/list-repo-variables": { + parameters: { + query?: { + per_page?: components["parameters"]["variables-per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + variables: components["schemas"]["actions-variable"][]; + }; + }; + }; + }; + }; + /** + * Create a repository variable + * @description Creates a repository variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions_variables:write` repository permission to use this endpoint. + */ + "actions/create-repo-variable": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the variable. */ + name: string; + /** @description The value of the variable. */ + value: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + }; + }; + /** + * Get a repository variable + * @description Gets a specific variable in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions_variables:read` repository permission to use this endpoint. + */ + "actions/get-repo-variable": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + name: components["parameters"]["variable-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-variable"]; + }; + }; + }; + }; + /** + * Delete a repository variable + * @description Deletes a repository variable using the variable name. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions_variables:write` repository permission to use this endpoint. + */ + "actions/delete-repo-variable": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + name: components["parameters"]["variable-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update a repository variable + * @description Updates a repository variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions_variables:write` repository permission to use this endpoint. + */ + "actions/update-repo-variable": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + name: components["parameters"]["variable-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the variable. */ + name?: string; + /** @description The value of the variable. */ + value?: string; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List repository workflows + * @description Lists the workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/list-repo-workflows": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + workflows: components["schemas"]["workflow"][]; + }; + }; + }; + }; + }; + /** + * Get a workflow + * @description Gets a specific workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-workflow": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow"]; + }; + }; + }; + }; + /** + * Disable a workflow + * @description Disables a workflow and sets the `state` of the workflow to `disabled_manually`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/disable-workflow": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Create a workflow dispatch event + * @description You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must configure your GitHub Actions workflow to run when the [`workflow_dispatch` webhook](/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch) event occurs. The `inputs` are configured in the workflow file. For more information about how to configure the `workflow_dispatch` event in the workflow file, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows#workflow_dispatch)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)." + */ + "actions/create-workflow-dispatch": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The git reference for the workflow. The reference can be a branch or tag name. */ + ref: string; + /** @description Input keys and values configured in the workflow file. The maximum number of properties is 10. Any default properties configured in the workflow file will be used when `inputs` are omitted. */ + inputs?: { + [key: string]: unknown; + }; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Enable a workflow + * @description Enables a workflow and sets the `state` of the workflow to `active`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/enable-workflow": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List workflow runs for a workflow + * @description List all workflow runs for a workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + */ + "actions/list-workflow-runs": { + parameters: { + query?: { + actor?: components["parameters"]["actor"]; + branch?: components["parameters"]["workflow-run-branch"]; + event?: components["parameters"]["event"]; + status?: components["parameters"]["workflow-run-status"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + created?: components["parameters"]["created"]; + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + check_suite_id?: components["parameters"]["workflow-run-check-suite-id"]; + head_sha?: components["parameters"]["workflow-run-head-sha"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + workflow_runs: components["schemas"]["workflow-run"][]; + }; + }; + }; + }; + }; + /** + * Get workflow usage + * @description Gets the number of billable minutes used by a specific workflow during the current billing cycle. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-workflow-usage": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-usage"]; + }; + }; + }; + }; + /** + * List assignees + * @description Lists the [available assignees](https://docs.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) for issues in a repository. + */ + "issues/list-assignees": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Check if a user can be assigned + * @description Checks if a user has permission to be assigned to an issue in this repository. + * + * If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. + * + * Otherwise a `404` status code is returned. + */ + "issues/check-user-can-be-assigned": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + assignee: string; + }; + }; + responses: { + /** @description If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. */ + 204: never; + /** @description Otherwise a `404` status code is returned. */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * List all autolinks of a repository + * @description This returns a list of autolinks configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + "repos/list-autolinks": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["autolink"][]; + }; + }; + }; + }; + /** + * Create an autolink reference for a repository + * @description Users with admin access to the repository can create an autolink. + */ + "repos/create-autolink": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description This prefix appended by certain characters will generate a link any time it is found in an issue, pull request, or commit. */ + key_prefix: string; + /** @description The URL must contain `` for the reference number. `` matches different characters depending on the value of `is_alphanumeric`. */ + url_template: string; + /** + * @description Whether this autolink reference matches alphanumeric characters. If true, the `` parameter of the `url_template` matches alphanumeric characters `A-Z` (case insensitive), `0-9`, and `-`. If false, this autolink reference only matches numeric characters. + * @default true + */ + is_alphanumeric?: boolean; + }; + }; + }; + responses: { + /** @description response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/autolinks/1 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["autolink"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get an autolink reference of a repository + * @description This returns a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + "repos/get-autolink": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + autolink_id: components["parameters"]["autolink-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["autolink"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete an autolink reference from a repository + * @description This deletes a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + "repos/delete-autolink": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + autolink_id: components["parameters"]["autolink-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Enable automated security fixes + * @description Enables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/articles/configuring-automated-security-fixes)". + */ + "repos/enable-automated-security-fixes": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Disable automated security fixes + * @description Disables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/articles/configuring-automated-security-fixes)". + */ + "repos/disable-automated-security-fixes": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** List branches */ + "repos/list-branches": { + parameters: { + query?: { + /** @description Setting to `true` returns only protected branches. When set to `false`, only unprotected branches are returned. Omitting this parameter returns all branches. */ + protected?: boolean; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["short-branch"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Get a branch */ + "repos/get-branch": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["branch-with-protection"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get branch protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "repos/get-branch-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["branch-protection"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update branch protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Protecting a branch requires admin or owner permissions to the repository. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + * + * **Note**: The list of users, apps, and teams in total is limited to 100 items. + */ + "repos/update-branch-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Require status checks to pass before merging. Set to `null` to disable. */ + required_status_checks: { + /** @description Require branches to be up to date before merging. */ + strict: boolean; + /** + * @deprecated + * @description **Deprecated**: The list of status checks to require in order to merge into this branch. If any of these checks have recently been set by a particular GitHub App, they will be required to come from that app in future for the branch to merge. Use `checks` instead of `contexts` for more fine-grained control. + */ + contexts: string[]; + /** @description The list of status checks to require in order to merge into this branch. */ + checks?: { + /** @description The name of the required check */ + context: string; + /** @description The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status. */ + app_id?: number; + }[]; + } | null; + /** @description Enforce all configured restrictions for administrators. Set to `true` to enforce required status checks for repository administrators. Set to `null` to disable. */ + enforce_admins: boolean | null; + /** @description Require at least one approving review on a pull request, before merging. Set to `null` to disable. */ + required_pull_request_reviews: { + /** @description Specify which users, teams, and apps can dismiss pull request reviews. Pass an empty `dismissal_restrictions` object to disable. User and team `dismissal_restrictions` are only available for organization-owned repositories. Omit this parameter for personal repositories. */ + dismissal_restrictions?: { + /** @description The list of user `login`s with dismissal access */ + users?: string[]; + /** @description The list of team `slug`s with dismissal access */ + teams?: string[]; + /** @description The list of app `slug`s with dismissal access */ + apps?: string[]; + }; + /** @description Set to `true` if you want to automatically dismiss approving reviews when someone pushes a new commit. */ + dismiss_stale_reviews?: boolean; + /** @description Blocks merging pull requests until [code owners](https://docs.github.com/articles/about-code-owners/) review them. */ + require_code_owner_reviews?: boolean; + /** @description Specify the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. */ + required_approving_review_count?: number; + /** + * @description Whether the most recent push must be approved by someone other than the person who pushed it. Default: `false`. + * @default false + */ + require_last_push_approval?: boolean; + /** @description Allow specific users, teams, or apps to bypass pull request requirements. */ + bypass_pull_request_allowances?: { + /** @description The list of user `login`s allowed to bypass pull request requirements. */ + users?: string[]; + /** @description The list of team `slug`s allowed to bypass pull request requirements. */ + teams?: string[]; + /** @description The list of app `slug`s allowed to bypass pull request requirements. */ + apps?: string[]; + }; + } | null; + /** @description Restrict who can push to the protected branch. User, app, and team `restrictions` are only available for organization-owned repositories. Set to `null` to disable. */ + restrictions: { + /** @description The list of user `login`s with push access */ + users: string[]; + /** @description The list of team `slug`s with push access */ + teams: string[]; + /** @description The list of app `slug`s with push access */ + apps?: string[]; + } | null; + /** @description Enforces a linear commit Git history, which prevents anyone from pushing merge commits to a branch. Set to `true` to enforce a linear commit history. Set to `false` to disable a linear commit Git history. Your repository must allow squash merging or rebase merging before you can enable a linear commit history. Default: `false`. For more information, see "[Requiring a linear commit history](https://docs.github.com/github/administering-a-repository/requiring-a-linear-commit-history)" in the GitHub Help documentation. */ + required_linear_history?: boolean; + /** @description Permits force pushes to the protected branch by anyone with write access to the repository. Set to `true` to allow force pushes. Set to `false` or `null` to block force pushes. Default: `false`. For more information, see "[Enabling force pushes to a protected branch](https://docs.github.com/github/administering-a-repository/enabling-force-pushes-to-a-protected-branch)" in the GitHub Help documentation." */ + allow_force_pushes?: boolean | null; + /** @description Allows deletion of the protected branch by anyone with write access to the repository. Set to `false` to prevent deletion of the protected branch. Default: `false`. For more information, see "[Enabling force pushes to a protected branch](https://docs.github.com/github/administering-a-repository/enabling-force-pushes-to-a-protected-branch)" in the GitHub Help documentation. */ + allow_deletions?: boolean; + /** @description If set to `true`, the `restrictions` branch protection settings which limits who can push will also block pushes which create new branches, unless the push is initiated by a user, team, or app which has the ability to push. Set to `true` to restrict new branch creation. Default: `false`. */ + block_creations?: boolean; + /** @description Requires all conversations on code to be resolved before a pull request can be merged into a branch that matches this rule. Set to `false` to disable. Default: `false`. */ + required_conversation_resolution?: boolean; + /** + * @description Whether to set the branch as read-only. If this is true, users will not be able to push to the branch. Default: `false`. + * @default false + */ + lock_branch?: boolean; + /** + * @description Whether users can pull changes from upstream when the branch is locked. Set to `true` to allow fork syncing. Set to `false` to prevent fork syncing. Default: `false`. + * @default false + */ + allow_fork_syncing?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Delete branch protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "repos/delete-branch-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Get admin branch protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "repos/get-admin-branch-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-admin-enforced"]; + }; + }; + }; + }; + /** + * Set admin branch protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adding admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + "repos/set-admin-branch-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-admin-enforced"]; + }; + }; + }; + }; + /** + * Delete admin branch protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removing admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + "repos/delete-admin-branch-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get pull request review protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "repos/get-pull-request-review-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-pull-request-review"]; + }; + }; + }; + }; + /** + * Delete pull request review protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "repos/delete-pull-request-review-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update pull request review protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating pull request review enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + */ + "repos/update-pull-request-review-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description Specify which users, teams, and apps can dismiss pull request reviews. Pass an empty `dismissal_restrictions` object to disable. User and team `dismissal_restrictions` are only available for organization-owned repositories. Omit this parameter for personal repositories. */ + dismissal_restrictions?: { + /** @description The list of user `login`s with dismissal access */ + users?: string[]; + /** @description The list of team `slug`s with dismissal access */ + teams?: string[]; + /** @description The list of app `slug`s with dismissal access */ + apps?: string[]; + }; + /** @description Set to `true` if you want to automatically dismiss approving reviews when someone pushes a new commit. */ + dismiss_stale_reviews?: boolean; + /** @description Blocks merging pull requests until [code owners](https://docs.github.com/articles/about-code-owners/) have reviewed. */ + require_code_owner_reviews?: boolean; + /** @description Specifies the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. */ + required_approving_review_count?: number; + /** + * @description Whether the most recent push must be approved by someone other than the person who pushed it. Default: `false` + * @default false + */ + require_last_push_approval?: boolean; + /** @description Allow specific users, teams, or apps to bypass pull request requirements. */ + bypass_pull_request_allowances?: { + /** @description The list of user `login`s allowed to bypass pull request requirements. */ + users?: string[]; + /** @description The list of team `slug`s allowed to bypass pull request requirements. */ + teams?: string[]; + /** @description The list of app `slug`s allowed to bypass pull request requirements. */ + apps?: string[]; + }; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-pull-request-review"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get commit signature protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to check whether a branch requires signed commits. An enabled status of `true` indicates you must sign commits on this branch. For more information, see [Signing commits with GPG](https://docs.github.com/articles/signing-commits-with-gpg) in GitHub Help. + * + * **Note**: You must enable branch protection to require signed commits. + */ + "repos/get-commit-signature-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-admin-enforced"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create commit signature protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to require signed commits on a branch. You must enable branch protection to require signed commits. + */ + "repos/create-commit-signature-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-admin-enforced"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete commit signature protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to disable required signed commits on a branch. You must enable branch protection to require signed commits. + */ + "repos/delete-commit-signature-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get status checks protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "repos/get-status-checks-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["status-check-policy"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove status check protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "repos/remove-status-check-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update status check protection + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. + */ + "repos/update-status-check-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description Require branches to be up to date before merging. */ + strict?: boolean; + /** + * @deprecated + * @description **Deprecated**: The list of status checks to require in order to merge into this branch. If any of these checks have recently been set by a particular GitHub App, they will be required to come from that app in future for the branch to merge. Use `checks` instead of `contexts` for more fine-grained control. + */ + contexts?: string[]; + /** @description The list of status checks to require in order to merge into this branch. */ + checks?: { + /** @description The name of the required check */ + context: string; + /** @description The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status. */ + app_id?: number; + }[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["status-check-policy"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get all status check contexts + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "repos/get-all-status-check-contexts": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Set status check contexts + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "repos/set-status-check-contexts": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The name of the status checks */ + contexts: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Add status check contexts + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "repos/add-status-check-contexts": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The name of the status checks */ + contexts: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove status check contexts + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "repos/remove-status-check-contexts": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The name of the status checks */ + contexts: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists who has access to this protected branch. + * + * **Note**: Users, apps, and teams `restrictions` are only available for organization-owned repositories. + */ + "repos/get-access-restrictions": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["branch-restriction-policy"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Disables the ability to restrict who can push to this branch. + */ + "repos/delete-access-restrictions": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get apps with access to the protected branch + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the GitHub Apps that have push access to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + "repos/get-apps-with-access-to-protected-branch": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Set app access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of apps that have push access to this branch. This removes all apps that previously had push access and grants push access to the new list of apps. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + "repos/set-app-access-restrictions": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The GitHub Apps that have push access to this branch. Use the slugified version of the app name. **Note**: The list of users, apps, and teams in total is limited to 100 items. */ + apps: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Add app access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified apps push access for this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + "repos/add-app-access-restrictions": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The GitHub Apps that have push access to this branch. Use the slugified version of the app name. **Note**: The list of users, apps, and teams in total is limited to 100 items. */ + apps: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove app access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of an app to push to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + "repos/remove-app-access-restrictions": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The GitHub Apps that have push access to this branch. Use the slugified version of the app name. **Note**: The list of users, apps, and teams in total is limited to 100 items. */ + apps: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get teams with access to the protected branch + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the teams who have push access to this branch. The list includes child teams. + */ + "repos/get-teams-with-access-to-protected-branch": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Set team access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of teams that have push access to this branch. This removes all teams that previously had push access and grants push access to the new list of teams. Team restrictions include child teams. + */ + "repos/set-team-access-restrictions": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The slug values for teams */ + teams: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Add team access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified teams push access for this branch. You can also give push access to child teams. + */ + "repos/add-team-access-restrictions": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The slug values for teams */ + teams: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove team access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a team to push to this branch. You can also remove push access for child teams. + */ + "repos/remove-team-access-restrictions": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The slug values for teams */ + teams: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get users with access to the protected branch + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the people who have push access to this branch. + */ + "repos/get-users-with-access-to-protected-branch": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Set user access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of people that have push access to this branch. This removes all people that previously had push access and grants push access to the new list of people. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/set-user-access-restrictions": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The username for users */ + users: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Add user access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified people push access for this branch. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/add-user-access-restrictions": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The username for users */ + users: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove user access restrictions + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a user to push to this branch. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames of the people who should no longer have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/remove-user-access-restrictions": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The username for users */ + users: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Rename a branch + * @description Renames a branch in a repository. + * + * **Note:** Although the API responds immediately, the branch rename process might take some extra time to complete in the background. You won't be able to push to the old branch name while the rename process is in progress. For more information, see "[Renaming a branch](https://docs.github.com/github/administering-a-repository/renaming-a-branch)". + * + * The permissions required to use this endpoint depends on whether you are renaming the default branch. + * + * To rename a non-default branch: + * + * * Users must have push access. + * * GitHub Apps must have the `contents:write` repository permission. + * + * To rename the default branch: + * + * * Users must have admin or owner permissions. + * * GitHub Apps must have the `administration:write` repository permission. + */ + "repos/rename-branch": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The new name of the branch. */ + new_name: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["branch-with-protection"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Create a check run + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Creates a new check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to create check runs. + * + * In a check suite, GitHub limits the number of check runs with the same name to 1000. Once these check runs exceed 1000, GitHub will start to automatically delete older check runs. + */ + "checks/create": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": OneOf< + [ + { + /** @enum {unknown} */ + status: "completed"; + [key: string]: unknown; + }, + { + /** @enum {unknown} */ + status?: "queued" | "in_progress"; + [key: string]: unknown; + }, + ] + >; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["check-run"]; + }; + }; + }; + }; + /** + * Get a check run + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Gets a single check run using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + "checks/get": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + check_run_id: components["parameters"]["check-run-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["check-run"]; + }; + }; + }; + }; + /** + * Update a check run + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Updates a check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to edit check runs. + */ + "checks/update": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + check_run_id: components["parameters"]["check-run-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the check. For example, "code-coverage". */ + name?: string; + /** @description The URL of the integrator's site that has the full details of the check. */ + details_url?: string; + /** @description A reference for the run on the integrator's system. */ + external_id?: string; + /** + * Format: date-time + * @description This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at?: string; + /** + * @description The current status. + * @enum {string} + */ + status?: "queued" | "in_progress" | "completed"; + /** + * @description **Required if you provide `completed_at` or a `status` of `completed`**. The final conclusion of the check. + * **Note:** Providing `conclusion` will automatically set the `status` parameter to `completed`. You cannot change a check run conclusion to `stale`, only GitHub can set this. + * @enum {string} + */ + conclusion?: + | "action_required" + | "cancelled" + | "failure" + | "neutral" + | "success" + | "skipped" + | "stale" + | "timed_out"; + /** + * Format: date-time + * @description The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at?: string; + /** @description Check runs can accept a variety of data in the `output` object, including a `title` and `summary` and can optionally provide descriptive details about the run. */ + output?: { + /** @description **Required**. */ + title?: string; + /** @description Can contain Markdown. */ + summary: string; + /** @description Can contain Markdown. */ + text?: string; + /** @description Adds information from your analysis to specific lines of code. Annotations are visible in GitHub's pull request UI. Annotations are visible in GitHub's pull request UI. The Checks API limits the number of annotations to a maximum of 50 per API request. To create more than 50 annotations, you have to make multiple requests to the [Update a check run](https://docs.github.com/rest/reference/checks#update-a-check-run) endpoint. Each time you update the check run, annotations are appended to the list of annotations that already exist for the check run. GitHub Actions are limited to 10 warning annotations and 10 error annotations per step. For details about annotations in the UI, see "[About status checks](https://docs.github.com/articles/about-status-checks#checks)". */ + annotations?: { + /** @description The path of the file to add an annotation to. For example, `assets/css/main.css`. */ + path: string; + /** @description The start line of the annotation. Line numbers start at 1. */ + start_line: number; + /** @description The end line of the annotation. */ + end_line: number; + /** @description The start column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. Column numbers start at 1. */ + start_column?: number; + /** @description The end column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ + end_column?: number; + /** + * @description The level of the annotation. + * @enum {string} + */ + annotation_level: "notice" | "warning" | "failure"; + /** @description A short description of the feedback for these lines of code. The maximum size is 64 KB. */ + message: string; + /** @description The title that represents the annotation. The maximum size is 255 characters. */ + title?: string; + /** @description Details about this annotation. The maximum size is 64 KB. */ + raw_details?: string; + }[]; + /** @description Adds images to the output displayed in the GitHub pull request UI. */ + images?: { + /** @description The alternative text for the image. */ + alt: string; + /** @description The full URL of the image. */ + image_url: string; + /** @description A short image description. */ + caption?: string; + }[]; + }; + /** @description Possible further actions the integrator can perform, which a user may trigger. Each action includes a `label`, `identifier` and `description`. A maximum of three actions are accepted. See the [`actions` object](https://docs.github.com/rest/reference/checks#actions-object) description. To learn more about check runs and requested actions, see "[Check runs and requested actions](https://docs.github.com/rest/reference/checks#check-runs-and-requested-actions)." */ + actions?: { + /** @description The text to be displayed on a button in the web UI. The maximum size is 20 characters. */ + label: string; + /** @description A short explanation of what this action would do. The maximum size is 40 characters. */ + description: string; + /** @description A reference for the action on the integrator's system. The maximum size is 20 characters. */ + identifier: string; + }[]; + } & ( + | { + /** @enum {unknown} */ + status?: "completed"; + [key: string]: unknown; + } + | { + /** @enum {unknown} */ + status?: "queued" | "in_progress"; + [key: string]: unknown; + } + ); + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["check-run"]; + }; + }; + }; + }; + /** + * List check run annotations + * @description Lists annotations for a check run using the annotation `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get annotations for a check run. OAuth Apps and authenticated users must have the `repo` scope to get annotations for a check run in a private repository. + */ + "checks/list-annotations": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + check_run_id: components["parameters"]["check-run-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["check-annotation"][]; + }; + }; + }; + }; + /** + * Rerequest a check run + * @description Triggers GitHub to rerequest an existing check run, without pushing new code to a repository. This endpoint will trigger the [`check_run` webhook](https://docs.github.com/webhooks/event-payloads/#check_run) event with the action `rerequested`. When a check run is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check run, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + "checks/rerequest-run": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + check_run_id: components["parameters"]["check-run-id"]; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** @description Forbidden if the check run is not rerequestable or doesn't belong to the authenticated GitHub App */ + 403: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + 404: components["responses"]["not_found"]; + /** @description Validation error if the check run is not rerequestable */ + 422: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * Create a check suite + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * By default, check suites are automatically created when you create a [check run](https://docs.github.com/rest/reference/checks#check-runs). You only need to use this endpoint for manually creating check suites when you've disabled automatic creation using "[Update repository preferences for check suites](https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites)". Your GitHub App must have the `checks:write` permission to create check suites. + */ + "checks/create-suite": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The sha of the head commit. */ + head_sha: string; + }; + }; + }; + responses: { + /** @description Response when the suite already exists */ + 200: { + content: { + "application/json": components["schemas"]["check-suite"]; + }; + }; + /** @description Response when the suite was created */ + 201: { + content: { + "application/json": components["schemas"]["check-suite"]; + }; + }; + }; + }; + /** + * Update repository preferences for check suites + * @description Changes the default automatic flow when creating check suites. By default, a check suite is automatically created each time code is pushed to a repository. When you disable the automatic creation of check suites, you can manually [Create a check suite](https://docs.github.com/rest/reference/checks#create-a-check-suite). You must have admin permissions in the repository to set preferences for check suites. + */ + "checks/set-suites-preferences": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Enables or disables automatic creation of CheckSuite events upon pushes to the repository. Enabled by default. */ + auto_trigger_checks?: { + /** @description The `id` of the GitHub App. */ + app_id: number; + /** + * @description Set to `true` to enable automatic creation of CheckSuite events upon pushes to the repository, or `false` to disable them. + * @default true + */ + setting: boolean; + }[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["check-suite-preference"]; + }; + }; + }; + }; + /** + * Get a check suite + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Gets a single check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + "checks/get-suite": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + check_suite_id: components["parameters"]["check-suite-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["check-suite"]; + }; + }; + }; + }; + /** + * List check runs in a check suite + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + "checks/list-for-suite": { + parameters: { + query?: { + check_name?: components["parameters"]["check-name"]; + status?: components["parameters"]["status"]; + /** @description Filters check runs by their `completed_at` timestamp. `latest` returns the most recent check runs. */ + filter?: "latest" | "all"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + check_suite_id: components["parameters"]["check-suite-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + check_runs: components["schemas"]["check-run"][]; + }; + }; + }; + }; + }; + /** + * Rerequest a check suite + * @description Triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. This endpoint will trigger the [`check_suite` webhook](https://docs.github.com/webhooks/event-payloads/#check_suite) event with the action `rerequested`. When a check suite is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check suite, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + "checks/rerequest-suite": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + check_suite_id: components["parameters"]["check-suite-id"]; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + }; + }; + /** + * List code scanning alerts for a repository + * @description Lists all open code scanning alerts for the default branch (usually `main` + * or `master`). You must use an access token with the `security_events` scope to use + * this endpoint with private repos, the `public_repo` scope also grants permission to read + * security events on public repos only. GitHub Apps must have the `security_events` read + * permission to use this endpoint. + * + * The response includes a `most_recent_instance` object. + * This provides details of the most recent instance of this alert + * for the default branch or for the specified Git reference + * (if you used `ref` in the request). + */ + "code-scanning/list-alerts-for-repo": { + parameters: { + query?: { + tool_name?: components["parameters"]["tool-name"]; + tool_guid?: components["parameters"]["tool-guid"]; + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + ref?: components["parameters"]["git-ref"]; + direction?: components["parameters"]["direction"]; + /** @description The property by which to sort the results. . `number` is deprecated - we recommend that you use `created` instead. */ + sort?: "created" | "number" | "updated"; + /** @description Set to `open`, `closed, `fixed`, or `dismissed` to list code scanning alerts in a specific state. */ + state?: components["schemas"]["code-scanning-alert-state"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-alert-items"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Get a code scanning alert + * @description Gets a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + "code-scanning/get-alert": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + alert_number: components["parameters"]["alert-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-alert"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Update a code scanning alert + * @description Updates the status of a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. + */ + "code-scanning/update-alert": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + alert_number: components["parameters"]["alert-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + state: components["schemas"]["code-scanning-alert-set-state"]; + dismissed_reason?: components["schemas"]["code-scanning-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-alert"]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_write"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List instances of a code scanning alert + * @description Lists all instances of the specified code scanning alert. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + "code-scanning/list-alert-instances": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + ref?: components["parameters"]["git-ref"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + alert_number: components["parameters"]["alert-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-alert-instance"][]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List code scanning analyses for a repository + * @description Lists the details of all code scanning analyses for a repository, + * starting with the most recent. + * The response is paginated and you can use the `page` and `per_page` parameters + * to list the analyses you're interested in. + * By default 30 analyses are listed per page. + * + * The `rules_count` field in the response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. + */ + "code-scanning/list-recent-analyses": { + parameters: { + query?: { + tool_name?: components["parameters"]["tool-name"]; + tool_guid?: components["parameters"]["tool-guid"]; + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + /** @description The Git reference for the analyses you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + ref?: components["schemas"]["code-scanning-ref"]; + /** @description Filter analyses belonging to the same SARIF upload. */ + sarif_id?: components["schemas"]["code-scanning-analysis-sarif-id"]; + direction?: components["parameters"]["direction"]; + /** @description The property by which to sort the results. */ + sort?: "created"; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-analysis"][]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Get a code scanning analysis for a repository + * @description Gets a specified code scanning analysis for a repository. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * The default JSON response contains fields that describe the analysis. + * This includes the Git reference and commit SHA to which the analysis relates, + * the datetime of the analysis, the name of the code scanning tool, + * and the number of alerts. + * + * The `rules_count` field in the default response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * If you use the Accept header `application/sarif+json`, + * the response contains the analysis data that was uploaded. + * This is formatted as + * [SARIF version 2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html). + */ + "code-scanning/get-analysis": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description The ID of the analysis, as returned from the `GET /repos/{owner}/{repo}/code-scanning/analyses` operation. */ + analysis_id: number; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-analysis"]; + "application/json+sarif": { + [key: string]: unknown; + }; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Delete a code scanning analysis from a repository + * @description Deletes a specified code scanning analysis from a repository. For + * private repositories, you must use an access token with the `repo` scope. For public repositories, + * you must use an access token with `public_repo` scope. + * GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * You can delete one analysis at a time. + * To delete a series of analyses, start with the most recent analysis and work backwards. + * Conceptually, the process is similar to the undo function in a text editor. + * + * When you list the analyses for a repository, + * one or more will be identified as deletable in the response: + * + * ``` + * "deletable": true + * ``` + * + * An analysis is deletable when it's the most recent in a set of analyses. + * Typically, a repository will have multiple sets of analyses + * for each enabled code scanning tool, + * where a set is determined by a unique combination of analysis values: + * + * * `ref` + * * `tool` + * * `category` + * + * If you attempt to delete an analysis that is not the most recent in a set, + * you'll get a 400 response with the message: + * + * ``` + * Analysis specified is not deletable. + * ``` + * + * The response from a successful `DELETE` operation provides you with + * two alternative URLs for deleting the next analysis in the set: + * `next_analysis_url` and `confirm_delete_url`. + * Use the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis + * in a set. This is a useful option if you want to preserve at least one analysis + * for the specified tool in your repository. + * Use the `confirm_delete_url` URL if you are content to remove all analyses for a tool. + * When you delete the last analysis in a set, the value of `next_analysis_url` and `confirm_delete_url` + * in the 200 response is `null`. + * + * As an example of the deletion process, + * let's imagine that you added a workflow that configured a particular code scanning tool + * to analyze the code in a repository. This tool has added 15 analyses: + * 10 on the default branch, and another 5 on a topic branch. + * You therefore have two separate sets of analyses for this tool. + * You've now decided that you want to remove all of the analyses for the tool. + * To do this you must make 15 separate deletion requests. + * To start, you must find an analysis that's identified as deletable. + * Each set of analyses always has one that's identified as deletable. + * Having found the deletable analysis for one of the two sets, + * delete this analysis and then continue deleting the next analysis in the set until they're all deleted. + * Then repeat the process for the second set. + * The procedure therefore consists of a nested loop: + * + * **Outer loop**: + * * List the analyses for the repository, filtered by tool. + * * Parse this list to find a deletable analysis. If found: + * + * **Inner loop**: + * * Delete the identified analysis. + * * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration. + * + * The above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely. + */ + "code-scanning/delete-analysis": { + parameters: { + query?: { + /** @description Allow deletion if the specified analysis is the last in a set. If you attempt to delete the final analysis in a set without setting this parameter to `true`, you'll get a 400 response with the message: `Analysis is last of its type and deletion may result in the loss of historical alert data. Please specify confirm_delete.` */ + confirm_delete?: string | null; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description The ID of the analysis, as returned from the `GET /repos/{owner}/{repo}/code-scanning/analyses` operation. */ + analysis_id: number; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-analysis-deletion"]; + }; + }; + 400: components["responses"]["bad_request"]; + 403: components["responses"]["code_scanning_forbidden_write"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List CodeQL databases for a repository + * @description Lists the CodeQL databases that are available in a repository. + * + * For private repositories, you must use an access token with the `security_events` scope. + * For public repositories, you can use tokens with the `security_events` or `public_repo` scope. + * GitHub Apps must have the `contents` read permission to use this endpoint. + */ + "code-scanning/list-codeql-databases": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-codeql-database"][]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Get a CodeQL database for a repository + * @description Gets a CodeQL database for a language in a repository. + * + * By default this endpoint returns JSON metadata about the CodeQL database. To + * download the CodeQL database binary content, set the `Accept` header of the request + * to [`application/zip`](https://docs.github.com/rest/overview/media-types), and make sure + * your HTTP client is configured to follow redirects or use the `Location` header + * to make a second request to get the redirect URL. + * + * For private repositories, you must use an access token with the `security_events` scope. + * For public repositories, you can use tokens with the `security_events` or `public_repo` scope. + * GitHub Apps must have the `contents` read permission to use this endpoint. + */ + "code-scanning/get-codeql-database": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description The language of the CodeQL database. */ + language: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-codeql-database"]; + }; + }; + 302: components["responses"]["found"]; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Get a code scanning default setup configuration + * @description Gets a code scanning default setup configuration. + * You must use an access token with the `repo` scope to use this endpoint with private repos or the `public_repo` + * scope for public repos. GitHub Apps must have the `repo` write permission to use this endpoint. + */ + "code-scanning/get-default-setup": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-default-setup"]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Update a code scanning default setup configuration + * @description Updates a code scanning default setup configuration. + * You must use an access token with the `repo` scope to use this endpoint with private repos or the `public_repo` + * scope for public repos. GitHub Apps must have the `repo` write permission to use this endpoint. + */ + "code-scanning/update-default-setup": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["code-scanning-default-setup-update"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** @description Response */ + 202: { + content: { + "application/json": components["schemas"]["code-scanning-default-setup-update-response"]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["code_scanning_conflict"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Upload an analysis as SARIF data + * @description Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint for private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * There are two places where you can upload code scanning results. + * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests)." + * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." + * + * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: + * + * ``` + * gzip -c analysis-data.sarif | base64 -w0 + * ``` + *
+ * SARIF upload supports a maximum number of entries per the following data objects, and an analysis will be rejected if any of these objects is above its maximum value. For some objects, there are additional values over which the entries will be ignored while keeping the most important entries whenever applicable. + * To get the most out of your analysis when it includes data above the supported limits, try to optimize the analysis configuration. For example, for the CodeQL tool, identify and remove the most noisy queries. + * + * + * | **SARIF data** | **Maximum values** | **Additional limits** | + * |----------------------------------|:------------------:|----------------------------------------------------------------------------------| + * | Runs per file | 20 | | + * | Results per run | 25,000 | Only the top 5,000 results will be included, prioritized by severity. | + * | Rules per run | 25,000 | | + * | Tool extensions per run | 100 | | + * | Thread Flow Locations per result | 10,000 | Only the top 1,000 Thread Flow Locations will be included, using prioritization. | + * | Location per result | 1,000 | Only 100 locations will be included. | + * | Tags per rule | 20 | Only 10 tags will be included. | + * + * + * The `202 Accepted` response includes an `id` value. + * You can use this ID to check the status of the upload by using it in the `/sarifs/{sarif_id}` endpoint. + * For more information, see "[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload)." + */ + "code-scanning/upload-sarif": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + commit_sha: components["schemas"]["code-scanning-analysis-commit-sha"]; + ref: components["schemas"]["code-scanning-ref"]; + sarif: components["schemas"]["code-scanning-analysis-sarif-file"]; + /** + * Format: uri + * @description The base directory used in the analysis, as it appears in the SARIF file. + * This property is used to convert file paths from absolute to relative, so that alerts can be mapped to their correct location in the repository. + * @example file:///github/workspace/ + */ + checkout_uri?: string; + /** + * Format: date-time + * @description The time that the analysis run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at?: string; + /** @description The name of the tool used to generate the code scanning analysis. If this parameter is not used, the tool name defaults to "API". If the uploaded SARIF contains a tool GUID, this will be available for filtering using the `tool_guid` parameter of operations such as `GET /repos/{owner}/{repo}/code-scanning/alerts`. */ + tool_name?: string; + /** + * @description Whether the SARIF file will be validated according to the code scanning specifications. + * This parameter is intended to help integrators ensure that the uploaded SARIF files are correctly rendered by code scanning. + */ + validate?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 202: { + content: { + "application/json": components["schemas"]["code-scanning-sarifs-receipt"]; + }; + }; + /** @description Bad Request if the sarif field is invalid */ + 400: never; + 403: components["responses"]["code_scanning_forbidden_write"]; + 404: components["responses"]["not_found"]; + /** @description Payload Too Large if the sarif field is too large */ + 413: never; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Get information about a SARIF upload + * @description Gets information about a SARIF upload, including the status and the URL of the analysis that was uploaded so that you can retrieve details of the analysis. For more information, see "[Get a code scanning analysis for a repository](/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository)." You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + "code-scanning/get-sarif": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description The SARIF ID obtained after uploading. */ + sarif_id: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-sarifs-status"]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + /** @description Not Found if the sarif id does not match any upload */ + 404: never; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List CODEOWNERS errors + * @description List any syntax errors that are detected in the CODEOWNERS + * file. + * + * For more information about the correct CODEOWNERS syntax, + * see "[About code owners](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners)." + */ + "repos/codeowners-errors": { + parameters: { + query?: { + /** @description A branch, tag or commit name used to determine which version of the CODEOWNERS file to use. Default: the repository's default branch (e.g. `main`) */ + ref?: string; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["codeowners-errors"]; + }; + }; + /** @description Resource not found */ + 404: never; + }; + }; + /** + * List codespaces in a repository for the authenticated user + * @description Lists the codespaces associated to a specified repository and the authenticated user. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/list-in-repository-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + codespaces: components["schemas"]["codespace"][]; + }; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Create a codespace in a repository + * @description Creates a codespace owned by the authenticated user in the specified repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/create-with-repo-for-authenticated-user": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Git ref (typically a branch name) for this codespace */ + ref?: string; + /** @description The requested location for a new codespace. Best efforts are made to respect this upon creation. Assigned by IP if not provided. */ + location?: string; + /** + * @description The geographic area for this codespace. If not specified, the value is assigned by IP. This property replaces `location`, which is being deprecated. + * @enum {string} + */ + geo?: "EuropeWest" | "SoutheastAsia" | "UsEast" | "UsWest"; + /** @description IP for location auto-detection when proxying a request */ + client_ip?: string; + /** @description Machine type to use for this codespace */ + machine?: string; + /** @description Path to devcontainer.json config to use for this codespace */ + devcontainer_path?: string; + /** @description Whether to authorize requested permissions from devcontainer.json */ + multi_repo_permissions_opt_out?: boolean; + /** @description Working directory for this codespace */ + working_directory?: string; + /** @description Time in minutes before codespace stops from inactivity */ + idle_timeout_minutes?: number; + /** @description Display name for this codespace */ + display_name?: string; + /** @description Duration in minutes after codespace has gone idle in which it will be deleted. Must be integer minutes between 0 and 43200 (30 days). */ + retention_period_minutes?: number; + } | null; + }; + }; + responses: { + /** @description Response when the codespace was successfully created */ + 201: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + /** @description Response when the codespace creation partially failed but is being retried in the background */ + 202: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 400: components["responses"]["bad_request"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List devcontainer configurations in a repository for the authenticated user + * @description Lists the devcontainer.json files associated with a specified repository and the authenticated user. These files + * specify launchpoint configurations for codespaces created within the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + "codespaces/list-devcontainers-in-repository-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + devcontainers: { + path: string; + name?: string; + display_name?: string; + }[]; + }; + }; + }; + 400: components["responses"]["bad_request"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * List available machine types for a repository + * @description List the machine types available for a given repository based on its configuration. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_metadata` repository permission to use this endpoint. + */ + "codespaces/repo-machines-for-authenticated-user": { + parameters: { + query?: { + /** @description The location to check for available machines. Assigned by IP if not provided. */ + location?: string; + /** @description IP for location auto-detection when proxying a request */ + client_ip?: string; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + machines: components["schemas"]["codespace-machine"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Get default attributes for a codespace + * @description Gets the default attributes for codespaces created by the user with the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/pre-flight-with-repo-for-authenticated-user": { + parameters: { + query?: { + /** @description The branch or commit to check for a default devcontainer path. If not specified, the default branch will be checked. */ + ref?: string; + /** @description An alternative IP for default location auto-detection, such as when proxying a request. */ + client_ip?: string; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response when a user is able to create codespaces from the repository. */ + 200: { + content: { + "application/json": { + billable_owner?: components["schemas"]["simple-user"]; + defaults?: { + location: string; + devcontainer_path: string | null; + }; + }; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List repository secrets + * @description Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have write access to the `codespaces_secrets` repository permission to use this endpoint. + */ + "codespaces/list-repo-secrets": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["repo-codespaces-secret"][]; + }; + }; + }; + }; + }; + /** + * Get a repository public key + * @description Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have write access to the `codespaces_secrets` repository permission to use this endpoint. + */ + "codespaces/get-repo-public-key": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["codespaces-public-key"]; + }; + }; + }; + }; + /** + * Get a repository secret + * @description Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have write access to the `codespaces_secrets` repository permission to use this endpoint. + */ + "codespaces/get-repo-secret": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repo-codespaces-secret"]; + }; + }; + }; + }; + /** + * Create or update a repository secret + * @description Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have write access to the `codespaces_secrets` + * repository permission to use this endpoint. + * + * #### Example of encrypting a secret using Node.js + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * #### Example of encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example of encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example of encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "codespaces/create-or-update-repo-secret": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get a repository public key](https://docs.github.com/rest/reference/codespaces#get-a-repository-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + }; + }; + }; + responses: { + /** @description Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** @description Response when updating a secret */ + 204: never; + }; + }; + /** + * Delete a repository secret + * @description Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have write access to the `codespaces_secrets` repository permission to use this endpoint. + */ + "codespaces/delete-repo-secret": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List repository collaborators + * @description For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * Organization members with write, maintain, or admin privileges on the organization-owned repository can use this endpoint. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + "repos/list-collaborators": { + parameters: { + query?: { + /** @description Filter collaborators returned by their affiliation. `outside` means all outside collaborators of an organization-owned repository. `direct` means all collaborators with permissions to an organization-owned repository, regardless of organization membership status. `all` means all collaborators the authenticated user can see. */ + affiliation?: "outside" | "direct" | "all"; + /** @description Filter collaborators by the permissions they have on the repository. If not specified, all collaborators will be returned. */ + permission?: "pull" | "triage" | "push" | "maintain" | "admin"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["collaborator"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Check if a user is a repository collaborator + * @description For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + "repos/check-collaborator": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response if user is a collaborator */ + 204: never; + /** @description Not Found if user is not a collaborator */ + 404: never; + }; + }; + /** + * Add a repository collaborator + * @description This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Adding an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." + * + * For more information on permission levels, see "[Repository permission levels for an organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". There are restrictions on which permissions can be granted to organization members when an organization base role is in place. In this case, the permission being given must be equal to or higher than the org base permission. Otherwise, the request will fail with: + * + * ``` + * Cannot assign {member} permission of {role name} + * ``` + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * The invitee will receive a notification that they have been invited to the repository, which they must accept or decline. They may do this via the notifications page, the email they receive, or by using the [repository invitations API endpoints](https://docs.github.com/rest/reference/repos#invitations). + * + * **Updating an existing collaborator's permission level** + * + * The endpoint can also be used to change the permissions of an existing collaborator without first removing and re-adding the collaborator. To change the permissions, use the same endpoint and pass a different `permission` parameter. The response will be a `204`, with no other indication that the permission level changed. + * + * **Rate limits** + * + * You are limited to sending 50 invitations to a repository per 24 hour period. Note there is no limit if you are inviting organization members to an organization repository. + */ + "repos/add-collaborator": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + username: components["parameters"]["username"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The permission to grant the collaborator. **Only valid on organization-owned repositories.** We accept the following permissions to be set: `pull`, `triage`, `push`, `maintain`, `admin` and you can also specify a custom repository role name, if the owning organization has defined any. + * @default push + */ + permission?: string; + }; + }; + }; + responses: { + /** @description Response when a new invitation is created */ + 201: { + content: { + "application/json": components["schemas"]["repository-invitation"]; + }; + }; + /** + * @description Response when: + * - an existing collaborator is added as a collaborator + * - an organization member is added as an individual collaborator + * - an existing team member (whose team is also a repository collaborator) is added as an individual collaborator + */ + 204: never; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove a repository collaborator + * @description Removes a collaborator from a repository. + * + * To use this endpoint, the authenticated user must either be an administrator of the repository or target themselves for removal. + * + * This endpoint also: + * - Cancels any outstanding invitations + * - Unasigns the user from any issues + * - Removes access to organization projects if the user is not an organization member and is not a collaborator on any other organization repositories. + * - Unstars the repository + * - Updates access permissions to packages + * + * Removing a user as a collaborator has the following effects on forks: + * - If the user had access to a fork through their membership to this repository, the user will also be removed from the fork. + * - If the user had their own fork of the repository, the fork will be deleted. + * - If the user still has read access to the repository, open pull requests by this user from a fork will be denied. + * + * **Note**: A user can still have access to the repository through organization permissions like base repository permissions. + * + * Although the API responds immediately, the additional permission updates might take some extra time to complete in the background. + * + * For more information on fork permissions, see "[About permissions and visibility of forks](https://docs.github.com/pull-requests/collaborating-with-pull-requests/working-with-forks/about-permissions-and-visibility-of-forks)". + */ + "repos/remove-collaborator": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description No Content when collaborator was removed from the repository. */ + 204: never; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get repository permissions for a user + * @description Checks the repository permission of a collaborator. The possible repository permissions are `admin`, `write`, `read`, and `none`. + */ + "repos/get-collaborator-permission-level": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description if user has admin permissions */ + 200: { + content: { + "application/json": components["schemas"]["repository-collaborator-permission"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List commit comments for a repository + * @description Commit Comments use [these custom media types](https://docs.github.com/rest/reference/repos#custom-media-types). You can read more about the use of media types in the API [here](https://docs.github.com/rest/overview/media-types/). + * + * Comments are ordered by ascending ID. + */ + "repos/list-commit-comments-for-repo": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["commit-comment"][]; + }; + }; + }; + }; + /** Get a commit comment */ + "repos/get-commit-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Delete a commit comment */ + "repos/delete-commit-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Update a commit comment */ + "repos/update-commit-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The contents of the comment */ + body: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List reactions for a commit comment + * @description List the reactions to a [commit comment](https://docs.github.com/rest/reference/repos#comments). + */ + "reactions/list-for-commit-comment": { + parameters: { + query?: { + /** @description Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a commit comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create reaction for a commit comment + * @description Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with an HTTP `200` status means that you already added the reaction type to this commit comment. + */ + "reactions/create-for-commit-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the commit comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + responses: { + /** @description Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** @description Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Delete a commit comment reaction + * @description **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). + */ + "reactions/delete-for-commit-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List commits + * @description **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/list-commits": { + parameters: { + query?: { + /** @description SHA or branch to start listing commits from. Default: the repository’s default branch (usually `main`). */ + sha?: string; + /** @description Only commits containing this file path will be returned. */ + path?: string; + /** @description GitHub username or email address to use to filter by commit author. */ + author?: string; + /** @description GitHub username or email address to use to filter by commit committer. */ + committer?: string; + since?: components["parameters"]["since"]; + /** @description Only commits before this date will be returned. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + until?: string; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["commit"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * List branches for HEAD commit + * @description Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Returns all branches where the given commit SHA is the HEAD, or latest commit for the branch. + */ + "repos/list-branches-for-head-commit": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + commit_sha: components["parameters"]["commit-sha"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["branch-short"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List commit comments + * @description Use the `:commit_sha` to specify the commit that will have its comments listed. + */ + "repos/list-comments-for-commit": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + commit_sha: components["parameters"]["commit-sha"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["commit-comment"][]; + }; + }; + }; + }; + /** + * Create a commit comment + * @description Create a comment for a commit using its `:commit_sha`. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "repos/create-commit-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + commit_sha: components["parameters"]["commit-sha"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The contents of the comment. */ + body: string; + /** @description Relative path of the file to comment on. */ + path?: string; + /** @description Line index in the diff to comment on. */ + position?: number; + /** @description **Deprecated**. Use **position** parameter instead. Line number in the file to comment on. */ + line?: number; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/comments/1 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["commit-comment"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List pull requests associated with a commit + * @description Lists the merged pull request that introduced the commit to the repository. If the commit is not present in the default branch, will only return open pull requests associated with the commit. + */ + "repos/list-pull-requests-associated-with-commit": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + commit_sha: components["parameters"]["commit-sha"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["pull-request-simple"][]; + }; + }; + }; + }; + /** + * Get a commit + * @description Returns the contents of a single commit reference. You must have `read` access for the repository to use this endpoint. + * + * **Note:** If there are more than 300 files in the commit diff, the response will include pagination link headers for the remaining files, up to a limit of 3000 files. Each page contains the static commit information, and the only changes are to the file listing. + * + * You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch `diff` and `patch` formats. Diffs with binary data will have no `patch` property. + * + * To return only the SHA-1 hash of the commit reference, you can provide the `sha` custom [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) in the `Accept` header. You can use this endpoint to check if a remote reference's SHA-1 hash is the same as your local reference's SHA-1 hash by providing the local SHA-1 reference as the ETag. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/get-commit": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description ref parameter */ + ref: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["commit"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List check runs for a Git reference + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a commit ref. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + "checks/list-for-ref": { + parameters: { + query?: { + check_name?: components["parameters"]["check-name"]; + status?: components["parameters"]["status"]; + /** @description Filters check runs by their `completed_at` timestamp. `latest` returns the most recent check runs. */ + filter?: "latest" | "all"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + app_id?: number; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description ref parameter */ + ref: string; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + check_runs: components["schemas"]["check-run"][]; + }; + }; + }; + }; + }; + /** + * List check suites for a Git reference + * @description **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Lists check suites for a commit `ref`. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to list check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + "checks/list-suites-for-ref": { + parameters: { + query?: { + /** + * @description Filters check suites by GitHub App `id`. + * @example 1 + */ + app_id?: number; + check_name?: components["parameters"]["check-name"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description ref parameter */ + ref: string; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + check_suites: components["schemas"]["check-suite"][]; + }; + }; + }; + }; + }; + /** + * Get the combined status for a specific reference + * @description Users with pull access in a repository can access a combined view of commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. + * + * + * Additionally, a combined `state` is returned. The `state` is one of: + * + * * **failure** if any of the contexts report as `error` or `failure` + * * **pending** if there are no statuses or a context is `pending` + * * **success** if the latest status for all contexts is `success` + */ + "repos/get-combined-status-for-ref": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description ref parameter */ + ref: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["combined-commit-status"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List commit statuses for a reference + * @description Users with pull access in a repository can view commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. Statuses are returned in reverse chronological order. The first status in the list will be the latest one. + * + * This resource is also available via a legacy route: `GET /repos/:owner/:repo/statuses/:ref`. + */ + "repos/list-commit-statuses-for-ref": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description ref parameter */ + ref: string; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["status"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + }; + }; + /** + * Get community profile metrics + * @description Returns all community profile metrics for a repository. The repository cannot be a fork. + * + * The returned metrics include an overall health score, the repository description, the presence of documentation, the + * detected code of conduct, the detected license, and the presence of ISSUE\_TEMPLATE, PULL\_REQUEST\_TEMPLATE, + * README, and CONTRIBUTING files. + * + * The `health_percentage` score is defined as a percentage of how many of + * these four documents are present: README, CONTRIBUTING, LICENSE, and + * CODE_OF_CONDUCT. For example, if all four documents are present, then + * the `health_percentage` is `100`. If only one is present, then the + * `health_percentage` is `25`. + * + * `content_reports_enabled` is only returned for organization-owned repositories. + */ + "repos/get-community-profile-metrics": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["community-profile"]; + }; + }; + }; + }; + /** + * Compare two commits + * @description Compares two commits against one another. You can compare branches in the same repository, or you can compare branches that exist in different repositories within the same repository network, including fork branches. For more information about how to view a repository's network, see "[Understanding connections between repositories](https://docs.github.com/repositories/viewing-activity-and-data-for-your-repository/understanding-connections-between-repositories)." + * + * This endpoint is equivalent to running the `git log BASE..HEAD` command, but it returns commits in a different order. The `git log BASE..HEAD` command returns commits in reverse chronological order, whereas the API returns commits in chronological order. You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The API response includes details about the files that were changed between the two commits. This includes the status of the change (if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * When calling this endpoint without any paging parameter (`per_page` or `page`), the returned list is limited to 250 commits, and the last commit in the list is the most recent of the entire comparison. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, use a query parameter (`per_page` or `page`) to paginate the results. When using pagination: + * + * - The list of changed files is only shown on the first page of results, but it includes all changed files for the entire comparison. + * - The results are returned in chronological order, but the last commit in the returned list may not be the most recent one in the entire set if there are more pages of results. + * + * For more information on working with pagination, see "[Using pagination in the REST API](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api)." + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The `verification` object includes the following fields: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/compare-commits-with-basehead": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description The base branch and head branch to compare. This parameter expects the format `BASE...HEAD`. Both must be branch names in `repo`. To compare with a branch that exists in a different repository in the same network as `repo`, the `basehead` parameter expects the format `USERNAME:BASE...USERNAME:HEAD`. */ + basehead: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comparison"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Get repository content + * @description Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit + * `:path`, you will receive the contents of the repository's root directory. See the description below regarding what the API response includes for directories. + * + * Files and symlinks support [a custom media type](https://docs.github.com/rest/reference/repos#custom-media-types) for + * retrieving the raw content or rendered HTML (when supported). All content types support [a custom media + * type](https://docs.github.com/rest/reference/repos#custom-media-types) to ensure the content is returned in a consistent + * object format. + * + * **Notes**: + * * To get a repository's contents recursively, you can [recursively get the tree](https://docs.github.com/rest/reference/git#trees). + * * This API has an upper limit of 1,000 files for a directory. If you need to retrieve more files, use the [Git Trees + * API](https://docs.github.com/rest/reference/git#get-a-tree). + * * Download URLs expire and are meant to be used just once. To ensure the download URL does not expire, please use the contents API to obtain a fresh download URL for each download. + * #### Size limits + * If the requested file's size is: + * * 1 MB or smaller: All features of this endpoint are supported. + * * Between 1-100 MB: Only the `raw` or `object` [custom media types](https://docs.github.com/rest/repos/contents#custom-media-types-for-repository-contents) are supported. Both will work as normal, except that when using the `object` media type, the `content` field will be an empty string and the `encoding` field will be `"none"`. To get the contents of these larger files, use the `raw` media type. + * * Greater than 100 MB: This endpoint is not supported. + * + * #### If the content is a directory + * The response will be an array of objects, one object for each item in the directory. + * When listing the contents of a directory, submodules have their "type" specified as "file". Logically, the value + * _should_ be "submodule". This behavior exists in API v3 [for backwards compatibility purposes](https://git.io/v1YCW). + * In the next major version of the API, the type will be returned as "submodule". + * + * #### If the content is a symlink + * If the requested `:path` points to a symlink, and the symlink's target is a normal file in the repository, then the + * API responds with the content of the file (in the format shown in the example. Otherwise, the API responds with an object + * describing the symlink itself. + * + * #### If the content is a submodule + * The `submodule_git_url` identifies the location of the submodule repository, and the `sha` identifies a specific + * commit within the submodule repository. Git uses the given URL when cloning the submodule repository, and checks out + * the submodule at that specific commit. + * + * If the submodule repository is not hosted on github.com, the Git URLs (`git_url` and `_links["git"]`) and the + * github.com URLs (`html_url` and `_links["html"]`) will have null values. + */ + "repos/get-content": { + parameters: { + query?: { + /** @description The name of the commit/branch/tag. Default: the repository’s default branch. */ + ref?: string; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description path parameter */ + path: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/vnd.github.object": components["schemas"]["content-tree"]; + "application/json": + | components["schemas"]["content-directory"] + | components["schemas"]["content-file"] + | components["schemas"]["content-symlink"] + | components["schemas"]["content-submodule"]; + }; + }; + 302: components["responses"]["found"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create or update file contents + * @description Creates a new file or replaces an existing file in a repository. You must authenticate using an access token with the `workflow` scope to use this endpoint. + * + * **Note:** If you use this endpoint and the "[Delete a file](https://docs.github.com/rest/reference/repos/#delete-file)" endpoint in parallel, the concurrent requests will conflict and you will receive errors. You must use these endpoints serially instead. + */ + "repos/create-or-update-file-contents": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description path parameter */ + path: string; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The commit message. */ + message: string; + /** @description The new file content, using Base64 encoding. */ + content: string; + /** @description **Required if you are updating a file**. The blob SHA of the file being replaced. */ + sha?: string; + /** @description The branch name. Default: the repository’s default branch. */ + branch?: string; + /** @description The person that committed the file. Default: the authenticated user. */ + committer?: { + /** @description The name of the author or committer of the commit. You'll receive a `422` status code if `name` is omitted. */ + name: string; + /** @description The email of the author or committer of the commit. You'll receive a `422` status code if `email` is omitted. */ + email: string; + /** @example "2013-01-05T13:13:22+05:00" */ + date?: string; + }; + /** @description The author of the file. Default: The `committer` or the authenticated user if you omit `committer`. */ + author?: { + /** @description The name of the author or committer of the commit. You'll receive a `422` status code if `name` is omitted. */ + name: string; + /** @description The email of the author or committer of the commit. You'll receive a `422` status code if `email` is omitted. */ + email: string; + /** @example "2013-01-15T17:13:22+05:00" */ + date?: string; + }; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["file-commit"]; + }; + }; + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["file-commit"]; + }; + }; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Delete a file + * @description Deletes a file in a repository. + * + * You can provide an additional `committer` parameter, which is an object containing information about the committer. Or, you can provide an `author` parameter, which is an object containing information about the author. + * + * The `author` section is optional and is filled in with the `committer` information if omitted. If the `committer` information is omitted, the authenticated user's information is used. + * + * You must provide values for both `name` and `email`, whether you choose to use `author` or `committer`. Otherwise, you'll receive a `422` status code. + * + * **Note:** If you use this endpoint and the "[Create or update file contents](https://docs.github.com/rest/reference/repos/#create-or-update-file-contents)" endpoint in parallel, the concurrent requests will conflict and you will receive errors. You must use these endpoints serially instead. + */ + "repos/delete-file": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description path parameter */ + path: string; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The commit message. */ + message: string; + /** @description The blob SHA of the file being deleted. */ + sha: string; + /** @description The branch name. Default: the repository’s default branch */ + branch?: string; + /** @description object containing information about the committer. */ + committer?: { + /** @description The name of the author (or committer) of the commit */ + name?: string; + /** @description The email of the author (or committer) of the commit */ + email?: string; + }; + /** @description object containing information about the author. */ + author?: { + /** @description The name of the author (or committer) of the commit */ + name?: string; + /** @description The email of the author (or committer) of the commit */ + email?: string; + }; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["file-commit"]; + }; + }; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List repository contributors + * @description Lists contributors to the specified repository and sorts them by the number of commits per contributor in descending order. This endpoint may return information that is a few hours old because the GitHub REST API caches contributor data to improve performance. + * + * GitHub identifies contributors by author email address. This endpoint groups contribution counts by GitHub user, which includes all associated email addresses. To improve performance, only the first 500 author email addresses in the repository link to GitHub users. The rest will appear as anonymous contributors without associated GitHub user information. + */ + "repos/list-contributors": { + parameters: { + query?: { + /** @description Set to `1` or `true` to include anonymous contributors in results. */ + anon?: string; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description if repository contains content */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["contributor"][]; + }; + }; + /** @description Response if repository is empty */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List Dependabot alerts for a repository + * @description You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + "dependabot/list-alerts-for-repo": { + parameters: { + query?: { + state?: components["parameters"]["dependabot-alert-comma-separated-states"]; + severity?: components["parameters"]["dependabot-alert-comma-separated-severities"]; + ecosystem?: components["parameters"]["dependabot-alert-comma-separated-ecosystems"]; + package?: components["parameters"]["dependabot-alert-comma-separated-packages"]; + manifest?: components["parameters"]["dependabot-alert-comma-separated-manifests"]; + scope?: components["parameters"]["dependabot-alert-scope"]; + sort?: components["parameters"]["dependabot-alert-sort"]; + direction?: components["parameters"]["direction"]; + /** + * @deprecated + * @description **Deprecated**. Page number of the results to fetch. Use cursor-based pagination with `before` or `after` instead. + */ + page?: number; + /** + * @deprecated + * @description The number of results per page (max 100). + */ + per_page?: number; + before?: components["parameters"]["pagination-before"]; + after?: components["parameters"]["pagination-after"]; + first?: components["parameters"]["pagination-first"]; + last?: components["parameters"]["pagination-last"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-alert"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 400: components["responses"]["bad_request"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Get a Dependabot alert + * @description You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + "dependabot/get-alert": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + alert_number: components["parameters"]["dependabot-alert-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-alert"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update a Dependabot alert + * @description You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** write permission to use this endpoint. + * + * To use this endpoint, you must have access to security alerts for the repository. For more information, see "[Granting access to security alerts](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-security-and-analysis-settings-for-your-repository#granting-access-to-security-alerts)." + */ + "dependabot/update-alert": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + alert_number: components["parameters"]["dependabot-alert-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The state of the Dependabot alert. + * A `dismissed_reason` must be provided when setting the state to `dismissed`. + * @enum {string} + */ + state: "dismissed" | "open"; + /** + * @description **Required when `state` is `dismissed`.** A reason for dismissing the alert. + * @enum {string} + */ + dismissed_reason?: + | "fix_started" + | "inaccurate" + | "no_bandwidth" + | "not_used" + | "tolerable_risk"; + /** @description An optional comment associated with dismissing the alert. */ + dismissed_comment?: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-alert"]; + }; + }; + 400: components["responses"]["bad_request"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List repository secrets + * @description Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + "dependabot/list-repo-secrets": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["dependabot-secret"][]; + }; + }; + }; + }; + }; + /** + * Get a repository public key + * @description Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + "dependabot/get-repo-public-key": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-public-key"]; + }; + }; + }; + }; + /** + * Get a repository secret + * @description Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + "dependabot/get-repo-secret": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-secret"]; + }; + }; + }; + }; + /** + * Create or update a repository secret + * @description Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository + * permission to use this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "dependabot/create-or-update-repo-secret": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get a repository public key](https://docs.github.com/rest/reference/dependabot#get-a-repository-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + }; + }; + }; + responses: { + /** @description Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** @description Response when updating a secret */ + 204: never; + }; + }; + /** + * Delete a repository secret + * @description Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + "dependabot/delete-repo-secret": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get a diff of the dependencies between commits + * @description Gets the diff of the dependency changes between two commits of a repository, based on the changes to the dependency manifests made in those commits. + */ + "dependency-graph/diff-range": { + parameters: { + query?: { + name?: components["parameters"]["manifest-path"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description The base and head Git revisions to compare. The Git revisions will be resolved to commit SHAs. Named revisions will be resolved to their corresponding HEAD commits, and an appropriate merge base will be determined. This parameter expects the format `{base}...{head}`. */ + basehead: string; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["dependency-graph-diff"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Export a software bill of materials (SBOM) for a repository. + * @description Exports the software bill of materials (SBOM) for a repository in SPDX JSON format. + */ + "dependency-graph/export-sbom": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["dependency-graph-spdx-sbom"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a snapshot of dependencies for a repository + * @description Create a new snapshot of a repository's dependencies. You must authenticate using an access token with the `repo` scope to use this endpoint for a repository that the requesting user has access to. + */ + "dependency-graph/create-repository-snapshot": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["snapshot"]; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": { + /** @description ID of the created snapshot. */ + id: number; + /** @description The time at which the snapshot was created. */ + created_at: string; + /** @description Either "SUCCESS", "ACCEPTED", or "INVALID". "SUCCESS" indicates that the snapshot was successfully created and the repository's dependencies were updated. "ACCEPTED" indicates that the snapshot was successfully created, but the repository's dependencies were not updated. "INVALID" indicates that the snapshot was malformed. */ + result: string; + /** @description A message providing further details about the result, such as why the dependencies were not updated. */ + message: string; + }; + }; + }; + }; + }; + /** + * List deployments + * @description Simple filtering of deployments is available via query parameters: + */ + "repos/list-deployments": { + parameters: { + query?: { + /** @description The SHA recorded at creation time. */ + sha?: string; + /** @description The name of the ref. This can be a branch, tag, or SHA. */ + ref?: string; + /** @description The name of the task for the deployment (e.g., `deploy` or `deploy:migrations`). */ + task?: string; + /** @description The name of the environment that was deployed to (e.g., `staging` or `production`). */ + environment?: string | null; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["deployment"][]; + }; + }; + }; + }; + /** + * Create a deployment + * @description Deployments offer a few configurable parameters with certain defaults. + * + * The `ref` parameter can be any named branch, tag, or SHA. At GitHub we often deploy branches and verify them + * before we merge a pull request. + * + * The `environment` parameter allows deployments to be issued to different runtime environments. Teams often have + * multiple environments for verifying their applications, such as `production`, `staging`, and `qa`. This parameter + * makes it easier to track which environments have requested deployments. The default environment is `production`. + * + * The `auto_merge` parameter is used to ensure that the requested ref is not behind the repository's default branch. If + * the ref _is_ behind the default branch for the repository, we will attempt to merge it for you. If the merge succeeds, + * the API will return a successful merge commit. If merge conflicts prevent the merge from succeeding, the API will + * return a failure response. + * + * By default, [commit statuses](https://docs.github.com/rest/commits/statuses) for every submitted context must be in a `success` + * state. The `required_contexts` parameter allows you to specify a subset of contexts that must be `success`, or to + * specify contexts that have not yet been submitted. You are not required to use commit statuses to deploy. If you do + * not require any contexts or create any commit statuses, the deployment will always succeed. + * + * The `payload` parameter is available for any extra information that a deployment system might need. It is a JSON text + * field that will be passed on when a deployment event is dispatched. + * + * The `task` parameter is used by the deployment system to allow different execution paths. In the web world this might + * be `deploy:migrations` to run schema changes on the system. In the compiled world this could be a flag to compile an + * application with debugging enabled. + * + * Users with `repo` or `repo_deployment` scopes can create a deployment for a given ref. + * + * #### Merged branch response + * You will see this response when GitHub automatically merges the base branch into the topic branch instead of creating + * a deployment. This auto-merge happens when: + * * Auto-merge option is enabled in the repository + * * Topic branch does not include the latest changes on the base branch, which is `master` in the response example + * * There are no merge conflicts + * + * If there are no new commits in the base branch, a new request to create a deployment should give a successful + * response. + * + * #### Merge conflict response + * This error happens when the `auto_merge` option is enabled and when the default branch (in this case `master`), can't + * be merged into the branch that's being deployed (in this case `topic-branch`), due to merge conflicts. + * + * #### Failed commit status checks + * This error happens when the `required_contexts` parameter indicates that one or more contexts need to have a `success` + * status for the commit to be deployed, but one or more of the required contexts do not have a state of `success`. + */ + "repos/create-deployment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The ref to deploy. This can be a branch, tag, or SHA. */ + ref: string; + /** + * @description Specifies a task to execute (e.g., `deploy` or `deploy:migrations`). + * @default deploy + */ + task?: string; + /** + * @description Attempts to automatically merge the default branch into the requested ref, if it's behind the default branch. + * @default true + */ + auto_merge?: boolean; + /** @description The [status](https://docs.github.com/rest/commits/statuses) contexts to verify against commit status checks. If you omit this parameter, GitHub verifies all unique contexts before creating a deployment. To bypass checking entirely, pass an empty array. Defaults to all unique contexts. */ + required_contexts?: string[]; + payload?: OneOf< + [ + { + [key: string]: unknown; + }, + string, + ] + >; + /** + * @description Name for the target deployment environment (e.g., `production`, `staging`, `qa`). + * @default production + */ + environment?: string; + /** + * @description Short description of the deployment. + * @default + */ + description?: string | null; + /** + * @description Specifies if the given environment is specific to the deployment and will no longer exist at some point in the future. Default: `false` + * @default false + */ + transient_environment?: boolean; + /** @description Specifies if the given environment is one that end-users directly interact with. Default: `true` when `environment` is `production` and `false` otherwise. */ + production_environment?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["deployment"]; + }; + }; + /** @description Merged branch response */ + 202: { + content: { + "application/json": { + message?: string; + }; + }; + }; + /** @description Conflict when there is a merge conflict or the commit's status checks failed */ + 409: never; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Get a deployment */ + "repos/get-deployment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + deployment_id: components["parameters"]["deployment-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a deployment + * @description If the repository only has one deployment, you can delete the deployment regardless of its status. If the repository has more than one deployment, you can only delete inactive deployments. This ensures that repositories with multiple deployments will always have an active deployment. Anyone with `repo` or `repo_deployment` scopes can delete a deployment. + * + * To set a deployment as inactive, you must: + * + * * Create a new deployment that is active so that the system has a record of the current state, then delete the previously active deployment. + * * Mark the active deployment as inactive by adding any non-successful deployment status. + * + * For more information, see "[Create a deployment](https://docs.github.com/rest/deployments/deployments/#create-a-deployment)" and "[Create a deployment status](https://docs.github.com/rest/deployments/deployment-statuses#create-a-deployment-status)." + */ + "repos/delete-deployment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + deployment_id: components["parameters"]["deployment-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List deployment statuses + * @description Users with pull access can view deployment statuses for a deployment: + */ + "repos/list-deployment-statuses": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + deployment_id: components["parameters"]["deployment-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["deployment-status"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a deployment status + * @description Users with `push` access can create deployment statuses for a given deployment. + * + * GitHub Apps require `read & write` access to "Deployments" and `read-only` access to "Repo contents" (for private repos). OAuth Apps require the `repo_deployment` scope. + */ + "repos/create-deployment-status": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + deployment_id: components["parameters"]["deployment-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The state of the status. When you set a transient deployment to `inactive`, the deployment will be shown as `destroyed` in GitHub. + * @enum {string} + */ + state: + | "error" + | "failure" + | "inactive" + | "in_progress" + | "queued" + | "pending" + | "success"; + /** + * @description The target URL to associate with this status. This URL should contain output to keep the user updated while the task is running or serve as historical information for what happened in the deployment. **Note:** It's recommended to use the `log_url` parameter, which replaces `target_url`. + * @default + */ + target_url?: string; + /** + * @description The full URL of the deployment's output. This parameter replaces `target_url`. We will continue to accept `target_url` to support legacy uses, but we recommend replacing `target_url` with `log_url`. Setting `log_url` will automatically set `target_url` to the same value. Default: `""` + * @default + */ + log_url?: string; + /** + * @description A short description of the status. The maximum description length is 140 characters. + * @default + */ + description?: string; + /** + * @description Name for the target deployment environment, which can be changed when setting a deploy status. For example, `production`, `staging`, or `qa`. + * @enum {string} + */ + environment?: "production" | "staging" | "qa"; + /** + * @description Sets the URL for accessing your environment. Default: `""` + * @default + */ + environment_url?: string; + /** @description Adds a new `inactive` status to all prior non-transient, non-production environment deployments with the same repository and `environment` name as the created status's deployment. An `inactive` status is only added to deployments that had a `success` state. Default: `true` */ + auto_inactive?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/example/deployments/42/statuses/1 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["deployment-status"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a deployment status + * @description Users with pull access can view a deployment status for a deployment: + */ + "repos/get-deployment-status": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + deployment_id: components["parameters"]["deployment-id"]; + status_id: number; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment-status"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a repository dispatch event + * @description You can use this endpoint to trigger a webhook event called `repository_dispatch` when you want activity that happens outside of GitHub to trigger a GitHub Actions workflow or GitHub App webhook. You must configure your GitHub Actions workflow or GitHub App to run when the `repository_dispatch` event occurs. For an example `repository_dispatch` webhook payload, see "[RepositoryDispatchEvent](https://docs.github.com/webhooks/event-payloads/#repository_dispatch)." + * + * The `client_payload` parameter is available for any extra information that your workflow might need. This parameter is a JSON payload that will be passed on when the webhook event is dispatched. For example, the `client_payload` can include a message that a user would like to send using a GitHub Actions workflow. Or the `client_payload` can be used as a test to debug your workflow. + * + * This endpoint requires write access to the repository by providing either: + * + * - Personal access tokens with `repo` scope. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)" in the GitHub Help documentation. + * - GitHub Apps with both `metadata:read` and `contents:read&write` permissions. + * + * This input example shows how you can use the `client_payload` as a test to debug your workflow. + */ + "repos/create-dispatch-event": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description A custom webhook event name. Must be 100 characters or fewer. */ + event_type: string; + /** @description JSON payload with extra information about the webhook event that your action or workflow may use. The maximum number of top-level properties is 10. */ + client_payload?: { + [key: string]: unknown; + }; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List environments + * @description Lists the environments for a repository. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "repos/get-all-environments": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + /** + * @description The number of environments in this repository + * @example 5 + */ + total_count?: number; + environments?: components["schemas"]["environment"][]; + }; + }; + }; + }; + }; + /** + * Get an environment + * @description **Note:** To get information about name patterns that branches must match in order to deploy to this environment, see "[Get a deployment branch policy](/rest/deployments/branch-policies#get-a-deployment-branch-policy)." + * + * Anyone with read access to the repository can use this endpoint. If the + * repository is private, you must use an access token with the `repo` scope. GitHub + * Apps must have the `actions:read` permission to use this endpoint. + */ + "repos/get-environment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["environment"]; + }; + }; + }; + }; + /** + * Create or update an environment + * @description Create or update an environment with protection rules, such as required reviewers. For more information about environment protection rules, see "[Environments](/actions/reference/environments#environment-protection-rules)." + * + * **Note:** To create or update name patterns that branches must match in order to deploy to this environment, see "[Deployment branch policies](/rest/deployments/branch-policies)." + * + * **Note:** To create or update secrets for an environment, see "[Secrets](/rest/reference/actions#secrets)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + "repos/create-or-update-environment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + environment_name: components["parameters"]["environment-name"]; + }; + }; + requestBody?: { + content: { + "application/json": { + wait_timer?: components["schemas"]["wait-timer"]; + /** @description The people or teams that may review jobs that reference the environment. You can list up to six users or teams as reviewers. The reviewers must have at least read access to the repository. Only one of the required reviewers needs to approve the job for it to proceed. */ + reviewers?: + | { + type?: components["schemas"]["deployment-reviewer-type"]; + /** + * @description The id of the user or team who can review the deployment + * @example 4532992 + */ + id?: number; + }[] + | null; + deployment_branch_policy?: components["schemas"]["deployment-branch-policy-settings"]; + } | null; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["environment"]; + }; + }; + /** @description Validation error when the environment name is invalid or when `protected_branches` and `custom_branch_policies` in `deployment_branch_policy` are set to the same value */ + 422: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * Delete an environment + * @description You must authenticate using an access token with the repo scope to use this endpoint. + */ + "repos/delete-an-environment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** @description Default response */ + 204: never; + }; + }; + /** + * List deployment branch policies + * @description Lists the deployment branch policies for an environment. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "repos/list-deployment-branch-policies": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + /** + * @description The number of deployment branch policies for the environment. + * @example 2 + */ + total_count: number; + branch_policies: components["schemas"]["deployment-branch-policy"][]; + }; + }; + }; + }; + }; + /** + * Create a deployment branch policy + * @description Creates a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + "repos/create-deployment-branch-policy": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + environment_name: components["parameters"]["environment-name"]; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["deployment-branch-policy-name-pattern"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment-branch-policy"]; + }; + }; + /** @description Response if the same branch name pattern already exists */ + 303: never; + /** @description Not Found or `deployment_branch_policy.custom_branch_policies` property for the environment is set to false */ + 404: never; + }; + }; + /** + * Get a deployment branch policy + * @description Gets a deployment branch policy for an environment. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "repos/get-deployment-branch-policy": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + environment_name: components["parameters"]["environment-name"]; + branch_policy_id: components["parameters"]["branch-policy-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment-branch-policy"]; + }; + }; + }; + }; + /** + * Update a deployment branch policy + * @description Updates a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + "repos/update-deployment-branch-policy": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + environment_name: components["parameters"]["environment-name"]; + branch_policy_id: components["parameters"]["branch-policy-id"]; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["deployment-branch-policy-name-pattern"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment-branch-policy"]; + }; + }; + }; + }; + /** + * Delete a deployment branch policy + * @description Deletes a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + "repos/delete-deployment-branch-policy": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + environment_name: components["parameters"]["environment-name"]; + branch_policy_id: components["parameters"]["branch-policy-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get all deployment protection rules for an environment + * @description Gets all custom deployment protection rules that are enabled for an environment. Anyone with read access to the repository can use this endpoint. If the repository is private and you want to use a personal access token (classic), you must use an access token with the `repo` scope. GitHub Apps and fine-grained personal access tokens must have the `actions:read` permission to use this endpoint. For more information about environments, see "[Using environments for deployment](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment)." + * + * For more information about the app that is providing this custom deployment rule, see the [documentation for the `GET /apps/{app_slug}` endpoint](https://docs.github.com/rest/apps/apps#get-an-app). + */ + "repos/get-all-deployment-protection-rules": { + parameters: { + path: { + environment_name: components["parameters"]["environment-name"]; + repo: components["parameters"]["repo"]; + owner: components["parameters"]["owner"]; + }; + }; + responses: { + /** @description List of deployment protection rules */ + 200: { + content: { + "application/json": { + /** + * @description The number of enabled custom deployment protection rules for this environment + * @example 10 + */ + total_count?: number; + custom_deployment_protection_rules?: components["schemas"]["deployment-protection-rule"][]; + }; + }; + }; + }; + }; + /** + * Create a custom deployment protection rule on an environment + * @description Enable a custom deployment protection rule for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. Enabling a custom protection rule requires admin or owner permissions to the repository. GitHub Apps must have the `actions:write` permission to use this endpoint. + * + * For more information about the app that is providing this custom deployment rule, see the [documentation for the `GET /apps/{app_slug}` endpoint](https://docs.github.com/rest/apps/apps#get-an-app). + */ + "repos/create-deployment-protection-rule": { + parameters: { + path: { + environment_name: components["parameters"]["environment-name"]; + repo: components["parameters"]["repo"]; + owner: components["parameters"]["owner"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The ID of the custom app that will be enabled on the environment. */ + integration_id?: number; + }; + }; + }; + responses: { + /** @description The enabled custom deployment protection rule */ + 201: { + content: { + "application/json": components["schemas"]["deployment-protection-rule"]; + }; + }; + }; + }; + /** + * List custom deployment rule integrations available for an environment + * @description Gets all custom deployment protection rule integrations that are available for an environment. Anyone with read access to the repository can use this endpoint. If the repository is private and you want to use a personal access token (classic), you must use an access token with the `repo` scope. GitHub Apps and fine-grained personal access tokens must have the `actions:read` permission to use this endpoint. + * + * For more information about environments, see "[Using environments for deployment](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment)." + * + * For more information about the app that is providing this custom deployment rule, see "[GET an app](https://docs.github.com/rest/apps/apps#get-an-app)". + */ + "repos/list-custom-deployment-rule-integrations": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + }; + path: { + environment_name: components["parameters"]["environment-name"]; + repo: components["parameters"]["repo"]; + owner: components["parameters"]["owner"]; + }; + }; + responses: { + /** @description A list of custom deployment rule integrations available for this environment. */ + 200: { + content: { + "application/json": { + /** + * @description The total number of custom deployment protection rule integrations available for this environment. + * @example 35 + */ + total_count?: number; + available_custom_deployment_protection_rule_integrations?: components["schemas"]["custom-deployment-rule-app"][]; + }; + }; + }; + }; + }; + /** + * Get a custom deployment protection rule + * @description Gets an enabled custom deployment protection rule for an environment. Anyone with read access to the repository can use this endpoint. If the repository is private and you want to use a personal access token (classic), you must use an access token with the `repo` scope. GitHub Apps and fine-grained personal access tokens must have the `actions:read` permission to use this endpoint. For more information about environments, see "[Using environments for deployment](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment)." + * + * For more information about the app that is providing this custom deployment rule, see [`GET /apps/{app_slug}`](https://docs.github.com/rest/apps/apps#get-an-app). + */ + "repos/get-custom-deployment-protection-rule": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + environment_name: components["parameters"]["environment-name"]; + protection_rule_id: components["parameters"]["protection-rule-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment-protection-rule"]; + }; + }; + }; + }; + /** + * Disable a custom protection rule for an environment + * @description Disables a custom deployment protection rule for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. Removing a custom protection rule requires admin or owner permissions to the repository. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Get an app](https://docs.github.com/rest/apps/apps#get-an-app)". + */ + "repos/disable-deployment-protection-rule": { + parameters: { + path: { + environment_name: components["parameters"]["environment-name"]; + repo: components["parameters"]["repo"]; + owner: components["parameters"]["owner"]; + protection_rule_id: components["parameters"]["protection-rule-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List repository events + * @description **Note**: This API is not built to serve real-time use cases. Depending on the time of day, event latency can be anywhere from 30s to 6h. + */ + "activity/list-repo-events": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** List forks */ + "repos/list-forks": { + parameters: { + query?: { + /** @description The sort order. `stargazers` will sort by star count. */ + sort?: "newest" | "oldest" | "stargazers" | "watchers"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 400: components["responses"]["bad_request"]; + }; + }; + /** + * Create a fork + * @description Create a fork for the authenticated user. + * + * **Note**: Forking a Repository happens asynchronously. You may have to wait a short period of time before you can access the git objects. If this takes longer than 5 minutes, be sure to contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + * + * **Note**: Although this endpoint works with GitHub Apps, the GitHub App must be installed on the destination account with access to all repositories and on the source account with access to the source repository. + */ + "repos/create-fork": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description Optional parameter to specify the organization name if forking into an organization. */ + organization?: string; + /** @description When forking from an existing repository, a new name for the fork. */ + name?: string; + /** @description When forking from an existing repository, fork with only the default branch. */ + default_branch_only?: boolean; + } | null; + }; + }; + responses: { + /** @description Response */ + 202: { + content: { + "application/json": components["schemas"]["full-repository"]; + }; + }; + 400: components["responses"]["bad_request"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Create a blob */ + "git/create-blob": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The new blob's content. */ + content: string; + /** + * @description The encoding used for `content`. Currently, `"utf-8"` and `"base64"` are supported. + * @default utf-8 + */ + encoding?: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/example/git/blobs/3a0f86fb8db8eea7ccbb9a95f325ddbedfb25e15 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["short-blob"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a blob + * @description The `content` in the response will always be Base64 encoded. + * + * _Note_: This API supports blobs up to 100 megabytes in size. + */ + "git/get-blob": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + file_sha: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["blob"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Create a commit + * @description Creates a new Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "git/create-commit": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The commit message */ + message: string; + /** @description The SHA of the tree object this commit points to */ + tree: string; + /** @description The SHAs of the commits that were the parents of this commit. If omitted or empty, the commit will be written as a root commit. For a single parent, an array of one SHA should be provided; for a merge commit, an array of more than one should be provided. */ + parents?: string[]; + /** @description Information about the author of the commit. By default, the `author` will be the authenticated user and the current date. See the `author` and `committer` object below for details. */ + author?: { + /** @description The name of the author (or committer) of the commit */ + name: string; + /** @description The email of the author (or committer) of the commit */ + email: string; + /** + * Format: date-time + * @description Indicates when this commit was authored (or committed). This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + date?: string; + }; + /** @description Information about the person who is making the commit. By default, `committer` will use the information set in `author`. See the `author` and `committer` object below for details. */ + committer?: { + /** @description The name of the author (or committer) of the commit */ + name?: string; + /** @description The email of the author (or committer) of the commit */ + email?: string; + /** + * Format: date-time + * @description Indicates when this commit was authored (or committed). This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + date?: string; + }; + /** @description The [PGP signature](https://en.wikipedia.org/wiki/Pretty_Good_Privacy) of the commit. GitHub adds the signature to the `gpgsig` header of the created commit. For a commit signature to be verifiable by Git or GitHub, it must be an ASCII-armored detached PGP signature over the string commit as it would be written to the object database. To pass a `signature` parameter, you need to first manually create a valid PGP signature, which can be complicated. You may find it easier to [use the command line](https://git-scm.com/book/id/v2/Git-Tools-Signing-Your-Work) to create signed commits. */ + signature?: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/git/commits/7638417db6d59f3c431d3e1f261cc637155684cd */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["git-commit"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a commit + * @description Gets a Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "git/get-commit": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + commit_sha: components["parameters"]["commit-sha"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["git-commit"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List matching references + * @description Returns an array of references from your Git database that match the supplied name. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't exist in the repository, but existing refs start with `:ref`, they will be returned as an array. + * + * When you use this endpoint without providing a `:ref`, it will return an array of all the references from your Git database, including notes and stashes if they exist on the server. Anything in the namespace is returned, not just `heads` and `tags`. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * If you request matching references for a branch named `feature` but the branch `feature` doesn't exist, the response can still include other matching head refs that start with the word `feature`, such as `featureA` and `featureB`. + */ + "git/list-matching-refs": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description ref parameter */ + ref: string; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["git-ref"][]; + }; + }; + }; + }; + /** + * Get a reference + * @description Returns a single reference from your Git database. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't match an existing ref, a `404` is returned. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + */ + "git/get-ref": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description ref parameter */ + ref: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["git-ref"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a reference + * @description Creates a reference for your repository. You are unable to create new references for empty repositories, even if the commit SHA-1 hash used exists. Empty repositories are repositories without branches. + */ + "git/create-ref": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the fully qualified reference (ie: `refs/heads/master`). If it doesn't start with 'refs' and have at least two slashes, it will be rejected. */ + ref: string; + /** @description The SHA1 value for this reference. */ + sha: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/git/refs/heads/featureA */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["git-ref"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Delete a reference */ + "git/delete-ref": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description ref parameter */ + ref: string; + }; + }; + responses: { + /** @description Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Update a reference */ + "git/update-ref": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** + * @description The name of the fully qualified reference to update. For example, `refs/heads/master`. If the value doesn't start with `refs` and have at least two slashes, it will be rejected. + * @example refs/head/master + */ + ref: string; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The SHA1 value to set this reference to */ + sha: string; + /** + * @description Indicates whether to force the update or to make sure the update is a fast-forward update. Leaving this out or setting it to `false` will make sure you're not overwriting work. + * @default false + */ + force?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["git-ref"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Create a tag object + * @description Note that creating a tag object does not create the reference that makes a tag in Git. If you want to create an annotated tag in Git, you have to do this call to create the tag object, and then [create](https://docs.github.com/rest/reference/git#create-a-reference) the `refs/tags/[tag]` reference. If you want to create a lightweight tag, you only have to [create](https://docs.github.com/rest/reference/git#create-a-reference) the tag reference - this call would be unnecessary. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "git/create-tag": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The tag's name. This is typically a version (e.g., "v0.0.1"). */ + tag: string; + /** @description The tag message. */ + message: string; + /** @description The SHA of the git object this is tagging. */ + object: string; + /** + * @description The type of the object we're tagging. Normally this is a `commit` but it can also be a `tree` or a `blob`. + * @enum {string} + */ + type: "commit" | "tree" | "blob"; + /** @description An object with information about the individual creating the tag. */ + tagger?: { + /** @description The name of the author of the tag */ + name: string; + /** @description The email of the author of the tag */ + email: string; + /** + * Format: date-time + * @description When this object was tagged. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + date?: string; + }; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/git/tags/940bd336248efae0f9ee5bc7b2d5c985887b16ac */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["git-tag"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a tag + * @description **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "git/get-tag": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + tag_sha: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["git-tag"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a tree + * @description The tree creation API accepts nested entries. If you specify both a tree and a nested path modifying that tree, this endpoint will overwrite the contents of the tree with the new path contents, and create a new tree structure. + * + * If you use this endpoint to add, delete, or modify the file contents in a tree, you will need to commit the tree and then update a branch to point to the commit. For more information see "[Create a commit](https://docs.github.com/rest/reference/git#create-a-commit)" and "[Update a reference](https://docs.github.com/rest/reference/git#update-a-reference)." + * + * Returns an error if you try to delete a file that does not exist. + */ + "git/create-tree": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Objects (of `path`, `mode`, `type`, and `sha`) specifying a tree structure. */ + tree: { + /** @description The file referenced in the tree. */ + path?: string; + /** + * @description The file mode; one of `100644` for file (blob), `100755` for executable (blob), `040000` for subdirectory (tree), `160000` for submodule (commit), or `120000` for a blob that specifies the path of a symlink. + * @enum {string} + */ + mode?: "100644" | "100755" | "040000" | "160000" | "120000"; + /** + * @description Either `blob`, `tree`, or `commit`. + * @enum {string} + */ + type?: "blob" | "tree" | "commit"; + /** + * @description The SHA1 checksum ID of the object in the tree. Also called `tree.sha`. If the value is `null` then the file will be deleted. + * + * **Note:** Use either `tree.sha` or `content` to specify the contents of the entry. Using both `tree.sha` and `content` will return an error. + */ + sha?: string | null; + /** + * @description The content you want this file to have. GitHub will write this blob out and use that SHA for this entry. Use either this, or `tree.sha`. + * + * **Note:** Use either `tree.sha` or `content` to specify the contents of the entry. Using both `tree.sha` and `content` will return an error. + */ + content?: string; + }[]; + /** + * @description The SHA1 of an existing Git tree object which will be used as the base for the new tree. If provided, a new Git tree object will be created from entries in the Git tree object pointed to by `base_tree` and entries defined in the `tree` parameter. Entries defined in the `tree` parameter will overwrite items from `base_tree` with the same `path`. If you're creating new changes on a branch, then normally you'd set `base_tree` to the SHA1 of the Git tree object of the current latest commit on the branch you're working on. + * If not provided, GitHub will create a new Git tree object from only the entries defined in the `tree` parameter. If you create a new commit pointing to such a tree, then all files which were a part of the parent commit's tree and were not defined in the `tree` parameter will be listed as deleted by the new commit. + */ + base_tree?: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/trees/cd8274d15fa3ae2ab983129fb037999f264ba9a7 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["git-tree"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a tree + * @description Returns a single tree using the SHA1 value for that tree. + * + * If `truncated` is `true` in the response then the number of items in the `tree` array exceeded our maximum limit. If you need to fetch more items, use the non-recursive method of fetching trees, and fetch one sub-tree at a time. + * + * + * **Note**: The limit for the `tree` array is 100,000 entries with a maximum size of 7 MB when using the `recursive` parameter. + */ + "git/get-tree": { + parameters: { + query?: { + /** @description Setting this parameter to any value returns the objects or subtrees referenced by the tree specified in `:tree_sha`. For example, setting `recursive` to any of the following will enable returning objects or subtrees: `0`, `1`, `"true"`, and `"false"`. Omit this parameter to prevent recursively returning objects or subtrees. */ + recursive?: string; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + tree_sha: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["git-tree"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List repository webhooks + * @description Lists webhooks for a repository. `last response` may return null if there have not been any deliveries within 30 days. + */ + "repos/list-webhooks": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["hook"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a repository webhook + * @description Repositories can have multiple webhooks installed. Each webhook should have a unique `config`. Multiple webhooks can + * share the same `config` as long as those webhooks do not have any `events` that overlap. + */ + "repos/create-webhook": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description Use `web` to create a webhook. Default: `web`. This parameter only accepts the value `web`. */ + name?: string; + /** @description Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/repos#create-hook-config-params). */ + config?: { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + /** @example "abc" */ + token?: string; + /** @example "sha256" */ + digest?: string; + }; + /** + * @description Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. + * @default [ + * "push" + * ] + */ + events?: string[]; + /** + * @description Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. + * @default true + */ + active?: boolean; + } | null; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/hooks/12345678 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["hook"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a repository webhook + * @description Returns a webhook configured in a repository. To get only the webhook `config` properties, see "[Get a webhook configuration for a repository](/rest/reference/repos#get-a-webhook-configuration-for-a-repository)." + */ + "repos/get-webhook": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["hook"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Delete a repository webhook */ + "repos/delete-webhook": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update a repository webhook + * @description Updates a webhook configured in a repository. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for a repository](/rest/reference/repos#update-a-webhook-configuration-for-a-repository)." + */ + "repos/update-webhook": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/repos#create-hook-config-params). */ + config?: { + url: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + /** @example "bar@example.com" */ + address?: string; + /** @example "The Serious Room" */ + room?: string; + }; + /** + * @description Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. This replaces the entire array of events. + * @default [ + * "push" + * ] + */ + events?: string[]; + /** @description Determines a list of events to be added to the list of events that the Hook triggers for. */ + add_events?: string[]; + /** @description Determines a list of events to be removed from the list of events that the Hook triggers for. */ + remove_events?: string[]; + /** + * @description Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. + * @default true + */ + active?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["hook"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a webhook configuration for a repository + * @description Returns the webhook configuration for a repository. To get more information about the webhook, including the `active` state and `events`, use "[Get a repository webhook](/rest/reference/orgs#get-a-repository-webhook)." + * + * Access tokens must have the `read:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:read` permission. + */ + "repos/get-webhook-config-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + }; + /** + * Update a webhook configuration for a repository + * @description Updates the webhook configuration for a repository. To update more information about the webhook, including the `active` state and `events`, use "[Update a repository webhook](/rest/reference/orgs#update-a-repository-webhook)." + * + * Access tokens must have the `write:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:write` permission. + */ + "repos/update-webhook-config-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + }; + /** + * List deliveries for a repository webhook + * @description Returns a list of webhook deliveries for a webhook configured in a repository. + */ + "repos/list-webhook-deliveries": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + cursor?: components["parameters"]["cursor"]; + redelivery?: boolean; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery-item"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a delivery for a repository webhook + * @description Returns a delivery for a webhook configured in a repository. + */ + "repos/get-webhook-delivery": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + hook_id: components["parameters"]["hook-id"]; + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery"]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Redeliver a delivery for a repository webhook + * @description Redeliver a webhook delivery for a webhook configured in a repository. + */ + "repos/redeliver-webhook-delivery": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + hook_id: components["parameters"]["hook-id"]; + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Ping a repository webhook + * @description This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. + */ + "repos/ping-webhook": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Test the push repository webhook + * @description This will trigger the hook with the latest push to the current repository if the hook is subscribed to `push` events. If the hook is not subscribed to `push` events, the server will respond with 204 but no test POST will be generated. + * + * **Note**: Previously `/repos/:owner/:repo/hooks/:hook_id/test` + */ + "repos/test-push-webhook": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get an import status + * @description View the progress of an import. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + * + * **Import status** + * + * This section includes details about the possible values of the `status` field of the Import Progress response. + * + * An import that does not have errors will progress through these steps: + * + * * `detecting` - the "detection" step of the import is in progress because the request did not include a `vcs` parameter. The import is identifying the type of source control present at the URL. + * * `importing` - the "raw" step of the import is in progress. This is where commit data is fetched from the original repository. The import progress response will include `commit_count` (the total number of raw commits that will be imported) and `percent` (0 - 100, the current progress through the import). + * * `mapping` - the "rewrite" step of the import is in progress. This is where SVN branches are converted to Git branches, and where author updates are applied. The import progress response does not include progress information. + * * `pushing` - the "push" step of the import is in progress. This is where the importer updates the repository on GitHub. The import progress response will include `push_percent`, which is the percent value reported by `git push` when it is "Writing objects". + * * `complete` - the import is complete, and the repository is ready on GitHub. + * + * If there are problems, you will see one of these in the `status` field: + * + * * `auth_failed` - the import requires authentication in order to connect to the original repository. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/migrations/source-imports#update-an-import) section. + * * `error` - the import encountered an error. The import progress response will include the `failed_step` and an error message. Contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api) for more information. + * * `detection_needs_auth` - the importer requires authentication for the originating repository to continue detection. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/migrations/source-imports#update-an-import) section. + * * `detection_found_nothing` - the importer didn't recognize any source control at the URL. To resolve, [Cancel the import](https://docs.github.com/rest/migrations/source-imports#cancel-an-import) and [retry](https://docs.github.com/rest/migrations/source-imports#start-an-import) with the correct URL. + * * `detection_found_multiple` - the importer found several projects or repositories at the provided URL. When this is the case, the Import Progress response will also include a `project_choices` field with the possible project choices as values. To update project choice, please see the [Update an import](https://docs.github.com/rest/migrations/source-imports#update-an-import) section. + * + * **The project_choices field** + * + * When multiple projects are found at the provided URL, the response hash will include a `project_choices` field, the value of which is an array of hashes each representing a project choice. The exact key/value pairs of the project hashes will differ depending on the version control type. + * + * **Git LFS related fields** + * + * This section includes details about Git LFS related fields that may be present in the Import Progress response. + * + * * `use_lfs` - describes whether the import has been opted in or out of using Git LFS. The value can be `opt_in`, `opt_out`, or `undecided` if no action has been taken. + * * `has_large_files` - the boolean value describing whether files larger than 100MB were found during the `importing` step. + * * `large_files_size` - the total size in gigabytes of files larger than 100MB found in the originating repository. + * * `large_files_count` - the total number of files larger than 100MB found in the originating repository. To see a list of these files, make a "Get Large Files" request. + */ + "migrations/get-import-status": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["import"]; + }; + }; + 404: components["responses"]["not_found"]; + 503: components["responses"]["porter_maintenance"]; + }; + }; + /** + * Start an import + * @description Start a source import to a GitHub repository using GitHub Importer. Importing into a GitHub repository with GitHub Actions enabled is not supported and will return a status `422 Unprocessable Entity` response. + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + "migrations/start-import": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The URL of the originating repository. */ + vcs_url: string; + /** + * @description The originating VCS type. Without this parameter, the import job will take additional time to detect the VCS type before beginning the import. This detection step will be reflected in the response. + * @enum {string} + */ + vcs?: "subversion" | "git" | "mercurial" | "tfvc"; + /** @description If authentication is required, the username to provide to `vcs_url`. */ + vcs_username?: string; + /** @description If authentication is required, the password to provide to `vcs_url`. */ + vcs_password?: string; + /** @description For a tfvc import, the name of the project that is being imported. */ + tfvc_project?: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/spraints/socm/import */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["import"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["porter_maintenance"]; + }; + }; + /** + * Cancel an import + * @description Stop an import for a repository. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + "migrations/cancel-import": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 503: components["responses"]["porter_maintenance"]; + }; + }; + /** + * Update an import + * @description An import can be updated with credentials or a project choice by passing in the appropriate parameters in this API + * request. If no parameters are provided, the import will be restarted. + * + * Some servers (e.g. TFS servers) can have several projects at a single URL. In those cases the import progress will + * have the status `detection_found_multiple` and the Import Progress response will include a `project_choices` array. + * You can select the project to import by providing one of the objects in the `project_choices` array in the update request. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + "migrations/update-import": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The username to provide to the originating repository. */ + vcs_username?: string; + /** @description The password to provide to the originating repository. */ + vcs_password?: string; + /** + * @description The type of version control system you are migrating from. + * @example "git" + * @enum {string} + */ + vcs?: "subversion" | "tfvc" | "git" | "mercurial"; + /** + * @description For a tfvc import, the name of the project that is being imported. + * @example "project1" + */ + tfvc_project?: string; + } | null; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["import"]; + }; + }; + 503: components["responses"]["porter_maintenance"]; + }; + }; + /** + * Get commit authors + * @description Each type of source control system represents authors in a different way. For example, a Git commit author has a display name and an email address, but a Subversion commit author just has a username. The GitHub Importer will make the author information valid, but the author might not be correct. For example, it will change the bare Subversion username `hubot` into something like `hubot `. + * + * This endpoint and the [Map a commit author](https://docs.github.com/rest/migrations/source-imports#map-a-commit-author) endpoint allow you to provide correct Git author information. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + "migrations/get-commit-authors": { + parameters: { + query?: { + since?: components["parameters"]["since-user"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["porter-author"][]; + }; + }; + 404: components["responses"]["not_found"]; + 503: components["responses"]["porter_maintenance"]; + }; + }; + /** + * Map a commit author + * @description Update an author's identity for the import. Your application can continue updating authors any time before you push + * new commits to the repository. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + "migrations/map-commit-author": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + author_id: number; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The new Git author email. */ + email?: string; + /** @description The new Git author name. */ + name?: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["porter-author"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["porter_maintenance"]; + }; + }; + /** + * Get large files + * @description List files larger than 100MB found during the import + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + "migrations/get-large-files": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["porter-large-file"][]; + }; + }; + 503: components["responses"]["porter_maintenance"]; + }; + }; + /** + * Update Git LFS preference + * @description You can import repositories from Subversion, Mercurial, and TFS that include files larger than 100MB. This ability + * is powered by [Git LFS](https://git-lfs.com). + * + * You can learn more about our LFS feature and working with large files [on our help + * site](https://docs.github.com/repositories/working-with-files/managing-large-files). + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + "migrations/set-lfs-preference": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Whether to store large files during the import. `opt_in` means large files will be stored using Git LFS. `opt_out` means large files will be removed during the import. + * @enum {string} + */ + use_lfs: "opt_in" | "opt_out"; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["import"]; + }; + }; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["porter_maintenance"]; + }; + }; + /** + * Get a repository installation for the authenticated app + * @description Enables an authenticated GitHub App to find the repository's installation information. The installation's account type will be either an organization or a user account, depending which account the repository belongs to. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-repo-installation": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["installation"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get interaction restrictions for a repository + * @description Shows which type of GitHub user can interact with this repository and when the restriction expires. If there are no restrictions, you will see an empty response. + */ + "interactions/get-restrictions-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": + | components["schemas"]["interaction-limit-response"] + | Record; + }; + }; + }; + }; + /** + * Set interaction restrictions for a repository + * @description Temporarily restricts interactions to a certain type of GitHub user within the given repository. You must have owner or admin access to set these restrictions. If an interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. + */ + "interactions/set-restrictions-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["interaction-limit"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["interaction-limit-response"]; + }; + }; + /** @description Response */ + 409: never; + }; + }; + /** + * Remove interaction restrictions for a repository + * @description Removes all interaction restrictions from the given repository. You must have owner or admin access to remove restrictions. If the interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. + */ + "interactions/remove-restrictions-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + /** @description Response */ + 409: never; + }; + }; + /** + * List repository invitations + * @description When authenticating as a user with admin rights to a repository, this endpoint will list all currently open repository invitations. + */ + "repos/list-invitations": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["repository-invitation"][]; + }; + }; + }; + }; + /** Delete a repository invitation */ + "repos/delete-invitation": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** Update a repository invitation */ + "repos/update-invitation": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The permissions that the associated user will have on the repository. Valid values are `read`, `write`, `maintain`, `triage`, and `admin`. + * @enum {string} + */ + permissions?: "read" | "write" | "maintain" | "triage" | "admin"; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-invitation"]; + }; + }; + }; + }; + /** + * List repository issues + * @description List issues in a repository. Only open issues will be listed. + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/list-for-repo": { + parameters: { + query?: { + /** @description If an `integer` is passed, it should refer to a milestone by its `number` field. If the string `*` is passed, issues with any milestone are accepted. If the string `none` is passed, issues without milestones are returned. */ + milestone?: string; + /** @description Indicates the state of the issues to return. */ + state?: "open" | "closed" | "all"; + /** @description Can be the name of a user. Pass in `none` for issues with no assigned user, and `*` for issues assigned to any user. */ + assignee?: string; + /** @description The user that created the issue. */ + creator?: string; + /** @description A user that's mentioned in the issue. */ + mentioned?: string; + labels?: components["parameters"]["labels"]; + /** @description What to sort results by. */ + sort?: "created" | "updated" | "comments"; + direction?: components["parameters"]["direction"]; + since?: components["parameters"]["since"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["issue"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Create an issue + * @description Any user with pull access to a repository can create an issue. If [issues are disabled in the repository](https://docs.github.com/articles/disabling-issues/), the API returns a `410 Gone` status. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "issues/create": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the issue. */ + title: string | number; + /** @description The contents of the issue. */ + body?: string; + /** @description Login for the user that this issue should be assigned to. _NOTE: Only users with push access can set the assignee for new issues. The assignee is silently dropped otherwise. **This field is deprecated.**_ */ + assignee?: string | null; + milestone?: string | number; + /** @description Labels to associate with this issue. _NOTE: Only users with push access can set labels for new issues. Labels are silently dropped otherwise._ */ + labels?: OneOf< + [ + string, + { + id?: number; + name?: string; + description?: string | null; + color?: string | null; + }, + ] + >[]; + /** @description Logins for Users to assign to this issue. _NOTE: Only users with push access can set assignees for new issues. Assignees are silently dropped otherwise._ */ + assignees?: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/issues/1347 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + 400: components["responses"]["bad_request"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List issue comments for a repository + * @description You can use the REST API to list comments on issues and pull requests for a repository. Every pull request is an issue, but not every issue is a pull request. + * + * By default, issue comments are ordered by ascending ID. + */ + "issues/list-comments-for-repo": { + parameters: { + query?: { + sort?: components["parameters"]["sort"]; + /** @description Either `asc` or `desc`. Ignored without the `sort` parameter. */ + direction?: "asc" | "desc"; + since?: components["parameters"]["since"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["issue-comment"][]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get an issue comment + * @description You can use the REST API to get comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + */ + "issues/get-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["issue-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete an issue comment + * @description You can use the REST API to delete comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + */ + "issues/delete-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update an issue comment + * @description You can use the REST API to update comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + */ + "issues/update-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The contents of the comment. */ + body: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["issue-comment"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List reactions for an issue comment + * @description List the reactions to an [issue comment](https://docs.github.com/rest/reference/issues#comments). + */ + "reactions/list-for-issue-comment": { + parameters: { + query?: { + /** @description Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to an issue comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create reaction for an issue comment + * @description Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with an HTTP `200` status means that you already added the reaction type to this issue comment. + */ + "reactions/create-for-issue-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the issue comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + responses: { + /** @description Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** @description Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Delete an issue comment reaction + * @description **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/issues/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). + */ + "reactions/delete-for-issue-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List issue events for a repository + * @description Lists events for a repository. + */ + "issues/list-events-for-repo": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["issue-event"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get an issue event + * @description Gets a single event by the event id. + */ + "issues/get-event": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + event_id: number; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["issue-event"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** + * Get an issue + * @description The API returns a [`301 Moved Permanently` status](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-redirects-redirects) if the issue was + * [transferred](https://docs.github.com/articles/transferring-an-issue-to-another-repository/) to another repository. If + * the issue was transferred to or deleted from a repository where the authenticated user lacks read access, the API + * returns a `404 Not Found` status. If the issue was deleted from a repository where the authenticated user has read + * access, the API returns a `410 Gone` status. To receive webhook events for transferred and deleted issues, subscribe + * to the [`issues`](https://docs.github.com/webhooks/event-payloads/#issues) webhook. + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/get": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** + * Update an issue + * @description Issue owners and users with push access can edit an issue. + */ + "issues/update": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The title of the issue. */ + title?: string | number; + /** @description The contents of the issue. */ + body?: string | null; + /** @description Username to assign to this issue. **This field is deprecated.** */ + assignee?: string | null; + /** + * @description The open or closed state of the issue. + * @enum {string} + */ + state?: "open" | "closed"; + /** + * @description The reason for the state change. Ignored unless `state` is changed. + * @example not_planned + * @enum {string|null} + */ + state_reason?: "completed" | "not_planned" | "reopened" | null; + milestone?: string | number; + /** @description Labels to associate with this issue. Pass one or more labels to _replace_ the set of labels on this issue. Send an empty array (`[]`) to clear all labels from the issue. Only users with push access can set labels for issues. Without push access to the repository, label changes are silently dropped. */ + labels?: OneOf< + [ + string, + { + id?: number; + name?: string; + description?: string | null; + color?: string | null; + }, + ] + >[]; + /** @description Usernames to assign to this issue. Pass one or more user logins to _replace_ the set of assignees on this issue. Send an empty array (`[]`) to clear all assignees from the issue. Only users with push access can set assignees for new issues. Without push access to the repository, assignee changes are silently dropped. */ + assignees?: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Add assignees to an issue + * @description Adds up to 10 assignees to an issue. Users already assigned to an issue are not replaced. + */ + "issues/add-assignees": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description Usernames of people to assign this issue to. _NOTE: Only users with push access can add assignees to an issue. Assignees are silently ignored otherwise._ */ + assignees?: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + }; + }; + /** + * Remove assignees from an issue + * @description Removes one or more assignees from an issue. + */ + "issues/remove-assignees": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Usernames of assignees to remove from an issue. _NOTE: Only users with push access can remove assignees from an issue. Assignees are silently ignored otherwise._ */ + assignees: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + }; + }; + /** + * Check if a user can be assigned to a issue + * @description Checks if a user has permission to be assigned to a specific issue. + * + * If the `assignee` can be assigned to this issue, a `204` status code with no content is returned. + * + * Otherwise a `404` status code is returned. + */ + "issues/check-user-can-be-assigned-to-issue": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + assignee: string; + }; + }; + responses: { + /** @description Response if `assignee` can be assigned to `issue_number` */ + 204: never; + /** @description Response if `assignee` can not be assigned to `issue_number` */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * List issue comments + * @description You can use the REST API to list comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + * + * Issue comments are ordered by ascending ID. + */ + "issues/list-comments": { + parameters: { + query?: { + since?: components["parameters"]["since"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["issue-comment"][]; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** + * Create an issue comment + * @description + * You can use the REST API to create comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). + * Creating content too quickly using this endpoint may result in secondary rate limiting. + * See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" + * and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" + * for details. + */ + "issues/create-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The contents of the comment. */ + body: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/issues/comments/1 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["issue-comment"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List issue events + * @description Lists all events for an issue. + */ + "issues/list-events": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["issue-event-for-issue"][]; + }; + }; + 410: components["responses"]["gone"]; + }; + }; + /** + * List labels for an issue + * @description Lists all labels for an issue. + */ + "issues/list-labels-on-issue": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** + * Set labels for an issue + * @description Removes any previous labels and sets the new labels for an issue. + */ + "issues/set-labels": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + requestBody?: { + content: { + "application/json": OneOf< + [ + { + /** @description The names of the labels to set for the issue. The labels you set replace any existing labels. You can pass an empty array to remove all labels. Alternatively, you can pass a single label as a `string` or an `array` of labels directly, but GitHub recommends passing an object with the `labels` key. You can also add labels to the existing labels for an issue. For more information, see "[Add labels to an issue](https://docs.github.com/rest/reference/issues#add-labels-to-an-issue)." */ + labels?: string[]; + }, + { + labels?: { + name: string; + }[]; + }, + ] + >; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Add labels to an issue + * @description Adds labels to an issue. If you provide an empty array of labels, all labels are removed from the issue. + */ + "issues/add-labels": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + requestBody?: { + content: { + "application/json": OneOf< + [ + { + /** @description The names of the labels to add to the issue's existing labels. You can pass an empty array to remove all labels. Alternatively, you can pass a single label as a `string` or an `array` of labels directly, but GitHub recommends passing an object with the `labels` key. You can also replace all of the labels for an issue. For more information, see "[Set labels for an issue](https://docs.github.com/rest/reference/issues#set-labels-for-an-issue)." */ + labels?: string[]; + }, + { + labels?: { + name: string; + }[]; + }, + ] + >; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove all labels from an issue + * @description Removes all labels from an issue. + */ + "issues/remove-all-labels": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** + * Remove a label from an issue + * @description Removes the specified label from the issue, and returns the remaining labels on the issue. This endpoint returns a `404 Not Found` status if the label does not exist. + */ + "issues/remove-label": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + name: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** + * Lock an issue + * @description Users with push access can lock an issue or pull request's conversation. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "issues/lock": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The reason for locking the issue or pull request conversation. Lock will fail if you don't use one of these reasons: + * * `off-topic` + * * `too heated` + * * `resolved` + * * `spam` + * @enum {string} + */ + lock_reason?: "off-topic" | "too heated" | "resolved" | "spam"; + } | null; + }; + }; + responses: { + /** @description Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Unlock an issue + * @description Users with push access can unlock an issue's conversation. + */ + "issues/unlock": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List reactions for an issue + * @description List the reactions to an [issue](https://docs.github.com/rest/reference/issues). + */ + "reactions/list-for-issue": { + parameters: { + query?: { + /** @description Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to an issue. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** + * Create reaction for an issue + * @description Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with an HTTP `200` status means that you already added the reaction type to this issue. + */ + "reactions/create-for-issue": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the issue. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Delete an issue reaction + * @description **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/issues/:issue_number/reactions/:reaction_id`. + * + * Delete a reaction to an [issue](https://docs.github.com/rest/reference/issues/). + */ + "reactions/delete-for-issue": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List timeline events for an issue + * @description List all timeline events for an issue. + */ + "issues/list-events-for-timeline": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["timeline-issue-events"][]; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** List deploy keys */ + "repos/list-deploy-keys": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["deploy-key"][]; + }; + }; + }; + }; + /** + * Create a deploy key + * @description You can create a read-only deploy key. + */ + "repos/create-deploy-key": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description A name for the key. */ + title?: string; + /** @description The contents of the key. */ + key: string; + /** + * @description If `true`, the key will only be able to read repository contents. Otherwise, the key will be able to read and write. + * + * Deploy keys with write access can perform the same actions as an organization member with admin access, or a collaborator on a personal repository. For more information, see "[Repository permission levels for an organization](https://docs.github.com/articles/repository-permission-levels-for-an-organization/)" and "[Permission levels for a user account repository](https://docs.github.com/articles/permission-levels-for-a-user-account-repository/)." + */ + read_only?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/keys/1 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["deploy-key"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Get a deploy key */ + "repos/get-deploy-key": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + key_id: components["parameters"]["key-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["deploy-key"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a deploy key + * @description Deploy keys are immutable. If you need to update a key, remove the key and create a new one instead. + */ + "repos/delete-deploy-key": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + key_id: components["parameters"]["key-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List labels for a repository + * @description Lists all labels for a repository. + */ + "issues/list-labels-for-repo": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a label + * @description Creates a label for the specified repository with the given name and color. The name and color parameters are required. The color must be a valid [hexadecimal color code](http://www.color-hex.com/). + */ + "issues/create-label": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the label. Emoji can be added to label names, using either native emoji or colon-style markup. For example, typing `:strawberry:` will render the emoji ![:strawberry:](https://github.githubassets.com/images/icons/emoji/unicode/1f353.png ":strawberry:"). For a full list of available emoji and codes, see "[Emoji cheat sheet](https://github.com/ikatyang/emoji-cheat-sheet)." */ + name: string; + /** @description The [hexadecimal color code](http://www.color-hex.com/) for the label, without the leading `#`. */ + color?: string; + /** @description A short description of the label. Must be 100 characters or fewer. */ + description?: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/labels/bug */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["label"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a label + * @description Gets a label using the given name. + */ + "issues/get-label": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + name: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["label"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a label + * @description Deletes a label using the given label name. + */ + "issues/delete-label": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + name: string; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update a label + * @description Updates a label using the given label name. + */ + "issues/update-label": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + name: string; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The new name of the label. Emoji can be added to label names, using either native emoji or colon-style markup. For example, typing `:strawberry:` will render the emoji ![:strawberry:](https://github.githubassets.com/images/icons/emoji/unicode/1f353.png ":strawberry:"). For a full list of available emoji and codes, see "[Emoji cheat sheet](https://github.com/ikatyang/emoji-cheat-sheet)." */ + new_name?: string; + /** @description The [hexadecimal color code](http://www.color-hex.com/) for the label, without the leading `#`. */ + color?: string; + /** @description A short description of the label. Must be 100 characters or fewer. */ + description?: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["label"]; + }; + }; + }; + }; + /** + * List repository languages + * @description Lists languages for the specified repository. The value shown for each language is the number of bytes of code written in that language. + */ + "repos/list-languages": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["language"]; + }; + }; + }; + }; + /** + * Enable Git LFS for a repository + * @description Enables Git LFS for a repository. Access tokens must have the `admin:enterprise` scope. + */ + "repos/enable-lfs-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + /** + * @description We will return a 403 with one of the following messages: + * + * - Git LFS support not enabled because Git LFS is globally disabled. + * - Git LFS support not enabled because Git LFS is disabled for the root repository in the network. + * - Git LFS support not enabled because Git LFS is disabled for . + */ + 403: never; + }; + }; + /** + * Disable Git LFS for a repository + * @description Disables Git LFS for a repository. Access tokens must have the `admin:enterprise` scope. + */ + "repos/disable-lfs-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get the license for a repository + * @description This method returns the contents of the repository's license file, if one is detected. + * + * Similar to [Get repository content](https://docs.github.com/rest/reference/repos#get-repository-content), this method also supports [custom media types](https://docs.github.com/rest/overview/media-types) for retrieving the raw license content or rendered license HTML. + */ + "licenses/get-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["license-content"]; + }; + }; + }; + }; + /** + * Sync a fork branch with the upstream repository + * @description Sync a branch of a forked repository to keep it up-to-date with the upstream repository. + */ + "repos/merge-upstream": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the branch which should be updated to match upstream. */ + branch: string; + }; + }; + }; + responses: { + /** @description The branch has been successfully synced with the upstream repository */ + 200: { + content: { + "application/json": components["schemas"]["merged-upstream"]; + }; + }; + /** @description The branch could not be synced because of a merge conflict */ + 409: never; + /** @description The branch could not be synced for some other reason */ + 422: never; + }; + }; + /** Merge a branch */ + "repos/merge": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the base branch that the head will be merged into. */ + base: string; + /** @description The head to merge. This can be a branch name or a commit SHA1. */ + head: string; + /** @description Commit message to use for the merge commit. If omitted, a default message will be used. */ + commit_message?: string; + }; + }; + }; + responses: { + /** @description Successful Response (The resulting merge commit) */ + 201: { + content: { + "application/json": components["schemas"]["commit"]; + }; + }; + /** @description Response when already merged */ + 204: never; + 403: components["responses"]["forbidden"]; + /** @description Not Found when the base or head does not exist */ + 404: never; + /** @description Conflict when there is a merge conflict */ + 409: never; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List milestones + * @description Lists milestones for a repository. + */ + "issues/list-milestones": { + parameters: { + query?: { + /** @description The state of the milestone. Either `open`, `closed`, or `all`. */ + state?: "open" | "closed" | "all"; + /** @description What to sort results by. Either `due_on` or `completeness`. */ + sort?: "due_on" | "completeness"; + /** @description The direction of the sort. Either `asc` or `desc`. */ + direction?: "asc" | "desc"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["milestone"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a milestone + * @description Creates a milestone. + */ + "issues/create-milestone": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the milestone. */ + title: string; + /** + * @description The state of the milestone. Either `open` or `closed`. + * @default open + * @enum {string} + */ + state?: "open" | "closed"; + /** @description A description of the milestone. */ + description?: string; + /** + * Format: date-time + * @description The milestone due date. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + due_on?: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/milestones/1 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["milestone"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a milestone + * @description Gets a milestone using the given milestone number. + */ + "issues/get-milestone": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + milestone_number: components["parameters"]["milestone-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["milestone"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a milestone + * @description Deletes a milestone using the given milestone number. + */ + "issues/delete-milestone": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + milestone_number: components["parameters"]["milestone-number"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Update a milestone */ + "issues/update-milestone": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + milestone_number: components["parameters"]["milestone-number"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The title of the milestone. */ + title?: string; + /** + * @description The state of the milestone. Either `open` or `closed`. + * @default open + * @enum {string} + */ + state?: "open" | "closed"; + /** @description A description of the milestone. */ + description?: string; + /** + * Format: date-time + * @description The milestone due date. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + due_on?: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["milestone"]; + }; + }; + }; + }; + /** + * List labels for issues in a milestone + * @description Lists labels for issues in a milestone. + */ + "issues/list-labels-for-milestone": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + milestone_number: components["parameters"]["milestone-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + }; + }; + /** + * List repository notifications for the authenticated user + * @description Lists all notifications for the current user in the specified repository. + */ + "activity/list-repo-notifications-for-authenticated-user": { + parameters: { + query?: { + all?: components["parameters"]["all"]; + participating?: components["parameters"]["participating"]; + since?: components["parameters"]["since"]; + before?: components["parameters"]["before"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["thread"][]; + }; + }; + }; + }; + /** + * Mark repository notifications as read + * @description Marks all notifications in a repository as "read" for the current user. If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List repository notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. + */ + "activity/mark-repo-notifications-as-read": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * Format: date-time + * @description Describes the last point that notifications were checked. Anything updated since this time will not be marked as read. If you omit this parameter, all notifications are marked as read. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. Default: The current timestamp. + */ + last_read_at?: string; + }; + }; + }; + responses: { + /** @description Response */ + 202: { + content: { + "application/json": { + message?: string; + url?: string; + }; + }; + }; + /** @description Reset Content */ + 205: never; + }; + }; + /** + * Get a GitHub Pages site + * @description Gets information about a GitHub Pages site. + * + * A token with the `repo` scope is required. GitHub Apps must have the `pages:read` permission. + */ + "repos/get-pages": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["page"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update information about a GitHub Pages site + * @description Updates information for a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). + * + * To use this endpoint, you must be a repository administrator, maintainer, or have the 'manage GitHub Pages settings' permission. A token with the `repo` scope or Pages write permission is required. GitHub Apps must have the `administration:write` and `pages:write` permissions. + */ + "repos/update-information-about-pages-site": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Specify a custom domain for the repository. Sending a `null` value will remove the custom domain. For more about custom domains, see "[Using a custom domain with GitHub Pages](https://docs.github.com/articles/using-a-custom-domain-with-github-pages/)." */ + cname?: string | null; + /** @description Specify whether HTTPS should be enforced for the repository. */ + https_enforced?: boolean; + /** + * @description The process by which the GitHub Pages site will be built. `workflow` means that the site is built by a custom GitHub Actions workflow. `legacy` means that the site is built by GitHub when changes are pushed to a specific branch. + * @enum {string} + */ + build_type?: "legacy" | "workflow"; + source?: + | ("gh-pages" | "master" | "master /docs") + | { + /** @description The repository branch used to publish your site's source files. */ + branch: string; + /** + * @description The repository directory that includes the source files for the Pages site. Allowed paths are `/` or `/docs`. + * @enum {string} + */ + path: "/" | "/docs"; + }; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + 400: components["responses"]["bad_request"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Create a GitHub Pages site + * @description Configures a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages)." + * + * To use this endpoint, you must be a repository administrator, maintainer, or have the 'manage GitHub Pages settings' permission. A token with the `repo` scope or Pages write permission is required. GitHub Apps must have the `administration:write` and `pages:write` permissions. + */ + "repos/create-pages-site": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The process in which the Page will be built. Possible values are `"legacy"` and `"workflow"`. + * @enum {string} + */ + build_type?: "legacy" | "workflow"; + /** @description The source branch and directory used to publish your Pages site. */ + source?: { + /** @description The repository branch used to publish your site's source files. */ + branch: string; + /** + * @description The repository directory that includes the source files for the Pages site. Allowed paths are `/` or `/docs`. Default: `/` + * @default / + * @enum {string} + */ + path?: "/" | "/docs"; + }; + } | null; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["page"]; + }; + }; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Delete a GitHub Pages site + * @description Deletes a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). + * + * To use this endpoint, you must be a repository administrator, maintainer, or have the 'manage GitHub Pages settings' permission. A token with the `repo` scope or Pages write permission is required. GitHub Apps must have the `administration:write` and `pages:write` permissions. + */ + "repos/delete-pages-site": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List GitHub Pages builds + * @description Lists builts of a GitHub Pages site. + * + * A token with the `repo` scope is required. GitHub Apps must have the `pages:read` permission. + */ + "repos/list-pages-builds": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["page-build"][]; + }; + }; + }; + }; + /** + * Request a GitHub Pages build + * @description You can request that your site be built from the latest revision on the default branch. This has the same effect as pushing a commit to your default branch, but does not require an additional commit. Manually triggering page builds can be helpful when diagnosing build warnings and failures. + * + * Build requests are limited to one concurrent build per repository and one concurrent build per requester. If you request a build while another is still in progress, the second request will be queued until the first completes. + */ + "repos/request-pages-build": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["page-build-status"]; + }; + }; + }; + }; + /** + * Get latest Pages build + * @description Gets information about the single most recent build of a GitHub Pages site. + * + * A token with the `repo` scope is required. GitHub Apps must have the `pages:read` permission. + */ + "repos/get-latest-pages-build": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["page-build"]; + }; + }; + }; + }; + /** + * Get GitHub Pages build + * @description Gets information about a GitHub Pages build. + * + * A token with the `repo` scope is required. GitHub Apps must have the `pages:read` permission. + */ + "repos/get-pages-build": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + build_id: number; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["page-build"]; + }; + }; + }; + }; + /** + * Create a GitHub Pages deployment + * @description Create a GitHub Pages deployment for a repository. + * + * Users must have write permissions. GitHub Apps must have the `pages:write` permission to use this endpoint. + */ + "repos/create-pages-deployment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The URL of an artifact that contains the .zip or .tar of static assets to deploy. The artifact belongs to the repository. */ + artifact_url: string; + /** + * @description The target environment for this GitHub Pages deployment. + * @default github-pages + */ + environment?: string; + /** + * @description A unique string that represents the version of the build for this deployment. + * @default GITHUB_SHA + */ + pages_build_version: string; + /** @description The OIDC token issued by GitHub Actions certifying the origin of the deployment. */ + oidc_token: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["page-deployment"]; + }; + }; + 400: components["responses"]["bad_request"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a DNS health check for GitHub Pages + * @description Gets a health check of the DNS settings for the `CNAME` record configured for a repository's GitHub Pages. + * + * The first request to this endpoint returns a `202 Accepted` status and starts an asynchronous background task to get the results for the domain. After the background task completes, subsequent requests to this endpoint return a `200 OK` status with the health check results in the response. + * + * To use this endpoint, you must be a repository administrator, maintainer, or have the 'manage GitHub Pages settings' permission. A token with the `repo` scope or Pages write permission is required. GitHub Apps must have the `administrative:write` and `pages:write` permissions. + */ + "repos/get-pages-health-check": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["pages-health-check"]; + }; + }; + /** @description Empty response */ + 202: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** @description Custom domains are not available for GitHub Pages */ + 400: never; + 404: components["responses"]["not_found"]; + /** @description There isn't a CNAME for this page */ + 422: never; + }; + }; + /** + * List repository projects + * @description Lists the projects in a repository. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + "projects/list-for-repo": { + parameters: { + query?: { + /** @description Indicates the state of the projects to return. */ + state?: "open" | "closed" | "all"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["project"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Create a repository project + * @description Creates a repository project board. Returns a `410 Gone` status if projects are disabled in the repository or if the repository does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + "projects/create-for-repo": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the project. */ + name: string; + /** @description The description of the project. */ + body?: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List pull requests + * @description Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + "pulls/list": { + parameters: { + query?: { + /** @description Either `open`, `closed`, or `all` to filter by state. */ + state?: "open" | "closed" | "all"; + /** @description Filter pulls by head user or head organization and branch name in the format of `user:ref-name` or `organization:ref-name`. For example: `github:new-script-format` or `octocat:test-branch`. */ + head?: string; + /** @description Filter pulls by base branch name. Example: `gh-pages`. */ + base?: string; + /** @description What to sort results by. `popularity` will sort by the number of comments. `long-running` will sort by date created and will limit the results to pull requests that have been open for more than a month and have had activity within the past month. */ + sort?: "created" | "updated" | "popularity" | "long-running"; + /** @description The direction of the sort. Default: `desc` when sort is `created` or sort is not specified, otherwise `asc`. */ + direction?: "asc" | "desc"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["pull-request-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Create a pull request + * @description Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. + */ + "pulls/create": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the new pull request. Required unless `issue` is specified. */ + title?: string; + /** @description The name of the branch where your changes are implemented. For cross-repository pull requests in the same network, namespace `head` with a user like this: `username:branch`. */ + head: string; + /** + * Format: repo.nwo + * @description The name of the repository where the changes in the pull request were made. This field is required for cross-repository pull requests if both repositories are owned by the same organization. + * @example octo-org/octo-repo + */ + head_repo?: string; + /** @description The name of the branch you want the changes pulled into. This should be an existing branch on the current repository. You cannot submit a pull request to one repository that requests a merge to a base of another repository. */ + base: string; + /** @description The contents of the pull request. */ + body?: string; + /** @description Indicates whether [maintainers can modify](https://docs.github.com/articles/allowing-changes-to-a-pull-request-branch-created-from-a-fork/) the pull request. */ + maintainer_can_modify?: boolean; + /** @description Indicates whether the pull request is a draft. See "[Draft Pull Requests](https://docs.github.com/articles/about-pull-requests#draft-pull-requests)" in the GitHub Help documentation to learn more. */ + draft?: boolean; + /** + * Format: int64 + * @description An issue in the repository to convert to a pull request. The issue title, body, and comments will become the title, body, and comments on the new pull request. Required unless `title` is specified. + * @example 1 + */ + issue?: number; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/pulls/1347 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["pull-request"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List review comments in a repository + * @description Lists review comments for all pull requests in a repository. By default, review comments are in ascending order by ID. + */ + "pulls/list-review-comments-for-repo": { + parameters: { + query?: { + sort?: "created" | "updated" | "created_at"; + /** @description The direction to sort results. Ignored without `sort` parameter. */ + direction?: "asc" | "desc"; + since?: components["parameters"]["since"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["pull-request-review-comment"][]; + }; + }; + }; + }; + /** + * Get a review comment for a pull request + * @description Provides details for a review comment. + */ + "pulls/get-review-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a review comment for a pull request + * @description Deletes a review comment. + */ + "pulls/delete-review-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update a review comment for a pull request + * @description Enables you to edit a review comment. + */ + "pulls/update-review-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The text of the reply to the review comment. */ + body: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review-comment"]; + }; + }; + }; + }; + /** + * List reactions for a pull request review comment + * @description List the reactions to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). + */ + "reactions/list-for-pull-request-review-comment": { + parameters: { + query?: { + /** @description Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a pull request review comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create reaction for a pull request review comment + * @description Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with an HTTP `200` status means that you already added the reaction type to this pull request review comment. + */ + "reactions/create-for-pull-request-review-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the pull request review comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + responses: { + /** @description Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** @description Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Delete a pull request comment reaction + * @description **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/pulls/comments/:comment_id/reactions/:reaction_id.` + * + * Delete a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). + */ + "reactions/delete-for-pull-request-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + comment_id: components["parameters"]["comment-id"]; + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get a pull request + * @description Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists details of a pull request by providing its number. + * + * When you get, [create](https://docs.github.com/rest/reference/pulls/#create-a-pull-request), or [edit](https://docs.github.com/rest/reference/pulls#update-a-pull-request) a pull request, GitHub creates a merge commit to test whether the pull request can be automatically merged into the base branch. This test commit is not added to the base branch or the head branch. You can review the status of the test commit using the `mergeable` key. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * The value of the `mergeable` attribute can be `true`, `false`, or `null`. If the value is `null`, then GitHub has started a background job to compute the mergeability. After giving the job time to complete, resubmit the request. When the job finishes, you will see a non-`null` value for the `mergeable` attribute in the response. If `mergeable` is `true`, then `merge_commit_sha` will be the SHA of the _test_ merge commit. + * + * The value of the `merge_commit_sha` attribute changes depending on the state of the pull request. Before merging a pull request, the `merge_commit_sha` attribute holds the SHA of the _test_ merge commit. After merging a pull request, the `merge_commit_sha` attribute changes depending on how you merged the pull request: + * + * * If merged as a [merge commit](https://docs.github.com/articles/about-merge-methods-on-github/), `merge_commit_sha` represents the SHA of the merge commit. + * * If merged via a [squash](https://docs.github.com/articles/about-merge-methods-on-github/#squashing-your-merge-commits), `merge_commit_sha` represents the SHA of the squashed commit on the base branch. + * * If [rebased](https://docs.github.com/articles/about-merge-methods-on-github/#rebasing-and-merging-your-commits), `merge_commit_sha` represents the commit that the base branch was updated to. + * + * Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + */ + "pulls/get": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** @description Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. */ + 200: { + content: { + "application/json": components["schemas"]["pull-request"]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Update a pull request + * @description Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + */ + "pulls/update": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The title of the pull request. */ + title?: string; + /** @description The contents of the pull request. */ + body?: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state?: "open" | "closed"; + /** @description The name of the branch you want your changes pulled into. This should be an existing branch on the current repository. You cannot update the base branch on a pull request to point to another repository. */ + base?: string; + /** @description Indicates whether [maintainers can modify](https://docs.github.com/articles/allowing-changes-to-a-pull-request-branch-created-from-a-fork/) the pull request. */ + maintainer_can_modify?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Create a codespace from a pull request + * @description Creates a codespace owned by the authenticated user for the specified pull request. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/create-with-pr-for-authenticated-user": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The requested location for a new codespace. Best efforts are made to respect this upon creation. Assigned by IP if not provided. */ + location?: string; + /** + * @description The geographic area for this codespace. If not specified, the value is assigned by IP. This property replaces `location`, which is being deprecated. + * @enum {string} + */ + geo?: "EuropeWest" | "SoutheastAsia" | "UsEast" | "UsWest"; + /** @description IP for location auto-detection when proxying a request */ + client_ip?: string; + /** @description Machine type to use for this codespace */ + machine?: string; + /** @description Path to devcontainer.json config to use for this codespace */ + devcontainer_path?: string; + /** @description Whether to authorize requested permissions from devcontainer.json */ + multi_repo_permissions_opt_out?: boolean; + /** @description Working directory for this codespace */ + working_directory?: string; + /** @description Time in minutes before codespace stops from inactivity */ + idle_timeout_minutes?: number; + /** @description Display name for this codespace */ + display_name?: string; + /** @description Duration in minutes after codespace has gone idle in which it will be deleted. Must be integer minutes between 0 and 43200 (30 days). */ + retention_period_minutes?: number; + } | null; + }; + }; + responses: { + /** @description Response when the codespace was successfully created */ + 201: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + /** @description Response when the codespace creation partially failed but is being retried in the background */ + 202: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List review comments on a pull request + * @description Lists all review comments for a pull request. By default, review comments are in ascending order by ID. + */ + "pulls/list-review-comments": { + parameters: { + query?: { + sort?: components["parameters"]["sort"]; + /** @description The direction to sort results. Ignored without `sort` parameter. */ + direction?: "asc" | "desc"; + since?: components["parameters"]["since"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["pull-request-review-comment"][]; + }; + }; + }; + }; + /** + * Create a review comment for a pull request + * @description + * Creates a review comment in the pull request diff. To add a regular comment to a pull request timeline, see "[Create an issue comment](https://docs.github.com/rest/reference/issues#create-an-issue-comment)." We recommend creating a review comment using `line`, `side`, and optionally `start_line` and `start_side` if your comment applies to more than one line in the pull request diff. + * + * The `position` parameter is deprecated. If you use `position`, the `line`, `side`, `start_line`, and `start_side` parameters are not required. + * + * **Note:** The position value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "pulls/create-review-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The text of the review comment. */ + body: string; + /** @description The SHA of the commit needing a comment. Not using the latest commit SHA may render your comment outdated if a subsequent commit modifies the line you specify as the `position`. */ + commit_id: string; + /** @description The relative path to the file that necessitates a comment. */ + path: string; + /** + * @deprecated + * @description **This parameter is deprecated. Use `line` instead**. The position in the diff where you want to add a review comment. Note this value is not the same as the line number in the file. For help finding the position value, read the note above. + */ + position?: number; + /** + * @description In a split diff view, the side of the diff that the pull request's changes appear on. Can be `LEFT` or `RIGHT`. Use `LEFT` for deletions that appear in red. Use `RIGHT` for additions that appear in green or unchanged lines that appear in white and are shown for context. For a multi-line comment, side represents whether the last line of the comment range is a deletion or addition. For more information, see "[Diff view options](https://docs.github.com/articles/about-comparing-branches-in-pull-requests#diff-view-options)" in the GitHub Help documentation. + * @enum {string} + */ + side?: "LEFT" | "RIGHT"; + /** @description **Required unless using `subject_type:file`**. The line of the blob in the pull request diff that the comment applies to. For a multi-line comment, the last line of the range that your comment applies to. */ + line?: number; + /** @description **Required when using multi-line comments unless using `in_reply_to`**. The `start_line` is the first line in the pull request diff that your multi-line comment applies to. To learn more about multi-line comments, see "[Commenting on a pull request](https://docs.github.com/articles/commenting-on-a-pull-request#adding-line-comments-to-a-pull-request)" in the GitHub Help documentation. */ + start_line?: number; + /** + * @description **Required when using multi-line comments unless using `in_reply_to`**. The `start_side` is the starting side of the diff that the comment applies to. Can be `LEFT` or `RIGHT`. To learn more about multi-line comments, see "[Commenting on a pull request](https://docs.github.com/articles/commenting-on-a-pull-request#adding-line-comments-to-a-pull-request)" in the GitHub Help documentation. See `side` in this table for additional context. + * @enum {string} + */ + start_side?: "LEFT" | "RIGHT" | "side"; + /** + * @description The ID of the review comment to reply to. To find the ID of a review comment with ["List review comments on a pull request"](#list-review-comments-on-a-pull-request). When specified, all parameters other than `body` in the request body are ignored. + * @example 2 + */ + in_reply_to?: number; + /** + * @description The level at which the comment is targeted. + * @enum {string} + */ + subject_type?: "LINE" | "FILE"; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/pulls/comments/1 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["pull-request-review-comment"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Create a reply for a review comment + * @description Creates a reply to a review comment for a pull request. For the `comment_id`, provide the ID of the review comment you are replying to. This must be the ID of a _top-level review comment_, not a reply to that comment. Replies to replies are not supported. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "pulls/create-reply-for-review-comment": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + comment_id: components["parameters"]["comment-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The text of the review comment. */ + body: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/pulls/comments/1 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["pull-request-review-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List commits on a pull request + * @description Lists a maximum of 250 commits for a pull request. To receive a complete commit list for pull requests with more than 250 commits, use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) endpoint. + */ + "pulls/list-commits": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["commit"][]; + }; + }; + }; + }; + /** + * List pull requests files + * @description **Note:** Responses include a maximum of 3000 files. The paginated response returns 30 files per page by default. + */ + "pulls/list-files": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["diff-entry"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Check if a pull request has been merged + * @description Checks if a pull request has been merged into the base branch. The HTTP status of the response indicates whether or not the pull request has been merged; the response body is empty. + */ + "pulls/check-if-merged": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** @description Response if pull request has been merged */ + 204: never; + /** @description Not Found if pull request has not been merged */ + 404: never; + }; + }; + /** + * Merge a pull request + * @description Merges a pull request into the base branch. + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "pulls/merge": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description Title for the automatic commit message. */ + commit_title?: string; + /** @description Extra detail to append to automatic commit message. */ + commit_message?: string; + /** @description SHA that pull request head must match to allow merge. */ + sha?: string; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method?: "merge" | "squash" | "rebase"; + } | null; + }; + }; + responses: { + /** @description if merge was successful */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-merge-result"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + /** @description Method Not Allowed if merge cannot be performed */ + 405: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + /** @description Conflict if sha was provided and pull request head did not match */ + 409: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get all requested reviewers for a pull request + * @description Gets the users or teams whose review is requested for a pull request. Once a requested reviewer submits a review, they are no longer considered a requested reviewer. Their review will instead be returned by the [List reviews for a pull request](https://docs.github.com/rest/pulls/reviews#list-reviews-for-a-pull-request) operation. + */ + "pulls/list-requested-reviewers": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["pull-request-review-request"]; + }; + }; + }; + }; + /** + * Request reviewers for a pull request + * @description This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "pulls/request-reviewers": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description An array of user `login`s that will be requested. */ + reviewers?: string[]; + /** @description An array of team `slug`s that will be requested. */ + team_reviewers?: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["pull-request-simple"]; + }; + }; + 403: components["responses"]["forbidden"]; + /** @description Unprocessable Entity if user is not a collaborator */ + 422: never; + }; + }; + /** + * Remove requested reviewers from a pull request + * @description Removes review requests from a pull request for a given set of users and/or teams. + */ + "pulls/remove-requested-reviewers": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of user `login`s that will be removed. */ + reviewers: string[]; + /** @description An array of team `slug`s that will be removed. */ + team_reviewers?: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-simple"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List reviews for a pull request + * @description The list of reviews returns in chronological order. + */ + "pulls/list-reviews": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** @description The list of reviews returns in chronological order. */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["pull-request-review"][]; + }; + }; + }; + }; + /** + * Create a review for a pull request + * @description This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Pull request reviews created in the `PENDING` state are not submitted and therefore do not include the `submitted_at` property in the response. To create a pending review for a pull request, leave the `event` parameter blank. For more information about submitting a `PENDING` review, see "[Submit a review for a pull request](https://docs.github.com/rest/pulls#submit-a-review-for-a-pull-request)." + * + * **Note:** To comment on a specific line in a file, you need to first determine the _position_ of that line in the diff. The GitHub REST API offers the `application/vnd.github.v3.diff` [media type](https://docs.github.com/rest/overview/media-types#commits-commit-comparison-and-pull-requests). To see a pull request diff, add this media type to the `Accept` header of a call to the [single pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) endpoint. + * + * The `position` value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + */ + "pulls/create-review": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The SHA of the commit that needs a review. Not using the latest commit SHA may render your review comment outdated if a subsequent commit modifies the line you specify as the `position`. Defaults to the most recent commit in the pull request when you do not specify a value. */ + commit_id?: string; + /** @description **Required** when using `REQUEST_CHANGES` or `COMMENT` for the `event` parameter. The body text of the pull request review. */ + body?: string; + /** + * @description The review action you want to perform. The review actions include: `APPROVE`, `REQUEST_CHANGES`, or `COMMENT`. By leaving this blank, you set the review action state to `PENDING`, which means you will need to [submit the pull request review](https://docs.github.com/rest/pulls#submit-a-review-for-a-pull-request) when you are ready. + * @enum {string} + */ + event?: "APPROVE" | "REQUEST_CHANGES" | "COMMENT"; + /** @description Use the following table to specify the location, destination, and contents of the draft review comment. */ + comments?: { + /** @description The relative path to the file that necessitates a review comment. */ + path: string; + /** @description The position in the diff where you want to add a review comment. Note this value is not the same as the line number in the file. For help finding the position value, read the note below. */ + position?: number; + /** @description Text of the review comment. */ + body: string; + /** @example 28 */ + line?: number; + /** @example RIGHT */ + side?: string; + /** @example 26 */ + start_line?: number; + /** @example LEFT */ + start_side?: string; + }[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Get a review for a pull request + * @description Retrieves a pull request review by its ID. + */ + "pulls/get-review": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update a review for a pull request + * @description Update the review summary comment with new text. + */ + "pulls/update-review": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + review_id: components["parameters"]["review-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The body text of the pull request review. */ + body: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Delete a pending review for a pull request + * @description Deletes a pull request review that has not been submitted. Submitted reviews cannot be deleted. + */ + "pulls/delete-pending-review": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List comments for a pull request review + * @description List comments for a specific pull request review. + */ + "pulls/list-comments-for-review": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["review-comment"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Dismiss a review for a pull request + * @description **Note:** To dismiss a pull request review on a [protected branch](https://docs.github.com/rest/reference/repos#branches), you must be a repository administrator or be included in the list of people or teams who can dismiss pull request reviews. + */ + "pulls/dismiss-review": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + review_id: components["parameters"]["review-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The message for the pull request review dismissal */ + message: string; + /** + * @example "DISMISS" + * @enum {string} + */ + event?: "DISMISS"; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Submit a review for a pull request + * @description Submits a pending review for a pull request. For more information about creating a pending review for a pull request, see "[Create a review for a pull request](https://docs.github.com/rest/pulls#create-a-review-for-a-pull-request)." + */ + "pulls/submit-review": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + review_id: components["parameters"]["review-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The body text of the pull request review */ + body?: string; + /** + * @description The review action you want to perform. The review actions include: `APPROVE`, `REQUEST_CHANGES`, or `COMMENT`. When you leave this blank, the API returns _HTTP 422 (Unrecognizable entity)_ and sets the review action state to `PENDING`, which means you will need to re-submit the pull request review using a review action. + * @enum {string} + */ + event: "APPROVE" | "REQUEST_CHANGES" | "COMMENT"; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Update a pull request branch + * @description Updates the pull request branch with the latest upstream changes by merging HEAD from the base branch into the pull request branch. + */ + "pulls/update-branch": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + pull_number: components["parameters"]["pull-number"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The expected SHA of the pull request's HEAD ref. This is the most recent commit on the pull request's branch. If the expected SHA does not match the pull request's HEAD, you will receive a `422 Unprocessable Entity` status. You can use the "[List commits](https://docs.github.com/rest/reference/repos#list-commits)" endpoint to find the most recent commit SHA. Default: SHA of the pull request's current HEAD ref. */ + expected_head_sha?: string; + } | null; + }; + }; + responses: { + /** @description Response */ + 202: { + content: { + "application/json": { + message?: string; + url?: string; + }; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a repository README + * @description Gets the preferred README for a repository. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + "repos/get-readme": { + parameters: { + query?: { + /** @description The name of the commit/branch/tag. Default: the repository’s default branch. */ + ref?: string; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["content-file"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a repository README for a directory + * @description Gets the README from a repository directory. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + "repos/get-readme-in-directory": { + parameters: { + query?: { + /** @description The name of the commit/branch/tag. Default: the repository’s default branch. */ + ref?: string; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description The alternate path to look for a README file */ + dir: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["content-file"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List releases + * @description This returns a list of releases, which does not include regular Git tags that have not been associated with a release. To get a list of Git tags, use the [Repository Tags API](https://docs.github.com/rest/reference/repos#list-repository-tags). + * + * Information about published releases are available to everyone. Only users with push access will receive listings for draft releases. + */ + "repos/list-releases": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["release"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a release + * @description Users with push access to the repository can create a release. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "repos/create-release": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the tag. */ + tag_name: string; + /** @description Specifies the commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Unused if the Git tag already exists. Default: the repository's default branch. */ + target_commitish?: string; + /** @description The name of the release. */ + name?: string; + /** @description Text describing the contents of the tag. */ + body?: string; + /** + * @description `true` to create a draft (unpublished) release, `false` to create a published one. + * @default false + */ + draft?: boolean; + /** + * @description `true` to identify the release as a prerelease. `false` to identify the release as a full release. + * @default false + */ + prerelease?: boolean; + /** @description If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. For more information, see "[Managing categories for discussions in your repository](https://docs.github.com/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository)." */ + discussion_category_name?: string; + /** + * @description Whether to automatically generate the name and body for this release. If `name` is specified, the specified name will be used; otherwise, a name will be automatically generated. If `body` is specified, the body will be pre-pended to the automatically generated notes. + * @default false + */ + generate_release_notes?: boolean; + /** + * @description Specifies whether this release should be set as the latest release for the repository. Drafts and prereleases cannot be set as latest. Defaults to `true` for newly published releases. `legacy` specifies that the latest release should be determined based on the release creation date and higher semantic version. + * @default true + * @enum {string} + */ + make_latest?: "true" | "false" | "legacy"; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/releases/1 */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["release"]; + }; + }; + /** @description Not Found if the discussion category name is invalid */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a release asset + * @description To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. + */ + "repos/get-release-asset": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + asset_id: components["parameters"]["asset-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["release-asset"]; + }; + }; + 302: components["responses"]["found"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Delete a release asset */ + "repos/delete-release-asset": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + asset_id: components["parameters"]["asset-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update a release asset + * @description Users with push access to the repository can edit a release asset. + */ + "repos/update-release-asset": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + asset_id: components["parameters"]["asset-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The file name of the asset. */ + name?: string; + /** @description An alternate short description of the asset. Used in place of the filename. */ + label?: string; + /** @example "uploaded" */ + state?: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["release-asset"]; + }; + }; + }; + }; + /** + * Generate release notes content for a release + * @description Generate a name and body describing a [release](https://docs.github.com/rest/reference/repos#releases). The body content will be markdown formatted and contain information like the changes since last release and users who contributed. The generated release notes are not saved anywhere. They are intended to be generated and used when creating a new release. + */ + "repos/generate-release-notes": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The tag name for the release. This can be an existing tag or a new one. */ + tag_name: string; + /** @description Specifies the commitish value that will be the target for the release's tag. Required if the supplied tag_name does not reference an existing tag. Ignored if the tag_name already exists. */ + target_commitish?: string; + /** @description The name of the previous tag to use as the starting point for the release notes. Use to manually specify the range for the set of changes considered as part this release. */ + previous_tag_name?: string; + /** @description Specifies a path to a file in the repository containing configuration settings used for generating the release notes. If unspecified, the configuration file located in the repository at '.github/release.yml' or '.github/release.yaml' will be used. If that is not present, the default configuration will be used. */ + configuration_file_path?: string; + }; + }; + }; + responses: { + /** @description Name and body of generated release notes */ + 200: { + content: { + "application/json": components["schemas"]["release-notes-content"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get the latest release + * @description View the latest published full release for the repository. + * + * The latest release is the most recent non-prerelease, non-draft release, sorted by the `created_at` attribute. The `created_at` attribute is the date of the commit used for the release, and not the date when the release was drafted or published. + */ + "repos/get-latest-release": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["release"]; + }; + }; + }; + }; + /** + * Get a release by tag name + * @description Get a published release with the specified tag. + */ + "repos/get-release-by-tag": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description tag parameter */ + tag: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["release"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get a release + * @description **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). + */ + "repos/get-release": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + release_id: components["parameters"]["release-id"]; + }; + }; + responses: { + /** @description **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). */ + 200: { + content: { + "application/json": components["schemas"]["release"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a release + * @description Users with push access to the repository can delete a release. + */ + "repos/delete-release": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + release_id: components["parameters"]["release-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update a release + * @description Users with push access to the repository can edit a release. + */ + "repos/update-release": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + release_id: components["parameters"]["release-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The name of the tag. */ + tag_name?: string; + /** @description Specifies the commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Unused if the Git tag already exists. Default: the repository's default branch. */ + target_commitish?: string; + /** @description The name of the release. */ + name?: string; + /** @description Text describing the contents of the tag. */ + body?: string; + /** @description `true` makes the release a draft, and `false` publishes the release. */ + draft?: boolean; + /** @description `true` to identify the release as a prerelease, `false` to identify the release as a full release. */ + prerelease?: boolean; + /** + * @description Specifies whether this release should be set as the latest release for the repository. Drafts and prereleases cannot be set as latest. Defaults to `true` for newly published releases. `legacy` specifies that the latest release should be determined based on the release creation date and higher semantic version. + * @default true + * @enum {string} + */ + make_latest?: "true" | "false" | "legacy"; + /** @description If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. If there is already a discussion linked to the release, this parameter is ignored. For more information, see "[Managing categories for discussions in your repository](https://docs.github.com/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository)." */ + discussion_category_name?: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["release"]; + }; + }; + /** @description Not Found if the discussion category name is invalid */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** List release assets */ + "repos/list-release-assets": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + release_id: components["parameters"]["release-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["release-asset"][]; + }; + }; + }; + }; + /** + * Upload a release asset + * @description This endpoint makes use of [a Hypermedia relation](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) to determine which URL to access. The endpoint you call to upload release assets is specific to your release. Use the `upload_url` returned in + * the response of the [Create a release endpoint](https://docs.github.com/rest/releases/releases#create-a-release) to upload a release asset. + * + * You need to use an HTTP client which supports [SNI](http://en.wikipedia.org/wiki/Server_Name_Indication) to make calls to this endpoint. + * + * Most libraries will set the required `Content-Length` header automatically. Use the required `Content-Type` header to provide the media type of the asset. For a list of media types, see [Media Types](https://www.iana.org/assignments/media-types/media-types.xhtml). For example: + * + * `application/zip` + * + * GitHub expects the asset data in its raw binary form, rather than JSON. You will send the raw binary content of the asset as the request body. Everything else about the endpoint is the same as the rest of the API. For example, + * you'll still need to pass your authentication to be able to upload an asset. + * + * When an upstream failure occurs, you will receive a `502 Bad Gateway` status. This may leave an empty asset with a state of `starter`. It can be safely deleted. + * + * **Notes:** + * * GitHub renames asset filenames that have special characters, non-alphanumeric characters, and leading or trailing periods. The "[List assets for a release](https://docs.github.com/rest/reference/repos#list-assets-for-a-release)" + * endpoint lists the renamed filenames. For more information and help, contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + * * To find the `release_id` query the [`GET /repos/{owner}/{repo}/releases/latest` endpoint](https://docs.github.com/rest/releases/releases#get-the-latest-release). + * * If you upload an asset with the same filename as another uploaded asset, you'll receive an error and must delete the old file before you can re-upload the new asset. + */ + "repos/upload-release-asset": { + parameters: { + query: { + name: string; + label?: string; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + release_id: components["parameters"]["release-id"]; + }; + }; + requestBody?: { + content: { + "application/octet-stream": string; + }; + }; + responses: { + /** @description Response for successful upload */ + 201: { + content: { + "application/json": components["schemas"]["release-asset"]; + }; + }; + /** @description Response if you upload an asset with the same filename as another uploaded asset */ + 422: never; + }; + }; + /** + * List reactions for a release + * @description List the reactions to a [release](https://docs.github.com/rest/reference/repos#releases). + */ + "reactions/list-for-release": { + parameters: { + query?: { + /** @description Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a release. */ + content?: "+1" | "laugh" | "heart" | "hooray" | "rocket" | "eyes"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + release_id: components["parameters"]["release-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create reaction for a release + * @description Create a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). A response with a `Status: 200 OK` means that you already added the reaction type to this release. + */ + "reactions/create-for-release": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + release_id: components["parameters"]["release-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the release. + * @enum {string} + */ + content: "+1" | "laugh" | "heart" | "hooray" | "rocket" | "eyes"; + }; + }; + }; + responses: { + /** @description Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** @description Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Delete a release reaction + * @description **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/releases/:release_id/reactions/:reaction_id`. + * + * Delete a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). + */ + "reactions/delete-for-release": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + release_id: components["parameters"]["release-id"]; + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Get rules for a branch + * @description Returns all rules that apply to the specified branch. + */ + "repos/get-branch-rules": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-rule"][]; + }; + }; + }; + }; + /** + * Get all repository rulesets + * @description Get all the rulesets for a repository. + */ + "repos/get-repo-rulesets": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + /** @description Include rulesets configured at higher levels that apply to this repository */ + includes_parents?: boolean; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-ruleset"][]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Create a repository ruleset + * @description Create a ruleset for a repository. + */ + "repos/create-repo-ruleset": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + /** @description Request body */ + requestBody: { + content: { + "application/json": { + /** @description The name of the ruleset. */ + name: string; + /** + * @description The target of the ruleset. + * @enum {string} + */ + target?: "branch" | "tag"; + enforcement: components["schemas"]["repository-rule-enforcement"]; + /** + * @description The permission level required to bypass this ruleset. "repository" allows those with bypass permission at the repository level to bypass. "organization" allows those with bypass permission at the organization level to bypass. "none" prevents anyone from bypassing. + * @enum {string} + */ + bypass_mode?: "none" | "repository" | "organization"; + /** @description The actors that can bypass the rules in this ruleset */ + bypass_actors?: components["schemas"]["repository-ruleset-bypass-actor"][]; + conditions?: components["schemas"]["repository-ruleset-conditions"]; + /** @description An array of rules within the ruleset. */ + rules?: components["schemas"]["repository-rule"][]; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["repository-ruleset"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Get a repository ruleset + * @description Get a ruleset for a repository. + */ + "repos/get-repo-ruleset": { + parameters: { + query?: { + /** @description Include rulesets configured at higher levels that apply to this repository */ + includes_parents?: boolean; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description The ID of the ruleset. */ + ruleset_id: number; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-ruleset"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Update a repository ruleset + * @description Update a ruleset for a repository. + */ + "repos/update-repo-ruleset": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description The ID of the ruleset. */ + ruleset_id: number; + }; + }; + /** @description Request body */ + requestBody?: { + content: { + "application/json": { + /** @description The name of the ruleset. */ + name?: string; + /** + * @description The target of the ruleset. + * @enum {string} + */ + target?: "branch" | "tag"; + enforcement?: components["schemas"]["repository-rule-enforcement"]; + /** + * @description The permission level required to bypass this ruleset. "repository" allows those with bypass permission at the repository level to bypass. "organization" allows those with bypass permission at the organization level to bypass. "none" prevents anyone from bypassing. + * @enum {string} + */ + bypass_mode?: "none" | "repository" | "organization"; + /** @description The actors that can bypass the rules in this ruleset */ + bypass_actors?: components["schemas"]["repository-ruleset-bypass-actor"][]; + conditions?: components["schemas"]["repository-ruleset-conditions"]; + /** @description An array of rules within the ruleset. */ + rules?: components["schemas"]["repository-rule"][]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-ruleset"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Delete a repository ruleset + * @description Delete a ruleset for a repository. + */ + "repos/delete-repo-ruleset": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + /** @description The ID of the ruleset. */ + ruleset_id: number; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * List secret scanning alerts for a repository + * @description Lists secret scanning alerts for an eligible repository, from newest to oldest. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + "secret-scanning/list-alerts-for-repo": { + parameters: { + query?: { + state?: components["parameters"]["secret-scanning-alert-state"]; + secret_type?: components["parameters"]["secret-scanning-alert-secret-type"]; + resolution?: components["parameters"]["secret-scanning-alert-resolution"]; + sort?: components["parameters"]["secret-scanning-alert-sort"]; + direction?: components["parameters"]["direction"]; + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + before?: components["parameters"]["secret-scanning-pagination-before-org-repo"]; + after?: components["parameters"]["secret-scanning-pagination-after-org-repo"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["secret-scanning-alert"][]; + }; + }; + /** @description Repository is public or secret scanning is disabled for the repository */ + 404: never; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Get a secret scanning alert + * @description Gets a single secret scanning alert detected in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + "secret-scanning/get-alert": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + alert_number: components["parameters"]["alert-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["secret-scanning-alert"]; + }; + }; + 304: components["responses"]["not_modified"]; + /** @description Repository is public, or secret scanning is disabled for the repository, or the resource is not found */ + 404: never; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Update a secret scanning alert + * @description Updates the status of a secret scanning alert in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` write permission to use this endpoint. + */ + "secret-scanning/update-alert": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + alert_number: components["parameters"]["alert-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + state: components["schemas"]["secret-scanning-alert-state"]; + resolution?: components["schemas"]["secret-scanning-alert-resolution"]; + resolution_comment?: components["schemas"]["secret-scanning-alert-resolution-comment"]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["secret-scanning-alert"]; + }; + }; + /** @description Bad request, resolution comment is invalid or the resolution was not changed. */ + 400: never; + /** @description Repository is public, or secret scanning is disabled for the repository, or the resource is not found */ + 404: never; + /** @description State does not match the resolution or resolution comment */ + 422: never; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List locations for a secret scanning alert + * @description Lists all locations for a given secret scanning alert for an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + "secret-scanning/list-locations-for-alert": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + alert_number: components["parameters"]["alert-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["secret-scanning-location"][]; + }; + }; + /** @description Repository is public, or secret scanning is disabled for the repository, or the resource is not found */ + 404: never; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List repository security advisories + * @description Lists security advisories in a repository. + * You must authenticate using an access token with the `repo` scope or `repository_advisories:read` permission + * in order to get published security advisories in a private repository, or any unpublished security advisories that you have access to. + * + * You can access unpublished security advisories from a repository if you are a security manager or administrator of that repository, or if you are a collaborator on any security advisory. + */ + "security-advisories/list-repository-advisories": { + parameters: { + query?: { + direction?: components["parameters"]["direction"]; + /** @description The property to sort the results by. */ + sort?: "created" | "updated" | "published"; + before?: components["parameters"]["pagination-before"]; + after?: components["parameters"]["pagination-after"]; + /** @description Number of advisories to return per page. */ + per_page?: number; + /** @description Filter by state of the repository advisories. Only advisories of this state will be returned. */ + state?: "triage" | "draft" | "published" | "closed"; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-advisory"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a repository security advisory + * @description Creates a new repository security advisory. + * You must authenticate using an access token with the `repo` scope or `repository_advisories:write` permission to use this endpoint. + * + * In order to create a draft repository security advisory, you must be a security manager or administrator of that repository. + */ + "security-advisories/create-repository-advisory": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["repository-advisory-create"]; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["repository-advisory"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Privately report a security vulnerability + * @description Report a security vulnerability to the maintainers of the repository. + * See "[Privately reporting a security vulnerability](https://docs.github.com/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability)" for more information about private vulnerability reporting. + */ + "security-advisories/create-private-vulnerability-report": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["private-vulnerability-report-create"]; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["repository-advisory"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a repository security advisory + * @description Get a repository security advisory using its GitHub Security Advisory (GHSA) identifier. + * You can access any published security advisory on a public repository. + * You must authenticate using an access token with the `repo` scope or `repository_advisories:read` permission + * in order to get a published security advisory in a private repository, or any unpublished security advisory that you have access to. + * + * You can access an unpublished security advisory from a repository if you are a security manager or administrator of that repository, or if you are a + * collaborator on the security advisory. + */ + "security-advisories/get-repository-advisory": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + ghsa_id: components["parameters"]["ghsa_id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-advisory"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update a repository security advisory + * @description Update a repository security advisory using its GitHub Security Advisory (GHSA) identifier. + * You must authenticate using an access token with the `repo` scope or `repository_advisories:write` permission to use this endpoint. + * + * In order to update any security advisory, you must be a security manager or administrator of that repository, + * or a collaborator on the repository security advisory. + */ + "security-advisories/update-repository-advisory": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + ghsa_id: components["parameters"]["ghsa_id"]; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["repository-advisory-update"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-advisory"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + /** @description Validation failed, or the endpoint has been spammed. */ + 422: { + content: { + "application/json": components["schemas"]["validation-error"]; + }; + }; + }; + }; + /** + * List stargazers + * @description Lists the people that have starred the repository. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + "activity/list-stargazers-for-repo": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": + | components["schemas"]["simple-user"][] + | components["schemas"]["stargazer"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get the weekly commit activity + * @description Returns a weekly aggregate of the number of additions and deletions pushed to a repository. + */ + "repos/get-code-frequency-stats": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Returns a weekly aggregate of the number of additions and deletions pushed to a repository. */ + 200: { + content: { + "application/json": components["schemas"]["code-frequency-stat"][]; + }; + }; + 202: components["responses"]["accepted"]; + 204: components["responses"]["no_content"]; + }; + }; + /** + * Get the last year of commit activity + * @description Returns the last year of commit activity grouped by week. The `days` array is a group of commits per day, starting on `Sunday`. + */ + "repos/get-commit-activity-stats": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-activity"][]; + }; + }; + 202: components["responses"]["accepted"]; + 204: components["responses"]["no_content"]; + }; + }; + /** + * Get all contributor commit activity + * @description + * Returns the `total` number of commits authored by the contributor. In addition, the response includes a Weekly Hash (`weeks` array) with the following information: + * + * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). + * * `a` - Number of additions + * * `d` - Number of deletions + * * `c` - Number of commits + */ + "repos/get-contributors-stats": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["contributor-activity"][]; + }; + }; + 202: components["responses"]["accepted"]; + 204: components["responses"]["no_content"]; + }; + }; + /** + * Get the weekly commit count + * @description Returns the total commit counts for the `owner` and total commit counts in `all`. `all` is everyone combined, including the `owner` in the last 52 weeks. If you'd like to get the commit counts for non-owners, you can subtract `owner` from `all`. + * + * The array order is oldest week (index 0) to most recent week. + * + * The most recent week is seven days ago at UTC midnight to today at UTC midnight. + */ + "repos/get-participation-stats": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description The array order is oldest week (index 0) to most recent week. */ + 200: { + content: { + "application/json": components["schemas"]["participation-stats"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get the hourly commit count for each day + * @description Each array contains the day number, hour number, and number of commits: + * + * * `0-6`: Sunday - Saturday + * * `0-23`: Hour of day + * * Number of commits + * + * For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. + */ + "repos/get-punch-card-stats": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. */ + 200: { + content: { + "application/json": components["schemas"]["code-frequency-stat"][]; + }; + }; + 204: components["responses"]["no_content"]; + }; + }; + /** + * Create a commit status + * @description Users with push access in a repository can create commit statuses for a given SHA. + * + * Note: there is a limit of 1000 statuses per `sha` and `context` within a repository. Attempts to create more than 1000 statuses will result in a validation error. + */ + "repos/create-commit-status": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + sha: string; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The state of the status. + * @enum {string} + */ + state: "error" | "failure" | "pending" | "success"; + /** + * @description The target URL to associate with this status. This URL will be linked from the GitHub UI to allow users to easily see the source of the status. + * For example, if your continuous integration system is posting build status, you would want to provide the deep link for the build output for this specific SHA: + * `http://ci.example.com/user/repo/build/sha` + */ + target_url?: string | null; + /** @description A short description of the status. */ + description?: string | null; + /** + * @description A string label to differentiate this status from the status of other systems. This field is case-insensitive. + * @default default + */ + context?: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["status"]; + }; + }; + }; + }; + /** + * List watchers + * @description Lists the people watching the specified repository. + */ + "activity/list-watchers-for-repo": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** Get a repository subscription */ + "activity/get-repo-subscription": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description if you subscribe to the repository */ + 200: { + content: { + "application/json": components["schemas"]["repository-subscription"]; + }; + }; + 403: components["responses"]["forbidden"]; + /** @description Not Found if you don't subscribe to the repository */ + 404: never; + }; + }; + /** + * Set a repository subscription + * @description If you would like to watch a repository, set `subscribed` to `true`. If you would like to ignore notifications made within a repository, set `ignored` to `true`. If you would like to stop watching a repository, [delete the repository's subscription](https://docs.github.com/rest/reference/activity#delete-a-repository-subscription) completely. + */ + "activity/set-repo-subscription": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description Determines if notifications should be received from this repository. */ + subscribed?: boolean; + /** @description Determines if all notifications should be blocked from this repository. */ + ignored?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-subscription"]; + }; + }; + }; + }; + /** + * Delete a repository subscription + * @description This endpoint should only be used to stop watching a repository. To control whether or not you wish to receive notifications from a repository, [set the repository's subscription manually](https://docs.github.com/rest/reference/activity#set-a-repository-subscription). + */ + "activity/delete-repo-subscription": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** List repository tags */ + "repos/list-tags": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["tag"][]; + }; + }; + }; + }; + /** + * List tag protection states for a repository + * @description This returns the tag protection states of a repository. + * + * This information is only available to repository administrators. + */ + "repos/list-tag-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["tag-protection"][]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a tag protection state for a repository + * @description This creates a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + "repos/create-tag-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description An optional glob pattern to match against when enforcing tag protection. */ + pattern: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["tag-protection"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a tag protection state for a repository + * @description This deletes a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + "repos/delete-tag-protection": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + tag_protection_id: components["parameters"]["tag-protection-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Download a repository archive (tar) + * @description Gets a redirect URL to download a tar archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `main`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * **Note**: For private repositories, these links are temporary and expire after five minutes. + */ + "repos/download-tarball-archive": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + ref: string; + }; + }; + responses: { + /** @description Response */ + 302: never; + }; + }; + /** + * List repository teams + * @description Lists the teams that have access to the specified repository and that are also visible to the authenticated user. + * + * For a public repository, a team is listed only if that team added the public repository explicitly. + * + * Personal access tokens require the following scopes: + * * `public_repo` to call this endpoint on a public repository + * * `repo` to call this endpoint on a private repository (this scope also includes public repositories) + * + * This endpoint is not compatible with fine-grained personal access tokens. + */ + "repos/list-teams": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Get all repository topics */ + "repos/get-all-topics": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["topic"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Replace all repository topics */ + "repos/replace-all-topics": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of topics to add to the repository. Pass one or more topics to _replace_ the set of existing topics. Send an empty array (`[]`) to clear all topics from the repository. **Note:** Topic `names` cannot contain uppercase letters. */ + names: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["topic"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Get repository clones + * @description Get the total number of clones and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. + */ + "repos/get-clones": { + parameters: { + query?: { + per?: components["parameters"]["per"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["clone-traffic"]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Get top referral paths + * @description Get the top 10 popular contents over the last 14 days. + */ + "repos/get-top-paths": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["content-traffic"][]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Get top referral sources + * @description Get the top 10 referrers over the last 14 days. + */ + "repos/get-top-referrers": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["referrer-traffic"][]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Get page views + * @description Get the total number of views and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. + */ + "repos/get-views": { + parameters: { + query?: { + per?: components["parameters"]["per"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["view-traffic"]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Transfer a repository + * @description A transfer request will need to be accepted by the new owner when transferring a personal repository to another user. The response will contain the original `owner`, and the transfer will continue asynchronously. For more details on the requirements to transfer personal and organization-owned repositories, see [about repository transfers](https://docs.github.com/articles/about-repository-transfers/). + */ + "repos/transfer": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The username or organization name the repository will be transferred to. */ + new_owner: string; + /** @description The new name to be given to the repository. */ + new_name?: string; + /** @description ID of the team or teams to add to the repository. Teams can only be added to organization-owned repositories. */ + team_ids?: number[]; + }; + }; + }; + responses: { + /** @description Response */ + 202: { + content: { + "application/json": components["schemas"]["minimal-repository"]; + }; + }; + }; + }; + /** + * Check if vulnerability alerts are enabled for a repository + * @description Shows whether dependency alerts are enabled or disabled for a repository. The authenticated user must have admin read access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/articles/about-security-alerts-for-vulnerable-dependencies)". + */ + "repos/check-vulnerability-alerts": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response if repository is enabled with vulnerability alerts */ + 204: never; + /** @description Not Found if repository is not enabled with vulnerability alerts */ + 404: never; + }; + }; + /** + * Enable vulnerability alerts + * @description Enables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/articles/about-security-alerts-for-vulnerable-dependencies)". + */ + "repos/enable-vulnerability-alerts": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Disable vulnerability alerts + * @description Disables dependency alerts and the dependency graph for a repository. + * The authenticated user must have admin access to the repository. For more information, + * see "[About security alerts for vulnerable dependencies](https://docs.github.com/articles/about-security-alerts-for-vulnerable-dependencies)". + */ + "repos/disable-vulnerability-alerts": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Download a repository archive (zip) + * @description Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `main`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * + * **Note**: For private repositories, these links are temporary and expire after five minutes. If the repository is empty, you will receive a 404 when you follow the redirect. + */ + "repos/download-zipball-archive": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + ref: string; + }; + }; + responses: { + /** @description Response */ + 302: never; + }; + }; + /** + * Create a repository using a template + * @description Creates a new repository using a repository template. Use the `template_owner` and `template_repo` route parameters to specify the repository to use as the template. If the repository is not public, the authenticated user must own or be a member of an organization that owns the repository. To check if a repository is available to use as a template, get the repository's information using the [Get a repository](https://docs.github.com/rest/reference/repos#get-a-repository) endpoint and check that the `is_template` key is `true`. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + "repos/create-using-template": { + parameters: { + path: { + template_owner: string; + template_repo: string; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The organization or person who will own the new repository. To create a new repository in an organization, the authenticated user must be a member of the specified organization. */ + owner?: string; + /** @description The name of the new repository. */ + name: string; + /** @description A short description of the new repository. */ + description?: string; + /** + * @description Set to `true` to include the directory structure and files from all branches in the template repository, and not just the default branch. Default: `false`. + * @default false + */ + include_all_branches?: boolean; + /** + * @description Either `true` to create a new private repository or `false` to create a new public one. + * @default false + */ + private?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["repository"]; + }; + }; + }; + }; + /** + * List public repositories + * @description Lists all public repositories in the order that they were created. + * + * Note: + * - For GitHub Enterprise Server, this endpoint will only list repositories available to all users on the enterprise. + * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers) to get the URL for the next page of repositories. + */ + "repos/list-public": { + parameters: { + query?: { + since?: components["parameters"]["since-repo"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + /** @example ; rel="next" */ + Link?: string; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List environment secrets + * @description Lists all secrets available in an environment without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + "actions/list-environment-secrets": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + repository_id: components["parameters"]["repository-id"]; + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["actions-secret"][]; + }; + }; + }; + }; + }; + /** + * Get an environment public key + * @description Get the public key for an environment, which you need to encrypt environment secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + "actions/get-environment-public-key": { + parameters: { + path: { + repository_id: components["parameters"]["repository-id"]; + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-public-key"]; + }; + }; + }; + }; + /** + * Get an environment secret + * @description Gets a single environment secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + "actions/get-environment-secret": { + parameters: { + path: { + repository_id: components["parameters"]["repository-id"]; + environment_name: components["parameters"]["environment-name"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-secret"]; + }; + }; + }; + }; + /** + * Create or update an environment secret + * @description Creates or updates an environment secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "actions/create-or-update-environment-secret": { + parameters: { + path: { + repository_id: components["parameters"]["repository-id"]; + environment_name: components["parameters"]["environment-name"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an environment public key](https://docs.github.com/rest/reference/actions#get-an-environment-public-key) endpoint. */ + encrypted_value: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id: string; + }; + }; + }; + responses: { + /** @description Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** @description Response when updating a secret */ + 204: never; + }; + }; + /** + * Delete an environment secret + * @description Deletes a secret in an environment using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + "actions/delete-environment-secret": { + parameters: { + path: { + repository_id: components["parameters"]["repository-id"]; + environment_name: components["parameters"]["environment-name"]; + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Default response */ + 204: never; + }; + }; + /** + * List environment variables + * @description Lists all environment variables. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `environments:read` repository permission to use this endpoint. + */ + "actions/list-environment-variables": { + parameters: { + query?: { + per_page?: components["parameters"]["variables-per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + repository_id: components["parameters"]["repository-id"]; + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + variables: components["schemas"]["actions-variable"][]; + }; + }; + }; + }; + }; + /** + * Create an environment variable + * @description Create an environment variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `environment:write` repository permission to use this endpoint. + */ + "actions/create-environment-variable": { + parameters: { + path: { + repository_id: components["parameters"]["repository-id"]; + environment_name: components["parameters"]["environment-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the variable. */ + name: string; + /** @description The value of the variable. */ + value: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + }; + }; + /** + * Get an environment variable + * @description Gets a specific variable in an environment. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `environments:read` repository permission to use this endpoint. + */ + "actions/get-environment-variable": { + parameters: { + path: { + repository_id: components["parameters"]["repository-id"]; + environment_name: components["parameters"]["environment-name"]; + name: components["parameters"]["variable-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-variable"]; + }; + }; + }; + }; + /** + * Delete an environment variable + * @description Deletes an environment variable using the variable name. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `environment:write` repository permission to use this endpoint. + */ + "actions/delete-environment-variable": { + parameters: { + path: { + repository_id: components["parameters"]["repository-id"]; + name: components["parameters"]["variable-name"]; + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update an environment variable + * @description Updates an environment variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `environment:write` repository permission to use this endpoint. + */ + "actions/update-environment-variable": { + parameters: { + path: { + repository_id: components["parameters"]["repository-id"]; + name: components["parameters"]["variable-name"]; + environment_name: components["parameters"]["environment-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the variable. */ + name?: string; + /** @description The value of the variable. */ + value?: string; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Search code + * @description Searches for query terms inside of a file. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for code, you can get text match metadata for the file **content** and file **path** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the definition of the `addClass` function inside [jQuery](https://github.com/jquery/jquery) repository, your query would look something like this: + * + * `q=addClass+in:file+language:js+repo:jquery/jquery` + * + * This query searches for the keyword `addClass` within a file's contents. The query limits the search to files where the language is JavaScript in the `jquery/jquery` repository. + * + * #### Considerations for code search + * + * Due to the complexity of searching code, there are a few restrictions on how searches are performed: + * + * * Only the _default branch_ is considered. In most cases, this will be the `master` branch. + * * Only files smaller than 384 KB are searchable. + * * You must always include at least one search term when searching source code. For example, searching for [`language:go`](https://github.com/search?utf8=%E2%9C%93&q=language%3Ago&type=Code) is not valid, while [`amazing + * language:go`](https://github.com/search?utf8=%E2%9C%93&q=amazing+language%3Ago&type=Code) is. + * + * This endpoint requires you to authenticate and limits you to 10 requests per minute. + */ + "search/code": { + parameters: { + query: { + /** @description The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching code](https://docs.github.com/search-github/searching-on-github/searching-code)" for a detailed list of qualifiers. */ + q: string; + /** + * @deprecated + * @description **This field is deprecated.** Sorts the results of your query. Can only be `indexed`, which indicates how recently a file has been indexed by the GitHub search infrastructure. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) + */ + sort?: "indexed"; + /** + * @deprecated + * @description **This field is deprecated.** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. + */ + order?: "desc" | "asc"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["code-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Search commits + * @description Find commits via various criteria on the default branch (usually `main`). This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for commits, you can get text match metadata for the **message** field when you provide the `text-match` media type. For more details about how to receive highlighted search results, see [Text match + * metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find commits related to CSS in the [octocat/Spoon-Knife](https://github.com/octocat/Spoon-Knife) repository. Your query would look something like this: + * + * `q=repo:octocat/Spoon-Knife+css` + */ + "search/commits": { + parameters: { + query: { + /** @description The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching commits](https://docs.github.com/search-github/searching-on-github/searching-commits)" for a detailed list of qualifiers. */ + q: string; + /** @description Sorts the results of your query by `author-date` or `committer-date`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "author-date" | "committer-date"; + order?: components["parameters"]["order"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["commit-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Search issues and pull requests + * @description Find issues by state and keyword. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for issues, you can get text match metadata for the issue **title**, issue **body**, and issue **comment body** fields when you pass the `text-match` media type. For more details about how to receive highlighted + * search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this. + * + * `q=windows+label:bug+language:python+state:open&sort=created&order=asc` + * + * This query searches for the keyword `windows`, within any open issue that is labeled as `bug`. The search runs across repositories whose primary language is Python. The results are sorted by creation date in ascending order, which means the oldest issues appear first in the search results. + * + * **Note:** For [user-to-server](https://docs.github.com/developers/apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests) GitHub App requests, you can't retrieve a combination of issues and pull requests in a single query. Requests that don't include the `is:issue` or `is:pull-request` qualifier will receive an HTTP `422 Unprocessable Entity` response. To get results for both issues and pull requests, you must send separate queries for issues and pull requests. For more information about the `is` qualifier, see "[Searching only issues or pull requests](https://docs.github.com/github/searching-for-information-on-github/searching-issues-and-pull-requests#search-only-issues-or-pull-requests)." + */ + "search/issues-and-pull-requests": { + parameters: { + query: { + /** @description The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching issues and pull requests](https://docs.github.com/search-github/searching-on-github/searching-issues-and-pull-requests)" for a detailed list of qualifiers. */ + q: string; + /** @description Sorts the results of your query by the number of `comments`, `reactions`, `reactions-+1`, `reactions--1`, `reactions-smile`, `reactions-thinking_face`, `reactions-heart`, `reactions-tada`, or `interactions`. You can also sort results by how recently the items were `created` or `updated`, Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: + | "comments" + | "reactions" + | "reactions-+1" + | "reactions--1" + | "reactions-smile" + | "reactions-thinking_face" + | "reactions-heart" + | "reactions-tada" + | "interactions" + | "created" + | "updated"; + order?: components["parameters"]["order"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["issue-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Search labels + * @description Find labels in a repository with names or descriptions that match search keywords. Returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for labels, you can get text match metadata for the label **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find labels in the `linguist` repository that match `bug`, `defect`, or `enhancement`. Your query might look like this: + * + * `q=bug+defect+enhancement&repository_id=64778136` + * + * The labels that best match the query appear first in the search results. + */ + "search/labels": { + parameters: { + query: { + /** @description The id of the repository. */ + repository_id: number; + /** @description The search keywords. This endpoint does not accept qualifiers in the query. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). */ + q: string; + /** @description Sorts the results of your query by when the label was `created` or `updated`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "created" | "updated"; + order?: components["parameters"]["order"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["label-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Search repositories + * @description Find repositories via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for repositories, you can get text match metadata for the **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for popular Tetris repositories written in assembly code, your query might look like this: + * + * `q=tetris+language:assembly&sort=stars&order=desc` + * + * This query searches for repositories with the word `tetris` in the name, the description, or the README. The results are limited to repositories where the primary language is assembly. The results are sorted by stars in descending order, so that the most popular repositories appear first in the search results. + */ + "search/repos": { + parameters: { + query: { + /** @description The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching for repositories](https://docs.github.com/articles/searching-for-repositories/)" for a detailed list of qualifiers. */ + q: string; + /** @description Sorts the results of your query by number of `stars`, `forks`, or `help-wanted-issues` or how recently the items were `updated`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "stars" | "forks" | "help-wanted-issues" | "updated"; + order?: components["parameters"]["order"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["repo-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Search topics + * @description Find topics via various criteria. Results are sorted by best match. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). See "[Searching topics](https://docs.github.com/articles/searching-topics/)" for a detailed list of qualifiers. + * + * When searching for topics, you can get text match metadata for the topic's **short\_description**, **description**, **name**, or **display\_name** field when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for topics related to Ruby that are featured on https://github.com/topics. Your query might look like this: + * + * `q=ruby+is:featured` + * + * This query searches for topics with the keyword `ruby` and limits the results to find only topics that are featured. The topics that are the best match for the query appear first in the search results. + */ + "search/topics": { + parameters: { + query: { + /** @description The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). */ + q: string; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["topic-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Search users + * @description Find users via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for users, you can get text match metadata for the issue **login**, public **email**, and **name** fields when you pass the `text-match` media type. For more details about highlighting search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you're looking for a list of popular users, you might try this query: + * + * `q=tom+repos:%3E42+followers:%3E1000` + * + * This query searches for users with the name `tom`. The results are restricted to users with more than 42 repositories and over 1,000 followers. + */ + "search/users": { + parameters: { + query: { + /** @description The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching users](https://docs.github.com/search-github/searching-on-github/searching-users)" for a detailed list of qualifiers. */ + q: string; + /** @description Sorts the results of your query by number of `followers` or `repositories`, or when the person `joined` GitHub. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "followers" | "repositories" | "joined"; + order?: components["parameters"]["order"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["user-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Get a team (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the [Get a team by name](https://docs.github.com/rest/reference/teams#get-a-team-by-name) endpoint. + */ + "teams/get-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a team (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a team](https://docs.github.com/rest/reference/teams#delete-a-team) endpoint. + * + * To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + */ + "teams/delete-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Update a team (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a team](https://docs.github.com/rest/reference/teams#update-a-team) endpoint. + * + * To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** With nested teams, the `privacy` for parent teams cannot be `secret`. + */ + "teams/update-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the team. */ + name: string; + /** @description The description of the team. */ + description?: string; + /** + * @description The level of privacy this team should have. Editing teams without specifying this parameter leaves `privacy` intact. The options are: + * **For a non-nested team:** + * * `secret` - only visible to organization owners and members of this team. + * * `closed` - visible to all members of this organization. + * **For a parent or child team:** + * * `closed` - visible to all members of this organization. + * @enum {string} + */ + privacy?: "secret" | "closed"; + /** + * @description The notification setting the team has chosen. Editing teams without specifying this parameter leaves `notification_setting` intact. The options are: + * * `notifications_enabled` - team members receive notifications when the team is @mentioned. + * * `notifications_disabled` - no one receives notifications. + * @enum {string} + */ + notification_setting?: + | "notifications_enabled" + | "notifications_disabled"; + /** + * @description **Deprecated**. The permission that new repositories will be added to the team with when none is specified. + * @default pull + * @enum {string} + */ + permission?: "pull" | "push" | "admin"; + /** @description The ID of a team to set as the parent team. */ + parent_team_id?: number | null; + }; + }; + }; + responses: { + /** @description Response when the updated information already exists */ + 200: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List discussions (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List discussions`](https://docs.github.com/rest/reference/teams#list-discussions) endpoint. + * + * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/list-discussions-legacy": { + parameters: { + query?: { + direction?: components["parameters"]["direction"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["team-discussion"][]; + }; + }; + }; + }; + /** + * Create a discussion (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create a discussion`](https://docs.github.com/rest/reference/teams#create-a-discussion) endpoint. + * + * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "teams/create-discussion-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion post's title. */ + title: string; + /** @description The discussion post's body text. */ + body: string; + /** + * @description Private posts are only visible to team members, organization owners, and team maintainers. Public posts are visible to all members of the organization. Set to `true` to create a private post. + * @default false + */ + private?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + }; + /** + * Get a discussion (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion](https://docs.github.com/rest/reference/teams#get-a-discussion) endpoint. + * + * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/get-discussion-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + }; + /** + * Delete a discussion (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Delete a discussion`](https://docs.github.com/rest/reference/teams#delete-a-discussion) endpoint. + * + * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/delete-discussion-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update a discussion (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion](https://docs.github.com/rest/reference/teams#update-a-discussion) endpoint. + * + * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/update-discussion-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description The discussion post's title. */ + title?: string; + /** @description The discussion post's body text. */ + body?: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + }; + /** + * List discussion comments (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List discussion comments](https://docs.github.com/rest/reference/teams#list-discussion-comments) endpoint. + * + * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/list-discussion-comments-legacy": { + parameters: { + query?: { + direction?: components["parameters"]["direction"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + team_id: components["parameters"]["team-id"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["team-discussion-comment"][]; + }; + }; + }; + }; + /** + * Create a discussion comment (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Create a discussion comment](https://docs.github.com/rest/reference/teams#create-a-discussion-comment) endpoint. + * + * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "teams/create-discussion-comment-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion comment's body text. */ + body: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + }; + /** + * Get a discussion comment (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion comment](https://docs.github.com/rest/reference/teams#get-a-discussion-comment) endpoint. + * + * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/get-discussion-comment-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + discussion_number: components["parameters"]["discussion-number"]; + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + }; + /** + * Delete a discussion comment (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a discussion comment](https://docs.github.com/rest/reference/teams#delete-a-discussion-comment) endpoint. + * + * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/delete-discussion-comment-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + discussion_number: components["parameters"]["discussion-number"]; + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * Update a discussion comment (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion comment](https://docs.github.com/rest/reference/teams#update-a-discussion-comment) endpoint. + * + * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/update-discussion-comment-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + discussion_number: components["parameters"]["discussion-number"]; + comment_number: components["parameters"]["comment-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion comment's body text. */ + body: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + }; + /** + * List reactions for a team discussion comment (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion comment`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment) endpoint. + * + * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "reactions/list-for-team-discussion-comment-legacy": { + parameters: { + query?: { + /** @description Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + team_id: components["parameters"]["team-id"]; + discussion_number: components["parameters"]["discussion-number"]; + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + }; + }; + /** + * Create reaction for a team discussion comment (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Create reaction for a team discussion comment](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion-comment)" endpoint. + * + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + */ + "reactions/create-for-team-discussion-comment-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + discussion_number: components["parameters"]["discussion-number"]; + comment_number: components["parameters"]["comment-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + }; + }; + /** + * List reactions for a team discussion (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion) endpoint. + * + * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "reactions/list-for-team-discussion-legacy": { + parameters: { + query?: { + /** @description Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + team_id: components["parameters"]["team-id"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + }; + }; + /** + * Create reaction for a team discussion (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create reaction for a team discussion`](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion) endpoint. + * + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + */ + "reactions/create-for-team-discussion-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + }; + }; + /** + * List pending team invitations (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List pending team invitations`](https://docs.github.com/rest/reference/teams#list-pending-team-invitations) endpoint. + * + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + */ + "teams/list-pending-invitations-legacy": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["organization-invitation"][]; + }; + }; + }; + }; + /** + * List team members (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team members`](https://docs.github.com/rest/reference/teams#list-team-members) endpoint. + * + * Team members will include the members of child teams. + */ + "teams/list-members-legacy": { + parameters: { + query?: { + /** @description Filters members returned by their role in the team. */ + role?: "member" | "maintainer" | "all"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get team member (Legacy) + * @deprecated + * @description The "Get team member" endpoint (described below) is deprecated. + * + * We recommend using the [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint instead. It allows you to get both active and pending memberships. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + "teams/get-member-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description if user is a member */ + 204: never; + /** @description if user is not a member */ + 404: never; + }; + }; + /** + * Add team member (Legacy) + * @deprecated + * @description The "Add team member" endpoint (described below) is deprecated. + * + * We recommend using the [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint instead. It allows you to invite new organization members to your teams. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To add someone to a team, the authenticated user must be an organization owner or a team maintainer in the team they're changing. The person being added to the team must be a member of the team's organization. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "teams/add-member-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 403: components["responses"]["forbidden"]; + /** @description Not Found if team synchronization is set up */ + 404: never; + /** @description Unprocessable Entity if you attempt to add an organization to a team or you attempt to add a user to a team when they are not a member of at least one other team in the same organization */ + 422: never; + }; + }; + /** + * Remove team member (Legacy) + * @deprecated + * @description The "Remove team member" endpoint (described below) is deprecated. + * + * We recommend using the [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint instead. It allows you to remove both active and pending memberships. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a team member, the authenticated user must have 'admin' permissions to the team or be an owner of the org that the team is associated with. Removing a team member does not delete the user, it just removes them from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + */ + "teams/remove-member-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + /** @description Not Found if team synchronization is setup */ + 404: never; + }; + }; + /** + * Get team membership for a user (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint. + * + * Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + "teams/get-membership-for-user-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-membership"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Add or update team membership for a user (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * If the user is already a member of the team's organization, this endpoint will add the user to the team. To add a membership between an organization member and a team, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * If the user is unaffiliated with the team's organization, this endpoint will send an invitation to the user via email. This newly-created membership will be in the "pending" state until the user accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. To add a membership between an unaffiliated user and a team, the authenticated user must be an organization owner. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + */ + "teams/add-or-update-membership-for-user-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + username: components["parameters"]["username"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The role that this user should have in the team. + * @default member + * @enum {string} + */ + role?: "member" | "maintainer"; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-membership"]; + }; + }; + /** @description Forbidden if team synchronization is set up */ + 403: never; + 404: components["responses"]["not_found"]; + /** @description Unprocessable Entity if you attempt to add an organization to a team */ + 422: never; + }; + }; + /** + * Remove team membership for a user (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + */ + "teams/remove-membership-for-user-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + /** @description if team synchronization is set up */ + 403: never; + }; + }; + /** + * List team projects (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team projects`](https://docs.github.com/rest/reference/teams#list-team-projects) endpoint. + * + * Lists the organization projects for a team. + */ + "teams/list-projects-legacy": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["team-project"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Check team permissions for a project (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a project](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project) endpoint. + * + * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + */ + "teams/check-permissions-for-project-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["team-project"]; + }; + }; + /** @description Not Found if project is not managed by this team */ + 404: never; + }; + }; + /** + * Add or update team project permissions (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team project permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions) endpoint. + * + * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + */ + "teams/add-or-update-project-permissions-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + project_id: components["parameters"]["project-id"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The permission to grant to the team for this project. Default: the team's `permission` attribute will be used to determine what permission to grant the team on this project. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * @enum {string} + */ + permission?: "read" | "write" | "admin"; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + /** @description Forbidden if the project is not owned by the organization */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove a project from a team (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a project from a team](https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team) endpoint. + * + * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. **Note:** This endpoint removes the project from the team, but does not delete it. + */ + "teams/remove-project-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List team repositories (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List team repositories](https://docs.github.com/rest/reference/teams#list-team-repositories) endpoint. + */ + "teams/list-repos-legacy": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Check team permissions for a repository (Legacy) + * @deprecated + * @description **Note**: Repositories inherited through a parent team will also be checked. + * + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a repository](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-repository) endpoint. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + "teams/check-permissions-for-repo-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Alternative response with extra repository information */ + 200: { + content: { + "application/json": components["schemas"]["team-repository"]; + }; + }; + /** @description Response if repository is managed by this team */ + 204: never; + /** @description Not Found if repository is not managed by this team */ + 404: never; + }; + }; + /** + * Add or update team repository permissions (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Add or update team repository permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-repository-permissions)" endpoint. + * + * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "teams/add-or-update-repo-permissions-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The permission to grant the team on this repository. If no permission is specified, the team's `permission` attribute will be used to determine what permission to grant the team on this repository. + * @enum {string} + */ + permission?: "pull" | "push" | "admin"; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove a repository from a team (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a repository from a team](https://docs.github.com/rest/reference/teams#remove-a-repository-from-a-team) endpoint. + * + * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. NOTE: This does not delete the repository, it just removes it from the team. + */ + "teams/remove-repo-legacy": { + parameters: { + path: { + team_id: components["parameters"]["team-id"]; + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List child teams (Legacy) + * @deprecated + * @description **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List child teams`](https://docs.github.com/rest/reference/teams#list-child-teams) endpoint. + */ + "teams/list-child-legacy": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** @description if child teams exist */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get the authenticated user + * @description If the authenticated user is authenticated with an OAuth token with the `user` scope, then the response lists public and private profile information. + * + * If the authenticated user is authenticated through OAuth without the `user` scope, then the response lists only public profile information. + */ + "users/get-authenticated": { + responses: { + /** @description Response */ + 200: { + content: { + "application/json": + | components["schemas"]["private-user"] + | components["schemas"]["public-user"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Update the authenticated user + * @description **Note:** If your email is set to private and you send an `email` parameter as part of this request to update your profile, your privacy settings are still enforced: the email address will not be displayed on your public profile or via the API. + */ + "users/update-authenticated": { + requestBody?: { + content: { + "application/json": { + /** + * @description The new name of the user. + * @example Omar Jahandar + */ + name?: string; + /** + * @description The publicly visible email address of the user. + * @example omar@example.com + */ + email?: string; + /** + * @description The new blog URL of the user. + * @example blog.example.com + */ + blog?: string; + /** + * @description The new Twitter username of the user. + * @example therealomarj + */ + twitter_username?: string | null; + /** + * @description The new company of the user. + * @example Acme corporation + */ + company?: string; + /** + * @description The new location of the user. + * @example Berlin, Germany + */ + location?: string; + /** @description The new hiring availability of the user. */ + hireable?: boolean; + /** @description The new short biography of the user. */ + bio?: string; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["private-user"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List users blocked by the authenticated user + * @description List the users you've blocked on your personal account. + */ + "users/list-blocked-by-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Check if a user is blocked by the authenticated user */ + "users/check-blocked": { + parameters: { + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description If the user is blocked */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + /** @description If the user is not blocked */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** Block a user */ + "users/block": { + parameters: { + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Unblock a user */ + "users/unblock": { + parameters: { + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List codespaces for the authenticated user + * @description Lists the authenticated user's codespaces. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/list-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + repository_id?: components["parameters"]["repository-id-in-query"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + codespaces: components["schemas"]["codespace"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Create a codespace for the authenticated user + * @description Creates a new codespace, owned by the authenticated user. + * + * This endpoint requires either a `repository_id` OR a `pull_request` but not both. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/create-for-authenticated-user": { + requestBody: { + content: { + "application/json": OneOf< + [ + { + /** @description Repository id for this codespace */ + repository_id: number; + /** @description Git ref (typically a branch name) for this codespace */ + ref?: string; + /** @description The requested location for a new codespace. Best efforts are made to respect this upon creation. Assigned by IP if not provided. */ + location?: string; + /** + * @description The geographic area for this codespace. If not specified, the value is assigned by IP. This property replaces `location`, which is being deprecated. + * @enum {string} + */ + geo?: "EuropeWest" | "SoutheastAsia" | "UsEast" | "UsWest"; + /** @description IP for location auto-detection when proxying a request */ + client_ip?: string; + /** @description Machine type to use for this codespace */ + machine?: string; + /** @description Path to devcontainer.json config to use for this codespace */ + devcontainer_path?: string; + /** @description Whether to authorize requested permissions from devcontainer.json */ + multi_repo_permissions_opt_out?: boolean; + /** @description Working directory for this codespace */ + working_directory?: string; + /** @description Time in minutes before codespace stops from inactivity */ + idle_timeout_minutes?: number; + /** @description Display name for this codespace */ + display_name?: string; + /** @description Duration in minutes after codespace has gone idle in which it will be deleted. Must be integer minutes between 0 and 43200 (30 days). */ + retention_period_minutes?: number; + }, + { + /** @description Pull request number for this codespace */ + pull_request: { + /** @description Pull request number */ + pull_request_number: number; + /** @description Repository id for this codespace */ + repository_id: number; + }; + /** @description The requested location for a new codespace. Best efforts are made to respect this upon creation. Assigned by IP if not provided. */ + location?: string; + /** + * @description The geographic area for this codespace. If not specified, the value is assigned by IP. This property replaces `location`, which is being deprecated. + * @enum {string} + */ + geo?: "EuropeWest" | "SoutheastAsia" | "UsEast" | "UsWest"; + /** @description Machine type to use for this codespace */ + machine?: string; + /** @description Path to devcontainer.json config to use for this codespace */ + devcontainer_path?: string; + /** @description Working directory for this codespace */ + working_directory?: string; + /** @description Time in minutes before codespace stops from inactivity */ + idle_timeout_minutes?: number; + }, + ] + >; + }; + }; + responses: { + /** @description Response when the codespace was successfully created */ + 201: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + /** @description Response when the codespace creation partially failed but is being retried in the background */ + 202: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List secrets for the authenticated user + * @description Lists all secrets available for a user's Codespaces without revealing their + * encrypted values. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/list-secrets-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["codespaces-secret"][]; + }; + }; + }; + }; + }; + /** + * Get public key for the authenticated user + * @description Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/get-public-key-for-authenticated-user": { + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["codespaces-user-public-key"]; + }; + }; + }; + }; + /** + * Get a secret for the authenticated user + * @description Gets a secret available to a user's codespaces without revealing its encrypted value. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/get-secret-for-authenticated-user": { + parameters: { + path: { + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["codespaces-secret"]; + }; + }; + }; + }; + /** + * Create or update a secret for the authenticated user + * @description Creates or updates a secret for a user's codespace with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must also have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "codespaces/create-or-update-secret-for-authenticated-user": { + parameters: { + path: { + secret_name: components["parameters"]["secret-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get the public key for the authenticated user](https://docs.github.com/rest/reference/codespaces#get-the-public-key-for-the-authenticated-user) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id: string; + /** @description An array of repository ids that can access the user secret. You can manage the list of selected repositories using the [List selected repositories for a user secret](https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-a-user-secret), [Set selected repositories for a user secret](https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-a-user-secret), and [Remove a selected repository from a user secret](https://docs.github.com/rest/reference/codespaces#remove-a-selected-repository-from-a-user-secret) endpoints. */ + selected_repository_ids?: (number | string)[]; + }; + }; + }; + responses: { + /** @description Response after successfully creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** @description Response after successfully updating a secret */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Delete a secret for the authenticated user + * @description Deletes a secret from a user's codespaces using the secret name. Deleting the secret will remove access from all codespaces that were allowed to access the secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/delete-secret-for-authenticated-user": { + parameters: { + path: { + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List selected repositories for a user secret + * @description List the repositories that have been granted the ability to use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + "codespaces/list-repositories-for-secret-for-authenticated-user": { + parameters: { + path: { + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Set selected repositories for a user secret + * @description Select the repositories that will use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + "codespaces/set-repositories-for-secret-for-authenticated-user": { + parameters: { + path: { + secret_name: components["parameters"]["secret-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of repository ids for which a codespace can access the secret. You can manage the list of selected repositories using the [List selected repositories for a user secret](https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-a-user-secret), [Add a selected repository to a user secret](https://docs.github.com/rest/reference/codespaces#add-a-selected-repository-to-a-user-secret), and [Remove a selected repository from a user secret](https://docs.github.com/rest/reference/codespaces#remove-a-selected-repository-from-a-user-secret) endpoints. */ + selected_repository_ids: number[]; + }; + }; + }; + responses: { + /** @description No Content when repositories were added to the selected list */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Add a selected repository to a user secret + * @description Adds a repository to the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on the referenced repository to use this endpoint. + */ + "codespaces/add-repository-for-secret-for-authenticated-user": { + parameters: { + path: { + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** @description No Content when repository was added to the selected list */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Remove a selected repository from a user secret + * @description Removes a repository from the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/remove-repository-for-secret-for-authenticated-user": { + parameters: { + path: { + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** @description No Content when repository was removed from the selected list */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Get a codespace for the authenticated user + * @description Gets information about a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/get-for-authenticated-user": { + parameters: { + path: { + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Delete a codespace for the authenticated user + * @description Deletes a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/delete-for-authenticated-user": { + parameters: { + path: { + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Update a codespace for the authenticated user + * @description Updates a codespace owned by the authenticated user. Currently only the codespace's machine type and recent folders can be modified using this endpoint. + * + * If you specify a new machine type it will be applied the next time your codespace is started. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/update-for-authenticated-user": { + parameters: { + path: { + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description A valid machine to transition this codespace to. */ + machine?: string; + /** @description Display name for this codespace */ + display_name?: string; + /** @description Recently opened folders inside the codespace. It is currently used by the clients to determine the folder path to load the codespace in. */ + recent_folders?: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Export a codespace for the authenticated user + * @description Triggers an export of the specified codespace and returns a URL and ID where the status of the export can be monitored. + * + * If changes cannot be pushed to the codespace's repository, they will be pushed to a new or previously-existing fork instead. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + "codespaces/export-for-authenticated-user": { + parameters: { + path: { + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** @description Response */ + 202: { + content: { + "application/json": components["schemas"]["codespace-export-details"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Get details about a codespace export + * @description Gets information about an export of a codespace. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + "codespaces/get-export-details-for-authenticated-user": { + parameters: { + path: { + codespace_name: components["parameters"]["codespace-name"]; + export_id: components["parameters"]["export-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace-export-details"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List machine types for a codespace + * @description List the machine types a codespace can transition to use. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + "codespaces/codespace-machines-for-authenticated-user": { + parameters: { + path: { + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": { + total_count: number; + machines: components["schemas"]["codespace-machine"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Create a repository from an unpublished codespace + * @description Publishes an unpublished codespace, creating a new repository and assigning it to the codespace. + * + * The codespace's token is granted write permissions to the repository, allowing the user to push their changes. + * + * This will fail for a codespace that is already published, meaning it has an associated repository. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/publish-for-authenticated-user": { + parameters: { + path: { + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description A name for the new repository. */ + name?: string; + /** + * @description Whether the new repository should be private. + * @default false + */ + private?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["codespace-with-full-repository"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Start a codespace for the authenticated user + * @description Starts a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + "codespaces/start-for-authenticated-user": { + parameters: { + path: { + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 304: components["responses"]["not_modified"]; + 400: components["responses"]["bad_request"]; + 401: components["responses"]["requires_authentication"]; + /** @description Payment required */ + 402: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Stop a codespace for the authenticated user + * @description Stops a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + "codespaces/stop-for-authenticated-user": { + parameters: { + path: { + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Get list of conflicting packages during Docker migration for authenticated-user + * @description Lists all packages that are owned by the authenticated user within the user's namespace, and that encountered a conflict during a Docker migration. + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. + */ + "packages/list-docker-migration-conflicting-packages-for-authenticated-user": { + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package"][]; + }; + }; + }; + }; + /** + * Set primary email visibility for the authenticated user + * @description Sets the visibility for your primary email addresses. + */ + "users/set-primary-email-visibility-for-authenticated-user": { + requestBody: { + content: { + "application/json": { + /** + * @description Denotes whether an email is publicly visible. + * @enum {string} + */ + visibility: "public" | "private"; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["email"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List email addresses for the authenticated user + * @description Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. + */ + "users/list-emails-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["email"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Add an email address for the authenticated user + * @description This endpoint is accessible with the `user` scope. + */ + "users/add-email-for-authenticated-user": { + requestBody?: { + content: { + "application/json": { + /** + * @description Adds one or more email addresses to your GitHub account. Must contain at least one email address. **Note:** Alternatively, you can pass a single email address or an `array` of emails addresses directly, but we recommend that you pass an object using the `emails` key. + * @example [] + */ + emails: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["email"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Delete an email address for the authenticated user + * @description This endpoint is accessible with the `user` scope. + */ + "users/delete-email-for-authenticated-user": { + requestBody?: { + content: { + "application/json": { + /** @description Email addresses associated with the GitHub user account. */ + emails: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List followers of the authenticated user + * @description Lists the people following the authenticated user. + */ + "users/list-followers-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * List the people the authenticated user follows + * @description Lists the people who the authenticated user follows. + */ + "users/list-followed-by-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Check if a person is followed by the authenticated user */ + "users/check-person-is-followed-by-authenticated": { + parameters: { + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description if the person is followed by the authenticated user */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + /** @description if the person is not followed by the authenticated user */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * Follow a user + * @description Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * Following a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. + */ + "users/follow": { + parameters: { + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Unfollow a user + * @description Unfollowing a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. + */ + "users/unfollow": { + parameters: { + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List GPG keys for the authenticated user + * @description Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "users/list-gpg-keys-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["gpg-key"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a GPG key for the authenticated user + * @description Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "users/create-gpg-key-for-authenticated-user": { + requestBody: { + content: { + "application/json": { + /** @description A descriptive name for the new key. */ + name?: string; + /** @description A GPG key in ASCII-armored format. */ + armored_public_key: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["gpg-key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a GPG key for the authenticated user + * @description View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "users/get-gpg-key-for-authenticated-user": { + parameters: { + path: { + gpg_key_id: components["parameters"]["gpg-key-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["gpg-key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a GPG key for the authenticated user + * @description Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "users/delete-gpg-key-for-authenticated-user": { + parameters: { + path: { + gpg_key_id: components["parameters"]["gpg-key-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List app installations accessible to the user access token + * @description Lists installations of your GitHub App that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You can find the permissions for the installation under the `permissions` key. + */ + "apps/list-installations-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description You can find the permissions for the installation under the `permissions` key. */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + installations: components["schemas"]["installation"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * List repositories accessible to the user access token + * @description List repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access for an installation. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The access the user has to each repository is included in the hash under the `permissions` key. + */ + "apps/list-installation-repos-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** @description The access the user has to each repository is included in the hash under the `permissions` key. */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": { + total_count: number; + repository_selection?: string; + repositories: components["schemas"]["repository"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Add a repository to an app installation + * @description Add a single repository to an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + "apps/add-repo-to-installation-for-authenticated-user": { + parameters: { + path: { + installation_id: components["parameters"]["installation-id"]; + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove a repository from an app installation + * @description Remove a single repository from an installation. The authenticated user must have admin access to the repository. The installation must have the `repository_selection` of `selected`. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + "apps/remove-repo-from-installation-for-authenticated-user": { + parameters: { + path: { + installation_id: components["parameters"]["installation-id"]; + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + /** @description Returned when the application is installed on `all` repositories in the organization, or if this request would remove the last repository that the application has access to in the organization. */ + 422: never; + }; + }; + /** + * Get interaction restrictions for your public repositories + * @description Shows which type of GitHub user can interact with your public repositories and when the restriction expires. + */ + "interactions/get-restrictions-for-authenticated-user": { + responses: { + /** @description Default response */ + 200: { + content: { + "application/json": + | components["schemas"]["interaction-limit-response"] + | Record; + }; + }; + /** @description Response when there are no restrictions */ + 204: never; + }; + }; + /** + * Set interaction restrictions for your public repositories + * @description Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. + */ + "interactions/set-restrictions-for-authenticated-user": { + requestBody: { + content: { + "application/json": components["schemas"]["interaction-limit"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["interaction-limit-response"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Remove interaction restrictions from your public repositories + * @description Removes any interaction restrictions from your public repositories. + */ + "interactions/remove-restrictions-for-authenticated-user": { + responses: { + /** @description Response */ + 204: never; + }; + }; + /** + * List user account issues assigned to the authenticated user + * @description List issues across owned and member repositories assigned to the authenticated user. + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/list-for-authenticated-user": { + parameters: { + query?: { + /** @description Indicates which sorts of issues to return. `assigned` means issues assigned to you. `created` means issues created by you. `mentioned` means issues mentioning you. `subscribed` means issues you're subscribed to updates for. `all` or `repos` means all issues you can see, regardless of participation or creation. */ + filter?: + | "assigned" + | "created" + | "mentioned" + | "subscribed" + | "repos" + | "all"; + /** @description Indicates the state of the issues to return. */ + state?: "open" | "closed" | "all"; + labels?: components["parameters"]["labels"]; + /** @description What to sort results by. */ + sort?: "created" | "updated" | "comments"; + direction?: components["parameters"]["direction"]; + since?: components["parameters"]["since"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["issue"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List public SSH keys for the authenticated user + * @description Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "users/list-public-ssh-keys-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["key"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a public SSH key for the authenticated user + * @description Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "users/create-public-ssh-key-for-authenticated-user": { + requestBody: { + content: { + "application/json": { + /** + * @description A descriptive name for the new key. + * @example Personal MacBook Air + */ + title?: string; + /** @description The public SSH key to add to your GitHub account. */ + key: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a public SSH key for the authenticated user + * @description View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "users/get-public-ssh-key-for-authenticated-user": { + parameters: { + path: { + key_id: components["parameters"]["key-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete a public SSH key for the authenticated user + * @description Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "users/delete-public-ssh-key-for-authenticated-user": { + parameters: { + path: { + key_id: components["parameters"]["key-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List subscriptions for the authenticated user + * @description Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). + */ + "apps/list-subscriptions-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["user-marketplace-purchase"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List subscriptions for the authenticated user (stubbed) + * @description Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). + */ + "apps/list-subscriptions-for-authenticated-user-stubbed": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["user-marketplace-purchase"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + }; + }; + /** + * List organization memberships for the authenticated user + * @description Lists all of the authenticated user's organization memberships. + */ + "orgs/list-memberships-for-authenticated-user": { + parameters: { + query?: { + /** @description Indicates the state of the memberships to return. If not specified, the API returns both active and pending memberships. */ + state?: "active" | "pending"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["org-membership"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get an organization membership for the authenticated user + * @description If the authenticated user is an active or pending member of the organization, this endpoint will return the user's membership. If the authenticated user is not affiliated with the organization, a `404` is returned. This endpoint will return a `403` if the request is made by a GitHub App that is blocked by the organization. + */ + "orgs/get-membership-for-authenticated-user": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["org-membership"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Update an organization membership for the authenticated user + * @description Converts the authenticated user to an active member of the organization, if that user has a pending invitation from the organization. + */ + "orgs/update-membership-for-authenticated-user": { + parameters: { + path: { + org: components["parameters"]["org"]; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The state that the membership should be in. Only `"active"` will be accepted. + * @enum {string} + */ + state: "active"; + }; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["org-membership"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List user migrations + * @description Lists all migrations a user has started. + */ + "migrations/list-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["migration"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Start a user migration + * @description Initiates the generation of a user migration archive. + */ + "migrations/start-for-authenticated-user": { + requestBody: { + content: { + "application/json": { + /** + * @description Lock the repositories being migrated at the start of the migration + * @example true + */ + lock_repositories?: boolean; + /** + * @description Indicates whether metadata should be excluded and only git source should be included for the migration. + * @example true + */ + exclude_metadata?: boolean; + /** + * @description Indicates whether the repository git data should be excluded from the migration. + * @example true + */ + exclude_git_data?: boolean; + /** + * @description Do not include attachments in the migration + * @example true + */ + exclude_attachments?: boolean; + /** + * @description Do not include releases in the migration + * @example true + */ + exclude_releases?: boolean; + /** + * @description Indicates whether projects owned by the organization or users should be excluded. + * @example true + */ + exclude_owner_projects?: boolean; + /** + * @description Indicates whether this should only include organization metadata (repositories array should be empty and will ignore other flags). + * @default false + * @example true + */ + org_metadata_only?: boolean; + /** + * @description Exclude attributes from the API response to improve performance + * @example [ + * "repositories" + * ] + */ + exclude?: "repositories"[]; + repositories: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["migration"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a user migration status + * @description Fetches a single user migration. The response includes the `state` of the migration, which can be one of the following values: + * + * * `pending` - the migration hasn't started yet. + * * `exporting` - the migration is in progress. + * * `exported` - the migration finished successfully. + * * `failed` - the migration failed. + * + * Once the migration has been `exported` you can [download the migration archive](https://docs.github.com/rest/migrations/users#download-a-user-migration-archive). + */ + "migrations/get-status-for-authenticated-user": { + parameters: { + query?: { + exclude?: string[]; + }; + path: { + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["migration"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Download a user migration archive + * @description Fetches the URL to download the migration archive as a `tar.gz` file. Depending on the resources your repository uses, the migration archive can contain JSON files with data for these objects: + * + * * attachments + * * bases + * * commit\_comments + * * issue\_comments + * * issue\_events + * * issues + * * milestones + * * organizations + * * projects + * * protected\_branches + * * pull\_request\_reviews + * * pull\_requests + * * releases + * * repositories + * * review\_comments + * * schema + * * users + * + * The archive will also contain an `attachments` directory that includes all attachment files uploaded to GitHub.com and a `repositories` directory that contains the repository's Git data. + */ + "migrations/get-archive-for-authenticated-user": { + parameters: { + path: { + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** @description Response */ + 302: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Delete a user migration archive + * @description Deletes a previous migration archive. Downloadable migration archives are automatically deleted after seven days. Migration metadata, which is returned in the [List user migrations](https://docs.github.com/rest/migrations/users#list-user-migrations) and [Get a user migration status](https://docs.github.com/rest/migrations/users#get-a-user-migration-status) endpoints, will continue to be available even after an archive is deleted. + */ + "migrations/delete-archive-for-authenticated-user": { + parameters: { + path: { + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Unlock a user repository + * @description Unlocks a repository. You can lock repositories when you [start a user migration](https://docs.github.com/rest/migrations/users#start-a-user-migration). Once the migration is complete you can unlock each repository to begin using it again or [delete the repository](https://docs.github.com/rest/repos/repos#delete-a-repository) if you no longer need the source data. Returns a status of `404 Not Found` if the repository is not locked. + */ + "migrations/unlock-repo-for-authenticated-user": { + parameters: { + path: { + migration_id: components["parameters"]["migration-id"]; + repo_name: components["parameters"]["repo-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List repositories for a user migration + * @description Lists all the repositories for this user migration. + */ + "migrations/list-repos-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List organizations for the authenticated user + * @description List organizations for the authenticated user. + * + * **OAuth scope requirements** + * + * This only lists organizations that your authorization allows you to operate on in some way (e.g., you can list teams with `read:org` scope, you can publicize your organization membership with `user` scope, etc.). Therefore, this API requires at least `user` or `read:org` scope. OAuth requests with insufficient scope receive a `403 Forbidden` response. + */ + "orgs/list-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["organization-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * List packages for the authenticated user's namespace + * @description Lists packages owned by the authenticated user within the user's namespace. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/list-packages-for-authenticated-user": { + parameters: { + query: { + /** @description The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + visibility?: components["parameters"]["package-visibility"]; + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package"][]; + }; + }; + 400: components["responses"]["package_es_list_error"]; + }; + }; + /** + * Get a package for the authenticated user + * @description Gets a specific package for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/get-package-for-authenticated-user": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package"]; + }; + }; + }; + }; + /** + * Delete a package for the authenticated user + * @description Deletes a package owned by the authenticated user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `delete:packages` scopes. + * If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/delete-package-for-authenticated-user": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restore a package for the authenticated user + * @description Restores a package owned by the authenticated user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `write:packages` scopes. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/restore-package-for-authenticated-user": { + parameters: { + query?: { + /** @description package token */ + token?: string; + }; + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List package versions for a package owned by the authenticated user + * @description Lists package versions for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/get-all-package-versions-for-package-owned-by-authenticated-user": { + parameters: { + query?: { + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + /** @description The state of the package, either active or deleted. */ + state?: "active" | "deleted"; + }; + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get a package version for the authenticated user + * @description Gets a specific package version for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/get-package-version-for-authenticated-user": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"]; + }; + }; + }; + }; + /** + * Delete a package version for the authenticated user + * @description Deletes a specific package version for a package owned by the authenticated user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `delete:packages` scopes. + * If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/delete-package-version-for-authenticated-user": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restore a package version for the authenticated user + * @description Restores a package version owned by the authenticated user. + * + * You can restore a deleted package version under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `write:packages` scopes. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/restore-package-version-for-authenticated-user": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a user project + * @description Creates a user project board. Returns a `410 Gone` status if the user does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + "projects/create-for-authenticated-user": { + requestBody: { + content: { + "application/json": { + /** + * @description Name of the project + * @example Week One Sprint + */ + name: string; + /** + * @description Body of the project + * @example This project represents the sprint of the first week in January + */ + body?: string | null; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * List public email addresses for the authenticated user + * @description Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. + */ + "users/list-public-emails-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["email"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List repositories for the authenticated user + * @description Lists repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + */ + "repos/list-for-authenticated-user": { + parameters: { + query?: { + /** @description Limit results to repositories with the specified visibility. */ + visibility?: "all" | "public" | "private"; + /** + * @description Comma-separated list of values. Can include: + * * `owner`: Repositories that are owned by the authenticated user. + * * `collaborator`: Repositories that the user has been added to as a collaborator. + * * `organization_member`: Repositories that the user has access to through being a member of an organization. This includes every repository on every team that the user is on. + */ + affiliation?: string; + /** @description Limit results to repositories of the specified type. Will cause a `422` error if used in the same request as **visibility** or **affiliation**. */ + type?: "all" | "owner" | "public" | "private" | "member"; + /** @description The property to sort the results by. */ + sort?: "created" | "updated" | "pushed" | "full_name"; + /** @description The order to sort by. Default: `asc` when using `full_name`, otherwise `desc`. */ + direction?: "asc" | "desc"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + since?: components["parameters"]["since-repo-date"]; + before?: components["parameters"]["before-repo-date"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Create a repository for the authenticated user + * @description Creates a new repository for the authenticated user. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository. + */ + "repos/create-for-authenticated-user": { + requestBody: { + content: { + "application/json": { + /** + * @description The name of the repository. + * @example Team Environment + */ + name: string; + /** @description A short description of the repository. */ + description?: string; + /** @description A URL with more information about the repository. */ + homepage?: string; + /** + * @description Whether the repository is private. + * @default false + */ + private?: boolean; + /** + * @description Whether issues are enabled. + * @default true + * @example true + */ + has_issues?: boolean; + /** + * @description Whether projects are enabled. + * @default true + * @example true + */ + has_projects?: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + * @example true + */ + has_wiki?: boolean; + /** + * @description Whether discussions are enabled. + * @default false + * @example true + */ + has_discussions?: boolean; + /** @description The id of the team that will be granted access to this repository. This is only valid when creating a repository in an organization. */ + team_id?: number; + /** + * @description Whether the repository is initialized with a minimal README. + * @default false + */ + auto_init?: boolean; + /** + * @description The desired language or platform to apply to the .gitignore. + * @example Haskell + */ + gitignore_template?: string; + /** + * @description The license keyword of the open source license for this repository. + * @example mit + */ + license_template?: string; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + * @example true + */ + allow_squash_merge?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + * @example true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + * @example true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow Auto-merge to be used on pull requests. + * @default false + * @example false + */ + allow_auto_merge?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + * @example false + */ + delete_branch_on_merge?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads?: boolean; + /** + * @description Whether this repository acts as a template that can be used to generate new repositories. + * @default false + * @example true + */ + is_template?: boolean; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + headers: { + /** @example https://api.github.com/repos/octocat/Hello-World */ + Location?: string; + }; + content: { + "application/json": components["schemas"]["repository"]; + }; + }; + 304: components["responses"]["not_modified"]; + 400: components["responses"]["bad_request"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List repository invitations for the authenticated user + * @description When authenticating as a user, this endpoint will list all currently open repository invitations for that user. + */ + "repos/list-invitations-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["repository-invitation"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Decline a repository invitation */ + "repos/decline-invitation-for-authenticated-user": { + parameters: { + path: { + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + }; + }; + /** Accept a repository invitation */ + "repos/accept-invitation-for-authenticated-user": { + parameters: { + path: { + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + }; + }; + /** + * List social accounts for the authenticated user + * @description Lists all of your social accounts. + */ + "users/list-social-accounts-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["social-account"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Add social accounts for the authenticated user + * @description Add one or more social accounts to the authenticated user's profile. This endpoint is accessible with the `user` scope. + */ + "users/add-social-account-for-authenticated-user": { + requestBody: { + content: { + "application/json": { + /** + * @description Full URLs for the social media profiles to add. + * @example [] + */ + account_urls: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["social-account"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Delete social accounts for the authenticated user + * @description Deletes one or more social accounts from the authenticated user's profile. This endpoint is accessible with the `user` scope. + */ + "users/delete-social-account-for-authenticated-user": { + requestBody: { + content: { + "application/json": { + /** + * @description Full URLs for the social media profiles to delete. + * @example [] + */ + account_urls: string[]; + }; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List SSH signing keys for the authenticated user + * @description Lists the SSH signing keys for the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `read:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." + */ + "users/list-ssh-signing-keys-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["ssh-signing-key"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Create a SSH signing key for the authenticated user + * @description Creates an SSH signing key for the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `write:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." + */ + "users/create-ssh-signing-key-for-authenticated-user": { + requestBody: { + content: { + "application/json": { + /** + * @description A descriptive name for the new key. + * @example Personal MacBook Air + */ + title?: string; + /** @description The public SSH key to add to your GitHub account. For more information, see "[Checking for existing SSH keys](https://docs.github.com/authentication/connecting-to-github-with-ssh/checking-for-existing-ssh-keys)." */ + key: string; + }; + }; + }; + responses: { + /** @description Response */ + 201: { + content: { + "application/json": components["schemas"]["ssh-signing-key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get an SSH signing key for the authenticated user + * @description Gets extended details for an SSH signing key. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `read:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." + */ + "users/get-ssh-signing-key-for-authenticated-user": { + parameters: { + path: { + ssh_signing_key_id: components["parameters"]["ssh-signing-key-id"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["ssh-signing-key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Delete an SSH signing key for the authenticated user + * @description Deletes an SSH signing key from the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `admin:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." + */ + "users/delete-ssh-signing-key-for-authenticated-user": { + parameters: { + path: { + ssh_signing_key_id: components["parameters"]["ssh-signing-key-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List repositories starred by the authenticated user + * @description Lists repositories the authenticated user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + "activity/list-repos-starred-by-authenticated-user": { + parameters: { + query?: { + sort?: components["parameters"]["sort-starred"]; + direction?: components["parameters"]["direction"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["repository"][]; + "application/vnd.github.v3.star+json": components["schemas"]["starred-repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Check if a repository is starred by the authenticated user */ + "activity/check-repo-is-starred-by-authenticated-user": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response if this repository is starred by you */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + /** @description Not Found if this repository is not starred by you */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * Star a repository for the authenticated user + * @description Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "activity/star-repo-for-authenticated-user": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Unstar a repository for the authenticated user */ + "activity/unstar-repo-for-authenticated-user": { + parameters: { + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List repositories watched by the authenticated user + * @description Lists repositories the authenticated user is watching. + */ + "activity/list-watched-repos-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * List teams for the authenticated user + * @description List all of the teams across all of the organizations to which the authenticated user belongs. This method requires `user`, `repo`, or `read:org` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/) when authenticating via [OAuth](https://docs.github.com/apps/building-oauth-apps/). When using a fine-grained personal access token, the resource owner of the token [must be a single organization](https://docs.github.com/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#fine-grained-personal-access-tokens), and have at least read-only member organization permissions. The response payload only contains the teams from a single organization when using a fine-grained personal access token. + */ + "teams/list-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["team-full"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List users + * @description Lists all users, in the order that they signed up on GitHub. This list includes personal user accounts and organization accounts. + * + * Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers) to get the URL for the next page of users. + */ + "users/list": { + parameters: { + query?: { + since?: components["parameters"]["since-user"]; + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + /** @example ; rel="next" */ + Link?: string; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Get a user + * @description Provides publicly available information about someone with a GitHub account. + * + * GitHub Apps with the `Plan` user permission can use this endpoint to retrieve information about a user's GitHub plan. The GitHub App must be authenticated as a user. See "[Identifying and authorizing users for GitHub Apps](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/)" for details about authentication. For an example response, see 'Response with GitHub plan information' below" + * + * The `email` key in the following response is the publicly visible email address from your GitHub [profile page](https://github.com/settings/profile). When setting up your profile, you can select a primary email address to be “public” which provides an email entry for this endpoint. If you do not set a public email address for `email`, then it will have a value of `null`. You only see publicly visible email addresses when authenticated with GitHub. For more information, see [Authentication](https://docs.github.com/rest/overview/resources-in-the-rest-api#authentication). + * + * The Emails API enables you to list all of your email addresses, and toggle a primary email to be visible publicly. For more information, see "[Emails API](https://docs.github.com/rest/reference/users#emails)". + */ + "users/get-by-username": { + parameters: { + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": + | components["schemas"]["private-user"] + | components["schemas"]["public-user"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get list of conflicting packages during Docker migration for user + * @description Lists all packages that are in a specific user's namespace, that the requesting user has access to, and that encountered a conflict during Docker migration. + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. + */ + "packages/list-docker-migration-conflicting-packages-for-user": { + parameters: { + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * List events for the authenticated user + * @description If you are authenticated as the given user, you will see your private events. Otherwise, you'll only see public events. + */ + "activity/list-events-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** + * List organization events for the authenticated user + * @description This is the user's organization dashboard. You must be authenticated as the user to view this. + */ + "activity/list-org-events-for-authenticated-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + org: components["parameters"]["org"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** List public events for a user */ + "activity/list-public-events-for-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** + * List followers of a user + * @description Lists the people following the specified user. + */ + "users/list-followers-for-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** + * List the people a user follows + * @description Lists the people who the specified user follows. + */ + "users/list-following-for-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** Check if a user follows another user */ + "users/check-following-for-user": { + parameters: { + path: { + username: components["parameters"]["username"]; + target_user: string; + }; + }; + responses: { + /** @description if the user follows the target user */ + 204: never; + /** @description if the user does not follow the target user */ + 404: never; + }; + }; + /** + * List gists for a user + * @description Lists public gists for the specified user: + */ + "gists/list-for-user": { + parameters: { + query?: { + since?: components["parameters"]["since"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["base-gist"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List GPG keys for a user + * @description Lists the GPG keys for a user. This information is accessible by anyone. + */ + "users/list-gpg-keys-for-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["gpg-key"][]; + }; + }; + }; + }; + /** + * Get contextual information for a user + * @description Provides hovercard information when authenticated through basic auth or OAuth with the `repo` scope. You can find out more about someone in relation to their pull requests, issues, repositories, and organizations. + * + * The `subject_type` and `subject_id` parameters provide context for the person's hovercard, which returns more information than without the parameters. For example, if you wanted to find out more about `octocat` who owns the `Spoon-Knife` repository via cURL, it would look like this: + * + * ```shell + * curl -u username:token + * https://api.github.com/users/octocat/hovercard?subject_type=repository&subject_id=1300192 + * ``` + */ + "users/get-context-for-user": { + parameters: { + query?: { + /** @description Identifies which additional information you'd like to receive about the person's hovercard. Can be `organization`, `repository`, `issue`, `pull_request`. **Required** when using `subject_id`. */ + subject_type?: "organization" | "repository" | "issue" | "pull_request"; + /** @description Uses the ID for the `subject_type` you specified. **Required** when using `subject_type`. */ + subject_id?: string; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["hovercard"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Get a user installation for the authenticated app + * @description Enables an authenticated GitHub App to find the user’s installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-user-installation": { + parameters: { + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["installation"]; + }; + }; + }; + }; + /** + * List public keys for a user + * @description Lists the _verified_ public SSH keys for a user. This is accessible by anyone. + */ + "users/list-public-keys-for-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["key-simple"][]; + }; + }; + }; + }; + /** + * List organizations for a user + * @description List [public organization memberships](https://docs.github.com/articles/publicizing-or-concealing-organization-membership) for the specified user. + * + * This method only lists _public_ memberships, regardless of authentication. If you need to fetch all of the organization memberships (public and private) for the authenticated user, use the [List organizations for the authenticated user](https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user) API instead. + */ + "orgs/list-for-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["organization-simple"][]; + }; + }; + }; + }; + /** + * List packages for a user + * @description Lists all packages in a user's namespace for which the requesting user has access. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/list-packages-for-user": { + parameters: { + query: { + /** @description The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + visibility?: components["parameters"]["package-visibility"]; + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per-page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package"][]; + }; + }; + 400: components["responses"]["package_es_list_error"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Get a package for a user + * @description Gets a specific package metadata for a public package owned by a user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/get-package-for-user": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package"]; + }; + }; + }; + }; + /** + * Delete a package for a user + * @description Deletes an entire package for a user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `delete:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package you want to delete. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + "packages/delete-package-for-user": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restore a package for a user + * @description Restores an entire package for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `write:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package you want to restore. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + "packages/restore-package-for-user": { + parameters: { + query?: { + /** @description package token */ + token?: string; + }; + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List package versions for a package owned by a user + * @description Lists package versions for a public package owned by a specified user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/get-all-package-versions-for-package-owned-by-user": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get a package version for a user + * @description Gets a specific package version for a public package owned by a specified user. + * + * At this time, to use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + "packages/get-package-version-for-user": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + package_version_id: components["parameters"]["package-version-id"]; + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"]; + }; + }; + }; + }; + /** + * Delete package version for a user + * @description Deletes a specific package version for a user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `delete:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package whose version you want to delete. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + "packages/delete-package-version-for-user": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + username: components["parameters"]["username"]; + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restore package version for a user + * @description Restores a specific package version for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `write:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package whose version you want to restore. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + "packages/restore-package-version-for-user": { + parameters: { + path: { + package_type: components["parameters"]["package-type"]; + package_name: components["parameters"]["package-name"]; + username: components["parameters"]["username"]; + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** @description Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List user projects + * @description Lists projects for a user. + */ + "projects/list-for-user": { + parameters: { + query?: { + /** @description Indicates the state of the projects to return. */ + state?: "open" | "closed" | "all"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["project"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * List events received by the authenticated user + * @description These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events. + */ + "activity/list-received-events-for-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** List public events received by a user */ + "activity/list-received-public-events-for-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** + * List repositories for a user + * @description Lists public repositories for the specified user. Note: For GitHub AE, this endpoint will list internal repositories for the specified user. + */ + "repos/list-for-user": { + parameters: { + query?: { + /** @description Limit results to repositories of the specified type. */ + type?: "all" | "owner" | "member"; + /** @description The property to sort the results by. */ + sort?: "created" | "updated" | "pushed" | "full_name"; + /** @description The order to sort by. Default: `asc` when using `full_name`, otherwise `desc`. */ + direction?: "asc" | "desc"; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + /** + * Get GitHub Actions billing for a user + * @description Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `user` scope. + */ + "billing/get-github-actions-billing-user": { + parameters: { + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-billing-usage"]; + }; + }; + }; + }; + /** + * Get GitHub Packages billing for a user + * @description Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + "billing/get-github-packages-billing-user": { + parameters: { + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["packages-billing-usage"]; + }; + }; + }; + }; + /** + * Get shared storage billing for a user + * @description Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + "billing/get-shared-storage-billing-user": { + parameters: { + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["combined-billing-usage"]; + }; + }; + }; + }; + /** + * List social accounts for a user + * @description Lists social media accounts for a user. This endpoint is accessible by anyone. + */ + "users/list-social-accounts-for-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["social-account"][]; + }; + }; + }; + }; + /** + * List SSH signing keys for a user + * @description Lists the SSH signing keys for a user. This operation is accessible by anyone. + */ + "users/list-ssh-signing-keys-for-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["ssh-signing-key"][]; + }; + }; + }; + }; + /** + * List repositories starred by a user + * @description Lists repositories a user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + "activity/list-repos-starred-by-user": { + parameters: { + query?: { + sort?: components["parameters"]["sort-starred"]; + direction?: components["parameters"]["direction"]; + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": + | components["schemas"]["starred-repository"][] + | components["schemas"]["repository"][]; + }; + }; + }; + }; + /** + * List repositories watched by a user + * @description Lists repositories a user is watching. + */ + "activity/list-repos-watched-by-user": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + username: components["parameters"]["username"]; + }; + }; + responses: { + /** @description Response */ + 200: { + headers: { + Link: components["headers"]["link"]; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + /** + * Get all API versions + * @description Get all supported GitHub API versions. + */ + "meta/get-all-versions": { + responses: { + /** @description Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Get the Zen of GitHub + * @description Get a random sentence from the Zen of GitHub + */ + "meta/get-zen": { + responses: { + /** @description Response */ + 200: { + content: { + "text/plain": string; + }; + }; + }; + }; + /** + * Compare two commits + * @description **Deprecated**: Use `repos.compareCommitsWithBasehead()` (`GET /repos/{owner}/{repo}/compare/{basehead}`) instead. Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/compare-commits": { + parameters: { + query?: { + per_page?: components["parameters"]["per-page"]; + page?: components["parameters"]["page"]; + }; + path: { + owner: components["parameters"]["owner"]; + repo: components["parameters"]["repo"]; + base: string; + head: string; + }; + }; + responses: { + /** @description Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comparison"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; +} diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/LICENSE b/dist/node_modules/@octokit/plugin-paginate-rest/LICENSE new file mode 100644 index 0000000..57bee5f --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/LICENSE @@ -0,0 +1,7 @@ +MIT License Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/README.md b/dist/node_modules/@octokit/plugin-paginate-rest/README.md new file mode 100644 index 0000000..1e3c0ba --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/README.md @@ -0,0 +1,269 @@ +# plugin-paginate-rest.js + +> Octokit plugin to paginate REST API endpoint responses + +[![@latest](https://img.shields.io/npm/v/@octokit/plugin-paginate-rest.svg)](https://www.npmjs.com/package/@octokit/plugin-paginate-rest) +[![Build Status](https://github.com/octokit/plugin-paginate-rest.js/workflows/Test/badge.svg)](https://github.com/octokit/plugin-paginate-rest.js/actions?workflow=Test) + +## Usage + + + + + + +
+Browsers + + +Load `@octokit/plugin-paginate-rest` and [`@octokit/core`](https://github.com/octokit/core.js) (or core-compatible module) directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+Node + + +Install with `npm install @octokit/core @octokit/plugin-paginate-rest`. Optionally replace `@octokit/core` with a core-compatible module + +```js +const { Octokit } = require("@octokit/core"); +const { + paginateRest, + composePaginateRest, +} = require("@octokit/plugin-paginate-rest"); +``` + +
+ +```js +const MyOctokit = Octokit.plugin(paginateRest); +const octokit = new MyOctokit({ auth: "secret123" }); + +// See https://developer.github.com/v3/issues/#list-issues-for-a-repository +const issues = await octokit.paginate("GET /repos/{owner}/{repo}/issues", { + owner: "octocat", + repo: "hello-world", + since: "2010-10-01", + per_page: 100, +}); +``` + +If you want to utilize the pagination methods in another plugin, use `composePaginateRest`. + +```js +function myPlugin(octokit, options) { + return { + allStars({owner, repo}) => { + return composePaginateRest( + octokit, + "GET /repos/{owner}/{repo}/stargazers", + {owner, repo } + ) + } + } +} +``` + +## `octokit.paginate()` + +The `paginateRest` plugin adds a new `octokit.paginate()` method which accepts the same parameters as [`octokit.request`](https://github.com/octokit/request.js#request). Only "List ..." endpoints such as [List issues for a repository](https://developer.github.com/v3/issues/#list-issues-for-a-repository) are supporting pagination. Their [response includes a Link header](https://developer.github.com/v3/issues/#response-1). For other endpoints, `octokit.paginate()` behaves the same as `octokit.request()`. + +The `per_page` parameter is usually defaulting to `30`, and can be set to up to `100`, which helps retrieving a big amount of data without hitting the rate limits too soon. + +An optional `mapFunction` can be passed to map each page response to a new value, usually an array with only the data you need. This can help to reduce memory usage, as only the relevant data has to be kept in memory until the pagination is complete. + +```js +const issueTitles = await octokit.paginate( + "GET /repos/{owner}/{repo}/issues", + { + owner: "octocat", + repo: "hello-world", + since: "2010-10-01", + per_page: 100, + }, + (response) => response.data.map((issue) => issue.title) +); +``` + +The `mapFunction` gets a 2nd argument `done` which can be called to end the pagination early. + +```js +const issues = await octokit.paginate( + "GET /repos/{owner}/{repo}/issues", + { + owner: "octocat", + repo: "hello-world", + since: "2010-10-01", + per_page: 100, + }, + (response, done) => { + if (response.data.find((issue) => issue.title.includes("something"))) { + done(); + } + return response.data; + } +); +``` + +Alternatively you can pass a `request` method as first argument. This is great when using in combination with [`@octokit/plugin-rest-endpoint-methods`](https://github.com/octokit/plugin-rest-endpoint-methods.js/): + +```js +const issues = await octokit.paginate(octokit.rest.issues.listForRepo, { + owner: "octocat", + repo: "hello-world", + since: "2010-10-01", + per_page: 100, +}); +``` + +## `octokit.paginate.iterator()` + +If your target runtime environments supports async iterators (such as most modern browsers and Node 10+), you can iterate through each response + +```js +const parameters = { + owner: "octocat", + repo: "hello-world", + since: "2010-10-01", + per_page: 100, +}; +for await (const response of octokit.paginate.iterator( + "GET /repos/{owner}/{repo}/issues", + parameters +)) { + // do whatever you want with each response, break out of the loop, etc. + const issues = response.data; + console.log("%d issues found", issues.length); +} +``` + +Alternatively you can pass a `request` method as first argument. This is great when using in combination with [`@octokit/plugin-rest-endpoint-methods`](https://github.com/octokit/plugin-rest-endpoint-methods.js/): + +```js +const parameters = { + owner: "octocat", + repo: "hello-world", + since: "2010-10-01", + per_page: 100, +}; +for await (const response of octokit.paginate.iterator( + octokit.rest.issues.listForRepo, + parameters +)) { + // do whatever you want with each response, break out of the loop, etc. + const issues = response.data; + console.log("%d issues found", issues.length); +} +``` + +## `composePaginateRest` and `composePaginateRest.iterator` + +The `compose*` methods work just like their `octokit.*` counterparts described above, with the differenct that both methods require an `octokit` instance to be passed as first argument + +## How it works + +`octokit.paginate()` wraps `octokit.request()`. As long as a `rel="next"` link value is present in the response's `Link` header, it sends another request for that URL, and so on. + +Most of GitHub's paginating REST API endpoints return an array, but there are a few exceptions which return an object with a key that includes the items array. For example: + +- [Search repositories](https://developer.github.com/v3/search/#example) (key `items`) +- [List check runs for a specific ref](https://developer.github.com/v3/checks/runs/#response-3) (key: `check_runs`) +- [List check suites for a specific ref](https://developer.github.com/v3/checks/suites/#response-1) (key: `check_suites`) +- [List repositories](https://developer.github.com/v3/apps/installations/#list-repositories) for an installation (key: `repositories`) +- [List installations for a user](https://developer.github.com/v3/apps/installations/#response-1) (key `installations`) + +`octokit.paginate()` is working around these inconsistencies so you don't have to worry about it. + +If a response is lacking the `Link` header, `octokit.paginate()` still resolves with an array, even if the response returns a single object. + +## Types + +The plugin also exposes some types and runtime type guards for TypeScript projects. + + + + + + +
+Types + + +```typescript +import { + PaginateInterface, + PaginatingEndpoints, +} from "@octokit/plugin-paginate-rest"; +``` + +
+Guards + + +```typescript +import { isPaginatingEndpoint } from "@octokit/plugin-paginate-rest"; +``` + +
+ +### PaginateInterface + +An `interface` that declares all the overloads of the `.paginate` method. + +### PaginatingEndpoints + +An `interface` which describes all API endpoints supported by the plugin. Some overloads of `.paginate()` method and `composePaginateRest()` function depend on `PaginatingEndpoints`, using the `keyof PaginatingEndpoints` as a type for one of its arguments. + +```typescript +import { Octokit } from "@octokit/core"; +import { + PaginatingEndpoints, + composePaginateRest, +} from "@octokit/plugin-paginate-rest"; + +type DataType = "data" extends keyof T ? T["data"] : unknown; + +async function myPaginatePlugin( + octokit: Octokit, + endpoint: E, + parameters?: PaginatingEndpoints[E]["parameters"] +): Promise> { + return await composePaginateRest(octokit, endpoint, parameters); +} +``` + +### isPaginatingEndpoint + +A type guard, `isPaginatingEndpoint(arg)` returns `true` if `arg` is one of the keys in `PaginatingEndpoints` (is `keyof PaginatingEndpoints`). + +```typescript +import { Octokit } from "@octokit/core"; +import { + isPaginatingEndpoint, + composePaginateRest, +} from "@octokit/plugin-paginate-rest"; + +async function myPlugin(octokit: Octokit, arg: unknown) { + if (isPaginatingEndpoint(arg)) { + return await composePaginateRest(octokit, arg); + } + // ... +} +``` + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js b/dist/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js new file mode 100644 index 0000000..99297c3 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js @@ -0,0 +1,386 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + composePaginateRest: () => composePaginateRest, + isPaginatingEndpoint: () => isPaginatingEndpoint, + paginateRest: () => paginateRest, + paginatingEndpoints: () => paginatingEndpoints +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "6.1.2"; + +// pkg/dist-src/normalize-paginated-list-response.js +function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + response.data.total_count = totalCount; + return response; +} + +// pkg/dist-src/iterator.js +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + }) + }; +} + +// pkg/dist-src/paginate.js +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; + } + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); +} +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; + } + let earlyExit = false; + function done() { + earlyExit = true; + } + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; + } + return gather(octokit, results, iterator2, mapFn); + }); +} + +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { + iterator +}); + +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /organizations/{org}/personal-access-token-requests", + "GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /organizations/{org}/personal-access-tokens", + "GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/required_workflows", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{org}/{repo}/actions/required_workflows", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; + +// pkg/dist-src/paginating-endpoints.js +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} + +// pkg/dist-src/index.js +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + composePaginateRest, + isPaginatingEndpoint, + paginateRest, + paginatingEndpoints +}); diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map b/dist/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map new file mode 100644 index 0000000..ff0f82a --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js", "../dist-src/normalize-paginated-list-response.js", "../dist-src/iterator.js", "../dist-src/paginate.js", "../dist-src/compose-paginate.js", "../dist-src/generated/paginating-endpoints.js", "../dist-src/paginating-endpoints.js"], + "sourcesContent": ["import { VERSION } from \"./version\";\nimport { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nimport { composePaginateRest } from \"./compose-paginate\";\nimport {\n isPaginatingEndpoint,\n paginatingEndpoints\n} from \"./paginating-endpoints\";\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\nexport {\n composePaginateRest,\n isPaginatingEndpoint,\n paginateRest,\n paginatingEndpoints\n};\n", "const VERSION = \"6.1.2\";\nexport {\n VERSION\n};\n", "function normalizePaginatedListResponse(response) {\n if (!response.data) {\n return {\n ...response,\n data: []\n };\n }\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization)\n return response;\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\nexport {\n normalizePaginatedListResponse\n};\n", "import { normalizePaginatedListResponse } from \"./normalize-paginated-list-response\";\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url)\n return { done: true };\n try {\n const response = await requestMethod({ method, url, headers });\n const normalizedResponse = normalizePaginatedListResponse(response);\n url = ((normalizedResponse.headers.link || \"\").match(\n /<([^>]+)>;\\s*rel=\"next\"/\n ) || [])[1];\n return { value: normalizedResponse };\n } catch (error) {\n if (error.status !== 409)\n throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n })\n };\n}\nexport {\n iterator\n};\n", "import { iterator } from \"./iterator\";\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = void 0;\n }\n return gather(\n octokit,\n [],\n iterator(octokit, route, parameters)[Symbol.asyncIterator](),\n mapFn\n );\n}\nfunction gather(octokit, results, iterator2, mapFn) {\n return iterator2.next().then((result) => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(\n mapFn ? mapFn(result.value, done) : result.value.data\n );\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator2, mapFn);\n });\n}\nexport {\n paginate\n};\n", "import { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\nexport {\n composePaginateRest\n};\n", "const paginatingEndpoints = [\n \"GET /app/hook/deliveries\",\n \"GET /app/installation-requests\",\n \"GET /app/installations\",\n \"GET /enterprises/{enterprise}/dependabot/alerts\",\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\",\n \"GET /events\",\n \"GET /gists\",\n \"GET /gists/public\",\n \"GET /gists/starred\",\n \"GET /gists/{gist_id}/comments\",\n \"GET /gists/{gist_id}/commits\",\n \"GET /gists/{gist_id}/forks\",\n \"GET /installation/repositories\",\n \"GET /issues\",\n \"GET /licenses\",\n \"GET /marketplace_listing/plans\",\n \"GET /marketplace_listing/plans/{plan_id}/accounts\",\n \"GET /marketplace_listing/stubbed/plans\",\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"GET /networks/{owner}/{repo}/events\",\n \"GET /notifications\",\n \"GET /organizations\",\n \"GET /organizations/{org}/personal-access-token-requests\",\n \"GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories\",\n \"GET /organizations/{org}/personal-access-tokens\",\n \"GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories\",\n \"GET /orgs/{org}/actions/cache/usage-by-repository\",\n \"GET /orgs/{org}/actions/permissions/repositories\",\n \"GET /orgs/{org}/actions/required_workflows\",\n \"GET /orgs/{org}/actions/runners\",\n \"GET /orgs/{org}/actions/secrets\",\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/actions/variables\",\n \"GET /orgs/{org}/actions/variables/{name}/repositories\",\n \"GET /orgs/{org}/blocks\",\n \"GET /orgs/{org}/code-scanning/alerts\",\n \"GET /orgs/{org}/codespaces\",\n \"GET /orgs/{org}/codespaces/secrets\",\n \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/dependabot/alerts\",\n \"GET /orgs/{org}/dependabot/secrets\",\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/events\",\n \"GET /orgs/{org}/failed_invitations\",\n \"GET /orgs/{org}/hooks\",\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries\",\n \"GET /orgs/{org}/installations\",\n \"GET /orgs/{org}/invitations\",\n \"GET /orgs/{org}/invitations/{invitation_id}/teams\",\n \"GET /orgs/{org}/issues\",\n \"GET /orgs/{org}/members\",\n \"GET /orgs/{org}/members/{username}/codespaces\",\n \"GET /orgs/{org}/migrations\",\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n \"GET /orgs/{org}/outside_collaborators\",\n \"GET /orgs/{org}/packages\",\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n \"GET /orgs/{org}/projects\",\n \"GET /orgs/{org}/public_members\",\n \"GET /orgs/{org}/repos\",\n \"GET /orgs/{org}/secret-scanning/alerts\",\n \"GET /orgs/{org}/teams\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n \"GET /orgs/{org}/teams/{team_slug}/members\",\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n \"GET /orgs/{org}/teams/{team_slug}/repos\",\n \"GET /orgs/{org}/teams/{team_slug}/teams\",\n \"GET /projects/columns/{column_id}/cards\",\n \"GET /projects/{project_id}/collaborators\",\n \"GET /projects/{project_id}/columns\",\n \"GET /repos/{org}/{repo}/actions/required_workflows\",\n \"GET /repos/{owner}/{repo}/actions/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/caches\",\n \"GET /repos/{owner}/{repo}/actions/organization-secrets\",\n \"GET /repos/{owner}/{repo}/actions/organization-variables\",\n \"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs\",\n \"GET /repos/{owner}/{repo}/actions/runners\",\n \"GET /repos/{owner}/{repo}/actions/runs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/secrets\",\n \"GET /repos/{owner}/{repo}/actions/variables\",\n \"GET /repos/{owner}/{repo}/actions/workflows\",\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n \"GET /repos/{owner}/{repo}/assignees\",\n \"GET /repos/{owner}/{repo}/branches\",\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n \"GET /repos/{owner}/{repo}/code-scanning/analyses\",\n \"GET /repos/{owner}/{repo}/codespaces\",\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\",\n \"GET /repos/{owner}/{repo}/codespaces/secrets\",\n \"GET /repos/{owner}/{repo}/collaborators\",\n \"GET /repos/{owner}/{repo}/comments\",\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/commits\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/status\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n \"GET /repos/{owner}/{repo}/contributors\",\n \"GET /repos/{owner}/{repo}/dependabot/alerts\",\n \"GET /repos/{owner}/{repo}/dependabot/secrets\",\n \"GET /repos/{owner}/{repo}/deployments\",\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n \"GET /repos/{owner}/{repo}/environments\",\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\",\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps\",\n \"GET /repos/{owner}/{repo}/events\",\n \"GET /repos/{owner}/{repo}/forks\",\n \"GET /repos/{owner}/{repo}/hooks\",\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\",\n \"GET /repos/{owner}/{repo}/invitations\",\n \"GET /repos/{owner}/{repo}/issues\",\n \"GET /repos/{owner}/{repo}/issues/comments\",\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n \"GET /repos/{owner}/{repo}/keys\",\n \"GET /repos/{owner}/{repo}/labels\",\n \"GET /repos/{owner}/{repo}/milestones\",\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n \"GET /repos/{owner}/{repo}/notifications\",\n \"GET /repos/{owner}/{repo}/pages/builds\",\n \"GET /repos/{owner}/{repo}/projects\",\n \"GET /repos/{owner}/{repo}/pulls\",\n \"GET /repos/{owner}/{repo}/pulls/comments\",\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n \"GET /repos/{owner}/{repo}/releases\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\",\n \"GET /repos/{owner}/{repo}/security-advisories\",\n \"GET /repos/{owner}/{repo}/stargazers\",\n \"GET /repos/{owner}/{repo}/subscribers\",\n \"GET /repos/{owner}/{repo}/tags\",\n \"GET /repos/{owner}/{repo}/teams\",\n \"GET /repos/{owner}/{repo}/topics\",\n \"GET /repositories\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables\",\n \"GET /search/code\",\n \"GET /search/commits\",\n \"GET /search/issues\",\n \"GET /search/labels\",\n \"GET /search/repositories\",\n \"GET /search/topics\",\n \"GET /search/users\",\n \"GET /teams/{team_id}/discussions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\",\n \"GET /teams/{team_id}/invitations\",\n \"GET /teams/{team_id}/members\",\n \"GET /teams/{team_id}/projects\",\n \"GET /teams/{team_id}/repos\",\n \"GET /teams/{team_id}/teams\",\n \"GET /user/blocks\",\n \"GET /user/codespaces\",\n \"GET /user/codespaces/secrets\",\n \"GET /user/emails\",\n \"GET /user/followers\",\n \"GET /user/following\",\n \"GET /user/gpg_keys\",\n \"GET /user/installations\",\n \"GET /user/installations/{installation_id}/repositories\",\n \"GET /user/issues\",\n \"GET /user/keys\",\n \"GET /user/marketplace_purchases\",\n \"GET /user/marketplace_purchases/stubbed\",\n \"GET /user/memberships/orgs\",\n \"GET /user/migrations\",\n \"GET /user/migrations/{migration_id}/repositories\",\n \"GET /user/orgs\",\n \"GET /user/packages\",\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n \"GET /user/public_emails\",\n \"GET /user/repos\",\n \"GET /user/repository_invitations\",\n \"GET /user/social_accounts\",\n \"GET /user/ssh_signing_keys\",\n \"GET /user/starred\",\n \"GET /user/subscriptions\",\n \"GET /user/teams\",\n \"GET /users\",\n \"GET /users/{username}/events\",\n \"GET /users/{username}/events/orgs/{org}\",\n \"GET /users/{username}/events/public\",\n \"GET /users/{username}/followers\",\n \"GET /users/{username}/following\",\n \"GET /users/{username}/gists\",\n \"GET /users/{username}/gpg_keys\",\n \"GET /users/{username}/keys\",\n \"GET /users/{username}/orgs\",\n \"GET /users/{username}/packages\",\n \"GET /users/{username}/projects\",\n \"GET /users/{username}/received_events\",\n \"GET /users/{username}/received_events/public\",\n \"GET /users/{username}/repos\",\n \"GET /users/{username}/social_accounts\",\n \"GET /users/{username}/ssh_signing_keys\",\n \"GET /users/{username}/starred\",\n \"GET /users/{username}/subscriptions\"\n];\nexport {\n paginatingEndpoints\n};\n", "import {\n paginatingEndpoints\n} from \"./generated/paginating-endpoints\";\nimport { paginatingEndpoints as paginatingEndpoints2 } from \"./generated/paginating-endpoints\";\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\nexport {\n isPaginatingEndpoint,\n paginatingEndpoints2 as paginatingEndpoints\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAM,UAAU;;;ACAhB,SAAS,+BAA+B,UAAU;AAChD,MAAI,CAAC,SAAS,MAAM;AAClB,WAAO;AAAA,MACL,GAAG;AAAA,MACH,MAAM,CAAC;AAAA,IACT;AAAA,EACF;AACA,QAAM,6BAA6B,iBAAiB,SAAS,QAAQ,EAAE,SAAS,SAAS;AACzF,MAAI,CAAC;AACH,WAAO;AACT,QAAM,oBAAoB,SAAS,KAAK;AACxC,QAAM,sBAAsB,SAAS,KAAK;AAC1C,QAAM,aAAa,SAAS,KAAK;AACjC,SAAO,SAAS,KAAK;AACrB,SAAO,SAAS,KAAK;AACrB,SAAO,SAAS,KAAK;AACrB,QAAM,eAAe,OAAO,KAAK,SAAS,IAAI,EAAE,CAAC;AACjD,QAAM,OAAO,SAAS,KAAK,YAAY;AACvC,WAAS,OAAO;AAChB,MAAI,OAAO,sBAAsB,aAAa;AAC5C,aAAS,KAAK,qBAAqB;AAAA,EACrC;AACA,MAAI,OAAO,wBAAwB,aAAa;AAC9C,aAAS,KAAK,uBAAuB;AAAA,EACvC;AACA,WAAS,KAAK,cAAc;AAC5B,SAAO;AACT;;;AC1BA,SAAS,SAAS,SAAS,OAAO,YAAY;AAC5C,QAAM,UAAU,OAAO,UAAU,aAAa,MAAM,SAAS,UAAU,IAAI,QAAQ,QAAQ,SAAS,OAAO,UAAU;AACrH,QAAM,gBAAgB,OAAO,UAAU,aAAa,QAAQ,QAAQ;AACpE,QAAM,SAAS,QAAQ;AACvB,QAAM,UAAU,QAAQ;AACxB,MAAI,MAAM,QAAQ;AAClB,SAAO;AAAA,IACL,CAAC,OAAO,aAAa,GAAG,OAAO;AAAA,MAC7B,MAAM,OAAO;AACX,YAAI,CAAC;AACH,iBAAO,EAAE,MAAM,KAAK;AACtB,YAAI;AACF,gBAAM,WAAW,MAAM,cAAc,EAAE,QAAQ,KAAK,QAAQ,CAAC;AAC7D,gBAAM,qBAAqB,+BAA+B,QAAQ;AAClE,kBAAQ,mBAAmB,QAAQ,QAAQ,IAAI;AAAA,YAC7C;AAAA,UACF,KAAK,CAAC,GAAG,CAAC;AACV,iBAAO,EAAE,OAAO,mBAAmB;AAAA,QACrC,SAAS,OAAP;AACA,cAAI,MAAM,WAAW;AACnB,kBAAM;AACR,gBAAM;AACN,iBAAO;AAAA,YACL,OAAO;AAAA,cACL,QAAQ;AAAA,cACR,SAAS,CAAC;AAAA,cACV,MAAM,CAAC;AAAA,YACT;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;ACjCA,SAAS,SAAS,SAAS,OAAO,YAAY,OAAO;AACnD,MAAI,OAAO,eAAe,YAAY;AACpC,YAAQ;AACR,iBAAa;AAAA,EACf;AACA,SAAO;AAAA,IACL;AAAA,IACA,CAAC;AAAA,IACD,SAAS,SAAS,OAAO,UAAU,EAAE,OAAO,aAAa,EAAE;AAAA,IAC3D;AAAA,EACF;AACF;AACA,SAAS,OAAO,SAAS,SAAS,WAAW,OAAO;AAClD,SAAO,UAAU,KAAK,EAAE,KAAK,CAAC,WAAW;AACvC,QAAI,OAAO,MAAM;AACf,aAAO;AAAA,IACT;AACA,QAAI,YAAY;AAChB,aAAS,OAAO;AACd,kBAAY;AAAA,IACd;AACA,cAAU,QAAQ;AAAA,MAChB,QAAQ,MAAM,OAAO,OAAO,IAAI,IAAI,OAAO,MAAM;AAAA,IACnD;AACA,QAAI,WAAW;AACb,aAAO;AAAA,IACT;AACA,WAAO,OAAO,SAAS,SAAS,WAAW,KAAK;AAAA,EAClD,CAAC;AACH;;;AC5BA,IAAM,sBAAsB,OAAO,OAAO,UAAU;AAAA,EAClD;AACF,CAAC;;;ACJD,IAAM,sBAAsB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;AC3NA,SAAS,qBAAqB,KAAK;AACjC,MAAI,OAAO,QAAQ,UAAU;AAC3B,WAAO,oBAAoB,SAAS,GAAG;AAAA,EACzC,OAAO;AACL,WAAO;AAAA,EACT;AACF;;;APFA,SAAS,aAAa,SAAS;AAC7B,SAAO;AAAA,IACL,UAAU,OAAO,OAAO,SAAS,KAAK,MAAM,OAAO,GAAG;AAAA,MACpD,UAAU,SAAS,KAAK,MAAM,OAAO;AAAA,IACvC,CAAC;AAAA,EACH;AACF;AACA,aAAa,UAAU;", + "names": [] +} diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/compose-paginate.js b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/compose-paginate.js new file mode 100644 index 0000000..9ea093a --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/compose-paginate.js @@ -0,0 +1,8 @@ +import { paginate } from "./paginate"; +import { iterator } from "./iterator"; +const composePaginateRest = Object.assign(paginate, { + iterator +}); +export { + composePaginateRest +}; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/generated/paginating-endpoints.js b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/generated/paginating-endpoints.js new file mode 100644 index 0000000..c981898 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/generated/paginating-endpoints.js @@ -0,0 +1,227 @@ +const paginatingEndpoints = [ + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /organizations/{org}/personal-access-token-requests", + "GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /organizations/{org}/personal-access-tokens", + "GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/required_workflows", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{org}/{repo}/actions/required_workflows", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; +export { + paginatingEndpoints +}; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/index.js b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/index.js new file mode 100644 index 0000000..6066dec --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/index.js @@ -0,0 +1,22 @@ +import { VERSION } from "./version"; +import { paginate } from "./paginate"; +import { iterator } from "./iterator"; +import { composePaginateRest } from "./compose-paginate"; +import { + isPaginatingEndpoint, + paginatingEndpoints +} from "./paginating-endpoints"; +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; +export { + composePaginateRest, + isPaginatingEndpoint, + paginateRest, + paginatingEndpoints +}; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/iterator.js b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/iterator.js new file mode 100644 index 0000000..2bd96f8 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/iterator.js @@ -0,0 +1,38 @@ +import { normalizePaginatedListResponse } from "./normalize-paginated-list-response"; +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + }) + }; +} +export { + iterator +}; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/normalize-paginated-list-response.js b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/normalize-paginated-list-response.js new file mode 100644 index 0000000..c1dd744 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/normalize-paginated-list-response.js @@ -0,0 +1,31 @@ +function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + response.data.total_count = totalCount; + return response; +} +export { + normalizePaginatedListResponse +}; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/paginate.js b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/paginate.js new file mode 100644 index 0000000..49d93c1 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/paginate.js @@ -0,0 +1,34 @@ +import { iterator } from "./iterator"; +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; + } + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); +} +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; + } + let earlyExit = false; + function done() { + earlyExit = true; + } + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; + } + return gather(octokit, results, iterator2, mapFn); + }); +} +export { + paginate +}; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/paginating-endpoints.js b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/paginating-endpoints.js new file mode 100644 index 0000000..312b32c --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/paginating-endpoints.js @@ -0,0 +1,15 @@ +import { + paginatingEndpoints +} from "./generated/paginating-endpoints"; +import { paginatingEndpoints as paginatingEndpoints2 } from "./generated/paginating-endpoints"; +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} +export { + isPaginatingEndpoint, + paginatingEndpoints2 as paginatingEndpoints +}; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/version.js b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/version.js new file mode 100644 index 0000000..e33ac3a --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "6.1.2"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/compose-paginate.d.ts b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/compose-paginate.d.ts new file mode 100644 index 0000000..6eaca43 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/compose-paginate.d.ts @@ -0,0 +1,2 @@ +import type { ComposePaginateInterface } from "./types"; +export declare const composePaginateRest: ComposePaginateInterface; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/generated/paginating-endpoints.d.ts b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/generated/paginating-endpoints.d.ts new file mode 100644 index 0000000..95f7ec1 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/generated/paginating-endpoints.d.ts @@ -0,0 +1,1670 @@ +import { Endpoints } from "@octokit/types"; +export interface PaginatingEndpoints { + /** + * @see https://docs.github.com/rest/reference/apps#list-deliveries-for-an-app-webhook + */ + "GET /app/hook/deliveries": { + parameters: Endpoints["GET /app/hook/deliveries"]["parameters"]; + response: Endpoints["GET /app/hook/deliveries"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-installation-requests-for-the-authenticated-app + */ + "GET /app/installation-requests": { + parameters: Endpoints["GET /app/installation-requests"]["parameters"]; + response: Endpoints["GET /app/installation-requests"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app + */ + "GET /app/installations": { + parameters: Endpoints["GET /app/installations"]["parameters"]; + response: Endpoints["GET /app/installations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/dependabot/alerts#list-dependabot-alerts-for-an-enterprise + */ + "GET /enterprises/{enterprise}/dependabot/alerts": { + parameters: Endpoints["GET /enterprises/{enterprise}/dependabot/alerts"]["parameters"]; + response: Endpoints["GET /enterprises/{enterprise}/dependabot/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-an-enterprise + */ + "GET /enterprises/{enterprise}/secret-scanning/alerts": { + parameters: Endpoints["GET /enterprises/{enterprise}/secret-scanning/alerts"]["parameters"]; + response: Endpoints["GET /enterprises/{enterprise}/secret-scanning/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events + */ + "GET /events": { + parameters: Endpoints["GET /events"]["parameters"]; + response: Endpoints["GET /events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-gists-for-the-authenticated-user + */ + "GET /gists": { + parameters: Endpoints["GET /gists"]["parameters"]; + response: Endpoints["GET /gists"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-public-gists + */ + "GET /gists/public": { + parameters: Endpoints["GET /gists/public"]["parameters"]; + response: Endpoints["GET /gists/public"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-starred-gists + */ + "GET /gists/starred": { + parameters: Endpoints["GET /gists/starred"]["parameters"]; + response: Endpoints["GET /gists/starred"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-comments + */ + "GET /gists/{gist_id}/comments": { + parameters: Endpoints["GET /gists/{gist_id}/comments"]["parameters"]; + response: Endpoints["GET /gists/{gist_id}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-commits + */ + "GET /gists/{gist_id}/commits": { + parameters: Endpoints["GET /gists/{gist_id}/commits"]["parameters"]; + response: Endpoints["GET /gists/{gist_id}/commits"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-forks + */ + "GET /gists/{gist_id}/forks": { + parameters: Endpoints["GET /gists/{gist_id}/forks"]["parameters"]; + response: Endpoints["GET /gists/{gist_id}/forks"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-repositories-accessible-to-the-app-installation + */ + "GET /installation/repositories": { + parameters: Endpoints["GET /installation/repositories"]["parameters"]; + response: Endpoints["GET /installation/repositories"]["response"] & { + data: Endpoints["GET /installation/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-issues-assigned-to-the-authenticated-user + */ + "GET /issues": { + parameters: Endpoints["GET /issues"]["parameters"]; + response: Endpoints["GET /issues"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/licenses#get-all-commonly-used-licenses + */ + "GET /licenses": { + parameters: Endpoints["GET /licenses"]["parameters"]; + response: Endpoints["GET /licenses"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-plans + */ + "GET /marketplace_listing/plans": { + parameters: Endpoints["GET /marketplace_listing/plans"]["parameters"]; + response: Endpoints["GET /marketplace_listing/plans"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-accounts-for-a-plan + */ + "GET /marketplace_listing/plans/{plan_id}/accounts": { + parameters: Endpoints["GET /marketplace_listing/plans/{plan_id}/accounts"]["parameters"]; + response: Endpoints["GET /marketplace_listing/plans/{plan_id}/accounts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-plans-stubbed + */ + "GET /marketplace_listing/stubbed/plans": { + parameters: Endpoints["GET /marketplace_listing/stubbed/plans"]["parameters"]; + response: Endpoints["GET /marketplace_listing/stubbed/plans"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-accounts-for-a-plan-stubbed + */ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts": { + parameters: Endpoints["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"]["parameters"]; + response: Endpoints["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-for-a-network-of-repositories + */ + "GET /networks/{owner}/{repo}/events": { + parameters: Endpoints["GET /networks/{owner}/{repo}/events"]["parameters"]; + response: Endpoints["GET /networks/{owner}/{repo}/events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user + */ + "GET /notifications": { + parameters: Endpoints["GET /notifications"]["parameters"]; + response: Endpoints["GET /notifications"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations + */ + "GET /organizations": { + parameters: Endpoints["GET /organizations"]["parameters"]; + response: Endpoints["GET /organizations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/orgs/orgs#list-requests-to-access-organization-resources-with-fine-grained-personal-access-tokens + */ + "GET /organizations/{org}/personal-access-token-requests": { + parameters: Endpoints["GET /organizations/{org}/personal-access-token-requests"]["parameters"]; + response: Endpoints["GET /organizations/{org}/personal-access-token-requests"]["response"]; + }; + /** + * @see https://docs.github.com/rest/orgs/orgs#list-repositories-requested-to-be-accessed-by-a-fine-grained-personal-access-token + */ + "GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories": { + parameters: Endpoints["GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories"]["parameters"]; + response: Endpoints["GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories"]["response"]; + }; + /** + * @see https://docs.github.com/rest/orgs/orgs#list-fine-grained-personal-access-tokens-with-access-to-organization-resources + */ + "GET /organizations/{org}/personal-access-tokens": { + parameters: Endpoints["GET /organizations/{org}/personal-access-tokens"]["parameters"]; + response: Endpoints["GET /organizations/{org}/personal-access-tokens"]["response"]; + }; + /** + * @see https://docs.github.com/rest/orgs/orgs#list-repositories-a-fine-grained-personal-access-token-has-access-to + */ + "GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories": { + parameters: Endpoints["GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories"]["parameters"]; + response: Endpoints["GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-repositories-with-github-actions-cache-usage-for-an-organization + */ + "GET /orgs/{org}/actions/cache/usage-by-repository": { + parameters: Endpoints["GET /orgs/{org}/actions/cache/usage-by-repository"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/cache/usage-by-repository"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/cache/usage-by-repository"]["response"]["data"]["repository_cache_usages"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-enabled-for-github-actions-in-an-organization + */ + "GET /orgs/{org}/actions/permissions/repositories": { + parameters: Endpoints["GET /orgs/{org}/actions/permissions/repositories"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/permissions/repositories"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/permissions/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-required-workflows + */ + "GET /orgs/{org}/actions/required_workflows": { + parameters: Endpoints["GET /orgs/{org}/actions/required_workflows"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/required_workflows"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/required_workflows"]["response"]["data"]["required_workflows"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-an-organization + */ + "GET /orgs/{org}/actions/runners": { + parameters: Endpoints["GET /orgs/{org}/actions/runners"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/runners"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/runners"]["response"]["data"]["runners"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-organization-secrets + */ + "GET /orgs/{org}/actions/secrets": { + parameters: Endpoints["GET /orgs/{org}/actions/secrets"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/secrets"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories": { + parameters: Endpoints["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/actions/variables#list-organization-variables + */ + "GET /orgs/{org}/actions/variables": { + parameters: Endpoints["GET /orgs/{org}/actions/variables"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/variables"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/variables"]["response"]["data"]["variables"]; + }; + }; + /** + * @see https://docs.github.com/rest/actions/variables#list-selected-repositories-for-an-organization-variable + */ + "GET /orgs/{org}/actions/variables/{name}/repositories": { + parameters: Endpoints["GET /orgs/{org}/actions/variables/{name}/repositories"]["parameters"]; + response: Endpoints["GET /orgs/{org}/actions/variables/{name}/repositories"]["response"] & { + data: Endpoints["GET /orgs/{org}/actions/variables/{name}/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-users-blocked-by-an-organization + */ + "GET /orgs/{org}/blocks": { + parameters: Endpoints["GET /orgs/{org}/blocks"]["parameters"]; + response: Endpoints["GET /orgs/{org}/blocks"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-for-an-organization + */ + "GET /orgs/{org}/code-scanning/alerts": { + parameters: Endpoints["GET /orgs/{org}/code-scanning/alerts"]["parameters"]; + response: Endpoints["GET /orgs/{org}/code-scanning/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-in-organization + */ + "GET /orgs/{org}/codespaces": { + parameters: Endpoints["GET /orgs/{org}/codespaces"]["parameters"]; + response: Endpoints["GET /orgs/{org}/codespaces"]["response"] & { + data: Endpoints["GET /orgs/{org}/codespaces"]["response"]["data"]["codespaces"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-organization-secrets + */ + "GET /orgs/{org}/codespaces/secrets": { + parameters: Endpoints["GET /orgs/{org}/codespaces/secrets"]["parameters"]; + response: Endpoints["GET /orgs/{org}/codespaces/secrets"]["response"] & { + data: Endpoints["GET /orgs/{org}/codespaces/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories": { + parameters: Endpoints["GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories"]["parameters"]; + response: Endpoints["GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories"]["response"] & { + data: Endpoints["GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/dependabot/alerts#list-dependabot-alerts-for-an-organization + */ + "GET /orgs/{org}/dependabot/alerts": { + parameters: Endpoints["GET /orgs/{org}/dependabot/alerts"]["parameters"]; + response: Endpoints["GET /orgs/{org}/dependabot/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-organization-secrets + */ + "GET /orgs/{org}/dependabot/secrets": { + parameters: Endpoints["GET /orgs/{org}/dependabot/secrets"]["parameters"]; + response: Endpoints["GET /orgs/{org}/dependabot/secrets"]["response"] & { + data: Endpoints["GET /orgs/{org}/dependabot/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories": { + parameters: Endpoints["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]["parameters"]; + response: Endpoints["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]["response"] & { + data: Endpoints["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-organization-events + */ + "GET /orgs/{org}/events": { + parameters: Endpoints["GET /orgs/{org}/events"]["parameters"]; + response: Endpoints["GET /orgs/{org}/events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-failed-organization-invitations + */ + "GET /orgs/{org}/failed_invitations": { + parameters: Endpoints["GET /orgs/{org}/failed_invitations"]["parameters"]; + response: Endpoints["GET /orgs/{org}/failed_invitations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-webhooks + */ + "GET /orgs/{org}/hooks": { + parameters: Endpoints["GET /orgs/{org}/hooks"]["parameters"]; + response: Endpoints["GET /orgs/{org}/hooks"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-deliveries-for-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}/deliveries": { + parameters: Endpoints["GET /orgs/{org}/hooks/{hook_id}/deliveries"]["parameters"]; + response: Endpoints["GET /orgs/{org}/hooks/{hook_id}/deliveries"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-app-installations-for-an-organization + */ + "GET /orgs/{org}/installations": { + parameters: Endpoints["GET /orgs/{org}/installations"]["parameters"]; + response: Endpoints["GET /orgs/{org}/installations"]["response"] & { + data: Endpoints["GET /orgs/{org}/installations"]["response"]["data"]["installations"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-pending-organization-invitations + */ + "GET /orgs/{org}/invitations": { + parameters: Endpoints["GET /orgs/{org}/invitations"]["parameters"]; + response: Endpoints["GET /orgs/{org}/invitations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-invitation-teams + */ + "GET /orgs/{org}/invitations/{invitation_id}/teams": { + parameters: Endpoints["GET /orgs/{org}/invitations/{invitation_id}/teams"]["parameters"]; + response: Endpoints["GET /orgs/{org}/invitations/{invitation_id}/teams"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-organization-issues-assigned-to-the-authenticated-user + */ + "GET /orgs/{org}/issues": { + parameters: Endpoints["GET /orgs/{org}/issues"]["parameters"]; + response: Endpoints["GET /orgs/{org}/issues"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-members + */ + "GET /orgs/{org}/members": { + parameters: Endpoints["GET /orgs/{org}/members"]["parameters"]; + response: Endpoints["GET /orgs/{org}/members"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-codespaces-for-user-in-org + */ + "GET /orgs/{org}/members/{username}/codespaces": { + parameters: Endpoints["GET /orgs/{org}/members/{username}/codespaces"]["parameters"]; + response: Endpoints["GET /orgs/{org}/members/{username}/codespaces"]["response"] & { + data: Endpoints["GET /orgs/{org}/members/{username}/codespaces"]["response"]["data"]["codespaces"]; + }; + }; + /** + * @see https://docs.github.com/rest/migrations/orgs#list-organization-migrations + */ + "GET /orgs/{org}/migrations": { + parameters: Endpoints["GET /orgs/{org}/migrations"]["parameters"]; + response: Endpoints["GET /orgs/{org}/migrations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/migrations/orgs#list-repositories-in-an-organization-migration + */ + "GET /orgs/{org}/migrations/{migration_id}/repositories": { + parameters: Endpoints["GET /orgs/{org}/migrations/{migration_id}/repositories"]["parameters"]; + response: Endpoints["GET /orgs/{org}/migrations/{migration_id}/repositories"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-outside-collaborators-for-an-organization + */ + "GET /orgs/{org}/outside_collaborators": { + parameters: Endpoints["GET /orgs/{org}/outside_collaborators"]["parameters"]; + response: Endpoints["GET /orgs/{org}/outside_collaborators"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-an-organization + */ + "GET /orgs/{org}/packages": { + parameters: Endpoints["GET /orgs/{org}/packages"]["parameters"]; + response: Endpoints["GET /orgs/{org}/packages"]["response"]; + }; + /** + * @see https://docs.github.com/rest/packages#get-all-package-versions-for-a-package-owned-by-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions": { + parameters: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"]["parameters"]; + response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/projects#list-organization-projects + */ + "GET /orgs/{org}/projects": { + parameters: Endpoints["GET /orgs/{org}/projects"]["parameters"]; + response: Endpoints["GET /orgs/{org}/projects"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-public-organization-members + */ + "GET /orgs/{org}/public_members": { + parameters: Endpoints["GET /orgs/{org}/public_members"]["parameters"]; + response: Endpoints["GET /orgs/{org}/public_members"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-organization-repositories + */ + "GET /orgs/{org}/repos": { + parameters: Endpoints["GET /orgs/{org}/repos"]["parameters"]; + response: Endpoints["GET /orgs/{org}/repos"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-an-organization + */ + "GET /orgs/{org}/secret-scanning/alerts": { + parameters: Endpoints["GET /orgs/{org}/secret-scanning/alerts"]["parameters"]; + response: Endpoints["GET /orgs/{org}/secret-scanning/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-teams + */ + "GET /orgs/{org}/teams": { + parameters: Endpoints["GET /orgs/{org}/teams"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussions + */ + "GET /orgs/{org}/teams/{team_slug}/discussions": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussion-comments + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-pending-team-invitations + */ + "GET /orgs/{org}/teams/{team_slug}/invitations": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/invitations"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/invitations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-members + */ + "GET /orgs/{org}/teams/{team_slug}/members": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/members"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/members"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-projects + */ + "GET /orgs/{org}/teams/{team_slug}/projects": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/projects"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/projects"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-repositories + */ + "GET /orgs/{org}/teams/{team_slug}/repos": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/repos"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/repos"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-child-teams + */ + "GET /orgs/{org}/teams/{team_slug}/teams": { + parameters: Endpoints["GET /orgs/{org}/teams/{team_slug}/teams"]["parameters"]; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/teams"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-cards + */ + "GET /projects/columns/{column_id}/cards": { + parameters: Endpoints["GET /projects/columns/{column_id}/cards"]["parameters"]; + response: Endpoints["GET /projects/columns/{column_id}/cards"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-collaborators + */ + "GET /projects/{project_id}/collaborators": { + parameters: Endpoints["GET /projects/{project_id}/collaborators"]["parameters"]; + response: Endpoints["GET /projects/{project_id}/collaborators"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-columns + */ + "GET /projects/{project_id}/columns": { + parameters: Endpoints["GET /projects/{project_id}/columns"]["parameters"]; + response: Endpoints["GET /projects/{project_id}/columns"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-required-workflows + */ + "GET /repos/{org}/{repo}/actions/required_workflows": { + parameters: Endpoints["GET /repos/{org}/{repo}/actions/required_workflows"]["parameters"]; + response: Endpoints["GET /repos/{org}/{repo}/actions/required_workflows"]["response"] & { + data: Endpoints["GET /repos/{org}/{repo}/actions/required_workflows"]["response"]["data"]["required_workflows"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-artifacts-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/artifacts": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts"]["response"]["data"]["artifacts"]; + }; + }; + /** + * @see https://docs.github.com/rest/actions/cache#list-github-actions-caches-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/caches": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/caches"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/caches"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/caches"]["response"]["data"]["actions_caches"]; + }; + }; + /** + * @see https://docs.github.com/rest/actions/secrets#list-repository-organization-secrets + */ + "GET /repos/{owner}/{repo}/actions/organization-secrets": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/organization-secrets"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/organization-secrets"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/organization-secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/actions/variables#list-repository-organization-variables + */ + "GET /repos/{owner}/{repo}/actions/organization-variables": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/organization-variables"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/organization-variables"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/organization-variables"]["response"]["data"]["variables"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-required-workflow-runs + */ + "GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs"]["response"]["data"]["workflow_runs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/runners"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runners"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/runners"]["response"]["data"]["runners"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-runs-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/runs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/runs"]["response"]["data"]["workflow_runs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-run-artifacts + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"]["response"]["data"]["artifacts"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-jobs-for-a-workflow-run-attempt + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"]["response"]["data"]["jobs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-jobs-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"]["response"]["data"]["jobs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/actions/secrets": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/secrets"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/secrets"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/actions/variables#list-repository-variables + */ + "GET /repos/{owner}/{repo}/actions/variables": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/variables"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/variables"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/variables"]["response"]["data"]["variables"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-workflows + */ + "GET /repos/{owner}/{repo}/actions/workflows": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/workflows"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/workflows"]["response"]["data"]["workflows"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-runs + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"]["response"]["data"]["workflow_runs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-assignees + */ + "GET /repos/{owner}/{repo}/assignees": { + parameters: Endpoints["GET /repos/{owner}/{repo}/assignees"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/assignees"]["response"]; + }; + /** + * @see https://docs.github.com/rest/branches/branches#list-branches + */ + "GET /repos/{owner}/{repo}/branches": { + parameters: Endpoints["GET /repos/{owner}/{repo}/branches"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/branches"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-run-annotations + */ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations": { + parameters: Endpoints["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-runs-in-a-check-suite + */ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"]["response"]["data"]["check_runs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts": { + parameters: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-instances-of-a-code-scanning-alert + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances": { + parameters: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-analyses-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/analyses": { + parameters: Endpoints["GET /repos/{owner}/{repo}/code-scanning/analyses"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/analyses"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-codespaces-in-a-repository-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/codespaces": { + parameters: Endpoints["GET /repos/{owner}/{repo}/codespaces"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/codespaces"]["response"]["data"]["codespaces"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-devcontainers-in-a-repository-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/codespaces/devcontainers": { + parameters: Endpoints["GET /repos/{owner}/{repo}/codespaces/devcontainers"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/devcontainers"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/codespaces/devcontainers"]["response"]["data"]["devcontainers"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/codespaces/secrets": { + parameters: Endpoints["GET /repos/{owner}/{repo}/codespaces/secrets"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/secrets"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/codespaces/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#list-repository-collaborators + */ + "GET /repos/{owner}/{repo}/collaborators": { + parameters: Endpoints["GET /repos/{owner}/{repo}/collaborators"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/collaborators"]["response"]; + }; + /** + * @see https://docs.github.com/rest/commits/comments#list-commit-comments-for-a-repository + */ + "GET /repos/{owner}/{repo}/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-commit-comment + */ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions": { + parameters: Endpoints["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/commits/commits#list-commits + */ + "GET /repos/{owner}/{repo}/commits": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits"]["response"]; + }; + /** + * @see https://docs.github.com/rest/commits/comments#list-commit-comments + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/commits/commits#list-pull-requests-associated-with-a-commit + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-runs-for-a-git-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"]["response"]["data"]["check_runs"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-suites-for-a-git-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"]["response"]["data"]["check_suites"]; + }; + }; + /** + * @see https://docs.github.com/rest/commits/statuses#get-the-combined-status-for-a-specific-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/status": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/status"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/status"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/status"]["response"]["data"]["statuses"]; + }; + }; + /** + * @see https://docs.github.com/rest/commits/statuses#list-commit-statuses-for-a-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses": { + parameters: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/statuses"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/statuses"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-contributors + */ + "GET /repos/{owner}/{repo}/contributors": { + parameters: Endpoints["GET /repos/{owner}/{repo}/contributors"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/contributors"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-dependabot-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/dependabot/alerts": { + parameters: Endpoints["GET /repos/{owner}/{repo}/dependabot/alerts"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/dependabot/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/dependabot/secrets": { + parameters: Endpoints["GET /repos/{owner}/{repo}/dependabot/secrets"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/dependabot/secrets"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/dependabot/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/deployments/deployments#list-deployments + */ + "GET /repos/{owner}/{repo}/deployments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/deployments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/deployments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/deployments/statuses#list-deployment-statuses + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses": { + parameters: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"]["response"]; + }; + /** + * @see https://docs.github.com/rest/deployments/environments#list-environments + */ + "GET /repos/{owner}/{repo}/environments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/environments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/environments"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/environments"]["response"]["data"]["environments"]; + }; + }; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#list-deployment-branch-policies + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies": { + parameters: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"]["response"]["data"]["branch_policies"]; + }; + }; + /** + * @see https://docs.github.com/rest/deployments/protection-rules#list-custom-deployment-rule-integrations + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps": { + parameters: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps"]["response"]["data"]["available_custom_deployment_protection_rule_integrations"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-repository-events + */ + "GET /repos/{owner}/{repo}/events": { + parameters: Endpoints["GET /repos/{owner}/{repo}/events"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-forks + */ + "GET /repos/{owner}/{repo}/forks": { + parameters: Endpoints["GET /repos/{owner}/{repo}/forks"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/forks"]["response"]; + }; + /** + * @see https://docs.github.com/rest/webhooks/repos#list-repository-webhooks + */ + "GET /repos/{owner}/{repo}/hooks": { + parameters: Endpoints["GET /repos/{owner}/{repo}/hooks"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/hooks"]["response"]; + }; + /** + * @see https://docs.github.com/rest/webhooks/repo-deliveries#list-deliveries-for-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries": { + parameters: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"]["response"]; + }; + /** + * @see https://docs.github.com/rest/collaborators/invitations#list-repository-invitations + */ + "GET /repos/{owner}/{repo}/invitations": { + parameters: Endpoints["GET /repos/{owner}/{repo}/invitations"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/invitations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-repository-issues + */ + "GET /repos/{owner}/{repo}/issues": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-comments-for-a-repository + */ + "GET /repos/{owner}/{repo}/issues/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue-comment + */ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-events-for-a-repository + */ + "GET /repos/{owner}/{repo}/issues/events": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/events"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-comments + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-events + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/events": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/events"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-timeline-events-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline": { + parameters: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"]["response"]; + }; + /** + * @see https://docs.github.com/rest/deploy-keys#list-deploy-keys + */ + "GET /repos/{owner}/{repo}/keys": { + parameters: Endpoints["GET /repos/{owner}/{repo}/keys"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/keys"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-a-repository + */ + "GET /repos/{owner}/{repo}/labels": { + parameters: Endpoints["GET /repos/{owner}/{repo}/labels"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/labels"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-milestones + */ + "GET /repos/{owner}/{repo}/milestones": { + parameters: Endpoints["GET /repos/{owner}/{repo}/milestones"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/milestones"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-issues-in-a-milestone + */ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels": { + parameters: Endpoints["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/notifications": { + parameters: Endpoints["GET /repos/{owner}/{repo}/notifications"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/notifications"]["response"]; + }; + /** + * @see https://docs.github.com/rest/pages#list-github-pages-builds + */ + "GET /repos/{owner}/{repo}/pages/builds": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pages/builds"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pages/builds"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/projects#list-repository-projects + */ + "GET /repos/{owner}/{repo}/projects": { + parameters: Endpoints["GET /repos/{owner}/{repo}/projects"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/projects"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests + */ + "GET /repos/{owner}/{repo}/pulls": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-review-comments-in-a-repository + */ + "GET /repos/{owner}/{repo}/pulls/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-pull-request-review-comment + */ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-review-comments-on-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-commits-on-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests-files + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-reviews-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/pulls#list-comments-for-a-pull-request-review + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments": { + parameters: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-releases + */ + "GET /repos/{owner}/{repo}/releases": { + parameters: Endpoints["GET /repos/{owner}/{repo}/releases"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/releases"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-release-assets + */ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets": { + parameters: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/assets"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/assets"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-release + */ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions": { + parameters: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts": { + parameters: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-locations-for-a-secret-scanning-alert + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations": { + parameters: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/security-advisories/repository-advisories#list-repository-security-advisories + */ + "GET /repos/{owner}/{repo}/security-advisories": { + parameters: Endpoints["GET /repos/{owner}/{repo}/security-advisories"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/security-advisories"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-stargazers + */ + "GET /repos/{owner}/{repo}/stargazers": { + parameters: Endpoints["GET /repos/{owner}/{repo}/stargazers"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/stargazers"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-watchers + */ + "GET /repos/{owner}/{repo}/subscribers": { + parameters: Endpoints["GET /repos/{owner}/{repo}/subscribers"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/subscribers"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-tags + */ + "GET /repos/{owner}/{repo}/tags": { + parameters: Endpoints["GET /repos/{owner}/{repo}/tags"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/tags"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-teams + */ + "GET /repos/{owner}/{repo}/teams": { + parameters: Endpoints["GET /repos/{owner}/{repo}/teams"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/teams"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#get-all-repository-topics + */ + "GET /repos/{owner}/{repo}/topics": { + parameters: Endpoints["GET /repos/{owner}/{repo}/topics"]["parameters"]; + response: Endpoints["GET /repos/{owner}/{repo}/topics"]["response"] & { + data: Endpoints["GET /repos/{owner}/{repo}/topics"]["response"]["data"]["names"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-public-repositories + */ + "GET /repositories": { + parameters: Endpoints["GET /repositories"]["parameters"]; + response: Endpoints["GET /repositories"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/actions#list-environment-secrets + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets": { + parameters: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets"]["parameters"]; + response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets"]["response"] & { + data: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/actions/variables#list-environment-variables + */ + "GET /repositories/{repository_id}/environments/{environment_name}/variables": { + parameters: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/variables"]["parameters"]; + response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/variables"]["response"] & { + data: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/variables"]["response"]["data"]["variables"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-code + */ + "GET /search/code": { + parameters: Endpoints["GET /search/code"]["parameters"]; + response: Endpoints["GET /search/code"]["response"] & { + data: Endpoints["GET /search/code"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-commits + */ + "GET /search/commits": { + parameters: Endpoints["GET /search/commits"]["parameters"]; + response: Endpoints["GET /search/commits"]["response"] & { + data: Endpoints["GET /search/commits"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-issues-and-pull-requests + */ + "GET /search/issues": { + parameters: Endpoints["GET /search/issues"]["parameters"]; + response: Endpoints["GET /search/issues"]["response"] & { + data: Endpoints["GET /search/issues"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-labels + */ + "GET /search/labels": { + parameters: Endpoints["GET /search/labels"]["parameters"]; + response: Endpoints["GET /search/labels"]["response"] & { + data: Endpoints["GET /search/labels"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-repositories + */ + "GET /search/repositories": { + parameters: Endpoints["GET /search/repositories"]["parameters"]; + response: Endpoints["GET /search/repositories"]["response"] & { + data: Endpoints["GET /search/repositories"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-topics + */ + "GET /search/topics": { + parameters: Endpoints["GET /search/topics"]["parameters"]; + response: Endpoints["GET /search/topics"]["response"] & { + data: Endpoints["GET /search/topics"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/search#search-users + */ + "GET /search/users": { + parameters: Endpoints["GET /search/users"]["parameters"]; + response: Endpoints["GET /search/users"]["response"] & { + data: Endpoints["GET /search/users"]["response"]["data"]["items"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussions-legacy + */ + "GET /teams/{team_id}/discussions": { + parameters: Endpoints["GET /teams/{team_id}/discussions"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/discussions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussion-comments-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments": { + parameters: Endpoints["GET /teams/{team_id}/discussions/{discussion_number}/comments"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/discussions/{discussion_number}/comments"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-comment-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": { + parameters: Endpoints["GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/reactions": { + parameters: Endpoints["GET /teams/{team_id}/discussions/{discussion_number}/reactions"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/discussions/{discussion_number}/reactions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-pending-team-invitations-legacy + */ + "GET /teams/{team_id}/invitations": { + parameters: Endpoints["GET /teams/{team_id}/invitations"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/invitations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-members-legacy + */ + "GET /teams/{team_id}/members": { + parameters: Endpoints["GET /teams/{team_id}/members"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/members"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams/#list-team-projects-legacy + */ + "GET /teams/{team_id}/projects": { + parameters: Endpoints["GET /teams/{team_id}/projects"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/projects"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams/#list-team-repositories-legacy + */ + "GET /teams/{team_id}/repos": { + parameters: Endpoints["GET /teams/{team_id}/repos"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/repos"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams/#list-child-teams-legacy + */ + "GET /teams/{team_id}/teams": { + parameters: Endpoints["GET /teams/{team_id}/teams"]["parameters"]; + response: Endpoints["GET /teams/{team_id}/teams"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-users-blocked-by-the-authenticated-user + */ + "GET /user/blocks": { + parameters: Endpoints["GET /user/blocks"]["parameters"]; + response: Endpoints["GET /user/blocks"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-codespaces-for-the-authenticated-user + */ + "GET /user/codespaces": { + parameters: Endpoints["GET /user/codespaces"]["parameters"]; + response: Endpoints["GET /user/codespaces"]["response"] & { + data: Endpoints["GET /user/codespaces"]["response"]["data"]["codespaces"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-secrets-for-the-authenticated-user + */ + "GET /user/codespaces/secrets": { + parameters: Endpoints["GET /user/codespaces/secrets"]["parameters"]; + response: Endpoints["GET /user/codespaces/secrets"]["response"] & { + data: Endpoints["GET /user/codespaces/secrets"]["response"]["data"]["secrets"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-email-addresses-for-the-authenticated-user + */ + "GET /user/emails": { + parameters: Endpoints["GET /user/emails"]["parameters"]; + response: Endpoints["GET /user/emails"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-followers-of-the-authenticated-user + */ + "GET /user/followers": { + parameters: Endpoints["GET /user/followers"]["parameters"]; + response: Endpoints["GET /user/followers"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-the-people-the-authenticated-user-follows + */ + "GET /user/following": { + parameters: Endpoints["GET /user/following"]["parameters"]; + response: Endpoints["GET /user/following"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-gpg-keys-for-the-authenticated-user + */ + "GET /user/gpg_keys": { + parameters: Endpoints["GET /user/gpg_keys"]["parameters"]; + response: Endpoints["GET /user/gpg_keys"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-app-installations-accessible-to-the-user-access-token + */ + "GET /user/installations": { + parameters: Endpoints["GET /user/installations"]["parameters"]; + response: Endpoints["GET /user/installations"]["response"] & { + data: Endpoints["GET /user/installations"]["response"]["data"]["installations"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-repositories-accessible-to-the-user-access-token + */ + "GET /user/installations/{installation_id}/repositories": { + parameters: Endpoints["GET /user/installations/{installation_id}/repositories"]["parameters"]; + response: Endpoints["GET /user/installations/{installation_id}/repositories"]["response"] & { + data: Endpoints["GET /user/installations/{installation_id}/repositories"]["response"]["data"]["repositories"]; + }; + }; + /** + * @see https://docs.github.com/rest/reference/issues#list-user-account-issues-assigned-to-the-authenticated-user + */ + "GET /user/issues": { + parameters: Endpoints["GET /user/issues"]["parameters"]; + response: Endpoints["GET /user/issues"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-public-ssh-keys-for-the-authenticated-user + */ + "GET /user/keys": { + parameters: Endpoints["GET /user/keys"]["parameters"]; + response: Endpoints["GET /user/keys"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-subscriptions-for-the-authenticated-user + */ + "GET /user/marketplace_purchases": { + parameters: Endpoints["GET /user/marketplace_purchases"]["parameters"]; + response: Endpoints["GET /user/marketplace_purchases"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/apps#list-subscriptions-for-the-authenticated-user-stubbed + */ + "GET /user/marketplace_purchases/stubbed": { + parameters: Endpoints["GET /user/marketplace_purchases/stubbed"]["parameters"]; + response: Endpoints["GET /user/marketplace_purchases/stubbed"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-memberships-for-the-authenticated-user + */ + "GET /user/memberships/orgs": { + parameters: Endpoints["GET /user/memberships/orgs"]["parameters"]; + response: Endpoints["GET /user/memberships/orgs"]["response"]; + }; + /** + * @see https://docs.github.com/rest/migrations/users#list-user-migrations + */ + "GET /user/migrations": { + parameters: Endpoints["GET /user/migrations"]["parameters"]; + response: Endpoints["GET /user/migrations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/migrations/users#list-repositories-for-a-user-migration + */ + "GET /user/migrations/{migration_id}/repositories": { + parameters: Endpoints["GET /user/migrations/{migration_id}/repositories"]["parameters"]; + response: Endpoints["GET /user/migrations/{migration_id}/repositories"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user + */ + "GET /user/orgs": { + parameters: Endpoints["GET /user/orgs"]["parameters"]; + response: Endpoints["GET /user/orgs"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-the-authenticated-user + */ + "GET /user/packages": { + parameters: Endpoints["GET /user/packages"]["parameters"]; + response: Endpoints["GET /user/packages"]["response"]; + }; + /** + * @see https://docs.github.com/rest/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}/versions": { + parameters: Endpoints["GET /user/packages/{package_type}/{package_name}/versions"]["parameters"]; + response: Endpoints["GET /user/packages/{package_type}/{package_name}/versions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-public-email-addresses-for-the-authenticated-user + */ + "GET /user/public_emails": { + parameters: Endpoints["GET /user/public_emails"]["parameters"]; + response: Endpoints["GET /user/public_emails"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-the-authenticated-user + */ + "GET /user/repos": { + parameters: Endpoints["GET /user/repos"]["parameters"]; + response: Endpoints["GET /user/repos"]["response"]; + }; + /** + * @see https://docs.github.com/rest/collaborators/invitations#list-repository-invitations-for-the-authenticated-user + */ + "GET /user/repository_invitations": { + parameters: Endpoints["GET /user/repository_invitations"]["parameters"]; + response: Endpoints["GET /user/repository_invitations"]["response"]; + }; + /** + * @see https://docs.github.com/rest/users/social-accounts#list-social-accounts-for-the-authenticated-user + */ + "GET /user/social_accounts": { + parameters: Endpoints["GET /user/social_accounts"]["parameters"]; + response: Endpoints["GET /user/social_accounts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-public-ssh-signing-keys-for-the-authenticated-user + */ + "GET /user/ssh_signing_keys": { + parameters: Endpoints["GET /user/ssh_signing_keys"]["parameters"]; + response: Endpoints["GET /user/ssh_signing_keys"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-starred-by-the-authenticated-user + */ + "GET /user/starred": { + parameters: Endpoints["GET /user/starred"]["parameters"]; + response: Endpoints["GET /user/starred"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-watched-by-the-authenticated-user + */ + "GET /user/subscriptions": { + parameters: Endpoints["GET /user/subscriptions"]["parameters"]; + response: Endpoints["GET /user/subscriptions"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/teams#list-teams-for-the-authenticated-user + */ + "GET /user/teams": { + parameters: Endpoints["GET /user/teams"]["parameters"]; + response: Endpoints["GET /user/teams"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-users + */ + "GET /users": { + parameters: Endpoints["GET /users"]["parameters"]; + response: Endpoints["GET /users"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-events-for-the-authenticated-user + */ + "GET /users/{username}/events": { + parameters: Endpoints["GET /users/{username}/events"]["parameters"]; + response: Endpoints["GET /users/{username}/events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-organization-events-for-the-authenticated-user + */ + "GET /users/{username}/events/orgs/{org}": { + parameters: Endpoints["GET /users/{username}/events/orgs/{org}"]["parameters"]; + response: Endpoints["GET /users/{username}/events/orgs/{org}"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-for-a-user + */ + "GET /users/{username}/events/public": { + parameters: Endpoints["GET /users/{username}/events/public"]["parameters"]; + response: Endpoints["GET /users/{username}/events/public"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-followers-of-a-user + */ + "GET /users/{username}/followers": { + parameters: Endpoints["GET /users/{username}/followers"]["parameters"]; + response: Endpoints["GET /users/{username}/followers"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-the-people-a-user-follows + */ + "GET /users/{username}/following": { + parameters: Endpoints["GET /users/{username}/following"]["parameters"]; + response: Endpoints["GET /users/{username}/following"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/gists#list-gists-for-a-user + */ + "GET /users/{username}/gists": { + parameters: Endpoints["GET /users/{username}/gists"]["parameters"]; + response: Endpoints["GET /users/{username}/gists"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-gpg-keys-for-a-user + */ + "GET /users/{username}/gpg_keys": { + parameters: Endpoints["GET /users/{username}/gpg_keys"]["parameters"]; + response: Endpoints["GET /users/{username}/gpg_keys"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-public-keys-for-a-user + */ + "GET /users/{username}/keys": { + parameters: Endpoints["GET /users/{username}/keys"]["parameters"]; + response: Endpoints["GET /users/{username}/keys"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-a-user + */ + "GET /users/{username}/orgs": { + parameters: Endpoints["GET /users/{username}/orgs"]["parameters"]; + response: Endpoints["GET /users/{username}/orgs"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-user + */ + "GET /users/{username}/packages": { + parameters: Endpoints["GET /users/{username}/packages"]["parameters"]; + response: Endpoints["GET /users/{username}/packages"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/projects#list-user-projects + */ + "GET /users/{username}/projects": { + parameters: Endpoints["GET /users/{username}/projects"]["parameters"]; + response: Endpoints["GET /users/{username}/projects"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-events-received-by-the-authenticated-user + */ + "GET /users/{username}/received_events": { + parameters: Endpoints["GET /users/{username}/received_events"]["parameters"]; + response: Endpoints["GET /users/{username}/received_events"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-received-by-a-user + */ + "GET /users/{username}/received_events/public": { + parameters: Endpoints["GET /users/{username}/received_events/public"]["parameters"]; + response: Endpoints["GET /users/{username}/received_events/public"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-a-user + */ + "GET /users/{username}/repos": { + parameters: Endpoints["GET /users/{username}/repos"]["parameters"]; + response: Endpoints["GET /users/{username}/repos"]["response"]; + }; + /** + * @see https://docs.github.com/rest/users/social-accounts#list-social-accounts-for-a-user + */ + "GET /users/{username}/social_accounts": { + parameters: Endpoints["GET /users/{username}/social_accounts"]["parameters"]; + response: Endpoints["GET /users/{username}/social_accounts"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/users#list-ssh-signing-keys-for-a-user + */ + "GET /users/{username}/ssh_signing_keys": { + parameters: Endpoints["GET /users/{username}/ssh_signing_keys"]["parameters"]; + response: Endpoints["GET /users/{username}/ssh_signing_keys"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-starred-by-a-user + */ + "GET /users/{username}/starred": { + parameters: Endpoints["GET /users/{username}/starred"]["parameters"]; + response: Endpoints["GET /users/{username}/starred"]["response"]; + }; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-watched-by-a-user + */ + "GET /users/{username}/subscriptions": { + parameters: Endpoints["GET /users/{username}/subscriptions"]["parameters"]; + response: Endpoints["GET /users/{username}/subscriptions"]["response"]; + }; +} +export declare const paginatingEndpoints: (keyof PaginatingEndpoints)[]; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/index.d.ts b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/index.d.ts new file mode 100644 index 0000000..f5dde53 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/index.d.ts @@ -0,0 +1,15 @@ +import type { Octokit } from "@octokit/core"; +import type { PaginateInterface } from "./types"; +export type { PaginateInterface, PaginatingEndpoints } from "./types"; +export { composePaginateRest } from "./compose-paginate"; +export { isPaginatingEndpoint, paginatingEndpoints, } from "./paginating-endpoints"; +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ +export declare function paginateRest(octokit: Octokit): { + paginate: PaginateInterface; +}; +export declare namespace paginateRest { + var VERSION: string; +} diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/iterator.d.ts b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/iterator.d.ts new file mode 100644 index 0000000..66ef5af --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/iterator.d.ts @@ -0,0 +1,20 @@ +import type { Octokit } from "@octokit/core"; +import type { RequestInterface, RequestParameters, Route } from "./types"; +export declare function iterator(octokit: Octokit, route: Route | RequestInterface, parameters?: RequestParameters): { + [Symbol.asyncIterator]: () => { + next(): Promise<{ + done: boolean; + value?: undefined; + } | { + value: import("@octokit/types/dist-types/OctokitResponse").OctokitResponse; + done?: undefined; + } | { + value: { + status: number; + headers: {}; + data: never[]; + }; + done?: undefined; + }>; + }; +}; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/normalize-paginated-list-response.d.ts b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/normalize-paginated-list-response.d.ts new file mode 100644 index 0000000..925aa1a --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/normalize-paginated-list-response.d.ts @@ -0,0 +1,18 @@ +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +import type { OctokitResponse } from "./types"; +export declare function normalizePaginatedListResponse(response: OctokitResponse): OctokitResponse; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/paginate.d.ts b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/paginate.d.ts new file mode 100644 index 0000000..7ccd3de --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/paginate.d.ts @@ -0,0 +1,3 @@ +import type { Octokit } from "@octokit/core"; +import type { MapFunction, PaginationResults, RequestParameters, Route, RequestInterface } from "./types"; +export declare function paginate(octokit: Octokit, route: Route | RequestInterface, parameters?: RequestParameters, mapFn?: MapFunction): Promise; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/paginating-endpoints.d.ts b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/paginating-endpoints.d.ts new file mode 100644 index 0000000..f6a4d7b --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/paginating-endpoints.d.ts @@ -0,0 +1,3 @@ +import { PaginatingEndpoints } from "./generated/paginating-endpoints"; +export { paginatingEndpoints } from "./generated/paginating-endpoints"; +export declare function isPaginatingEndpoint(arg: unknown): arg is keyof PaginatingEndpoints; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/types.d.ts b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/types.d.ts new file mode 100644 index 0000000..536c267 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/types.d.ts @@ -0,0 +1,242 @@ +import { Octokit } from "@octokit/core"; +import * as OctokitTypes from "@octokit/types"; +export { EndpointOptions, RequestInterface, OctokitResponse, RequestParameters, Route, } from "@octokit/types"; +export { PaginatingEndpoints } from "./generated/paginating-endpoints"; +import { PaginatingEndpoints } from "./generated/paginating-endpoints"; +type KnownKeys = Extract<{ + [K in keyof T]: string extends K ? never : number extends K ? never : K; +} extends { + [_ in keyof T]: infer U; +} ? U : never, keyof T>; +type KeysMatching = { + [K in keyof T]: T[K] extends V ? K : never; +}[keyof T]; +type KnownKeysMatching = KeysMatching>, V>; +type GetResultsType = T extends { + data: any[]; +} ? T["data"] : T extends { + data: object; +} ? T["data"][KnownKeysMatching] : never; +type NormalizeResponse = T & { + data: GetResultsType; +}; +type DataType = "data" extends keyof T ? T["data"] : unknown; +export interface MapFunction>, M = unknown[]> { + (response: T, done: () => void): M; +} +export type PaginationResults = T[]; +export interface PaginateInterface { + /** + * Paginate a request using endpoint options and map each response to a custom array + * + * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + * @param {function} mapFn Optional method to map each response to a custom array + */ + (options: OctokitTypes.EndpointOptions, mapFn: MapFunction>, M[]>): Promise>; + /** + * Paginate a request using endpoint options + * + * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (options: OctokitTypes.EndpointOptions): Promise>; + /** + * Paginate a request using a known endpoint route string and map each response to a custom array + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {function} mapFn Optional method to map each response to a custom array + */ + (route: R, mapFn: MapFunction): Promise; + /** + * Paginate a request using a known endpoint route string and parameters, and map each response to a custom array + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} parameters URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + * @param {function} mapFn Optional method to map each response to a custom array + */ + (route: R, parameters: PaginatingEndpoints[R]["parameters"], mapFn: MapFunction): Promise; + /** + * Paginate a request using an known endpoint route string + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} parameters? URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (route: R, parameters?: PaginatingEndpoints[R]["parameters"]): Promise>; + /** + * Paginate a request using an unknown endpoint route string + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} parameters? URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (route: R, parameters?: R extends keyof PaginatingEndpoints ? PaginatingEndpoints[R]["parameters"] : OctokitTypes.RequestParameters): Promise; + /** + * Paginate a request using an endpoint method and a map function + * + * @param {string} request Request method (`octokit.request` or `@octokit/request`) + * @param {function} mapFn? Optional method to map each response to a custom array + */ + (request: R, mapFn: MapFunction>, M>): Promise; + /** + * Paginate a request using an endpoint method, parameters, and a map function + * + * @param {string} request Request method (`octokit.request` or `@octokit/request`) + * @param {object} parameters URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + * @param {function} mapFn? Optional method to map each response to a custom array + */ + (request: R, parameters: Parameters[0], mapFn: MapFunction>, M>): Promise; + /** + * Paginate a request using an endpoint method and parameters + * + * @param {string} request Request method (`octokit.request` or `@octokit/request`) + * @param {object} parameters? URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (request: R, parameters?: Parameters[0]): Promise>["data"]>; + iterator: { + /** + * Get an async iterator to paginate a request using endpoint options + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (options: OctokitTypes.EndpointOptions): AsyncIterableIterator>>; + /** + * Get an async iterator to paginate a request using a known endpoint route string and optional parameters + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (route: R, parameters?: PaginatingEndpoints[R]["parameters"]): AsyncIterableIterator>>; + /** + * Get an async iterator to paginate a request using an unknown endpoint route string and optional parameters + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (route: R, parameters?: R extends keyof PaginatingEndpoints ? PaginatingEndpoints[R]["parameters"] : OctokitTypes.RequestParameters): AsyncIterableIterator>>; + /** + * Get an async iterator to paginate a request using a request method and optional parameters + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * @param {string} request `@octokit/request` or `octokit.request` method + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (request: R, parameters?: Parameters[0]): AsyncIterableIterator>>; + }; +} +export interface ComposePaginateInterface { + /** + * Paginate a request using endpoint options and map each response to a custom array + * + * @param {object} octokit Octokit instance + * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + * @param {function} mapFn Optional method to map each response to a custom array + */ + (octokit: Octokit, options: OctokitTypes.EndpointOptions, mapFn: MapFunction>, M[]>): Promise>; + /** + * Paginate a request using endpoint options + * + * @param {object} octokit Octokit instance + * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (octokit: Octokit, options: OctokitTypes.EndpointOptions): Promise>; + /** + * Paginate a request using a known endpoint route string and map each response to a custom array + * + * @param {object} octokit Octokit instance + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {function} mapFn Optional method to map each response to a custom array + */ + (octokit: Octokit, route: R, mapFn: MapFunction): Promise; + /** + * Paginate a request using a known endpoint route string and parameters, and map each response to a custom array + * + * @param {object} octokit Octokit instance + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} parameters URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + * @param {function} mapFn Optional method to map each response to a custom array + */ + (octokit: Octokit, route: R, parameters: PaginatingEndpoints[R]["parameters"], mapFn: MapFunction): Promise; + /** + * Paginate a request using an known endpoint route string + * + * @param {object} octokit Octokit instance + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} parameters? URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (octokit: Octokit, route: R, parameters?: PaginatingEndpoints[R]["parameters"]): Promise>; + /** + * Paginate a request using an unknown endpoint route string + * + * @param {object} octokit Octokit instance + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} parameters? URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (octokit: Octokit, route: R, parameters?: R extends keyof PaginatingEndpoints ? PaginatingEndpoints[R]["parameters"] : OctokitTypes.RequestParameters): Promise; + /** + * Paginate a request using an endpoint method and a map function + * + * @param {object} octokit Octokit instance + * @param {string} request Request method (`octokit.request` or `@octokit/request`) + * @param {function} mapFn? Optional method to map each response to a custom array + */ + (octokit: Octokit, request: R, mapFn: MapFunction>, M>): Promise; + /** + * Paginate a request using an endpoint method, parameters, and a map function + * + * @param {object} octokit Octokit instance + * @param {string} request Request method (`octokit.request` or `@octokit/request`) + * @param {object} parameters URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + * @param {function} mapFn? Optional method to map each response to a custom array + */ + (octokit: Octokit, request: R, parameters: Parameters[0], mapFn: MapFunction>, M>): Promise; + /** + * Paginate a request using an endpoint method and parameters + * + * @param {object} octokit Octokit instance + * @param {string} request Request method (`octokit.request` or `@octokit/request`) + * @param {object} parameters? URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (octokit: Octokit, request: R, parameters?: Parameters[0]): Promise>["data"]>; + iterator: { + /** + * Get an async iterator to paginate a request using endpoint options + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * + * @param {object} octokit Octokit instance + * @param {object} options Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (octokit: Octokit, options: OctokitTypes.EndpointOptions): AsyncIterableIterator>>; + /** + * Get an async iterator to paginate a request using a known endpoint route string and optional parameters + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * + * @param {object} octokit Octokit instance + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (octokit: Octokit, route: R, parameters?: PaginatingEndpoints[R]["parameters"]): AsyncIterableIterator>>; + /** + * Get an async iterator to paginate a request using an unknown endpoint route string and optional parameters + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * + * @param {object} octokit Octokit instance + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (octokit: Octokit, route: R, parameters?: R extends keyof PaginatingEndpoints ? PaginatingEndpoints[R]["parameters"] : OctokitTypes.RequestParameters): AsyncIterableIterator>>; + /** + * Get an async iterator to paginate a request using a request method and optional parameters + * + * @see {link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of} for await...of + * + * @param {object} octokit Octokit instance + * @param {string} request `@octokit/request` or `octokit.request` method + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.format`, `request`, or `baseUrl`. + */ + (octokit: Octokit, request: R, parameters?: Parameters[0]): AsyncIterableIterator>>; + }; +} diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts new file mode 100644 index 0000000..4cd7ce6 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "6.1.2"; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js b/dist/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js new file mode 100644 index 0000000..53b44b4 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js @@ -0,0 +1,356 @@ +// pkg/dist-src/version.js +var VERSION = "6.1.2"; + +// pkg/dist-src/normalize-paginated-list-response.js +function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + response.data.total_count = totalCount; + return response; +} + +// pkg/dist-src/iterator.js +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + }) + }; +} + +// pkg/dist-src/paginate.js +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; + } + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); +} +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; + } + let earlyExit = false; + function done() { + earlyExit = true; + } + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; + } + return gather(octokit, results, iterator2, mapFn); + }); +} + +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { + iterator +}); + +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /organizations/{org}/personal-access-token-requests", + "GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /organizations/{org}/personal-access-tokens", + "GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/required_workflows", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{org}/{repo}/actions/required_workflows", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; + +// pkg/dist-src/paginating-endpoints.js +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} + +// pkg/dist-src/index.js +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; +export { + composePaginateRest, + isPaginatingEndpoint, + paginateRest, + paginatingEndpoints +}; diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map b/dist/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map new file mode 100644 index 0000000..002ca71 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/version.js", "../dist-src/normalize-paginated-list-response.js", "../dist-src/iterator.js", "../dist-src/paginate.js", "../dist-src/compose-paginate.js", "../dist-src/generated/paginating-endpoints.js", "../dist-src/paginating-endpoints.js", "../dist-src/index.js"], + "sourcesContent": ["const VERSION = \"6.1.2\";\nexport {\n VERSION\n};\n", "function normalizePaginatedListResponse(response) {\n if (!response.data) {\n return {\n ...response,\n data: []\n };\n }\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization)\n return response;\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\nexport {\n normalizePaginatedListResponse\n};\n", "import { normalizePaginatedListResponse } from \"./normalize-paginated-list-response\";\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url)\n return { done: true };\n try {\n const response = await requestMethod({ method, url, headers });\n const normalizedResponse = normalizePaginatedListResponse(response);\n url = ((normalizedResponse.headers.link || \"\").match(\n /<([^>]+)>;\\s*rel=\"next\"/\n ) || [])[1];\n return { value: normalizedResponse };\n } catch (error) {\n if (error.status !== 409)\n throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n })\n };\n}\nexport {\n iterator\n};\n", "import { iterator } from \"./iterator\";\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = void 0;\n }\n return gather(\n octokit,\n [],\n iterator(octokit, route, parameters)[Symbol.asyncIterator](),\n mapFn\n );\n}\nfunction gather(octokit, results, iterator2, mapFn) {\n return iterator2.next().then((result) => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(\n mapFn ? mapFn(result.value, done) : result.value.data\n );\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator2, mapFn);\n });\n}\nexport {\n paginate\n};\n", "import { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\nexport {\n composePaginateRest\n};\n", "const paginatingEndpoints = [\n \"GET /app/hook/deliveries\",\n \"GET /app/installation-requests\",\n \"GET /app/installations\",\n \"GET /enterprises/{enterprise}/dependabot/alerts\",\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\",\n \"GET /events\",\n \"GET /gists\",\n \"GET /gists/public\",\n \"GET /gists/starred\",\n \"GET /gists/{gist_id}/comments\",\n \"GET /gists/{gist_id}/commits\",\n \"GET /gists/{gist_id}/forks\",\n \"GET /installation/repositories\",\n \"GET /issues\",\n \"GET /licenses\",\n \"GET /marketplace_listing/plans\",\n \"GET /marketplace_listing/plans/{plan_id}/accounts\",\n \"GET /marketplace_listing/stubbed/plans\",\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\",\n \"GET /networks/{owner}/{repo}/events\",\n \"GET /notifications\",\n \"GET /organizations\",\n \"GET /organizations/{org}/personal-access-token-requests\",\n \"GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories\",\n \"GET /organizations/{org}/personal-access-tokens\",\n \"GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories\",\n \"GET /orgs/{org}/actions/cache/usage-by-repository\",\n \"GET /orgs/{org}/actions/permissions/repositories\",\n \"GET /orgs/{org}/actions/required_workflows\",\n \"GET /orgs/{org}/actions/runners\",\n \"GET /orgs/{org}/actions/secrets\",\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/actions/variables\",\n \"GET /orgs/{org}/actions/variables/{name}/repositories\",\n \"GET /orgs/{org}/blocks\",\n \"GET /orgs/{org}/code-scanning/alerts\",\n \"GET /orgs/{org}/codespaces\",\n \"GET /orgs/{org}/codespaces/secrets\",\n \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/dependabot/alerts\",\n \"GET /orgs/{org}/dependabot/secrets\",\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\",\n \"GET /orgs/{org}/events\",\n \"GET /orgs/{org}/failed_invitations\",\n \"GET /orgs/{org}/hooks\",\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries\",\n \"GET /orgs/{org}/installations\",\n \"GET /orgs/{org}/invitations\",\n \"GET /orgs/{org}/invitations/{invitation_id}/teams\",\n \"GET /orgs/{org}/issues\",\n \"GET /orgs/{org}/members\",\n \"GET /orgs/{org}/members/{username}/codespaces\",\n \"GET /orgs/{org}/migrations\",\n \"GET /orgs/{org}/migrations/{migration_id}/repositories\",\n \"GET /orgs/{org}/outside_collaborators\",\n \"GET /orgs/{org}/packages\",\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n \"GET /orgs/{org}/projects\",\n \"GET /orgs/{org}/public_members\",\n \"GET /orgs/{org}/repos\",\n \"GET /orgs/{org}/secret-scanning/alerts\",\n \"GET /orgs/{org}/teams\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\",\n \"GET /orgs/{org}/teams/{team_slug}/invitations\",\n \"GET /orgs/{org}/teams/{team_slug}/members\",\n \"GET /orgs/{org}/teams/{team_slug}/projects\",\n \"GET /orgs/{org}/teams/{team_slug}/repos\",\n \"GET /orgs/{org}/teams/{team_slug}/teams\",\n \"GET /projects/columns/{column_id}/cards\",\n \"GET /projects/{project_id}/collaborators\",\n \"GET /projects/{project_id}/columns\",\n \"GET /repos/{org}/{repo}/actions/required_workflows\",\n \"GET /repos/{owner}/{repo}/actions/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/caches\",\n \"GET /repos/{owner}/{repo}/actions/organization-secrets\",\n \"GET /repos/{owner}/{repo}/actions/organization-variables\",\n \"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs\",\n \"GET /repos/{owner}/{repo}/actions/runners\",\n \"GET /repos/{owner}/{repo}/actions/runs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\",\n \"GET /repos/{owner}/{repo}/actions/secrets\",\n \"GET /repos/{owner}/{repo}/actions/variables\",\n \"GET /repos/{owner}/{repo}/actions/workflows\",\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\",\n \"GET /repos/{owner}/{repo}/assignees\",\n \"GET /repos/{owner}/{repo}/branches\",\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\",\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n \"GET /repos/{owner}/{repo}/code-scanning/analyses\",\n \"GET /repos/{owner}/{repo}/codespaces\",\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\",\n \"GET /repos/{owner}/{repo}/codespaces/secrets\",\n \"GET /repos/{owner}/{repo}/collaborators\",\n \"GET /repos/{owner}/{repo}/comments\",\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/commits\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/status\",\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\",\n \"GET /repos/{owner}/{repo}/contributors\",\n \"GET /repos/{owner}/{repo}/dependabot/alerts\",\n \"GET /repos/{owner}/{repo}/dependabot/secrets\",\n \"GET /repos/{owner}/{repo}/deployments\",\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\",\n \"GET /repos/{owner}/{repo}/environments\",\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\",\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps\",\n \"GET /repos/{owner}/{repo}/events\",\n \"GET /repos/{owner}/{repo}/forks\",\n \"GET /repos/{owner}/{repo}/hooks\",\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\",\n \"GET /repos/{owner}/{repo}/invitations\",\n \"GET /repos/{owner}/{repo}/issues\",\n \"GET /repos/{owner}/{repo}/issues/comments\",\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\",\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\",\n \"GET /repos/{owner}/{repo}/keys\",\n \"GET /repos/{owner}/{repo}/labels\",\n \"GET /repos/{owner}/{repo}/milestones\",\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\",\n \"GET /repos/{owner}/{repo}/notifications\",\n \"GET /repos/{owner}/{repo}/pages/builds\",\n \"GET /repos/{owner}/{repo}/projects\",\n \"GET /repos/{owner}/{repo}/pulls\",\n \"GET /repos/{owner}/{repo}/pulls/comments\",\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\",\n \"GET /repos/{owner}/{repo}/releases\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\",\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts\",\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\",\n \"GET /repos/{owner}/{repo}/security-advisories\",\n \"GET /repos/{owner}/{repo}/stargazers\",\n \"GET /repos/{owner}/{repo}/subscribers\",\n \"GET /repos/{owner}/{repo}/tags\",\n \"GET /repos/{owner}/{repo}/teams\",\n \"GET /repos/{owner}/{repo}/topics\",\n \"GET /repositories\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\",\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables\",\n \"GET /search/code\",\n \"GET /search/commits\",\n \"GET /search/issues\",\n \"GET /search/labels\",\n \"GET /search/repositories\",\n \"GET /search/topics\",\n \"GET /search/users\",\n \"GET /teams/{team_id}/discussions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\",\n \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\",\n \"GET /teams/{team_id}/invitations\",\n \"GET /teams/{team_id}/members\",\n \"GET /teams/{team_id}/projects\",\n \"GET /teams/{team_id}/repos\",\n \"GET /teams/{team_id}/teams\",\n \"GET /user/blocks\",\n \"GET /user/codespaces\",\n \"GET /user/codespaces/secrets\",\n \"GET /user/emails\",\n \"GET /user/followers\",\n \"GET /user/following\",\n \"GET /user/gpg_keys\",\n \"GET /user/installations\",\n \"GET /user/installations/{installation_id}/repositories\",\n \"GET /user/issues\",\n \"GET /user/keys\",\n \"GET /user/marketplace_purchases\",\n \"GET /user/marketplace_purchases/stubbed\",\n \"GET /user/memberships/orgs\",\n \"GET /user/migrations\",\n \"GET /user/migrations/{migration_id}/repositories\",\n \"GET /user/orgs\",\n \"GET /user/packages\",\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n \"GET /user/public_emails\",\n \"GET /user/repos\",\n \"GET /user/repository_invitations\",\n \"GET /user/social_accounts\",\n \"GET /user/ssh_signing_keys\",\n \"GET /user/starred\",\n \"GET /user/subscriptions\",\n \"GET /user/teams\",\n \"GET /users\",\n \"GET /users/{username}/events\",\n \"GET /users/{username}/events/orgs/{org}\",\n \"GET /users/{username}/events/public\",\n \"GET /users/{username}/followers\",\n \"GET /users/{username}/following\",\n \"GET /users/{username}/gists\",\n \"GET /users/{username}/gpg_keys\",\n \"GET /users/{username}/keys\",\n \"GET /users/{username}/orgs\",\n \"GET /users/{username}/packages\",\n \"GET /users/{username}/projects\",\n \"GET /users/{username}/received_events\",\n \"GET /users/{username}/received_events/public\",\n \"GET /users/{username}/repos\",\n \"GET /users/{username}/social_accounts\",\n \"GET /users/{username}/ssh_signing_keys\",\n \"GET /users/{username}/starred\",\n \"GET /users/{username}/subscriptions\"\n];\nexport {\n paginatingEndpoints\n};\n", "import {\n paginatingEndpoints\n} from \"./generated/paginating-endpoints\";\nimport { paginatingEndpoints as paginatingEndpoints2 } from \"./generated/paginating-endpoints\";\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\nexport {\n isPaginatingEndpoint,\n paginatingEndpoints2 as paginatingEndpoints\n};\n", "import { VERSION } from \"./version\";\nimport { paginate } from \"./paginate\";\nimport { iterator } from \"./iterator\";\nimport { composePaginateRest } from \"./compose-paginate\";\nimport {\n isPaginatingEndpoint,\n paginatingEndpoints\n} from \"./paginating-endpoints\";\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\nexport {\n composePaginateRest,\n isPaginatingEndpoint,\n paginateRest,\n paginatingEndpoints\n};\n"], + "mappings": ";AAAA,IAAM,UAAU;;;ACAhB,SAAS,+BAA+B,UAAU;AAChD,MAAI,CAAC,SAAS,MAAM;AAClB,WAAO;AAAA,MACL,GAAG;AAAA,MACH,MAAM,CAAC;AAAA,IACT;AAAA,EACF;AACA,QAAM,6BAA6B,iBAAiB,SAAS,QAAQ,EAAE,SAAS,SAAS;AACzF,MAAI,CAAC;AACH,WAAO;AACT,QAAM,oBAAoB,SAAS,KAAK;AACxC,QAAM,sBAAsB,SAAS,KAAK;AAC1C,QAAM,aAAa,SAAS,KAAK;AACjC,SAAO,SAAS,KAAK;AACrB,SAAO,SAAS,KAAK;AACrB,SAAO,SAAS,KAAK;AACrB,QAAM,eAAe,OAAO,KAAK,SAAS,IAAI,EAAE,CAAC;AACjD,QAAM,OAAO,SAAS,KAAK,YAAY;AACvC,WAAS,OAAO;AAChB,MAAI,OAAO,sBAAsB,aAAa;AAC5C,aAAS,KAAK,qBAAqB;AAAA,EACrC;AACA,MAAI,OAAO,wBAAwB,aAAa;AAC9C,aAAS,KAAK,uBAAuB;AAAA,EACvC;AACA,WAAS,KAAK,cAAc;AAC5B,SAAO;AACT;;;AC1BA,SAAS,SAAS,SAAS,OAAO,YAAY;AAC5C,QAAM,UAAU,OAAO,UAAU,aAAa,MAAM,SAAS,UAAU,IAAI,QAAQ,QAAQ,SAAS,OAAO,UAAU;AACrH,QAAM,gBAAgB,OAAO,UAAU,aAAa,QAAQ,QAAQ;AACpE,QAAM,SAAS,QAAQ;AACvB,QAAM,UAAU,QAAQ;AACxB,MAAI,MAAM,QAAQ;AAClB,SAAO;AAAA,IACL,CAAC,OAAO,aAAa,GAAG,OAAO;AAAA,MAC7B,MAAM,OAAO;AACX,YAAI,CAAC;AACH,iBAAO,EAAE,MAAM,KAAK;AACtB,YAAI;AACF,gBAAM,WAAW,MAAM,cAAc,EAAE,QAAQ,KAAK,QAAQ,CAAC;AAC7D,gBAAM,qBAAqB,+BAA+B,QAAQ;AAClE,kBAAQ,mBAAmB,QAAQ,QAAQ,IAAI;AAAA,YAC7C;AAAA,UACF,KAAK,CAAC,GAAG,CAAC;AACV,iBAAO,EAAE,OAAO,mBAAmB;AAAA,QACrC,SAAS,OAAP;AACA,cAAI,MAAM,WAAW;AACnB,kBAAM;AACR,gBAAM;AACN,iBAAO;AAAA,YACL,OAAO;AAAA,cACL,QAAQ;AAAA,cACR,SAAS,CAAC;AAAA,cACV,MAAM,CAAC;AAAA,YACT;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;ACjCA,SAAS,SAAS,SAAS,OAAO,YAAY,OAAO;AACnD,MAAI,OAAO,eAAe,YAAY;AACpC,YAAQ;AACR,iBAAa;AAAA,EACf;AACA,SAAO;AAAA,IACL;AAAA,IACA,CAAC;AAAA,IACD,SAAS,SAAS,OAAO,UAAU,EAAE,OAAO,aAAa,EAAE;AAAA,IAC3D;AAAA,EACF;AACF;AACA,SAAS,OAAO,SAAS,SAAS,WAAW,OAAO;AAClD,SAAO,UAAU,KAAK,EAAE,KAAK,CAAC,WAAW;AACvC,QAAI,OAAO,MAAM;AACf,aAAO;AAAA,IACT;AACA,QAAI,YAAY;AAChB,aAAS,OAAO;AACd,kBAAY;AAAA,IACd;AACA,cAAU,QAAQ;AAAA,MAChB,QAAQ,MAAM,OAAO,OAAO,IAAI,IAAI,OAAO,MAAM;AAAA,IACnD;AACA,QAAI,WAAW;AACb,aAAO;AAAA,IACT;AACA,WAAO,OAAO,SAAS,SAAS,WAAW,KAAK;AAAA,EAClD,CAAC;AACH;;;AC5BA,IAAM,sBAAsB,OAAO,OAAO,UAAU;AAAA,EAClD;AACF,CAAC;;;ACJD,IAAM,sBAAsB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;AC3NA,SAAS,qBAAqB,KAAK;AACjC,MAAI,OAAO,QAAQ,UAAU;AAC3B,WAAO,oBAAoB,SAAS,GAAG;AAAA,EACzC,OAAO;AACL,WAAO;AAAA,EACT;AACF;;;ACFA,SAAS,aAAa,SAAS;AAC7B,SAAO;AAAA,IACL,UAAU,OAAO,OAAO,SAAS,KAAK,MAAM,OAAO,GAAG;AAAA,MACpD,UAAU,SAAS,KAAK,MAAM,OAAO;AAAA,IACvC,CAAC;AAAA,EACH;AACF;AACA,aAAa,UAAU;", + "names": [] +} diff --git a/dist/node_modules/@octokit/plugin-paginate-rest/package.json b/dist/node_modules/@octokit/plugin-paginate-rest/package.json new file mode 100644 index 0000000..bc9dc9d --- /dev/null +++ b/dist/node_modules/@octokit/plugin-paginate-rest/package.json @@ -0,0 +1,52 @@ +{ + "name": "@octokit/plugin-paginate-rest", + "publishConfig": { + "access": "public" + }, + "version": "6.1.2", + "description": "Octokit plugin to paginate REST API endpoint responses", + "repository": "github:octokit/plugin-paginate-rest.js", + "keywords": [ + "github", + "api", + "sdk", + "toolkit" + ], + "license": "MIT", + "dependencies": { + "@octokit/tsconfig": "^1.0.2", + "@octokit/types": "^9.2.3" + }, + "peerDependencies": { + "@octokit/core": ">=4" + }, + "devDependencies": { + "@octokit/core": "^4.0.0", + "@octokit/plugin-rest-endpoint-methods": "^7.1.0", + "@types/fetch-mock": "^7.3.1", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "esbuild": "^0.17.19", + "fetch-mock": "^9.0.0", + "github-openapi-graphql-query": "^4.0.0", + "glob": "^10.2.5", + "jest": "^29.0.0", + "npm-run-all": "^4.1.5", + "prettier": "2.8.8", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "source": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/LICENSE b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/LICENSE new file mode 100644 index 0000000..57bee5f --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/LICENSE @@ -0,0 +1,7 @@ +MIT License Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/README.md b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/README.md new file mode 100644 index 0000000..b23ff58 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/README.md @@ -0,0 +1,76 @@ +# plugin-rest-endpoint-methods.js + +> Octokit plugin adding one method for all of api.github.com REST API endpoints + +[![@latest](https://img.shields.io/npm/v/@octokit/plugin-rest-endpoint-methods.svg)](https://www.npmjs.com/package/@octokit/plugin-rest-endpoint-methods) +[![Build Status](https://github.com/octokit/plugin-rest-endpoint-methods.js/workflows/Test/badge.svg)](https://github.com/octokit/plugin-rest-endpoint-methods.js/actions?workflow=Test) + +## Usage + + + + + + +
+Browsers + + +Load `@octokit/plugin-rest-endpoint-methods` and [`@octokit/core`](https://github.com/octokit/core.js) (or core-compatible module) directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+Node + + +Install with `npm install @octokit/core @octokit/plugin-rest-endpoint-methods`. Optionally replace `@octokit/core` with a compatible module + +```js +const { Octokit } = require("@octokit/core"); +const { + restEndpointMethods, +} = require("@octokit/plugin-rest-endpoint-methods"); +``` + +
+ +```js +const MyOctokit = Octokit.plugin(restEndpointMethods); +const octokit = new MyOctokit({ auth: "secret123" }); + +// https://developer.github.com/v3/users/#get-the-authenticated-user +octokit.rest.users.getAuthenticated(); +``` + +There is one method for each REST API endpoint documented at [https://developer.github.com/v3](https://developer.github.com/v3). All endpoint methods are documented in the [docs/](docs/) folder, e.g. [docs/users/getAuthenticated.md](docs/users/getAuthenticated.md) + +## TypeScript + +Parameter and response types for all endpoint methods exported as `{ RestEndpointMethodTypes }`. + +Example + +```ts +import { RestEndpointMethodTypes } from "@octokit/plugin-rest-endpoint-methods"; + +type UpdateLabelParameters = + RestEndpointMethodTypes["issues"]["updateLabel"]["parameters"]; +type UpdateLabelResponse = + RestEndpointMethodTypes["issues"]["updateLabel"]["response"]; +``` + +In order to get types beyond parameters and responses, check out [`@octokit/openapi-types`](https://github.com/octokit/openapi-types.ts/#readme), which is a direct transpilation from GitHub's official OpenAPI specification. + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js new file mode 100644 index 0000000..562def7 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js @@ -0,0 +1,2013 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + legacyRestEndpointMethods: () => legacyRestEndpointMethods, + restEndpointMethods: () => restEndpointMethods +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "7.2.3"; + +// pkg/dist-src/generated/endpoints.js +var Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + addSelectedRepoToRequiredWorkflow: [ + "PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createRequiredWorkflow: ["POST /orgs/{org}/actions/required_workflows"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteRequiredWorkflow: [ + "DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoRequiredWorkflow: [ + "GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}" + ], + getRepoRequiredWorkflowUsage: [ + "GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing" + ], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getRequiredWorkflow: [ + "GET /orgs/{org}/actions/required_workflows/{required_workflow_id}" + ], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], + listRepoRequiredWorkflows: [ + "GET /repos/{org}/{repo}/actions/required_workflows" + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRequiredWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs" + ], + listRequiredWorkflows: ["GET /orgs/{org}/actions/required_workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], + listSelectedRepositoriesRequiredWorkflow: [ + "GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories" + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + removeSelectedRepoFromRequiredWorkflow: [ + "DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedReposToRequiredWorkflow: [ + "PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ], + updateRequiredWorkflow: [ + "PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}" + ] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], + deleteCodespacesBillingUsers: [ + "DELETE /orgs/{org}/codespaces/billing/selected_users" + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setCodespacesBilling: ["PUT /orgs/{org}/codespaces/billing"], + setCodespacesBillingUsers: [ + "POST /orgs/{org}/codespaces/billing/selected_users" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] + }, + emojis: { get: ["GET /emojis"] }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] + }, + meta: { + get: ["GET /meta"], + getAllVersions: ["GET /versions"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: ["PATCH /repos/{owner}/{repo}/import"] + }, + orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], + get: ["GET /orgs/{org}"], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: [ + "GET /organizations/{org}/personal-access-token-requests" + ], + listPatGrants: ["GET /organizations/{org}/personal-access-tokens"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /organizations/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /organizations/{org}/personal-access-token-requests" + ], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: [ + "POST /organizations/{org}/personal-access-tokens/{pat_id}" + ], + updatePatAccesses: ["POST /organizations/{org}/personal-access-tokens"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] + }, + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] + }, + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; +var endpoints_default = Endpoints; + +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { + method, + url + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +var handler = { + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const { decorations, endpointDefaults } = endpointMethodsMap.get(scope).get(methodName); + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); + } + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); + } + return newMethods; +} +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + function withDecorations(...args) { + let options = requestWithDefaults.endpoint.merge(...args); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: void 0 + }); + return requestWithDefaults(options); + } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); + } + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; + } + delete options2[name]; + } + } + return requestWithDefaults(options2); + } + return requestWithDefaults(...args); + } + return Object.assign(withDecorations, requestWithDefaults); +} + +// pkg/dist-src/index.js +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + rest: api + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + ...api, + rest: api + }; +} +legacyRestEndpointMethods.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + legacyRestEndpointMethods, + restEndpointMethods +}); diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map new file mode 100644 index 0000000..e62eb08 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js", "../dist-src/generated/endpoints.js", "../dist-src/endpoints-to-methods.js"], + "sourcesContent": ["import { VERSION } from \"./version\";\nimport { endpointsToMethods } from \"./endpoints-to-methods\";\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit);\n return {\n ...api,\n rest: api\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\nexport {\n legacyRestEndpointMethods,\n restEndpointMethods\n};\n", "const VERSION = \"7.2.3\";\nexport {\n VERSION\n};\n", "const Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\n \"POST /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n addCustomLabelsToSelfHostedRunnerForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToOrgVariable: [\n \"PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToRequiredWorkflow: [\n \"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}\"\n ],\n approveWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"\n ],\n createEnvironmentVariable: [\n \"POST /repositories/{repository_id}/environments/{environment_name}/variables\"\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"\n ],\n createOrgVariable: [\"POST /orgs/{org}/actions/variables\"],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\"\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\"\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\"\n ],\n createRepoVariable: [\"POST /repos/{owner}/{repo}/actions/variables\"],\n createRequiredWorkflow: [\"POST /orgs/{org}/actions/required_workflows\"],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"\n ],\n deleteActionsCacheById: [\n \"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"\n ],\n deleteActionsCacheByKey: [\n \"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n deleteEnvironmentVariable: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteOrgVariable: [\"DELETE /orgs/{org}/actions/variables/{name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"\n ],\n deleteRepoVariable: [\n \"DELETE /repos/{owner}/{repo}/actions/variables/{name}\"\n ],\n deleteRequiredWorkflow: [\n \"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}\"\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\"\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"\n ],\n downloadWorkflowRunAttemptLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"\n ],\n generateRunnerJitconfigForOrg: [\n \"POST /orgs/{org}/actions/runners/generate-jitconfig\"\n ],\n generateRunnerJitconfigForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig\"\n ],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\n \"GET /orgs/{org}/actions/cache/usage-by-repository\"\n ],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\"\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n getEnvironmentVariable: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/workflow\"\n ],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/workflow\"\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\"\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\"\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getOrgVariable: [\"GET /orgs/{org}/actions/variables/{name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] }\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoRequiredWorkflow: [\n \"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}\"\n ],\n getRepoRequiredWorkflowUsage: [\n \"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing\"\n ],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getRepoVariable: [\"GET /repos/{owner}/{repo}/actions/variables/{name}\"],\n getRequiredWorkflow: [\n \"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}\"\n ],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/access\"\n ],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"\n ],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"\n ],\n listEnvironmentVariables: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables\"\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"\n ],\n listJobsForWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"\n ],\n listLabelsForSelfHostedRunnerForOrg: [\n \"GET /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n listLabelsForSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listOrgVariables: [\"GET /orgs/{org}/actions/variables\"],\n listRepoOrganizationSecrets: [\n \"GET /repos/{owner}/{repo}/actions/organization-secrets\"\n ],\n listRepoOrganizationVariables: [\n \"GET /repos/{owner}/{repo}/actions/organization-variables\"\n ],\n listRepoRequiredWorkflows: [\n \"GET /repos/{org}/{repo}/actions/required_workflows\"\n ],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoVariables: [\"GET /repos/{owner}/{repo}/actions/variables\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRequiredWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs\"\n ],\n listRequiredWorkflows: [\"GET /orgs/{org}/actions/required_workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\"\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"\n ],\n listSelectedReposForOrgVariable: [\n \"GET /orgs/{org}/actions/variables/{name}/repositories\"\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\"\n ],\n listSelectedRepositoriesRequiredWorkflow: [\n \"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories\"\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"\n ],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"\n ],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromOrgVariable: [\n \"DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromRequiredWorkflow: [\n \"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}\"\n ],\n reviewCustomGatesForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule\"\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\"\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"\n ],\n setCustomLabelsForSelfHostedRunnerForOrg: [\n \"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n setCustomLabelsForSelfHostedRunnerForRepo: [\n \"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/workflow\"\n ],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\"\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"\n ],\n setSelectedReposForOrgVariable: [\n \"PUT /orgs/{org}/actions/variables/{name}/repositories\"\n ],\n setSelectedReposToRequiredWorkflow: [\n \"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories\"\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\"\n ],\n setWorkflowAccessToRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/access\"\n ],\n updateEnvironmentVariable: [\n \"PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n updateOrgVariable: [\"PATCH /orgs/{org}/actions/variables/{name}\"],\n updateRepoVariable: [\n \"PATCH /repos/{owner}/{repo}/actions/variables/{name}\"\n ],\n updateRequiredWorkflow: [\n \"PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}\"\n ]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\"\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\"\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\"\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\"\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\"\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\"\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"] }\n ],\n addRepoToInstallationForAuthenticatedUser: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\"\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\"\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\"\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\"\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\"\n ],\n listInstallationRequestsForAuthenticatedApp: [\n \"GET /app/installation-requests\"\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\"\n ],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\n \"POST /app/hook/deliveries/{delivery_id}/attempts\"\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"] }\n ],\n removeRepoFromInstallationForAuthenticatedUser: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\"\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\"\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\"\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\"\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\"\n ]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\n \"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"\n ],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\"\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } }\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"\n ],\n getCodeqlDatabase: [\n \"GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}\"\n ],\n getDefaultSetup: [\"GET /repos/{owner}/{repo}/code-scanning/default-setup\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n {},\n { renamed: [\"codeScanning\", \"listAlertInstances\"] }\n ],\n listCodeqlDatabases: [\n \"GET /repos/{owner}/{repo}/code-scanning/codeql/databases\"\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"\n ],\n updateDefaultSetup: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/default-setup\"\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n codespaceMachinesForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/machines\"\n ],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}\"\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n createOrUpdateSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}\"\n ],\n createWithPrForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"\n ],\n createWithRepoForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/codespaces\"\n ],\n deleteCodespacesBillingUsers: [\n \"DELETE /orgs/{org}/codespaces/billing/selected_users\"\n ],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\n \"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/codespaces/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n deleteSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}\"\n ],\n exportForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/exports\"\n ],\n getCodespacesForUserInOrg: [\n \"GET /orgs/{org}/members/{username}/codespaces\"\n ],\n getExportDetailsForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/exports/{export_id}\"\n ],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/codespaces/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/codespaces/secrets/{secret_name}\"],\n getPublicKeyForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/public-key\"\n ],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n getSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}\"\n ],\n listDevcontainersInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\"\n ],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\n \"GET /orgs/{org}/codespaces\",\n {},\n { renamedParameters: { org_id: \"org\" } }\n ],\n listInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces\"\n ],\n listOrgSecrets: [\"GET /orgs/{org}/codespaces/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}/repositories\"\n ],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"\n ],\n preFlightWithRepoForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/new\"\n ],\n publishForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/publish\"\n ],\n removeRepositoryForSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n repoMachinesForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/machines\"\n ],\n setCodespacesBilling: [\"PUT /orgs/{org}/codespaces/billing\"],\n setCodespacesBillingUsers: [\n \"POST /orgs/{org}/codespaces/billing/selected_users\"\n ],\n setRepositoriesForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"\n ],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\n \"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"\n ],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n getAlert: [\"GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/dependabot/alerts\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/dependabot/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/dependabot/alerts\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"\n ]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\n \"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"\n ],\n diffRange: [\n \"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"\n ],\n exportSbom: [\"GET /repos/{owner}/{repo}/dependency-graph/sbom\"]\n },\n emojis: { get: [\"GET /emojis\"] },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] }\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\"\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] }\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] }\n ]\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n checkUserCanBeAssignedToIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}\"\n ],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"\n ]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } }\n ]\n },\n meta: {\n get: [\"GET /meta\"],\n getAllVersions: [\"GET /versions\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\"\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\"\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\"\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\"\n ],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/repositories\"\n ],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n {},\n { renamed: [\"migrations\", \"listReposForAuthenticatedUser\"] }\n ],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"\n ],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n addSecurityManagerTeam: [\n \"PUT /orgs/{org}/security-managers/teams/{team_slug}\"\n ],\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\"\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n delete: [\"DELETE /orgs/{org}\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n enableOrDisableSecurityProductOnAllOrgRepos: [\n \"POST /orgs/{org}/{security_product}/{enablement}\"\n ],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"\n ],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPatGrantRepositories: [\n \"GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories\"\n ],\n listPatGrantRequestRepositories: [\n \"GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories\"\n ],\n listPatGrantRequests: [\n \"GET /organizations/{org}/personal-access-token-requests\"\n ],\n listPatGrants: [\"GET /organizations/{org}/personal-access-tokens\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listSecurityManagerTeams: [\"GET /orgs/{org}/security-managers\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"\n ],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\"\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\"\n ],\n removeSecurityManagerTeam: [\n \"DELETE /orgs/{org}/security-managers/teams/{team_slug}\"\n ],\n reviewPatGrantRequest: [\n \"POST /organizations/{org}/personal-access-token-requests/{pat_request_id}\"\n ],\n reviewPatGrantRequestsInBulk: [\n \"POST /organizations/{org}/personal-access-token-requests\"\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\"\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\"\n ],\n updatePatAccess: [\n \"POST /organizations/{org}/personal-access-tokens/{pat_id}\"\n ],\n updatePatAccesses: [\"POST /organizations/{org}/personal-access-tokens\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\"\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"\n ],\n deletePackageForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}\"\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n deletePackageVersionForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] }\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"\n ]\n }\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\"\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\"\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\"\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\"\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\"\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n listDockerMigrationConflictingPackagesForAuthenticatedUser: [\n \"GET /user/docker/conflicts\"\n ],\n listDockerMigrationConflictingPackagesForOrganization: [\n \"GET /orgs/{org}/docker/conflicts\"\n ],\n listDockerMigrationConflictingPackagesForUser: [\n \"GET /users/{username}/docker/conflicts\"\n ],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ],\n restorePackageVersionForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\"\n ],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\"\n ],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"\n ]\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"\n ],\n createForRelease: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForRelease: [\n \"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"\n ],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"\n ],\n listForRelease: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"\n ]\n },\n repos: {\n acceptInvitation: [\n \"PATCH /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"] }\n ],\n acceptInvitationForAuthenticatedUser: [\n \"PATCH /user/repository_invitations/{invitation_id}\"\n ],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\n \"GET /repos/{owner}/{repo}/compare/{basehead}\"\n ],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentBranchPolicy: [\n \"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"\n ],\n createDeploymentProtectionRule: [\n \"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules\"\n ],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createOrgRuleset: [\"POST /orgs/{org}/rulesets\"],\n createPagesDeployment: [\"POST /repos/{owner}/{repo}/pages/deployment\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createRepoRuleset: [\"POST /repos/{owner}/{repo}/rulesets\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\"\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\n \"DELETE /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"] }\n ],\n declineInvitationForAuthenticatedUser: [\n \"DELETE /user/repository_invitations/{invitation_id}\"\n ],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"\n ],\n deleteDeploymentBranchPolicy: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"\n ],\n deleteOrgRuleset: [\"DELETE /orgs/{org}/rulesets/{ruleset_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"\n ],\n deleteRepoRuleset: [\"DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n deleteTagProtection: [\n \"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n disableDeploymentProtectionRule: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}\"\n ],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] }\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n generateReleaseNotes: [\n \"POST /repos/{owner}/{repo}/releases/generate-notes\"\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n getAllDeploymentProtectionRules: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules\"\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"\n ],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"\n ],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n getBranchRules: [\"GET /repos/{owner}/{repo}/rules/branches/{branch}\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getCustomDeploymentProtectionRule: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}\"\n ],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentBranchPolicy: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getOrgRuleset: [\"GET /orgs/{org}/rulesets/{ruleset_id}\"],\n getOrgRulesets: [\"GET /orgs/{org}/rulesets\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getRepoRuleset: [\"GET /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n getRepoRulesets: [\"GET /repos/{owner}/{repo}/rulesets\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"\n ],\n getWebhookDelivery: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"\n ],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listCustomDeploymentRuleIntegrations: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps\"\n ],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentBranchPolicies: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"\n ],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"\n ],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"\n ],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\"\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateDeploymentBranchPolicy: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"\n ],\n updateOrgRuleset: [\"PUT /orgs/{org}/rulesets/{ruleset_id}\"],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"\n ],\n updateRepoRuleset: [\"PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] }\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" }\n ]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"\n ]\n },\n securityAdvisories: {\n createPrivateVulnerabilityReport: [\n \"POST /repos/{owner}/{repo}/security-advisories/reports\"\n ],\n createRepositoryAdvisory: [\n \"POST /repos/{owner}/{repo}/security-advisories\"\n ],\n getRepositoryAdvisory: [\n \"GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}\"\n ],\n listRepositoryAdvisories: [\"GET /repos/{owner}/{repo}/security-advisories\"],\n updateRepositoryAdvisory: [\n \"PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}\"\n ]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\"\n ],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\n \"POST /user/emails\",\n {},\n { renamed: [\"users\", \"addEmailForAuthenticatedUser\"] }\n ],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n addSocialAccountForAuthenticatedUser: [\"POST /user/social_accounts\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\n \"POST /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"] }\n ],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\n \"POST /user/keys\",\n {},\n { renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"] }\n ],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n createSshSigningKeyForAuthenticatedUser: [\"POST /user/ssh_signing_keys\"],\n deleteEmailForAuthenticated: [\n \"DELETE /user/emails\",\n {},\n { renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"] }\n ],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\n \"DELETE /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"] }\n ],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\n \"DELETE /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"] }\n ],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n deleteSocialAccountForAuthenticatedUser: [\"DELETE /user/social_accounts\"],\n deleteSshSigningKeyForAuthenticatedUser: [\n \"DELETE /user/ssh_signing_keys/{ssh_signing_key_id}\"\n ],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\n \"GET /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"] }\n ],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\n \"GET /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"] }\n ],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n getSshSigningKeyForAuthenticatedUser: [\n \"GET /user/ssh_signing_keys/{ssh_signing_key_id}\"\n ],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\n \"GET /user/blocks\",\n {},\n { renamed: [\"users\", \"listBlockedByAuthenticatedUser\"] }\n ],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\n \"GET /user/emails\",\n {},\n { renamed: [\"users\", \"listEmailsForAuthenticatedUser\"] }\n ],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\n \"GET /user/following\",\n {},\n { renamed: [\"users\", \"listFollowedByAuthenticatedUser\"] }\n ],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\n \"GET /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"] }\n ],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\n \"GET /user/public_emails\",\n {},\n { renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"] }\n ],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\n \"GET /user/keys\",\n {},\n { renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"] }\n ],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n listSocialAccountsForAuthenticatedUser: [\"GET /user/social_accounts\"],\n listSocialAccountsForUser: [\"GET /users/{username}/social_accounts\"],\n listSshSigningKeysForAuthenticatedUser: [\"GET /user/ssh_signing_keys\"],\n listSshSigningKeysForUser: [\"GET /users/{username}/ssh_signing_keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\n \"PATCH /user/email/visibility\",\n {},\n { renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"] }\n ],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\n \"PATCH /user/email/visibility\"\n ],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\nvar endpoints_default = Endpoints;\nexport {\n endpoints_default as default\n};\n", "import ENDPOINTS from \"./generated/endpoints\";\nconst endpointMethodsMap = /* @__PURE__ */ new Map();\nfor (const [scope, endpoints] of Object.entries(ENDPOINTS)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign(\n {\n method,\n url\n },\n defaults\n );\n if (!endpointMethodsMap.has(scope)) {\n endpointMethodsMap.set(scope, /* @__PURE__ */ new Map());\n }\n endpointMethodsMap.get(scope).set(methodName, {\n scope,\n methodName,\n endpointDefaults,\n decorations\n });\n }\n}\nconst handler = {\n get({ octokit, scope, cache }, methodName) {\n if (cache[methodName]) {\n return cache[methodName];\n }\n const { decorations, endpointDefaults } = endpointMethodsMap.get(scope).get(methodName);\n if (decorations) {\n cache[methodName] = decorate(\n octokit,\n scope,\n methodName,\n endpointDefaults,\n decorations\n );\n } else {\n cache[methodName] = octokit.request.defaults(endpointDefaults);\n }\n return cache[methodName];\n }\n};\nfunction endpointsToMethods(octokit) {\n const newMethods = {};\n for (const scope of endpointMethodsMap.keys()) {\n newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler);\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n function withDecorations(...args) {\n let options = requestWithDefaults.endpoint.merge(...args);\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: void 0\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(\n `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`\n );\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n const options2 = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(\n decorations.renamedParameters\n )) {\n if (name in options2) {\n octokit.log.warn(\n `\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`\n );\n if (!(alias in options2)) {\n options2[alias] = options2[name];\n }\n delete options2[name];\n }\n }\n return requestWithDefaults(options2);\n }\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\nexport {\n endpointsToMethods\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAM,UAAU;;;ACAhB,IAAM,YAAY;AAAA,EAChB,SAAS;AAAA,IACP,yCAAyC;AAAA,MACvC;AAAA,IACF;AAAA,IACA,0CAA0C;AAAA,MACxC;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,mBAAmB;AAAA,MACjB;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,yBAAyB,CAAC,+CAA+C;AAAA,IACzE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,oCAAoC;AAAA,IACxD,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,yBAAyB,CAAC,+CAA+C;AAAA,IACzE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,oBAAoB,CAAC,8CAA8C;AAAA,IACnE,wBAAwB,CAAC,6CAA6C;AAAA,IACtE,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,kDAAkD;AAAA,IACpE,mBAAmB,CAAC,6CAA6C;AAAA,IACjE,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,oDAAoD;AAAA,IACxE,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,oDAAoD;AAAA,MAClD;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,mDAAmD;AAAA,MACjD;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,0CAA0C;AAAA,IAChE,sBAAsB,CAAC,+CAA+C;AAAA,IACtE,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,4BAA4B,CAAC,qCAAqC;AAAA,IAClE,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,aAAa,CAAC,2DAA2D;AAAA,IACzE,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,wDAAwD;AAAA,MACtD;AAAA,IACF;AAAA,IACA,sDAAsD;AAAA,MACpD;AAAA,IACF;AAAA,IACA,yCAAyC;AAAA,MACvC;AAAA,IACF;AAAA,IACA,uCAAuC;AAAA,MACrC;AAAA,IACF;AAAA,IACA,sBAAsB,CAAC,iDAAiD;AAAA,IACxE,iBAAiB,CAAC,4CAA4C;AAAA,IAC9D,cAAc,CAAC,+CAA+C;AAAA,IAC9D,gBAAgB,CAAC,0CAA0C;AAAA,IAC3D,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,WAAW,uCAAuC,EAAE;AAAA,IAClE;AAAA,IACA,kBAAkB,CAAC,sDAAsD;AAAA,IACzE,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,eAAe,CAAC,yDAAyD;AAAA,IACzE,iBAAiB,CAAC,oDAAoD;AAAA,IACtE,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,2BAA2B,CAAC,6CAA6C;AAAA,IACzE,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,aAAa,CAAC,2DAA2D;AAAA,IACzE,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,iDAAiD;AAAA,IAClE,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,sBAAsB,CAAC,6CAA6C;AAAA,IACpE,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,qCAAqC;AAAA,MACnC;AAAA,IACF;AAAA,IACA,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,iCAAiC;AAAA,IAClD,kBAAkB,CAAC,mCAAmC;AAAA,IACtD,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,2CAA2C;AAAA,IAC7D,mBAAmB,CAAC,6CAA6C;AAAA,IACjE,mBAAmB,CAAC,6CAA6C;AAAA,IACjE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,uBAAuB,CAAC,4CAA4C;AAAA,IACpE,8BAA8B,CAAC,2CAA2C;AAAA,IAC1E,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,0DAA0D;AAAA,MACxD;AAAA,IACF;AAAA,IACA,0CAA0C;AAAA,MACxC;AAAA,IACF;AAAA,IACA,6BAA6B,CAAC,iCAAiC;AAAA,IAC/D,8BAA8B,CAAC,2CAA2C;AAAA,IAC1E,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,yBAAyB,CAAC,wCAAwC;AAAA,IAClE,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,eAAe,CAAC,wDAAwD;AAAA,IACxE,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,iDAAiD;AAAA,MAC/C;AAAA,IACF;AAAA,IACA,kDAAkD;AAAA,MAChD;AAAA,IACF;AAAA,IACA,6CAA6C;AAAA,MAC3C;AAAA,IACF;AAAA,IACA,8CAA8C;AAAA,MAC5C;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,wCAAwC;AAAA,MACtC;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,0CAA0C;AAAA,MACxC;AAAA,IACF;AAAA,IACA,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,wDAAwD;AAAA,MACtD;AAAA,IACF;AAAA,IACA,sDAAsD;AAAA,MACpD;AAAA,IACF;AAAA,IACA,yCAAyC;AAAA,MACvC;AAAA,IACF;AAAA,IACA,uCAAuC;AAAA,MACrC;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,oCAAoC;AAAA,MAClC;AAAA,IACF;AAAA,IACA,yDAAyD;AAAA,MACvD;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,4CAA4C;AAAA,IAChE,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AAAA,EACA,UAAU;AAAA,IACR,uCAAuC,CAAC,kCAAkC;AAAA,IAC1E,wBAAwB,CAAC,2CAA2C;AAAA,IACpE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,UAAU,CAAC,YAAY;AAAA,IACvB,qBAAqB,CAAC,wCAAwC;AAAA,IAC9D,WAAW,CAAC,wCAAwC;AAAA,IACpD,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,gCAAgC,CAAC,8BAA8B;AAAA,IAC/D,uCAAuC,CAAC,oBAAoB;AAAA,IAC5D,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,aAAa;AAAA,IAChC,gCAAgC,CAAC,qCAAqC;AAAA,IACtE,yBAAyB,CAAC,qCAAqC;AAAA,IAC/D,qBAAqB,CAAC,wBAAwB;AAAA,IAC9C,2BAA2B,CAAC,uCAAuC;AAAA,IACnE,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,kCAAkC;AAAA,IACnD,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,qCAAqC,CAAC,mBAAmB;AAAA,IACzD,wBAAwB,CAAC,+BAA+B;AAAA,IACxD,wBAAwB,CAAC,qCAAqC;AAAA,IAC9D,uBAAuB,CAAC,sCAAsC;AAAA,IAC9D,sCAAsC,CAAC,yBAAyB;AAAA,IAChE,qBAAqB,CAAC,uCAAuC;AAAA,IAC7D,yBAAyB,CAAC,oBAAoB;AAAA,IAC9C,6BAA6B,CAAC,yCAAyC;AAAA,IACvE,kBAAkB,CAAC,0CAA0C;AAAA,IAC7D,qBAAqB,CAAC,wCAAwC;AAAA,IAC9D,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,8BAA8B,CAAC,kCAAkC;AAAA,IACjE,gCAAgC,CAAC,qCAAqC;AAAA,EACxE;AAAA,EACA,MAAM;AAAA,IACJ,uBAAuB;AAAA,MACrB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,QAAQ,2CAA2C,EAAE;AAAA,IACnE;AAAA,IACA,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,YAAY,CAAC,sCAAsC;AAAA,IACnD,oBAAoB,CAAC,wCAAwC;AAAA,IAC7D,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,wCAAwC;AAAA,IAC9D,oBAAoB,CAAC,6CAA6C;AAAA,IAClE,aAAa,CAAC,wCAAwC;AAAA,IACtD,kBAAkB,CAAC,UAAU;AAAA,IAC7B,WAAW,CAAC,sBAAsB;AAAA,IAClC,iBAAiB,CAAC,0CAA0C;AAAA,IAC5D,oBAAoB,CAAC,8BAA8B;AAAA,IACnD,qBAAqB,CAAC,wCAAwC;AAAA,IAC9D,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,oCAAoC;AAAA,IAC1D,wBAAwB,CAAC,sBAAsB;AAAA,IAC/C,oBAAoB,CAAC,wCAAwC;AAAA,IAC7D,qBAAqB,CAAC,mDAAmD;AAAA,IACzE,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,6CAA6C;AAAA,MAC3C;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,wBAAwB;AAAA,IAC5C,uCAAuC,CAAC,yBAAyB;AAAA,IACjE,WAAW,CAAC,gCAAgC;AAAA,IAC5C,kBAAkB,CAAC,wCAAwC;AAAA,IAC3D,mCAAmC,CAAC,gCAAgC;AAAA,IACpE,uCAAuC,CAAC,iCAAiC;AAAA,IACzE,8CAA8C;AAAA,MAC5C;AAAA,IACF;AAAA,IACA,uBAAuB,CAAC,0BAA0B;AAAA,IAClD,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,QAAQ,gDAAgD,EAAE;AAAA,IACxE;AAAA,IACA,gDAAgD;AAAA,MAC9C;AAAA,IACF;AAAA,IACA,YAAY,CAAC,uCAAuC;AAAA,IACpD,+BAA+B,CAAC,4BAA4B;AAAA,IAC5D,YAAY,CAAC,6CAA6C;AAAA,IAC1D,qBAAqB,CAAC,oDAAoD;AAAA,IAC1E,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,2BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAAA,EACA,SAAS;AAAA,IACP,4BAA4B,CAAC,0CAA0C;AAAA,IACvE,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,6BAA6B,CAAC,2CAA2C;AAAA,IACzE,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAAA,EACA,QAAQ;AAAA,IACN,QAAQ,CAAC,uCAAuC;AAAA,IAChD,aAAa,CAAC,yCAAyC;AAAA,IACvD,KAAK,CAAC,qDAAqD;AAAA,IAC3D,UAAU,CAAC,yDAAyD;AAAA,IACpE,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,YAAY,CAAC,oDAAoD;AAAA,IACjE,cAAc;AAAA,MACZ;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,sDAAsD;AAAA,IACzE,cAAc;AAAA,MACZ;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,uDAAuD;AAAA,EAClE;AAAA,EACA,cAAc;AAAA,IACZ,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,UAAU;AAAA,MACR;AAAA,MACA,CAAC;AAAA,MACD,EAAE,mBAAmB,EAAE,UAAU,eAAe,EAAE;AAAA,IACpD;AAAA,IACA,aAAa;AAAA,MACX;AAAA,IACF;AAAA,IACA,mBAAmB;AAAA,MACjB;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,uDAAuD;AAAA,IACzE,UAAU,CAAC,2DAA2D;AAAA,IACtE,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,sCAAsC;AAAA,IACzD,mBAAmB,CAAC,gDAAgD;AAAA,IACpE,qBAAqB;AAAA,MACnB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,gBAAgB,oBAAoB,EAAE;AAAA,IACpD;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,oBAAoB,CAAC,kDAAkD;AAAA,IACvE,aAAa;AAAA,MACX;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,aAAa,CAAC,iDAAiD;AAAA,EACjE;AAAA,EACA,gBAAgB;AAAA,IACd,sBAAsB,CAAC,uBAAuB;AAAA,IAC9C,gBAAgB,CAAC,6BAA6B;AAAA,EAChD;AAAA,EACA,YAAY;AAAA,IACV,4CAA4C;AAAA,MAC1C;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,uCAAuC;AAAA,MACrC;AAAA,IACF;AAAA,IACA,4BAA4B,CAAC,uBAAuB;AAAA,IACpD,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,0CAA0C;AAAA,MACxC;AAAA,IACF;AAAA,IACA,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,oCAAoC;AAAA,MAClC;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,4BAA4B,CAAC,0CAA0C;AAAA,IACvE,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,qDAAqD;AAAA,IACvE,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,yBAAyB,CAAC,uCAAuC;AAAA,IACjE,iBAAiB,CAAC,+CAA+C;AAAA,IACjE,cAAc,CAAC,kDAAkD;AAAA,IACjE,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,eAAe;AAAA,MACb;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,mDAAmD;AAAA,MACjD;AAAA,IACF;AAAA,IACA,0BAA0B,CAAC,sBAAsB;AAAA,IACjD,oBAAoB;AAAA,MAClB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,mBAAmB,EAAE,QAAQ,MAAM,EAAE;AAAA,IACzC;AAAA,IACA,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,oCAAoC;AAAA,IACrD,iBAAiB,CAAC,8CAA8C;AAAA,IAChE,+CAA+C;AAAA,MAC7C;AAAA,IACF;AAAA,IACA,iCAAiC,CAAC,8BAA8B;AAAA,IAChE,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,uCAAuC;AAAA,MACrC;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,+CAA+C;AAAA,MAC7C;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,sBAAsB,CAAC,oCAAoC;AAAA,IAC3D,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,8CAA8C;AAAA,MAC5C;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,2BAA2B,CAAC,8CAA8C;AAAA,IAC1E,0BAA0B,CAAC,6CAA6C;AAAA,IACxE,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,4BAA4B,CAAC,yCAAyC;AAAA,EACxE;AAAA,EACA,YAAY;AAAA,IACV,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,qDAAqD;AAAA,IACvE,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,UAAU,CAAC,4DAA4D;AAAA,IACvE,iBAAiB,CAAC,+CAA+C;AAAA,IACjE,cAAc,CAAC,kDAAkD;AAAA,IACjE,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,eAAe;AAAA,MACb;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,mCAAmC;AAAA,IACtD,mBAAmB,CAAC,6CAA6C;AAAA,IACjE,gBAAgB,CAAC,oCAAoC;AAAA,IACrD,iBAAiB,CAAC,8CAA8C;AAAA,IAChE,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,aAAa;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAAA,EACA,iBAAiB;AAAA,IACf,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,WAAW;AAAA,MACT;AAAA,IACF;AAAA,IACA,YAAY,CAAC,iDAAiD;AAAA,EAChE;AAAA,EACA,QAAQ,EAAE,KAAK,CAAC,aAAa,EAAE;AAAA,EAC/B,OAAO;AAAA,IACL,gBAAgB,CAAC,2BAA2B;AAAA,IAC5C,QAAQ,CAAC,aAAa;AAAA,IACtB,eAAe,CAAC,gCAAgC;AAAA,IAChD,QAAQ,CAAC,yBAAyB;AAAA,IAClC,eAAe,CAAC,+CAA+C;AAAA,IAC/D,MAAM,CAAC,6BAA6B;AAAA,IACpC,KAAK,CAAC,sBAAsB;AAAA,IAC5B,YAAY,CAAC,4CAA4C;AAAA,IACzD,aAAa,CAAC,4BAA4B;AAAA,IAC1C,MAAM,CAAC,YAAY;AAAA,IACnB,cAAc,CAAC,+BAA+B;AAAA,IAC9C,aAAa,CAAC,8BAA8B;AAAA,IAC5C,aAAa,CAAC,6BAA6B;AAAA,IAC3C,WAAW,CAAC,4BAA4B;AAAA,IACxC,YAAY,CAAC,mBAAmB;AAAA,IAChC,aAAa,CAAC,oBAAoB;AAAA,IAClC,MAAM,CAAC,2BAA2B;AAAA,IAClC,QAAQ,CAAC,8BAA8B;AAAA,IACvC,QAAQ,CAAC,wBAAwB;AAAA,IACjC,eAAe,CAAC,8CAA8C;AAAA,EAChE;AAAA,EACA,KAAK;AAAA,IACH,YAAY,CAAC,sCAAsC;AAAA,IACnD,cAAc,CAAC,wCAAwC;AAAA,IACvD,WAAW,CAAC,qCAAqC;AAAA,IACjD,WAAW,CAAC,qCAAqC;AAAA,IACjD,YAAY,CAAC,sCAAsC;AAAA,IACnD,WAAW,CAAC,6CAA6C;AAAA,IACzD,SAAS,CAAC,gDAAgD;AAAA,IAC1D,WAAW,CAAC,oDAAoD;AAAA,IAChE,QAAQ,CAAC,yCAAyC;AAAA,IAClD,QAAQ,CAAC,8CAA8C;AAAA,IACvD,SAAS,CAAC,gDAAgD;AAAA,IAC1D,kBAAkB,CAAC,mDAAmD;AAAA,IACtE,WAAW,CAAC,4CAA4C;AAAA,EAC1D;AAAA,EACA,WAAW;AAAA,IACT,iBAAiB,CAAC,0BAA0B;AAAA,IAC5C,aAAa,CAAC,iCAAiC;AAAA,EACjD;AAAA,EACA,cAAc;AAAA,IACZ,qCAAqC,CAAC,8BAA8B;AAAA,IACpE,uBAAuB,CAAC,oCAAoC;AAAA,IAC5D,wBAAwB,CAAC,8CAA8C;AAAA,IACvE,mCAAmC;AAAA,MACjC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,gBAAgB,qCAAqC,EAAE;AAAA,IACrE;AAAA,IACA,wCAAwC,CAAC,iCAAiC;AAAA,IAC1E,0BAA0B,CAAC,uCAAuC;AAAA,IAClE,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,sCAAsC;AAAA,MACpC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,gBAAgB,wCAAwC,EAAE;AAAA,IACxE;AAAA,IACA,qCAAqC,CAAC,8BAA8B;AAAA,IACpE,uBAAuB,CAAC,oCAAoC;AAAA,IAC5D,wBAAwB,CAAC,8CAA8C;AAAA,IACvE,mCAAmC;AAAA,MACjC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,gBAAgB,qCAAqC,EAAE;AAAA,IACrE;AAAA,EACF;AAAA,EACA,QAAQ;AAAA,IACN,cAAc;AAAA,MACZ;AAAA,IACF;AAAA,IACA,WAAW,CAAC,yDAAyD;AAAA,IACrE,wBAAwB,CAAC,gDAAgD;AAAA,IACzE,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,mCAAmC;AAAA,IAC5C,eAAe;AAAA,MACb;AAAA,IACF;AAAA,IACA,aAAa,CAAC,mCAAmC;AAAA,IACjD,iBAAiB,CAAC,uCAAuC;AAAA,IACzD,eAAe;AAAA,MACb;AAAA,IACF;AAAA,IACA,aAAa,CAAC,4CAA4C;AAAA,IAC1D,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,KAAK,CAAC,iDAAiD;AAAA,IACvD,YAAY,CAAC,wDAAwD;AAAA,IACrE,UAAU,CAAC,oDAAoD;AAAA,IAC/D,UAAU,CAAC,yCAAyC;AAAA,IACpD,cAAc,CAAC,yDAAyD;AAAA,IACxE,MAAM,CAAC,aAAa;AAAA,IACpB,eAAe,CAAC,qCAAqC;AAAA,IACrD,cAAc,CAAC,0DAA0D;AAAA,IACzE,qBAAqB,CAAC,2CAA2C;AAAA,IACjE,YAAY,CAAC,wDAAwD;AAAA,IACrE,mBAAmB,CAAC,yCAAyC;AAAA,IAC7D,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,0BAA0B,CAAC,kBAAkB;AAAA,IAC7C,YAAY,CAAC,wBAAwB;AAAA,IACrC,aAAa,CAAC,kCAAkC;AAAA,IAChD,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,kCAAkC;AAAA,IACtD,mBAAmB;AAAA,MACjB;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,sCAAsC;AAAA,IACvD,MAAM,CAAC,sDAAsD;AAAA,IAC7D,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,aAAa;AAAA,MACX;AAAA,IACF;AAAA,IACA,WAAW,CAAC,wDAAwD;AAAA,IACpE,QAAQ,CAAC,yDAAyD;AAAA,IAClE,QAAQ,CAAC,mDAAmD;AAAA,IAC5D,eAAe,CAAC,0DAA0D;AAAA,IAC1E,aAAa,CAAC,2CAA2C;AAAA,IACzD,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAAA,EACA,UAAU;AAAA,IACR,KAAK,CAAC,yBAAyB;AAAA,IAC/B,oBAAoB,CAAC,eAAe;AAAA,IACpC,YAAY,CAAC,mCAAmC;AAAA,EAClD;AAAA,EACA,UAAU;AAAA,IACR,QAAQ,CAAC,gBAAgB;AAAA,IACzB,WAAW;AAAA,MACT;AAAA,MACA,EAAE,SAAS,EAAE,gBAAgB,4BAA4B,EAAE;AAAA,IAC7D;AAAA,EACF;AAAA,EACA,MAAM;AAAA,IACJ,KAAK,CAAC,WAAW;AAAA,IACjB,gBAAgB,CAAC,eAAe;AAAA,IAChC,YAAY,CAAC,cAAc;AAAA,IAC3B,QAAQ,CAAC,UAAU;AAAA,IACnB,MAAM,CAAC,OAAO;AAAA,EAChB;AAAA,EACA,YAAY;AAAA,IACV,cAAc,CAAC,qCAAqC;AAAA,IACpD,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,0CAA0C;AAAA,IAC7D,iBAAiB,CAAC,kCAAkC;AAAA,IACpD,eAAe,CAAC,8CAA8C;AAAA,IAC9D,+BAA+B,CAAC,qCAAqC;AAAA,IACrE,iBAAiB,CAAC,2CAA2C;AAAA,IAC7D,0BAA0B,CAAC,sBAAsB;AAAA,IACjD,YAAY,CAAC,4BAA4B;AAAA,IACzC,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,wDAAwD;AAAA,IAC1E,kBAAkB;AAAA,MAChB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,cAAc,+BAA+B,EAAE;AAAA,IAC7D;AAAA,IACA,iBAAiB,CAAC,wDAAwD;AAAA,IAC1E,kBAAkB,CAAC,wCAAwC;AAAA,IAC3D,2BAA2B,CAAC,uBAAuB;AAAA,IACnD,aAAa,CAAC,6BAA6B;AAAA,IAC3C,aAAa,CAAC,kCAAkC;AAAA,IAChD,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,cAAc,CAAC,oCAAoC;AAAA,EACrD;AAAA,EACA,MAAM;AAAA,IACJ,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,WAAW,CAAC,mCAAmC;AAAA,IAC/C,kBAAkB,CAAC,gDAAgD;AAAA,IACnE,kBAAkB,CAAC,mCAAmC;AAAA,IACtD,wBAAwB,CAAC,oCAAoC;AAAA,IAC7D,8BAA8B,CAAC,2CAA2C;AAAA,IAC1E,oCAAoC;AAAA,MAClC;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,8BAA8B;AAAA,IACjD,eAAe,CAAC,wBAAwB;AAAA,IACxC,QAAQ,CAAC,oBAAoB;AAAA,IAC7B,eAAe,CAAC,oCAAoC;AAAA,IACpD,6CAA6C;AAAA,MAC3C;AAAA,IACF;AAAA,IACA,KAAK,CAAC,iBAAiB;AAAA,IACvB,mCAAmC,CAAC,kCAAkC;AAAA,IACtE,sBAAsB,CAAC,wCAAwC;AAAA,IAC/D,YAAY,CAAC,iCAAiC;AAAA,IAC9C,wBAAwB,CAAC,wCAAwC;AAAA,IACjE,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,MAAM,CAAC,oBAAoB;AAAA,IAC3B,sBAAsB,CAAC,+BAA+B;AAAA,IACtD,kBAAkB,CAAC,wBAAwB;AAAA,IAC3C,uBAAuB,CAAC,oCAAoC;AAAA,IAC5D,0BAA0B,CAAC,gBAAgB;AAAA,IAC3C,aAAa,CAAC,4BAA4B;AAAA,IAC1C,qBAAqB,CAAC,mDAAmD;AAAA,IACzE,aAAa,CAAC,yBAAyB;AAAA,IACvC,qCAAqC,CAAC,4BAA4B;AAAA,IAClE,0BAA0B,CAAC,uCAAuC;AAAA,IAClE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,eAAe,CAAC,iDAAiD;AAAA,IACjE,wBAAwB,CAAC,6BAA6B;AAAA,IACtD,mBAAmB,CAAC,gCAAgC;AAAA,IACpD,0BAA0B,CAAC,mCAAmC;AAAA,IAC9D,uBAAuB,CAAC,4CAA4C;AAAA,IACpE,cAAc,CAAC,uBAAuB;AAAA,IACtC,aAAa,CAAC,wCAAwC;AAAA,IACtD,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,cAAc,CAAC,uCAAuC;AAAA,IACtD,yBAAyB,CAAC,2CAA2C;AAAA,IACrE,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,4CAA4C;AAAA,MAC1C;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,sBAAsB,CAAC,wCAAwC;AAAA,IAC/D,yCAAyC;AAAA,MACvC;AAAA,IACF;AAAA,IACA,aAAa,CAAC,sCAAsC;AAAA,IACpD,QAAQ,CAAC,mBAAmB;AAAA,IAC5B,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,kDAAkD;AAAA,IACtE,eAAe,CAAC,mCAAmC;AAAA,IACnD,2BAA2B,CAAC,0CAA0C;AAAA,EACxE;AAAA,EACA,UAAU;AAAA,IACR,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,0CAA0C;AAAA,MACxC;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,8CAA8C;AAAA,MAC5C;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,YAAY,2CAA2C,EAAE;AAAA,IACvE;AAAA,IACA,6DAA6D;AAAA,MAC3D;AAAA,MACA,CAAC;AAAA,MACD;AAAA,QACE,SAAS;AAAA,UACP;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IACA,yDAAyD;AAAA,MACvD;AAAA,IACF;AAAA,IACA,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,4CAA4C;AAAA,MAC1C;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,mBAAmB;AAAA,MACjB;AAAA,IACF;AAAA,IACA,uCAAuC;AAAA,MACrC;AAAA,IACF;AAAA,IACA,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,4DAA4D;AAAA,MAC1D;AAAA,IACF;AAAA,IACA,uDAAuD;AAAA,MACrD;AAAA,IACF;AAAA,IACA,+CAA+C;AAAA,MAC7C;AAAA,IACF;AAAA,IACA,kCAAkC,CAAC,oBAAoB;AAAA,IACvD,6BAA6B,CAAC,0BAA0B;AAAA,IACxD,qBAAqB,CAAC,gCAAgC;AAAA,IACtD,oCAAoC;AAAA,MAClC;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAAA,EACA,UAAU;AAAA,IACR,iBAAiB,CAAC,qDAAqD;AAAA,IACvE,YAAY,CAAC,0CAA0C;AAAA,IACvD,cAAc,CAAC,qCAAqC;AAAA,IACpD,4BAA4B,CAAC,qBAAqB;AAAA,IAClD,cAAc,CAAC,2BAA2B;AAAA,IAC1C,eAAe,CAAC,qCAAqC;AAAA,IACrD,QAAQ,CAAC,+BAA+B;AAAA,IACxC,YAAY,CAAC,0CAA0C;AAAA,IACvD,cAAc,CAAC,sCAAsC;AAAA,IACrD,KAAK,CAAC,4BAA4B;AAAA,IAClC,SAAS,CAAC,uCAAuC;AAAA,IACjD,WAAW,CAAC,mCAAmC;AAAA,IAC/C,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,WAAW,CAAC,yCAAyC;AAAA,IACrD,mBAAmB,CAAC,0CAA0C;AAAA,IAC9D,aAAa,CAAC,oCAAoC;AAAA,IAClD,YAAY,CAAC,0BAA0B;AAAA,IACvC,aAAa,CAAC,oCAAoC;AAAA,IAClD,aAAa,CAAC,gCAAgC;AAAA,IAC9C,UAAU,CAAC,8CAA8C;AAAA,IACzD,YAAY,CAAC,0CAA0C;AAAA,IACvD,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,8BAA8B;AAAA,IACvC,YAAY,CAAC,yCAAyC;AAAA,IACtD,cAAc,CAAC,qCAAqC;AAAA,EACtD;AAAA,EACA,OAAO;AAAA,IACL,eAAe,CAAC,qDAAqD;AAAA,IACrE,QAAQ,CAAC,kCAAkC;AAAA,IAC3C,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,cAAc,CAAC,wDAAwD;AAAA,IACvE,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,eAAe;AAAA,MACb;AAAA,IACF;AAAA,IACA,KAAK,CAAC,+CAA+C;AAAA,IACrD,WAAW;AAAA,MACT;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,uDAAuD;AAAA,IAC1E,MAAM,CAAC,iCAAiC;AAAA,IACxC,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,aAAa,CAAC,uDAAuD;AAAA,IACrE,WAAW,CAAC,qDAAqD;AAAA,IACjE,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,2BAA2B,CAAC,0CAA0C;AAAA,IACtE,aAAa,CAAC,uDAAuD;AAAA,IACrE,OAAO,CAAC,qDAAqD;AAAA,IAC7D,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,cAAc;AAAA,MACZ;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,iDAAiD;AAAA,IAC1D,cAAc;AAAA,MACZ;AAAA,IACF;AAAA,IACA,cAAc;AAAA,MACZ;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AAAA,EACA,WAAW,EAAE,KAAK,CAAC,iBAAiB,EAAE;AAAA,EACtC,WAAW;AAAA,IACT,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,qCAAqC;AAAA,MACnC;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,cAAc,CAAC,2DAA2D;AAAA,IAC1E,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAAA,EACA,OAAO;AAAA,IACL,kBAAkB;AAAA,MAChB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,sCAAsC,EAAE;AAAA,IAC/D;AAAA,IACA,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,OAAO;AAAA,IACtB;AAAA,IACA,iBAAiB,CAAC,oDAAoD;AAAA,IACtE,wBAAwB;AAAA,MACtB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,WAAW;AAAA,IAC1B;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,QAAQ;AAAA,IACvB;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,QAAQ;AAAA,IACvB;AAAA,IACA,mBAAmB,CAAC,oDAAoD;AAAA,IACxE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,6CAA6C;AAAA,IAChE,gBAAgB,CAAC,mDAAmD;AAAA,IACpE,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,sCAAsC;AAAA,IACvD,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,oBAAoB,CAAC,2CAA2C;AAAA,IAChE,iBAAiB,CAAC,iCAAiC;AAAA,IACnD,kBAAkB,CAAC,wCAAwC;AAAA,IAC3D,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,uCAAuC;AAAA,IAC7D,4BAA4B,CAAC,kBAAkB;AAAA,IAC/C,YAAY,CAAC,kCAAkC;AAAA,IAC/C,aAAa,CAAC,wBAAwB;AAAA,IACtC,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,4BAA4B,CAAC,2CAA2C;AAAA,IACxE,kBAAkB,CAAC,2BAA2B;AAAA,IAC9C,uBAAuB,CAAC,6CAA6C;AAAA,IACrE,iBAAiB,CAAC,kCAAkC;AAAA,IACpD,eAAe,CAAC,qCAAqC;AAAA,IACrD,mBAAmB,CAAC,qCAAqC;AAAA,IACzD,qBAAqB,CAAC,4CAA4C;AAAA,IAClE,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,eAAe,CAAC,kCAAkC;AAAA,IAClD,mBAAmB;AAAA,MACjB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,uCAAuC,EAAE;AAAA,IAChE;AAAA,IACA,uCAAuC;AAAA,MACrC;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,8BAA8B;AAAA,IACvC,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,sDAAsD;AAAA,IACvE,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,oDAAoD;AAAA,IAC1E,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,4CAA4C;AAAA,IAC9D,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,YAAY,CAAC,8CAA8C;AAAA,IAC3D,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,0CAA0C;AAAA,IAC7D,iBAAiB,CAAC,oCAAoC;AAAA,IACtD,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,eAAe,CAAC,oDAAoD;AAAA,IACpE,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,oDAAoD;AAAA,IACxE,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,eAAe,CAAC,8CAA8C;AAAA,IAC9D,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,kCAAkC;AAAA,IACtD,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,MACf;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,wBAAwB,EAAE;AAAA,IACjD;AAAA,IACA,wBAAwB,CAAC,yCAAyC;AAAA,IAClE,wBAAwB,CAAC,yCAAyC;AAAA,IAClE,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,+BAA+B;AAAA,IAClD,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,KAAK,CAAC,2BAA2B;AAAA,IACjC,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,oBAAoB,CAAC,wCAAwC;AAAA,IAC7D,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,cAAc,CAAC,kCAAkC;AAAA,IACjD,oCAAoC;AAAA,MAClC;AAAA,IACF;AAAA,IACA,aAAa,CAAC,mDAAmD;AAAA,IACjE,WAAW,CAAC,6CAA6C;AAAA,IACzD,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,mDAAmD;AAAA,IACpE,WAAW,CAAC,0CAA0C;AAAA,IACtD,uBAAuB,CAAC,gDAAgD;AAAA,IACxE,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,yBAAyB,CAAC,gDAAgD;AAAA,IAC1E,WAAW,CAAC,yCAAyC;AAAA,IACrD,wBAAwB,CAAC,iDAAiD;AAAA,IAC1E,kBAAkB,CAAC,iDAAiD;AAAA,IACpE,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,4BAA4B,CAAC,6CAA6C;AAAA,IAC1E,YAAY,CAAC,2CAA2C;AAAA,IACxD,sBAAsB,CAAC,8CAA8C;AAAA,IACrE,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,cAAc,CAAC,yCAAyC;AAAA,IACxD,eAAe,CAAC,uDAAuD;AAAA,IACvE,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,+CAA+C;AAAA,IACrE,kBAAkB,CAAC,2CAA2C;AAAA,IAC9D,eAAe,CAAC,uCAAuC;AAAA,IACvD,gBAAgB,CAAC,0BAA0B;AAAA,IAC3C,UAAU,CAAC,iCAAiC;AAAA,IAC5C,eAAe,CAAC,mDAAmD;AAAA,IACnE,qBAAqB,CAAC,wCAAwC;AAAA,IAC9D,uBAAuB,CAAC,+CAA+C;AAAA,IACvE,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,4CAA4C;AAAA,IAChE,WAAW,CAAC,kCAAkC;AAAA,IAC9C,sBAAsB,CAAC,wCAAwC;AAAA,IAC/D,YAAY,CAAC,iDAAiD;AAAA,IAC9D,iBAAiB,CAAC,sDAAsD;AAAA,IACxE,iBAAiB,CAAC,+CAA+C;AAAA,IACjE,gBAAgB,CAAC,iDAAiD;AAAA,IAClE,iBAAiB,CAAC,oCAAoC;AAAA,IACtD,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,qCAAqC;AAAA,MACnC;AAAA,IACF;AAAA,IACA,aAAa,CAAC,iDAAiD;AAAA,IAC/D,iBAAiB,CAAC,qDAAqD;AAAA,IACvE,qCAAqC;AAAA,MACnC;AAAA,IACF;AAAA,IACA,UAAU,CAAC,yCAAyC;AAAA,IACpD,YAAY,CAAC,2CAA2C;AAAA,IACxD,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,eAAe,CAAC,qCAAqC;AAAA,IACrD,cAAc,CAAC,oCAAoC;AAAA,IACnD,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,yCAAyC;AAAA,IAC7D,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,2BAA2B,CAAC,oCAAoC;AAAA,IAChE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,aAAa,CAAC,mCAAmC;AAAA,IACjD,kBAAkB,CAAC,wCAAwC;AAAA,IAC3D,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,gCAAgC;AAAA,IACjD,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,uCAAuC;AAAA,IACzD,0BAA0B,CAAC,iBAAiB;AAAA,IAC5C,YAAY,CAAC,uBAAuB;AAAA,IACpC,aAAa,CAAC,6BAA6B;AAAA,IAC3C,WAAW,CAAC,iCAAiC;AAAA,IAC7C,iBAAiB,CAAC,uCAAuC;AAAA,IACzD,qCAAqC,CAAC,kCAAkC;AAAA,IACxE,eAAe,CAAC,qCAAqC;AAAA,IACrD,iBAAiB,CAAC,wCAAwC;AAAA,IAC1D,YAAY,CAAC,mBAAmB;AAAA,IAChC,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,mBAAmB;AAAA,MACjB;AAAA,IACF;AAAA,IACA,cAAc,CAAC,oCAAoC;AAAA,IACnD,mBAAmB,CAAC,2CAA2C;AAAA,IAC/D,UAAU,CAAC,gCAAgC;AAAA,IAC3C,WAAW,CAAC,iCAAiC;AAAA,IAC7C,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,cAAc,CAAC,iCAAiC;AAAA,IAChD,OAAO,CAAC,mCAAmC;AAAA,IAC3C,eAAe,CAAC,2CAA2C;AAAA,IAC3D,aAAa,CAAC,kDAAkD;AAAA,IAChE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,OAAO;AAAA,IACtB;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,WAAW;AAAA,IAC1B;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,QAAQ;AAAA,IACvB;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,QAAQ;AAAA,IACvB;AAAA,IACA,cAAc,CAAC,qDAAqD;AAAA,IACpE,kBAAkB,CAAC,kCAAkC;AAAA,IACrD,mBAAmB,CAAC,yCAAyC;AAAA,IAC7D,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,OAAO;AAAA,IACtB;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,WAAW;AAAA,IAC1B;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,QAAQ;AAAA,IACvB;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,QAAQ;AAAA,IACvB;AAAA,IACA,iBAAiB,CAAC,kDAAkD;AAAA,IACpE,UAAU,CAAC,qCAAqC;AAAA,IAChD,QAAQ,CAAC,6BAA6B;AAAA,IACtC,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,mDAAmD;AAAA,IACzE,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,iCAAiC,CAAC,iCAAiC;AAAA,IACnE,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,uCAAuC;AAAA,IAC1D,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,eAAe,CAAC,mDAAmD;AAAA,IACnE,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,iDAAiD;AAAA,IACrE,4BAA4B;AAAA,MAC1B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,6BAA6B,EAAE;AAAA,IACtD;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,eAAe,CAAC,6CAA6C;AAAA,IAC7D,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,MACA,EAAE,SAAS,6BAA6B;AAAA,IAC1C;AAAA,EACF;AAAA,EACA,QAAQ;AAAA,IACN,MAAM,CAAC,kBAAkB;AAAA,IACzB,SAAS,CAAC,qBAAqB;AAAA,IAC/B,uBAAuB,CAAC,oBAAoB;AAAA,IAC5C,QAAQ,CAAC,oBAAoB;AAAA,IAC7B,OAAO,CAAC,0BAA0B;AAAA,IAClC,QAAQ,CAAC,oBAAoB;AAAA,IAC7B,OAAO,CAAC,mBAAmB;AAAA,EAC7B;AAAA,EACA,gBAAgB;AAAA,IACd,UAAU;AAAA,MACR;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,wCAAwC;AAAA,IAC3D,mBAAmB,CAAC,kDAAkD;AAAA,IACtE,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,aAAa;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAAA,EACA,oBAAoB;AAAA,IAClB,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,0BAA0B,CAAC,+CAA+C;AAAA,IAC1E,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,EACF;AAAA,EACA,OAAO;AAAA,IACL,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,oCAAoC;AAAA,MAClC;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,wBAAwB;AAAA,IACjC,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,uBAAuB,CAAC,gDAAgD;AAAA,IACxE,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,aAAa,CAAC,sCAAsC;AAAA,IACpD,WAAW,CAAC,mCAAmC;AAAA,IAC/C,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,MAAM,CAAC,uBAAuB;AAAA,IAC9B,gBAAgB,CAAC,yCAAyC;AAAA,IAC1D,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,sBAAsB,CAAC,+CAA+C;AAAA,IACtE,0BAA0B,CAAC,iBAAiB;AAAA,IAC5C,kBAAkB,CAAC,2CAA2C;AAAA,IAC9D,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,4CAA4C;AAAA,IAChE,gBAAgB,CAAC,yCAAyC;AAAA,IAC1D,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,aAAa,CAAC,qCAAqC;AAAA,EACrD;AAAA,EACA,OAAO;AAAA,IACL,0BAA0B;AAAA,MACxB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,8BAA8B,EAAE;AAAA,IACvD;AAAA,IACA,8BAA8B,CAAC,mBAAmB;AAAA,IAClD,sCAAsC,CAAC,4BAA4B;AAAA,IACnE,OAAO,CAAC,6BAA6B;AAAA,IACrC,cAAc,CAAC,6BAA6B;AAAA,IAC5C,uBAAuB,CAAC,+CAA+C;AAAA,IACvE,sCAAsC,CAAC,gCAAgC;AAAA,IACvE,8BAA8B;AAAA,MAC5B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,kCAAkC,EAAE;AAAA,IAC3D;AAAA,IACA,kCAAkC,CAAC,qBAAqB;AAAA,IACxD,oCAAoC;AAAA,MAClC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,wCAAwC,EAAE;AAAA,IACjE;AAAA,IACA,wCAAwC,CAAC,iBAAiB;AAAA,IAC1D,yCAAyC,CAAC,6BAA6B;AAAA,IACvE,6BAA6B;AAAA,MAC3B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,iCAAiC,EAAE;AAAA,IAC1D;AAAA,IACA,iCAAiC,CAAC,qBAAqB;AAAA,IACvD,8BAA8B;AAAA,MAC5B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,kCAAkC,EAAE;AAAA,IAC3D;AAAA,IACA,kCAAkC,CAAC,oCAAoC;AAAA,IACvE,oCAAoC;AAAA,MAClC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,wCAAwC,EAAE;AAAA,IACjE;AAAA,IACA,wCAAwC,CAAC,4BAA4B;AAAA,IACrE,yCAAyC,CAAC,8BAA8B;AAAA,IACxE,yCAAyC;AAAA,MACvC;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,gCAAgC;AAAA,IACzC,kBAAkB,CAAC,WAAW;AAAA,IAC9B,eAAe,CAAC,uBAAuB;AAAA,IACvC,mBAAmB,CAAC,iCAAiC;AAAA,IACrD,2BAA2B;AAAA,MACzB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,+BAA+B,EAAE;AAAA,IACxD;AAAA,IACA,+BAA+B,CAAC,iCAAiC;AAAA,IACjE,iCAAiC;AAAA,MAC/B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,qCAAqC,EAAE;AAAA,IAC9D;AAAA,IACA,qCAAqC,CAAC,yBAAyB;AAAA,IAC/D,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,MAAM,CAAC,YAAY;AAAA,IACnB,4BAA4B;AAAA,MAC1B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,gCAAgC,EAAE;AAAA,IACzD;AAAA,IACA,gCAAgC,CAAC,kBAAkB;AAAA,IACnD,4BAA4B;AAAA,MAC1B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,gCAAgC,EAAE;AAAA,IACzD;AAAA,IACA,gCAAgC,CAAC,kBAAkB;AAAA,IACnD,6BAA6B;AAAA,MAC3B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,iCAAiC,EAAE;AAAA,IAC1D;AAAA,IACA,iCAAiC,CAAC,qBAAqB;AAAA,IACvD,mCAAmC,CAAC,qBAAqB;AAAA,IACzD,sBAAsB,CAAC,iCAAiC;AAAA,IACxD,sBAAsB,CAAC,iCAAiC;AAAA,IACxD,6BAA6B;AAAA,MAC3B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,iCAAiC,EAAE;AAAA,IAC1D;AAAA,IACA,iCAAiC,CAAC,oBAAoB;AAAA,IACtD,oBAAoB,CAAC,gCAAgC;AAAA,IACrD,kCAAkC;AAAA,MAChC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,sCAAsC,EAAE;AAAA,IAC/D;AAAA,IACA,sCAAsC,CAAC,yBAAyB;AAAA,IAChE,uBAAuB,CAAC,4BAA4B;AAAA,IACpD,mCAAmC;AAAA,MACjC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,uCAAuC,EAAE;AAAA,IAChE;AAAA,IACA,uCAAuC,CAAC,gBAAgB;AAAA,IACxD,wCAAwC,CAAC,2BAA2B;AAAA,IACpE,2BAA2B,CAAC,uCAAuC;AAAA,IACnE,wCAAwC,CAAC,4BAA4B;AAAA,IACrE,2BAA2B,CAAC,wCAAwC;AAAA,IACpE,2CAA2C;AAAA,MACzC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,+CAA+C,EAAE;AAAA,IACxE;AAAA,IACA,+CAA+C;AAAA,MAC7C;AAAA,IACF;AAAA,IACA,SAAS,CAAC,gCAAgC;AAAA,IAC1C,UAAU,CAAC,mCAAmC;AAAA,IAC9C,qBAAqB,CAAC,aAAa;AAAA,EACrC;AACF;AACA,IAAI,oBAAoB;;;ACz0DxB,IAAM,qBAAqC,oBAAI,IAAI;AACnD,WAAW,CAAC,OAAO,SAAS,KAAK,OAAO,QAAQ,iBAAS,GAAG;AAC1D,aAAW,CAAC,YAAY,QAAQ,KAAK,OAAO,QAAQ,SAAS,GAAG;AAC9D,UAAM,CAAC,OAAO,UAAU,WAAW,IAAI;AACvC,UAAM,CAAC,QAAQ,GAAG,IAAI,MAAM,MAAM,GAAG;AACrC,UAAM,mBAAmB,OAAO;AAAA,MAC9B;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,IACF;AACA,QAAI,CAAC,mBAAmB,IAAI,KAAK,GAAG;AAClC,yBAAmB,IAAI,OAAuB,oBAAI,IAAI,CAAC;AAAA,IACzD;AACA,uBAAmB,IAAI,KAAK,EAAE,IAAI,YAAY;AAAA,MAC5C;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;AACA,IAAM,UAAU;AAAA,EACd,IAAI,EAAE,SAAS,OAAO,MAAM,GAAG,YAAY;AACzC,QAAI,MAAM,UAAU,GAAG;AACrB,aAAO,MAAM,UAAU;AAAA,IACzB;AACA,UAAM,EAAE,aAAa,iBAAiB,IAAI,mBAAmB,IAAI,KAAK,EAAE,IAAI,UAAU;AACtF,QAAI,aAAa;AACf,YAAM,UAAU,IAAI;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,OAAO;AACL,YAAM,UAAU,IAAI,QAAQ,QAAQ,SAAS,gBAAgB;AAAA,IAC/D;AACA,WAAO,MAAM,UAAU;AAAA,EACzB;AACF;AACA,SAAS,mBAAmB,SAAS;AACnC,QAAM,aAAa,CAAC;AACpB,aAAW,SAAS,mBAAmB,KAAK,GAAG;AAC7C,eAAW,KAAK,IAAI,IAAI,MAAM,EAAE,SAAS,OAAO,OAAO,CAAC,EAAE,GAAG,OAAO;AAAA,EACtE;AACA,SAAO;AACT;AACA,SAAS,SAAS,SAAS,OAAO,YAAY,UAAU,aAAa;AACnE,QAAM,sBAAsB,QAAQ,QAAQ,SAAS,QAAQ;AAC7D,WAAS,mBAAmB,MAAM;AAChC,QAAI,UAAU,oBAAoB,SAAS,MAAM,GAAG,IAAI;AACxD,QAAI,YAAY,WAAW;AACzB,gBAAU,OAAO,OAAO,CAAC,GAAG,SAAS;AAAA,QACnC,MAAM,QAAQ,YAAY,SAAS;AAAA,QACnC,CAAC,YAAY,SAAS,GAAG;AAAA,MAC3B,CAAC;AACD,aAAO,oBAAoB,OAAO;AAAA,IACpC;AACA,QAAI,YAAY,SAAS;AACvB,YAAM,CAAC,UAAU,aAAa,IAAI,YAAY;AAC9C,cAAQ,IAAI;AAAA,QACV,WAAW,SAAS,4CAA4C,YAAY;AAAA,MAC9E;AAAA,IACF;AACA,QAAI,YAAY,YAAY;AAC1B,cAAQ,IAAI,KAAK,YAAY,UAAU;AAAA,IACzC;AACA,QAAI,YAAY,mBAAmB;AACjC,YAAM,WAAW,oBAAoB,SAAS,MAAM,GAAG,IAAI;AAC3D,iBAAW,CAAC,MAAM,KAAK,KAAK,OAAO;AAAA,QACjC,YAAY;AAAA,MACd,GAAG;AACD,YAAI,QAAQ,UAAU;AACpB,kBAAQ,IAAI;AAAA,YACV,IAAI,8CAA8C,SAAS,uBAAuB;AAAA,UACpF;AACA,cAAI,EAAE,SAAS,WAAW;AACxB,qBAAS,KAAK,IAAI,SAAS,IAAI;AAAA,UACjC;AACA,iBAAO,SAAS,IAAI;AAAA,QACtB;AAAA,MACF;AACA,aAAO,oBAAoB,QAAQ;AAAA,IACrC;AACA,WAAO,oBAAoB,GAAG,IAAI;AAAA,EACpC;AACA,SAAO,OAAO,OAAO,iBAAiB,mBAAmB;AAC3D;;;AHzFA,SAAS,oBAAoB,SAAS;AACpC,QAAM,MAAM,mBAAmB,OAAO;AACtC,SAAO;AAAA,IACL,MAAM;AAAA,EACR;AACF;AACA,oBAAoB,UAAU;AAC9B,SAAS,0BAA0B,SAAS;AAC1C,QAAM,MAAM,mBAAmB,OAAO;AACtC,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM;AAAA,EACR;AACF;AACA,0BAA0B,UAAU;", + "names": [] +} diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js new file mode 100644 index 0000000..2344d62 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js @@ -0,0 +1,95 @@ +import ENDPOINTS from "./generated/endpoints"; +const endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(ENDPOINTS)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { + method, + url + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +const handler = { + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const { decorations, endpointDefaults } = endpointMethodsMap.get(scope).get(methodName); + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); + } + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); + } + return newMethods; +} +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + function withDecorations(...args) { + let options = requestWithDefaults.endpoint.merge(...args); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: void 0 + }); + return requestWithDefaults(options); + } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); + } + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; + } + delete options2[name]; + } + } + return requestWithDefaults(options2); + } + return requestWithDefaults(...args); + } + return Object.assign(withDecorations, requestWithDefaults); +} +export { + endpointsToMethods +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js new file mode 100644 index 0000000..4674837 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js @@ -0,0 +1,1870 @@ +const Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + addSelectedRepoToRequiredWorkflow: [ + "PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createRequiredWorkflow: ["POST /orgs/{org}/actions/required_workflows"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteRequiredWorkflow: [ + "DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoRequiredWorkflow: [ + "GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}" + ], + getRepoRequiredWorkflowUsage: [ + "GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing" + ], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getRequiredWorkflow: [ + "GET /orgs/{org}/actions/required_workflows/{required_workflow_id}" + ], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], + listRepoRequiredWorkflows: [ + "GET /repos/{org}/{repo}/actions/required_workflows" + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRequiredWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs" + ], + listRequiredWorkflows: ["GET /orgs/{org}/actions/required_workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], + listSelectedRepositoriesRequiredWorkflow: [ + "GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories" + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + removeSelectedRepoFromRequiredWorkflow: [ + "DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedReposToRequiredWorkflow: [ + "PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ], + updateRequiredWorkflow: [ + "PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}" + ] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], + deleteCodespacesBillingUsers: [ + "DELETE /orgs/{org}/codespaces/billing/selected_users" + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setCodespacesBilling: ["PUT /orgs/{org}/codespaces/billing"], + setCodespacesBillingUsers: [ + "POST /orgs/{org}/codespaces/billing/selected_users" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] + }, + emojis: { get: ["GET /emojis"] }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] + }, + meta: { + get: ["GET /meta"], + getAllVersions: ["GET /versions"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: ["PATCH /repos/{owner}/{repo}/import"] + }, + orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], + get: ["GET /orgs/{org}"], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: [ + "GET /organizations/{org}/personal-access-token-requests" + ], + listPatGrants: ["GET /organizations/{org}/personal-access-tokens"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /organizations/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /organizations/{org}/personal-access-token-requests" + ], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: [ + "POST /organizations/{org}/personal-access-tokens/{pat_id}" + ], + updatePatAccesses: ["POST /organizations/{org}/personal-access-tokens"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] + }, + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] + }, + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; +var endpoints_default = Endpoints; +export { + endpoints_default as default +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/method-types.js b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/method-types.js new file mode 100644 index 0000000..e69de29 diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/parameters-and-response-types.js b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/parameters-and-response-types.js new file mode 100644 index 0000000..e69de29 diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js new file mode 100644 index 0000000..2fe13ac --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js @@ -0,0 +1,21 @@ +import { VERSION } from "./version"; +import { endpointsToMethods } from "./endpoints-to-methods"; +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + rest: api + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + ...api, + rest: api + }; +} +legacyRestEndpointMethods.VERSION = VERSION; +export { + legacyRestEndpointMethods, + restEndpointMethods +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js new file mode 100644 index 0000000..3c1d9ad --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "7.2.3"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/endpoints-to-methods.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/endpoints-to-methods.d.ts new file mode 100644 index 0000000..d9eb8ca --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/endpoints-to-methods.d.ts @@ -0,0 +1,3 @@ +import type { Octokit } from "@octokit/core"; +import type { RestEndpointMethods } from "./generated/method-types"; +export declare function endpointsToMethods(octokit: Octokit): RestEndpointMethods; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/endpoints.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/endpoints.d.ts new file mode 100644 index 0000000..930616e --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/endpoints.d.ts @@ -0,0 +1,3 @@ +import type { EndpointsDefaultsAndDecorations } from "../types"; +declare const Endpoints: EndpointsDefaultsAndDecorations; +export default Endpoints; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts new file mode 100644 index 0000000..94375de --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types.d.ts @@ -0,0 +1,11405 @@ +import type { EndpointInterface, RequestInterface } from "@octokit/types"; +import type { RestEndpointMethodTypes } from "./parameters-and-response-types"; +export type RestEndpointMethods = { + actions: { + /** + * Add custom labels to a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + addCustomLabelsToSelfHostedRunnerForOrg: { + (params?: RestEndpointMethodTypes["actions"]["addCustomLabelsToSelfHostedRunnerForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Add custom labels to a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + addCustomLabelsToSelfHostedRunnerForRepo: { + (params?: RestEndpointMethodTypes["actions"]["addCustomLabelsToSelfHostedRunnerForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + addSelectedRepoToOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["addSelectedRepoToOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a repository to an organization variable that is available to selected repositories. Organization variables that are available to selected repositories have their `visibility` field set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + addSelectedRepoToOrgVariable: { + (params?: RestEndpointMethodTypes["actions"]["addSelectedRepoToOrgVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a repository to a required workflow. To use this endpoint, the required workflow must be configured to run on selected repositories. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + addSelectedRepoToRequiredWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["addSelectedRepoToRequiredWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Approves a workflow run for a pull request from a public fork of a first time contributor. For more information, see ["Approving workflow runs from public forks](https://docs.github.com/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + approveWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["approveWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + cancelWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["cancelWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create an environment variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `environment:write` repository permission to use this endpoint. + */ + createEnvironmentVariable: { + (params?: RestEndpointMethodTypes["actions"]["createEnvironmentVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates an environment secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateEnvironmentSecret: { + (params?: RestEndpointMethodTypes["actions"]["createOrUpdateEnvironmentSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to + * use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["createOrUpdateOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateRepoSecret: { + (params?: RestEndpointMethodTypes["actions"]["createOrUpdateRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates an organization variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + createOrgVariable: { + (params?: RestEndpointMethodTypes["actions"]["createOrgVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org --token TOKEN + * ``` + */ + createRegistrationTokenForOrg: { + (params?: RestEndpointMethodTypes["actions"]["createRegistrationTokenForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. You must authenticate + * using an access token with the `repo` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org/octo-repo-artifacts --token TOKEN + * ``` + */ + createRegistrationTokenForRepo: { + (params?: RestEndpointMethodTypes["actions"]["createRegistrationTokenForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an organization. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from an organization, replace `TOKEN` with the remove token provided by this + * endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + createRemoveTokenForOrg: { + (params?: RestEndpointMethodTypes["actions"]["createRemoveTokenForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a token that you can pass to remove a self-hosted runner from a repository. The token expires after one hour. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from a repository, replace TOKEN with the remove token provided by this endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + createRemoveTokenForRepo: { + (params?: RestEndpointMethodTypes["actions"]["createRemoveTokenForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a repository variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions_variables:write` repository permission to use this endpoint. + */ + createRepoVariable: { + (params?: RestEndpointMethodTypes["actions"]["createRepoVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a required workflow in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + createRequiredWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["createRequiredWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must configure your GitHub Actions workflow to run when the [`workflow_dispatch` webhook](/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch) event occurs. The `inputs` are configured in the workflow file. For more information about how to configure the `workflow_dispatch` event in the workflow file, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows#workflow_dispatch)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)." + */ + createWorkflowDispatch: { + (params?: RestEndpointMethodTypes["actions"]["createWorkflowDispatch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a GitHub Actions cache for a repository, using a cache ID. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + deleteActionsCacheById: { + (params?: RestEndpointMethodTypes["actions"]["deleteActionsCacheById"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes one or more GitHub Actions caches for a repository, using a complete cache key. By default, all caches that match the provided key are deleted, but you can optionally provide a Git ref to restrict deletions to caches that match both the provided key and the Git ref. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + deleteActionsCacheByKey: { + (params?: RestEndpointMethodTypes["actions"]["deleteActionsCacheByKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes an artifact for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + deleteArtifact: { + (params?: RestEndpointMethodTypes["actions"]["deleteArtifact"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret in an environment using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + deleteEnvironmentSecret: { + (params?: RestEndpointMethodTypes["actions"]["deleteEnvironmentSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes an environment variable using the variable name. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `environment:write` repository permission to use this endpoint. + */ + deleteEnvironmentVariable: { + (params?: RestEndpointMethodTypes["actions"]["deleteEnvironmentVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + deleteOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["deleteOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes an organization variable using the variable name. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + deleteOrgVariable: { + (params?: RestEndpointMethodTypes["actions"]["deleteOrgVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + deleteRepoSecret: { + (params?: RestEndpointMethodTypes["actions"]["deleteRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a repository variable using the variable name. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions_variables:write` repository permission to use this endpoint. + */ + deleteRepoVariable: { + (params?: RestEndpointMethodTypes["actions"]["deleteRepoVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a required workflow configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + deleteRequiredWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["deleteRequiredWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Forces the removal of a self-hosted runner from an organization. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + deleteSelfHostedRunnerFromOrg: { + (params?: RestEndpointMethodTypes["actions"]["deleteSelfHostedRunnerFromOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Forces the removal of a self-hosted runner from a repository. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `repo` + * scope to use this endpoint. + */ + deleteSelfHostedRunnerFromRepo: { + (params?: RestEndpointMethodTypes["actions"]["deleteSelfHostedRunnerFromRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Delete a specific workflow run. Anyone with write access to the repository can use this endpoint. If the repository is + * private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:write` permission to use + * this endpoint. + */ + deleteWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["deleteWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes all logs for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + deleteWorkflowRunLogs: { + (params?: RestEndpointMethodTypes["actions"]["deleteWorkflowRunLogs"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a repository from the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + disableSelectedRepositoryGithubActionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["disableSelectedRepositoryGithubActionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Disables a workflow and sets the `state` of the workflow to `disabled_manually`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + disableWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["disableWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download an archive for a repository. This URL expires after 1 minute. Look for `Location:` in + * the response header to find the URL for the download. The `:archive_format` must be `zip`. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + downloadArtifact: { + (params?: RestEndpointMethodTypes["actions"]["downloadArtifact"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download a plain text file of logs for a workflow job. This link expires after 1 minute. Look + * for `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can + * use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must + * have the `actions:read` permission to use this endpoint. + */ + downloadJobLogsForWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["downloadJobLogsForWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download an archive of log files for a specific workflow run attempt. This link expires after + * 1 minute. Look for `Location:` in the response header to find the URL for the download. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + downloadWorkflowRunAttemptLogs: { + (params?: RestEndpointMethodTypes["actions"]["downloadWorkflowRunAttemptLogs"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download an archive of log files for a workflow run. This link expires after 1 minute. Look for + * `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can use + * this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have + * the `actions:read` permission to use this endpoint. + */ + downloadWorkflowRunLogs: { + (params?: RestEndpointMethodTypes["actions"]["downloadWorkflowRunLogs"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + enableSelectedRepositoryGithubActionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["enableSelectedRepositoryGithubActionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables a workflow and sets the `state` of the workflow to `active`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + enableWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["enableWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Generates a configuration that can be passed to the runner application at startup. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + generateRunnerJitconfigForOrg: { + (params?: RestEndpointMethodTypes["actions"]["generateRunnerJitconfigForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Generates a configuration that can be passed to the runner application at startup. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + generateRunnerJitconfigForRepo: { + (params?: RestEndpointMethodTypes["actions"]["generateRunnerJitconfigForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the GitHub Actions caches for a repository. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getActionsCacheList: { + (params?: RestEndpointMethodTypes["actions"]["getActionsCacheList"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets GitHub Actions cache usage for a repository. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getActionsCacheUsage: { + (params?: RestEndpointMethodTypes["actions"]["getActionsCacheUsage"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories and their GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + getActionsCacheUsageByRepoForOrg: { + (params?: RestEndpointMethodTypes["actions"]["getActionsCacheUsageByRepoForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the total GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + getActionsCacheUsageForOrg: { + (params?: RestEndpointMethodTypes["actions"]["getActionsCacheUsageForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the selected actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."" + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + getAllowedActionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["getAllowedActionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the settings for selected actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + getAllowedActionsRepository: { + (params?: RestEndpointMethodTypes["actions"]["getAllowedActionsRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific artifact for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getArtifact: { + (params?: RestEndpointMethodTypes["actions"]["getArtifact"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get the public key for an environment, which you need to encrypt environment secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + getEnvironmentPublicKey: { + (params?: RestEndpointMethodTypes["actions"]["getEnvironmentPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single environment secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + getEnvironmentSecret: { + (params?: RestEndpointMethodTypes["actions"]["getEnvironmentSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific variable in an environment. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `environments:read` repository permission to use this endpoint. + */ + getEnvironmentVariable: { + (params?: RestEndpointMethodTypes["actions"]["getEnvironmentVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + getGithubActionsDefaultWorkflowPermissionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["getGithubActionsDefaultWorkflowPermissionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, + * as well as if GitHub Actions can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + getGithubActionsDefaultWorkflowPermissionsRepository: { + (params?: RestEndpointMethodTypes["actions"]["getGithubActionsDefaultWorkflowPermissionsRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + getGithubActionsPermissionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["getGithubActionsPermissionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions and reusable workflows allowed to run in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + getGithubActionsPermissionsRepository: { + (params?: RestEndpointMethodTypes["actions"]["getGithubActionsPermissionsRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific job in a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getJobForWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["getJobForWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + getOrgPublicKey: { + (params?: RestEndpointMethodTypes["actions"]["getOrgPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + getOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["getOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific variable in an organization. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:read` organization permission to use this endpoint. + */ + getOrgVariable: { + (params?: RestEndpointMethodTypes["actions"]["getOrgVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get all deployment environments for a workflow run that are waiting for protection rules to pass. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getPendingDeploymentsForRun: { + (params?: RestEndpointMethodTypes["actions"]["getPendingDeploymentsForRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions and reusable workflows allowed to run in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + * @deprecated octokit.rest.actions.getRepoPermissions() has been renamed to octokit.rest.actions.getGithubActionsPermissionsRepository() (2020-11-10) + */ + getRepoPermissions: { + (params?: RestEndpointMethodTypes["actions"]["getRepoPermissions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + getRepoPublicKey: { + (params?: RestEndpointMethodTypes["actions"]["getRepoPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific required workflow present in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + getRepoRequiredWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["getRepoRequiredWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the number of billable minutes used by a specific required workflow during the current billing cycle. + * + * Billable minutes only apply to required workflows running in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)." + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getRepoRequiredWorkflowUsage: { + (params?: RestEndpointMethodTypes["actions"]["getRepoRequiredWorkflowUsage"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + getRepoSecret: { + (params?: RestEndpointMethodTypes["actions"]["getRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific variable in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions_variables:read` repository permission to use this endpoint. + */ + getRepoVariable: { + (params?: RestEndpointMethodTypes["actions"]["getRepoVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get a required workflow configured in an organization. + * + * You must authenticate using an access token with the `read:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + getRequiredWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["getRequiredWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getReviewsForRun: { + (params?: RestEndpointMethodTypes["actions"]["getReviewsForRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + getSelfHostedRunnerForOrg: { + (params?: RestEndpointMethodTypes["actions"]["getSelfHostedRunnerForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + getSelfHostedRunnerForRepo: { + (params?: RestEndpointMethodTypes["actions"]["getSelfHostedRunnerForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["getWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to private repositories. + * For more information, see "[Allowing access to components in a private repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-a-private-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + getWorkflowAccessToRepository: { + (params?: RestEndpointMethodTypes["actions"]["getWorkflowAccessToRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["getWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific workflow run attempt. Anyone with read access to the repository + * can use this endpoint. If the repository is private you must use an access token + * with the `repo` scope. GitHub Apps must have the `actions:read` permission to + * use this endpoint. + */ + getWorkflowRunAttempt: { + (params?: RestEndpointMethodTypes["actions"]["getWorkflowRunAttempt"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the number of billable minutes and total run time for a specific workflow run. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getWorkflowRunUsage: { + (params?: RestEndpointMethodTypes["actions"]["getWorkflowRunUsage"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the number of billable minutes used by a specific workflow during the current billing cycle. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getWorkflowUsage: { + (params?: RestEndpointMethodTypes["actions"]["getWorkflowUsage"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all artifacts for a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + listArtifactsForRepo: { + (params?: RestEndpointMethodTypes["actions"]["listArtifactsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available in an environment without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + listEnvironmentSecrets: { + (params?: RestEndpointMethodTypes["actions"]["listEnvironmentSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all environment variables. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `environments:read` repository permission to use this endpoint. + */ + listEnvironmentVariables: { + (params?: RestEndpointMethodTypes["actions"]["listEnvironmentVariables"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists jobs for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + */ + listJobsForWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["listJobsForWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists jobs for a specific workflow run attempt. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + */ + listJobsForWorkflowRunAttempt: { + (params?: RestEndpointMethodTypes["actions"]["listJobsForWorkflowRunAttempt"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all labels for a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + listLabelsForSelfHostedRunnerForOrg: { + (params?: RestEndpointMethodTypes["actions"]["listLabelsForSelfHostedRunnerForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all labels for a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + listLabelsForSelfHostedRunnerForRepo: { + (params?: RestEndpointMethodTypes["actions"]["listLabelsForSelfHostedRunnerForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + listOrgSecrets: { + (params?: RestEndpointMethodTypes["actions"]["listOrgSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all organization variables. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:read` organization permission to use this endpoint. + */ + listOrgVariables: { + (params?: RestEndpointMethodTypes["actions"]["listOrgVariables"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all organization secrets shared with a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + listRepoOrganizationSecrets: { + (params?: RestEndpointMethodTypes["actions"]["listRepoOrganizationSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all organiation variables shared with a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions_variables:read` repository permission to use this endpoint. + */ + listRepoOrganizationVariables: { + (params?: RestEndpointMethodTypes["actions"]["listRepoOrganizationVariables"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the required workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + listRepoRequiredWorkflows: { + (params?: RestEndpointMethodTypes["actions"]["listRepoRequiredWorkflows"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. + */ + listRepoSecrets: { + (params?: RestEndpointMethodTypes["actions"]["listRepoSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all repository variables. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions_variables:read` repository permission to use this endpoint. + */ + listRepoVariables: { + (params?: RestEndpointMethodTypes["actions"]["listRepoVariables"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + listRepoWorkflows: { + (params?: RestEndpointMethodTypes["actions"]["listRepoWorkflows"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all workflow runs for a required workflow. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + listRequiredWorkflowRuns: { + (params?: RestEndpointMethodTypes["actions"]["listRequiredWorkflowRuns"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all required workflows in an organization. + * + * You must authenticate using an access token with the `read:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + listRequiredWorkflows: { + (params?: RestEndpointMethodTypes["actions"]["listRequiredWorkflows"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + listRunnerApplicationsForOrg: { + (params?: RestEndpointMethodTypes["actions"]["listRunnerApplicationsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + listRunnerApplicationsForRepo: { + (params?: RestEndpointMethodTypes["actions"]["listRunnerApplicationsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + listSelectedReposForOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["listSelectedReposForOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all repositories that can access an organization variable that is available to selected repositories. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:read` organization permission to use this endpoint. + */ + listSelectedReposForOrgVariable: { + (params?: RestEndpointMethodTypes["actions"]["listSelectedReposForOrgVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + listSelectedRepositoriesEnabledGithubActionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["listSelectedRepositoriesEnabledGithubActionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the selected repositories that are configured for a required workflow in an organization. To use this endpoint, the required workflow must be configured to run on selected repositories. + * + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + listSelectedRepositoriesRequiredWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["listSelectedRepositoriesRequiredWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all self-hosted runners configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + listSelfHostedRunnersForOrg: { + (params?: RestEndpointMethodTypes["actions"]["listSelfHostedRunnersForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all self-hosted runners configured in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + listSelfHostedRunnersForRepo: { + (params?: RestEndpointMethodTypes["actions"]["listSelfHostedRunnersForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + listWorkflowRunArtifacts: { + (params?: RestEndpointMethodTypes["actions"]["listWorkflowRunArtifacts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all workflow runs for a workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + */ + listWorkflowRuns: { + (params?: RestEndpointMethodTypes["actions"]["listWorkflowRuns"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all workflow runs for a repository. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + listWorkflowRunsForRepo: { + (params?: RestEndpointMethodTypes["actions"]["listWorkflowRunsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Re-run a job and its dependent jobs in a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + reRunJobForWorkflowRun: { + (params?: RestEndpointMethodTypes["actions"]["reRunJobForWorkflowRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Re-runs your workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + reRunWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["reRunWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Re-run all of the failed jobs and their dependent jobs in a workflow run using the `id` of the workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + reRunWorkflowFailedJobs: { + (params?: RestEndpointMethodTypes["actions"]["reRunWorkflowFailedJobs"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove all custom labels from a self-hosted runner configured in an + * organization. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + removeAllCustomLabelsFromSelfHostedRunnerForOrg: { + (params?: RestEndpointMethodTypes["actions"]["removeAllCustomLabelsFromSelfHostedRunnerForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove all custom labels from a self-hosted runner configured in a + * repository. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + removeAllCustomLabelsFromSelfHostedRunnerForRepo: { + (params?: RestEndpointMethodTypes["actions"]["removeAllCustomLabelsFromSelfHostedRunnerForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove a custom label from a self-hosted runner configured + * in an organization. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + removeCustomLabelFromSelfHostedRunnerForOrg: { + (params?: RestEndpointMethodTypes["actions"]["removeCustomLabelFromSelfHostedRunnerForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove a custom label from a self-hosted runner configured + * in a repository. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + removeCustomLabelFromSelfHostedRunnerForRepo: { + (params?: RestEndpointMethodTypes["actions"]["removeCustomLabelFromSelfHostedRunnerForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + removeSelectedRepoFromOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["removeSelectedRepoFromOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a repository from an organization variable that is available to selected repositories. Organization variables that are available to selected repositories have their `visibility` field set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + removeSelectedRepoFromOrgVariable: { + (params?: RestEndpointMethodTypes["actions"]["removeSelectedRepoFromOrgVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a repository from a required workflow. To use this endpoint, the required workflow must be configured to run on selected repositories. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + removeSelectedRepoFromRequiredWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["removeSelectedRepoFromRequiredWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Approve or reject custom deployment protection rules provided by a GitHub App for a workflow run. For more information, see "[Using environments for deployment](https://docs.github.com/actions/deployment/targeting-different-environments/using-environments-for-deployment)." + * + * **Note:** GitHub Apps can only review their own custom deployment protection rules. + * To approve or reject pending deployments that are waiting for review from a specific person or team, see [`POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments`](/rest/actions/workflow-runs#review-pending-deployments-for-a-workflow-run). + * + * GitHub Apps must have read and write permission for **Deployments** to use this endpoint. + */ + reviewCustomGatesForRun: { + (params?: RestEndpointMethodTypes["actions"]["reviewCustomGatesForRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Approve or reject pending deployments that are waiting on approval by a required reviewer. + * + * Required reviewers with read access to the repository contents and deployments can use this endpoint. Required reviewers must authenticate using an access token with the `repo` scope to use this endpoint. + */ + reviewPendingDeploymentsForRun: { + (params?: RestEndpointMethodTypes["actions"]["reviewPendingDeploymentsForRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + setAllowedActionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["setAllowedActionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + setAllowedActionsRepository: { + (params?: RestEndpointMethodTypes["actions"]["setAllowedActionsRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + setCustomLabelsForSelfHostedRunnerForOrg: { + (params?: RestEndpointMethodTypes["actions"]["setCustomLabelsForSelfHostedRunnerForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + setCustomLabelsForSelfHostedRunnerForRepo: { + (params?: RestEndpointMethodTypes["actions"]["setCustomLabelsForSelfHostedRunnerForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, and sets if GitHub Actions + * can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + setGithubActionsDefaultWorkflowPermissionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["setGithubActionsDefaultWorkflowPermissionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, and sets if GitHub Actions + * can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + setGithubActionsDefaultWorkflowPermissionsRepository: { + (params?: RestEndpointMethodTypes["actions"]["setGithubActionsDefaultWorkflowPermissionsRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + setGithubActionsPermissionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["setGithubActionsPermissionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the GitHub Actions permissions policy for enabling GitHub Actions and allowed actions and reusable workflows in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + setGithubActionsPermissionsRepository: { + (params?: RestEndpointMethodTypes["actions"]["setGithubActionsPermissionsRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. + */ + setSelectedReposForOrgSecret: { + (params?: RestEndpointMethodTypes["actions"]["setSelectedReposForOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Replaces all repositories for an organization variable that is available to selected repositories. Organization variables that are available to selected repositories have their `visibility` field set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + setSelectedReposForOrgVariable: { + (params?: RestEndpointMethodTypes["actions"]["setSelectedReposForOrgVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the repositories for a required workflow that is required for selected repositories. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + setSelectedReposToRequiredWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["setSelectedReposToRequiredWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Replaces the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + setSelectedRepositoriesEnabledGithubActionsOrganization: { + (params?: RestEndpointMethodTypes["actions"]["setSelectedRepositoriesEnabledGithubActionsOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to private repositories. + * For more information, see "[Allowing access to components in a private repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-a-private-repository)". + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + setWorkflowAccessToRepository: { + (params?: RestEndpointMethodTypes["actions"]["setWorkflowAccessToRepository"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates an environment variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `environment:write` repository permission to use this endpoint. + */ + updateEnvironmentVariable: { + (params?: RestEndpointMethodTypes["actions"]["updateEnvironmentVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates an organization variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * GitHub Apps must have the `organization_actions_variables:write` organization permission to use this endpoint. + */ + updateOrgVariable: { + (params?: RestEndpointMethodTypes["actions"]["updateOrgVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates a repository variable that you can reference in a GitHub Actions workflow. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions_variables:write` repository permission to use this endpoint. + */ + updateRepoVariable: { + (params?: RestEndpointMethodTypes["actions"]["updateRepoVariable"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Update a required workflow in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * For more information, see "[Required Workflows](https://docs.github.com/actions/using-workflows/required-workflows)." + */ + updateRequiredWorkflow: { + (params?: RestEndpointMethodTypes["actions"]["updateRequiredWorkflow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + activity: { + checkRepoIsStarredByAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["checkRepoIsStarredByAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint should only be used to stop watching a repository. To control whether or not you wish to receive notifications from a repository, [set the repository's subscription manually](https://docs.github.com/rest/reference/activity#set-a-repository-subscription). + */ + deleteRepoSubscription: { + (params?: RestEndpointMethodTypes["activity"]["deleteRepoSubscription"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Mutes all future notifications for a conversation until you comment on the thread or get an **@mention**. If you are watching the repository of the thread, you will still receive notifications. To ignore future notifications for a repository you are watching, use the [Set a thread subscription](https://docs.github.com/rest/reference/activity#set-a-thread-subscription) endpoint and set `ignore` to `true`. + */ + deleteThreadSubscription: { + (params?: RestEndpointMethodTypes["activity"]["deleteThreadSubscription"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * GitHub provides several timeline resources in [Atom](http://en.wikipedia.org/wiki/Atom_(standard)) format. The Feeds API lists all the feeds available to the authenticated user: + * + * * **Timeline**: The GitHub global public timeline + * * **User**: The public timeline for any user, using [URI template](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) + * * **Current user public**: The public timeline for the authenticated user + * * **Current user**: The private timeline for the authenticated user + * * **Current user actor**: The private timeline for activity created by the authenticated user + * * **Current user organizations**: The private timeline for the organizations the authenticated user is a member of. + * * **Security advisories**: A collection of public announcements that provide information about security-related vulnerabilities in software on GitHub. + * + * **Note**: Private feeds are only returned when [authenticating via Basic Auth](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) since current feed URIs use the older, non revocable auth tokens. + */ + getFeeds: { + (params?: RestEndpointMethodTypes["activity"]["getFeeds"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getRepoSubscription: { + (params?: RestEndpointMethodTypes["activity"]["getRepoSubscription"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets information about a notification thread. + */ + getThread: { + (params?: RestEndpointMethodTypes["activity"]["getThread"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This checks to see if the current user is subscribed to a thread. You can also [get a repository subscription](https://docs.github.com/rest/reference/activity#get-a-repository-subscription). + * + * Note that subscriptions are only generated if a user is participating in a conversation--for example, they've replied to the thread, were **@mentioned**, or manually subscribe to a thread. + */ + getThreadSubscriptionForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["getThreadSubscriptionForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If you are authenticated as the given user, you will see your private events. Otherwise, you'll only see public events. + */ + listEventsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["listEventsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all notifications for the current user, sorted by most recently updated. + */ + listNotificationsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["listNotificationsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This is the user's organization dashboard. You must be authenticated as the user to view this. + */ + listOrgEventsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["listOrgEventsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * We delay the public events feed by five minutes, which means the most recent event returned by the public events API actually occurred at least five minutes ago. + */ + listPublicEvents: { + (params?: RestEndpointMethodTypes["activity"]["listPublicEvents"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listPublicEventsForRepoNetwork: { + (params?: RestEndpointMethodTypes["activity"]["listPublicEventsForRepoNetwork"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listPublicEventsForUser: { + (params?: RestEndpointMethodTypes["activity"]["listPublicEventsForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listPublicOrgEvents: { + (params?: RestEndpointMethodTypes["activity"]["listPublicOrgEvents"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events. + */ + listReceivedEventsForUser: { + (params?: RestEndpointMethodTypes["activity"]["listReceivedEventsForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listReceivedPublicEventsForUser: { + (params?: RestEndpointMethodTypes["activity"]["listReceivedPublicEventsForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note**: This API is not built to serve real-time use cases. Depending on the time of day, event latency can be anywhere from 30s to 6h. + */ + listRepoEvents: { + (params?: RestEndpointMethodTypes["activity"]["listRepoEvents"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all notifications for the current user in the specified repository. + */ + listRepoNotificationsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["listRepoNotificationsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories the authenticated user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + listReposStarredByAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["listReposStarredByAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories a user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + listReposStarredByUser: { + (params?: RestEndpointMethodTypes["activity"]["listReposStarredByUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories a user is watching. + */ + listReposWatchedByUser: { + (params?: RestEndpointMethodTypes["activity"]["listReposWatchedByUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people that have starred the repository. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + listStargazersForRepo: { + (params?: RestEndpointMethodTypes["activity"]["listStargazersForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories the authenticated user is watching. + */ + listWatchedReposForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["listWatchedReposForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people watching the specified repository. + */ + listWatchersForRepo: { + (params?: RestEndpointMethodTypes["activity"]["listWatchersForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Marks all notifications as "read" for the current user. If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. + */ + markNotificationsAsRead: { + (params?: RestEndpointMethodTypes["activity"]["markNotificationsAsRead"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Marks all notifications in a repository as "read" for the current user. If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List repository notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. + */ + markRepoNotificationsAsRead: { + (params?: RestEndpointMethodTypes["activity"]["markRepoNotificationsAsRead"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Marks a thread as "read." Marking a thread as "read" is equivalent to clicking a notification in your notification inbox on GitHub: https://github.com/notifications. + */ + markThreadAsRead: { + (params?: RestEndpointMethodTypes["activity"]["markThreadAsRead"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If you would like to watch a repository, set `subscribed` to `true`. If you would like to ignore notifications made within a repository, set `ignored` to `true`. If you would like to stop watching a repository, [delete the repository's subscription](https://docs.github.com/rest/reference/activity#delete-a-repository-subscription) completely. + */ + setRepoSubscription: { + (params?: RestEndpointMethodTypes["activity"]["setRepoSubscription"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If you are watching a repository, you receive notifications for all threads by default. Use this endpoint to ignore future notifications for threads until you comment on the thread or get an **@mention**. + * + * You can also use this endpoint to subscribe to threads that you are currently not receiving notifications for or to subscribed to threads that you have previously ignored. + * + * Unsubscribing from a conversation in a repository that you are not watching is functionally equivalent to the [Delete a thread subscription](https://docs.github.com/rest/reference/activity#delete-a-thread-subscription) endpoint. + */ + setThreadSubscription: { + (params?: RestEndpointMethodTypes["activity"]["setThreadSubscription"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + starRepoForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["starRepoForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + unstarRepoForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["activity"]["unstarRepoForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + apps: { + /** + * Add a single repository to an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + * @deprecated octokit.rest.apps.addRepoToInstallation() has been renamed to octokit.rest.apps.addRepoToInstallationForAuthenticatedUser() (2021-10-05) + */ + addRepoToInstallation: { + (params?: RestEndpointMethodTypes["apps"]["addRepoToInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Add a single repository to an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + addRepoToInstallationForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["apps"]["addRepoToInstallationForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * OAuth applications and GitHub applications with OAuth authorizations can use this API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to use this endpoint, where the username is the application `client_id` and the password is its `client_secret`. Invalid tokens will return `404 NOT FOUND`. + */ + checkToken: { + (params?: RestEndpointMethodTypes["apps"]["checkToken"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`. + */ + createFromManifest: { + (params?: RestEndpointMethodTypes["apps"]["createFromManifest"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates an installation access token that enables a GitHub App to make authenticated API requests for the app's installation on an organization or individual account. Installation tokens expire one hour from the time you create them. Using an expired token produces a status code of `401 - Unauthorized`, and requires creating a new installation token. By default the installation token has access to all repositories that the installation can access. To restrict the access to specific repositories, you can provide the `repository_ids` when creating the token. When you omit `repository_ids`, the response does not contain the `repositories` key. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + createInstallationAccessToken: { + (params?: RestEndpointMethodTypes["apps"]["createInstallationAccessToken"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * OAuth and GitHub application owners can revoke a grant for their application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid OAuth `access_token` as an input parameter and the grant for the token's owner will be deleted. + * Deleting an application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). + */ + deleteAuthorization: { + (params?: RestEndpointMethodTypes["apps"]["deleteAuthorization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Uninstalls a GitHub App on a user, organization, or business account. If you prefer to temporarily suspend an app's access to your account's resources, then we recommend the "[Suspend an app installation](https://docs.github.com/rest/reference/apps/#suspend-an-app-installation)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + deleteInstallation: { + (params?: RestEndpointMethodTypes["apps"]["deleteInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * OAuth or GitHub application owners can revoke a single token for an OAuth application or a GitHub application with an OAuth authorization. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the application's `client_id` and `client_secret` as the username and password. + */ + deleteToken: { + (params?: RestEndpointMethodTypes["apps"]["deleteToken"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the GitHub App associated with the authentication credentials used. To see how many app installations are associated with this GitHub App, see the `installations_count` in the response. For more details about your app's installations, see the "[List installations for the authenticated app](https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getAuthenticated: { + (params?: RestEndpointMethodTypes["apps"]["getAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note**: The `:app_slug` is just the URL-friendly name of your GitHub App. You can find this on the settings page for your GitHub App (e.g., `https://github.com/settings/apps/:app_slug`). + * + * If the GitHub App you specify is public, you can access this endpoint without authenticating. If the GitHub App you specify is private, you must authenticate with a [personal access token](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + getBySlug: { + (params?: RestEndpointMethodTypes["apps"]["getBySlug"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables an authenticated GitHub App to find an installation's information using the installation id. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getInstallation: { + (params?: RestEndpointMethodTypes["apps"]["getInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables an authenticated GitHub App to find the organization's installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getOrgInstallation: { + (params?: RestEndpointMethodTypes["apps"]["getOrgInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables an authenticated GitHub App to find the repository's installation information. The installation's account type will be either an organization or a user account, depending which account the repository belongs to. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getRepoInstallation: { + (params?: RestEndpointMethodTypes["apps"]["getRepoInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + getSubscriptionPlanForAccount: { + (params?: RestEndpointMethodTypes["apps"]["getSubscriptionPlanForAccount"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + getSubscriptionPlanForAccountStubbed: { + (params?: RestEndpointMethodTypes["apps"]["getSubscriptionPlanForAccountStubbed"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables an authenticated GitHub App to find the user’s installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getUserInstallation: { + (params?: RestEndpointMethodTypes["apps"]["getUserInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getWebhookConfigForApp: { + (params?: RestEndpointMethodTypes["apps"]["getWebhookConfigForApp"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + getWebhookDelivery: { + (params?: RestEndpointMethodTypes["apps"]["getWebhookDelivery"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns user and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + listAccountsForPlan: { + (params?: RestEndpointMethodTypes["apps"]["listAccountsForPlan"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns repository and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + listAccountsForPlanStubbed: { + (params?: RestEndpointMethodTypes["apps"]["listAccountsForPlanStubbed"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access for an installation. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The access the user has to each repository is included in the hash under the `permissions` key. + */ + listInstallationReposForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["apps"]["listInstallationReposForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all the pending installation requests for the authenticated GitHub App. + */ + listInstallationRequestsForAuthenticatedApp: { + (params?: RestEndpointMethodTypes["apps"]["listInstallationRequestsForAuthenticatedApp"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + * + * The permissions the installation has are included under the `permissions` key. + */ + listInstallations: { + (params?: RestEndpointMethodTypes["apps"]["listInstallations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists installations of your GitHub App that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You can find the permissions for the installation under the `permissions` key. + */ + listInstallationsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["apps"]["listInstallationsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + listPlans: { + (params?: RestEndpointMethodTypes["apps"]["listPlans"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + listPlansStubbed: { + (params?: RestEndpointMethodTypes["apps"]["listPlansStubbed"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List repositories that an app installation can access. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + listReposAccessibleToInstallation: { + (params?: RestEndpointMethodTypes["apps"]["listReposAccessibleToInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). + */ + listSubscriptionsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["apps"]["listSubscriptionsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). + */ + listSubscriptionsForAuthenticatedUserStubbed: { + (params?: RestEndpointMethodTypes["apps"]["listSubscriptionsForAuthenticatedUserStubbed"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a list of webhook deliveries for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + listWebhookDeliveries: { + (params?: RestEndpointMethodTypes["apps"]["listWebhookDeliveries"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Redeliver a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + redeliverWebhookDelivery: { + (params?: RestEndpointMethodTypes["apps"]["redeliverWebhookDelivery"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove a single repository from an installation. The authenticated user must have admin access to the repository. The installation must have the `repository_selection` of `selected`. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + * @deprecated octokit.rest.apps.removeRepoFromInstallation() has been renamed to octokit.rest.apps.removeRepoFromInstallationForAuthenticatedUser() (2021-10-05) + */ + removeRepoFromInstallation: { + (params?: RestEndpointMethodTypes["apps"]["removeRepoFromInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Remove a single repository from an installation. The authenticated user must have admin access to the repository. The installation must have the `repository_selection` of `selected`. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + removeRepoFromInstallationForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["apps"]["removeRepoFromInstallationForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * OAuth applications and GitHub applications with OAuth authorizations can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. + */ + resetToken: { + (params?: RestEndpointMethodTypes["apps"]["resetToken"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Revokes the installation token you're using to authenticate as an installation and access this endpoint. + * + * Once an installation token is revoked, the token is invalidated and cannot be used. Other endpoints that require the revoked installation token must have a new installation token to work. You can create a new token using the "[Create an installation access token for an app](https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app)" endpoint. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + revokeInstallationAccessToken: { + (params?: RestEndpointMethodTypes["apps"]["revokeInstallationAccessToken"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Use a non-scoped user-to-server access token to create a repository scoped and/or permission scoped user-to-server access token. You can specify which repositories the token can access and which permissions are granted to the token. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the `client_id` and `client_secret` of the GitHub App as the username and password. Invalid tokens will return `404 NOT FOUND`. + */ + scopeToken: { + (params?: RestEndpointMethodTypes["apps"]["scopeToken"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Suspends a GitHub App on a user, organization, or business account, which blocks the app from accessing the account's resources. When a GitHub App is suspended, the app's access to the GitHub API or webhook events is blocked for that account. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + suspendInstallation: { + (params?: RestEndpointMethodTypes["apps"]["suspendInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a GitHub App installation suspension. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + unsuspendInstallation: { + (params?: RestEndpointMethodTypes["apps"]["unsuspendInstallation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + updateWebhookConfigForApp: { + (params?: RestEndpointMethodTypes["apps"]["updateWebhookConfigForApp"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + billing: { + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + getGithubActionsBillingOrg: { + (params?: RestEndpointMethodTypes["billing"]["getGithubActionsBillingOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `user` scope. + */ + getGithubActionsBillingUser: { + (params?: RestEndpointMethodTypes["billing"]["getGithubActionsBillingUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + getGithubPackagesBillingOrg: { + (params?: RestEndpointMethodTypes["billing"]["getGithubPackagesBillingOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + getGithubPackagesBillingUser: { + (params?: RestEndpointMethodTypes["billing"]["getGithubPackagesBillingUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + getSharedStorageBillingOrg: { + (params?: RestEndpointMethodTypes["billing"]["getSharedStorageBillingOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + getSharedStorageBillingUser: { + (params?: RestEndpointMethodTypes["billing"]["getSharedStorageBillingUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + checks: { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Creates a new check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to create check runs. + * + * In a check suite, GitHub limits the number of check runs with the same name to 1000. Once these check runs exceed 1000, GitHub will start to automatically delete older check runs. + */ + create: { + (params?: RestEndpointMethodTypes["checks"]["create"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * By default, check suites are automatically created when you create a [check run](https://docs.github.com/rest/reference/checks#check-runs). You only need to use this endpoint for manually creating check suites when you've disabled automatic creation using "[Update repository preferences for check suites](https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites)". Your GitHub App must have the `checks:write` permission to create check suites. + */ + createSuite: { + (params?: RestEndpointMethodTypes["checks"]["createSuite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Gets a single check run using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + get: { + (params?: RestEndpointMethodTypes["checks"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Gets a single check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + getSuite: { + (params?: RestEndpointMethodTypes["checks"]["getSuite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists annotations for a check run using the annotation `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get annotations for a check run. OAuth Apps and authenticated users must have the `repo` scope to get annotations for a check run in a private repository. + */ + listAnnotations: { + (params?: RestEndpointMethodTypes["checks"]["listAnnotations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a commit ref. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + listForRef: { + (params?: RestEndpointMethodTypes["checks"]["listForRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + listForSuite: { + (params?: RestEndpointMethodTypes["checks"]["listForSuite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Lists check suites for a commit `ref`. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to list check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + listSuitesForRef: { + (params?: RestEndpointMethodTypes["checks"]["listSuitesForRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Triggers GitHub to rerequest an existing check run, without pushing new code to a repository. This endpoint will trigger the [`check_run` webhook](https://docs.github.com/webhooks/event-payloads/#check_run) event with the action `rerequested`. When a check run is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check run, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + rerequestRun: { + (params?: RestEndpointMethodTypes["checks"]["rerequestRun"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. This endpoint will trigger the [`check_suite` webhook](https://docs.github.com/webhooks/event-payloads/#check_suite) event with the action `rerequested`. When a check suite is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check suite, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + rerequestSuite: { + (params?: RestEndpointMethodTypes["checks"]["rerequestSuite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Changes the default automatic flow when creating check suites. By default, a check suite is automatically created each time code is pushed to a repository. When you disable the automatic creation of check suites, you can manually [Create a check suite](https://docs.github.com/rest/reference/checks#create-a-check-suite). You must have admin permissions in the repository to set preferences for check suites. + */ + setSuitesPreferences: { + (params?: RestEndpointMethodTypes["checks"]["setSuitesPreferences"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Updates a check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to edit check runs. + */ + update: { + (params?: RestEndpointMethodTypes["checks"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + codeScanning: { + /** + * Deletes a specified code scanning analysis from a repository. For + * private repositories, you must use an access token with the `repo` scope. For public repositories, + * you must use an access token with `public_repo` scope. + * GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * You can delete one analysis at a time. + * To delete a series of analyses, start with the most recent analysis and work backwards. + * Conceptually, the process is similar to the undo function in a text editor. + * + * When you list the analyses for a repository, + * one or more will be identified as deletable in the response: + * + * ``` + * "deletable": true + * ``` + * + * An analysis is deletable when it's the most recent in a set of analyses. + * Typically, a repository will have multiple sets of analyses + * for each enabled code scanning tool, + * where a set is determined by a unique combination of analysis values: + * + * * `ref` + * * `tool` + * * `category` + * + * If you attempt to delete an analysis that is not the most recent in a set, + * you'll get a 400 response with the message: + * + * ``` + * Analysis specified is not deletable. + * ``` + * + * The response from a successful `DELETE` operation provides you with + * two alternative URLs for deleting the next analysis in the set: + * `next_analysis_url` and `confirm_delete_url`. + * Use the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis + * in a set. This is a useful option if you want to preserve at least one analysis + * for the specified tool in your repository. + * Use the `confirm_delete_url` URL if you are content to remove all analyses for a tool. + * When you delete the last analysis in a set, the value of `next_analysis_url` and `confirm_delete_url` + * in the 200 response is `null`. + * + * As an example of the deletion process, + * let's imagine that you added a workflow that configured a particular code scanning tool + * to analyze the code in a repository. This tool has added 15 analyses: + * 10 on the default branch, and another 5 on a topic branch. + * You therefore have two separate sets of analyses for this tool. + * You've now decided that you want to remove all of the analyses for the tool. + * To do this you must make 15 separate deletion requests. + * To start, you must find an analysis that's identified as deletable. + * Each set of analyses always has one that's identified as deletable. + * Having found the deletable analysis for one of the two sets, + * delete this analysis and then continue deleting the next analysis in the set until they're all deleted. + * Then repeat the process for the second set. + * The procedure therefore consists of a nested loop: + * + * **Outer loop**: + * * List the analyses for the repository, filtered by tool. + * * Parse this list to find a deletable analysis. If found: + * + * **Inner loop**: + * * Delete the identified analysis. + * * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration. + * + * The above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely. + */ + deleteAnalysis: { + (params?: RestEndpointMethodTypes["codeScanning"]["deleteAnalysis"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + getAlert: { + (params?: RestEndpointMethodTypes["codeScanning"]["getAlert"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specified code scanning analysis for a repository. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * The default JSON response contains fields that describe the analysis. + * This includes the Git reference and commit SHA to which the analysis relates, + * the datetime of the analysis, the name of the code scanning tool, + * and the number of alerts. + * + * The `rules_count` field in the default response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * If you use the Accept header `application/sarif+json`, + * the response contains the analysis data that was uploaded. + * This is formatted as + * [SARIF version 2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html). + */ + getAnalysis: { + (params?: RestEndpointMethodTypes["codeScanning"]["getAnalysis"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a CodeQL database for a language in a repository. + * + * By default this endpoint returns JSON metadata about the CodeQL database. To + * download the CodeQL database binary content, set the `Accept` header of the request + * to [`application/zip`](https://docs.github.com/rest/overview/media-types), and make sure + * your HTTP client is configured to follow redirects or use the `Location` header + * to make a second request to get the redirect URL. + * + * For private repositories, you must use an access token with the `security_events` scope. + * For public repositories, you can use tokens with the `security_events` or `public_repo` scope. + * GitHub Apps must have the `contents` read permission to use this endpoint. + */ + getCodeqlDatabase: { + (params?: RestEndpointMethodTypes["codeScanning"]["getCodeqlDatabase"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a code scanning default setup configuration. + * You must use an access token with the `repo` scope to use this endpoint with private repos or the `public_repo` + * scope for public repos. GitHub Apps must have the `repo` write permission to use this endpoint. + */ + getDefaultSetup: { + (params?: RestEndpointMethodTypes["codeScanning"]["getDefaultSetup"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets information about a SARIF upload, including the status and the URL of the analysis that was uploaded so that you can retrieve details of the analysis. For more information, see "[Get a code scanning analysis for a repository](/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository)." You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + getSarif: { + (params?: RestEndpointMethodTypes["codeScanning"]["getSarif"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all instances of the specified code scanning alert. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + listAlertInstances: { + (params?: RestEndpointMethodTypes["codeScanning"]["listAlertInstances"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists code scanning alerts for the default branch for all eligible repositories in an organization. Eligible repositories are repositories that are owned by organizations that you own or for which you are a security manager. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be an owner or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + listAlertsForOrg: { + (params?: RestEndpointMethodTypes["codeScanning"]["listAlertsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all open code scanning alerts for the default branch (usually `main` + * or `master`). You must use an access token with the `security_events` scope to use + * this endpoint with private repos, the `public_repo` scope also grants permission to read + * security events on public repos only. GitHub Apps must have the `security_events` read + * permission to use this endpoint. + * + * The response includes a `most_recent_instance` object. + * This provides details of the most recent instance of this alert + * for the default branch or for the specified Git reference + * (if you used `ref` in the request). + */ + listAlertsForRepo: { + (params?: RestEndpointMethodTypes["codeScanning"]["listAlertsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all instances of the specified code scanning alert. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * @deprecated octokit.rest.codeScanning.listAlertsInstances() has been renamed to octokit.rest.codeScanning.listAlertInstances() (2021-04-30) + */ + listAlertsInstances: { + (params?: RestEndpointMethodTypes["codeScanning"]["listAlertsInstances"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the CodeQL databases that are available in a repository. + * + * For private repositories, you must use an access token with the `security_events` scope. + * For public repositories, you can use tokens with the `security_events` or `public_repo` scope. + * GitHub Apps must have the `contents` read permission to use this endpoint. + */ + listCodeqlDatabases: { + (params?: RestEndpointMethodTypes["codeScanning"]["listCodeqlDatabases"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the details of all code scanning analyses for a repository, + * starting with the most recent. + * The response is paginated and you can use the `page` and `per_page` parameters + * to list the analyses you're interested in. + * By default 30 analyses are listed per page. + * + * The `rules_count` field in the response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. + */ + listRecentAnalyses: { + (params?: RestEndpointMethodTypes["codeScanning"]["listRecentAnalyses"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the status of a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. + */ + updateAlert: { + (params?: RestEndpointMethodTypes["codeScanning"]["updateAlert"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates a code scanning default setup configuration. + * You must use an access token with the `repo` scope to use this endpoint with private repos or the `public_repo` + * scope for public repos. GitHub Apps must have the `repo` write permission to use this endpoint. + */ + updateDefaultSetup: { + (params?: RestEndpointMethodTypes["codeScanning"]["updateDefaultSetup"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint for private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * There are two places where you can upload code scanning results. + * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests)." + * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." + * + * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: + * + * ``` + * gzip -c analysis-data.sarif | base64 -w0 + * ``` + *
+ * SARIF upload supports a maximum number of entries per the following data objects, and an analysis will be rejected if any of these objects is above its maximum value. For some objects, there are additional values over which the entries will be ignored while keeping the most important entries whenever applicable. + * To get the most out of your analysis when it includes data above the supported limits, try to optimize the analysis configuration. For example, for the CodeQL tool, identify and remove the most noisy queries. + * + * + * | **SARIF data** | **Maximum values** | **Additional limits** | + * |----------------------------------|:------------------:|----------------------------------------------------------------------------------| + * | Runs per file | 20 | | + * | Results per run | 25,000 | Only the top 5,000 results will be included, prioritized by severity. | + * | Rules per run | 25,000 | | + * | Tool extensions per run | 100 | | + * | Thread Flow Locations per result | 10,000 | Only the top 1,000 Thread Flow Locations will be included, using prioritization. | + * | Location per result | 1,000 | Only 100 locations will be included. | + * | Tags per rule | 20 | Only 10 tags will be included. | + * + * + * The `202 Accepted` response includes an `id` value. + * You can use this ID to check the status of the upload by using it in the `/sarifs/{sarif_id}` endpoint. + * For more information, see "[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload)." + */ + uploadSarif: { + (params?: RestEndpointMethodTypes["codeScanning"]["uploadSarif"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + codesOfConduct: { + getAllCodesOfConduct: { + (params?: RestEndpointMethodTypes["codesOfConduct"]["getAllCodesOfConduct"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getConductCode: { + (params?: RestEndpointMethodTypes["codesOfConduct"]["getConductCode"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + codespaces: { + /** + * Adds a repository to the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on the referenced repository to use this endpoint. + */ + addRepositoryForSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["addRepositoryForSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + addSelectedRepoToOrgSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["addSelectedRepoToOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the machine types a codespace can transition to use. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + codespaceMachinesForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["codespaceMachinesForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new codespace, owned by the authenticated user. + * + * This endpoint requires either a `repository_id` OR a `pull_request` but not both. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + createForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["createForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateOrgSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["createOrUpdateOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have write access to the `codespaces_secrets` + * repository permission to use this endpoint. + * + * #### Example of encrypting a secret using Node.js + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * #### Example of encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example of encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example of encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateRepoSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["createOrUpdateRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates a secret for a user's codespace with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must also have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["createOrUpdateSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a codespace owned by the authenticated user for the specified pull request. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + createWithPrForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["createWithPrForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a codespace owned by the authenticated user in the specified repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + createWithRepoForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["createWithRepoForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Codespaces for the specified users will no longer be billed to the organization. + * To use this endpoint, the billing settings for the organization must be set to `selected_members`. For information on how to change this setting please see [these docs].(https://docs.github.com/rest/codespaces/organizations#manage-access-control-for-organization-codespaces) You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + deleteCodespacesBillingUsers: { + (params?: RestEndpointMethodTypes["codespaces"]["deleteCodespacesBillingUsers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + deleteForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["deleteForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + deleteFromOrganization: { + (params?: RestEndpointMethodTypes["codespaces"]["deleteFromOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes an organization secret using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + deleteOrgSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["deleteOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have write access to the `codespaces_secrets` repository permission to use this endpoint. + */ + deleteRepoSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["deleteRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret from a user's codespaces using the secret name. Deleting the secret will remove access from all codespaces that were allowed to access the secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + deleteSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["deleteSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Triggers an export of the specified codespace and returns a URL and ID where the status of the export can be monitored. + * + * If changes cannot be pushed to the codespace's repository, they will be pushed to a new or previously-existing fork instead. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + exportForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["exportForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the codespaces that a member of an organization has for repositories in that organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + getCodespacesForUserInOrg: { + (params?: RestEndpointMethodTypes["codespaces"]["getCodespacesForUserInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets information about an export of a codespace. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + getExportDetailsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["getExportDetailsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets information about a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + getForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["getForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a public key for an organization, which is required in order to encrypt secrets. You need to encrypt the value of a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + getOrgPublicKey: { + (params?: RestEndpointMethodTypes["codespaces"]["getOrgPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets an organization secret without revealing its encrypted value. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + getOrgSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["getOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + getPublicKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["getPublicKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have write access to the `codespaces_secrets` repository permission to use this endpoint. + */ + getRepoPublicKey: { + (params?: RestEndpointMethodTypes["codespaces"]["getRepoPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have write access to the `codespaces_secrets` repository permission to use this endpoint. + */ + getRepoSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["getRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a secret available to a user's codespaces without revealing its encrypted value. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + getSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["getSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the devcontainer.json files associated with a specified repository and the authenticated user. These files + * specify launchpoint configurations for codespaces created within the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + listDevcontainersInRepositoryForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["listDevcontainersInRepositoryForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the authenticated user's codespaces. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + listForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["listForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the codespaces associated to a specified organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + listInOrganization: { + (params?: RestEndpointMethodTypes["codespaces"]["listInOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the codespaces associated to a specified repository and the authenticated user. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + listInRepositoryForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["listInRepositoryForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all Codespaces secrets available at the organization-level without revealing their encrypted values. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + listOrgSecrets: { + (params?: RestEndpointMethodTypes["codespaces"]["listOrgSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have write access to the `codespaces_secrets` repository permission to use this endpoint. + */ + listRepoSecrets: { + (params?: RestEndpointMethodTypes["codespaces"]["listRepoSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the repositories that have been granted the ability to use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + listRepositoriesForSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["listRepositoriesForSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available for a user's Codespaces without revealing their + * encrypted values. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + listSecretsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["listSecretsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + listSelectedReposForOrgSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["listSelectedReposForOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the default attributes for codespaces created by the user with the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + preFlightWithRepoForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["preFlightWithRepoForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Publishes an unpublished codespace, creating a new repository and assigning it to the codespace. + * + * The codespace's token is granted write permissions to the repository, allowing the user to push their changes. + * + * This will fail for a codespace that is already published, meaning it has an associated repository. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + publishForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["publishForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a repository from the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + removeRepositoryForSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["removeRepositoryForSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + removeSelectedRepoFromOrgSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["removeSelectedRepoFromOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the machine types available for a given repository based on its configuration. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_metadata` repository permission to use this endpoint. + */ + repoMachinesForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["repoMachinesForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets which users can access codespaces in an organization. This is synonymous with granting or revoking codespaces billing permissions for users according to the visibility. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + setCodespacesBilling: { + (params?: RestEndpointMethodTypes["codespaces"]["setCodespacesBilling"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Codespaces for the specified users will be billed to the organization. + * To use this endpoint, the billing settings for the organization must be set to `selected_members`. For information on how to change this setting please see [these docs].(https://docs.github.com/rest/codespaces/organizations#manage-access-control-for-organization-codespaces) You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + setCodespacesBillingUsers: { + (params?: RestEndpointMethodTypes["codespaces"]["setCodespacesBillingUsers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Select the repositories that will use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + setRepositoriesForSecretForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["setRepositoriesForSecretForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + setSelectedReposForOrgSecret: { + (params?: RestEndpointMethodTypes["codespaces"]["setSelectedReposForOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Starts a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + startForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["startForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Stops a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + stopForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["stopForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Stops a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + stopInOrganization: { + (params?: RestEndpointMethodTypes["codespaces"]["stopInOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates a codespace owned by the authenticated user. Currently only the codespace's machine type and recent folders can be modified using this endpoint. + * + * If you specify a new machine type it will be applied the next time your codespace is started. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + updateForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["codespaces"]["updateForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + dependabot: { + /** + * Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + addSelectedRepoToOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["addSelectedRepoToOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["createOrUpdateOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository + * permission to use this endpoint. + * + * **Example encrypting a secret using Node.js** + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * const sodium = require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with the secret you want to encrypt + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key + * + * //Check if libsodium is ready and then proceed. + * sodium.ready.then(() => { + * // Convert Secret & Base64 key to Uint8Array. + * let binkey = sodium.from_base64(key, sodium.base64_variants.ORIGINAL) + * let binsec = sodium.from_string(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes = sodium.crypto_box_seal(binsec, binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output = sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) + * + * console.log(output) + * }); + * ``` + * + * **Example encrypting a secret using Python** + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * **Example encrypting a secret using C#** + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * **Example encrypting a secret using Ruby** + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + createOrUpdateRepoSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["createOrUpdateRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + deleteOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["deleteOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + deleteRepoSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["deleteRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + getAlert: { + (params?: RestEndpointMethodTypes["dependabot"]["getAlert"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + getOrgPublicKey: { + (params?: RestEndpointMethodTypes["dependabot"]["getOrgPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + getOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["getOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + getRepoPublicKey: { + (params?: RestEndpointMethodTypes["dependabot"]["getRepoPublicKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + getRepoSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["getRepoSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists Dependabot alerts for repositories that are owned by the specified enterprise. + * To use this endpoint, you must be a member of the enterprise, and you must use an + * access token with the `repo` scope or `security_events` scope. + * Alerts are only returned for organizations in the enterprise for which you are an organization owner or a security manager. For more information about security managers, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + listAlertsForEnterprise: { + (params?: RestEndpointMethodTypes["dependabot"]["listAlertsForEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists Dependabot alerts for an organization. + * + * To use this endpoint, you must be an owner or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + listAlertsForOrg: { + (params?: RestEndpointMethodTypes["dependabot"]["listAlertsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + listAlertsForRepo: { + (params?: RestEndpointMethodTypes["dependabot"]["listAlertsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + listOrgSecrets: { + (params?: RestEndpointMethodTypes["dependabot"]["listOrgSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. + */ + listRepoSecrets: { + (params?: RestEndpointMethodTypes["dependabot"]["listRepoSecrets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + listSelectedReposForOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["listSelectedReposForOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + removeSelectedRepoFromOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["removeSelectedRepoFromOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. + */ + setSelectedReposForOrgSecret: { + (params?: RestEndpointMethodTypes["dependabot"]["setSelectedReposForOrgSecret"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** write permission to use this endpoint. + * + * To use this endpoint, you must have access to security alerts for the repository. For more information, see "[Granting access to security alerts](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-security-and-analysis-settings-for-your-repository#granting-access-to-security-alerts)." + */ + updateAlert: { + (params?: RestEndpointMethodTypes["dependabot"]["updateAlert"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + dependencyGraph: { + /** + * Create a new snapshot of a repository's dependencies. You must authenticate using an access token with the `repo` scope to use this endpoint for a repository that the requesting user has access to. + */ + createRepositorySnapshot: { + (params?: RestEndpointMethodTypes["dependencyGraph"]["createRepositorySnapshot"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the diff of the dependency changes between two commits of a repository, based on the changes to the dependency manifests made in those commits. + */ + diffRange: { + (params?: RestEndpointMethodTypes["dependencyGraph"]["diffRange"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Exports the software bill of materials (SBOM) for a repository in SPDX JSON format. + */ + exportSbom: { + (params?: RestEndpointMethodTypes["dependencyGraph"]["exportSbom"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + emojis: { + /** + * Lists all the emojis available to use on GitHub. + */ + get: { + (params?: RestEndpointMethodTypes["emojis"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + gists: { + checkIsStarred: { + (params?: RestEndpointMethodTypes["gists"]["checkIsStarred"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Allows you to add a new gist with one or more files. + * + * **Note:** Don't name your files "gistfile" with a numerical suffix. This is the format of the automatic naming scheme that Gist uses internally. + */ + create: { + (params?: RestEndpointMethodTypes["gists"]["create"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + createComment: { + (params?: RestEndpointMethodTypes["gists"]["createComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + delete: { + (params?: RestEndpointMethodTypes["gists"]["delete"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteComment: { + (params?: RestEndpointMethodTypes["gists"]["deleteComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + fork: { + (params?: RestEndpointMethodTypes["gists"]["fork"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + get: { + (params?: RestEndpointMethodTypes["gists"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getComment: { + (params?: RestEndpointMethodTypes["gists"]["getComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getRevision: { + (params?: RestEndpointMethodTypes["gists"]["getRevision"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the authenticated user's gists or if called anonymously, this endpoint returns all public gists: + */ + list: { + (params?: RestEndpointMethodTypes["gists"]["list"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listComments: { + (params?: RestEndpointMethodTypes["gists"]["listComments"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listCommits: { + (params?: RestEndpointMethodTypes["gists"]["listCommits"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists public gists for the specified user: + */ + listForUser: { + (params?: RestEndpointMethodTypes["gists"]["listForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listForks: { + (params?: RestEndpointMethodTypes["gists"]["listForks"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List public gists sorted by most recently updated to least recently updated. + * + * Note: With [pagination](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination), you can fetch up to 3000 gists. For example, you can fetch 100 pages with 30 gists per page or 30 pages with 100 gists per page. + */ + listPublic: { + (params?: RestEndpointMethodTypes["gists"]["listPublic"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the authenticated user's starred gists: + */ + listStarred: { + (params?: RestEndpointMethodTypes["gists"]["listStarred"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + star: { + (params?: RestEndpointMethodTypes["gists"]["star"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + unstar: { + (params?: RestEndpointMethodTypes["gists"]["unstar"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Allows you to update a gist's description and to update, delete, or rename gist files. Files from the previous version of the gist that aren't explicitly changed during an edit are unchanged. + */ + update: { + (params?: RestEndpointMethodTypes["gists"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateComment: { + (params?: RestEndpointMethodTypes["gists"]["updateComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + git: { + createBlob: { + (params?: RestEndpointMethodTypes["git"]["createBlob"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + createCommit: { + (params?: RestEndpointMethodTypes["git"]["createCommit"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a reference for your repository. You are unable to create new references for empty repositories, even if the commit SHA-1 hash used exists. Empty repositories are repositories without branches. + */ + createRef: { + (params?: RestEndpointMethodTypes["git"]["createRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Note that creating a tag object does not create the reference that makes a tag in Git. If you want to create an annotated tag in Git, you have to do this call to create the tag object, and then [create](https://docs.github.com/rest/reference/git#create-a-reference) the `refs/tags/[tag]` reference. If you want to create a lightweight tag, you only have to [create](https://docs.github.com/rest/reference/git#create-a-reference) the tag reference - this call would be unnecessary. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + createTag: { + (params?: RestEndpointMethodTypes["git"]["createTag"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The tree creation API accepts nested entries. If you specify both a tree and a nested path modifying that tree, this endpoint will overwrite the contents of the tree with the new path contents, and create a new tree structure. + * + * If you use this endpoint to add, delete, or modify the file contents in a tree, you will need to commit the tree and then update a branch to point to the commit. For more information see "[Create a commit](https://docs.github.com/rest/reference/git#create-a-commit)" and "[Update a reference](https://docs.github.com/rest/reference/git#update-a-reference)." + * + * Returns an error if you try to delete a file that does not exist. + */ + createTree: { + (params?: RestEndpointMethodTypes["git"]["createTree"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteRef: { + (params?: RestEndpointMethodTypes["git"]["deleteRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The `content` in the response will always be Base64 encoded. + * + * _Note_: This API supports blobs up to 100 megabytes in size. + */ + getBlob: { + (params?: RestEndpointMethodTypes["git"]["getBlob"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + getCommit: { + (params?: RestEndpointMethodTypes["git"]["getCommit"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a single reference from your Git database. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't match an existing ref, a `404` is returned. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + */ + getRef: { + (params?: RestEndpointMethodTypes["git"]["getRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + getTag: { + (params?: RestEndpointMethodTypes["git"]["getTag"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a single tree using the SHA1 value for that tree. + * + * If `truncated` is `true` in the response then the number of items in the `tree` array exceeded our maximum limit. If you need to fetch more items, use the non-recursive method of fetching trees, and fetch one sub-tree at a time. + * + * + * **Note**: The limit for the `tree` array is 100,000 entries with a maximum size of 7 MB when using the `recursive` parameter. + */ + getTree: { + (params?: RestEndpointMethodTypes["git"]["getTree"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns an array of references from your Git database that match the supplied name. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't exist in the repository, but existing refs start with `:ref`, they will be returned as an array. + * + * When you use this endpoint without providing a `:ref`, it will return an array of all the references from your Git database, including notes and stashes if they exist on the server. Anything in the namespace is returned, not just `heads` and `tags`. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * If you request matching references for a branch named `feature` but the branch `feature` doesn't exist, the response can still include other matching head refs that start with the word `feature`, such as `featureA` and `featureB`. + */ + listMatchingRefs: { + (params?: RestEndpointMethodTypes["git"]["listMatchingRefs"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateRef: { + (params?: RestEndpointMethodTypes["git"]["updateRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + gitignore: { + /** + * List all templates available to pass as an option when [creating a repository](https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user). + */ + getAllTemplates: { + (params?: RestEndpointMethodTypes["gitignore"]["getAllTemplates"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The API also allows fetching the source of a single template. + * Use the raw [media type](https://docs.github.com/rest/overview/media-types/) to get the raw contents. + */ + getTemplate: { + (params?: RestEndpointMethodTypes["gitignore"]["getTemplate"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + interactions: { + /** + * Shows which type of GitHub user can interact with your public repositories and when the restriction expires. + */ + getRestrictionsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["interactions"]["getRestrictionsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Shows which type of GitHub user can interact with this organization and when the restriction expires. If there is no restrictions, you will see an empty response. + */ + getRestrictionsForOrg: { + (params?: RestEndpointMethodTypes["interactions"]["getRestrictionsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Shows which type of GitHub user can interact with this repository and when the restriction expires. If there are no restrictions, you will see an empty response. + */ + getRestrictionsForRepo: { + (params?: RestEndpointMethodTypes["interactions"]["getRestrictionsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Shows which type of GitHub user can interact with your public repositories and when the restriction expires. + * @deprecated octokit.rest.interactions.getRestrictionsForYourPublicRepos() has been renamed to octokit.rest.interactions.getRestrictionsForAuthenticatedUser() (2021-02-02) + */ + getRestrictionsForYourPublicRepos: { + (params?: RestEndpointMethodTypes["interactions"]["getRestrictionsForYourPublicRepos"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes any interaction restrictions from your public repositories. + */ + removeRestrictionsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["interactions"]["removeRestrictionsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes all interaction restrictions from public repositories in the given organization. You must be an organization owner to remove restrictions. + */ + removeRestrictionsForOrg: { + (params?: RestEndpointMethodTypes["interactions"]["removeRestrictionsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes all interaction restrictions from the given repository. You must have owner or admin access to remove restrictions. If the interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. + */ + removeRestrictionsForRepo: { + (params?: RestEndpointMethodTypes["interactions"]["removeRestrictionsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes any interaction restrictions from your public repositories. + * @deprecated octokit.rest.interactions.removeRestrictionsForYourPublicRepos() has been renamed to octokit.rest.interactions.removeRestrictionsForAuthenticatedUser() (2021-02-02) + */ + removeRestrictionsForYourPublicRepos: { + (params?: RestEndpointMethodTypes["interactions"]["removeRestrictionsForYourPublicRepos"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. + */ + setRestrictionsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["interactions"]["setRestrictionsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Temporarily restricts interactions to a certain type of GitHub user in any public repository in the given organization. You must be an organization owner to set these restrictions. Setting the interaction limit at the organization level will overwrite any interaction limits that are set for individual repositories owned by the organization. + */ + setRestrictionsForOrg: { + (params?: RestEndpointMethodTypes["interactions"]["setRestrictionsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Temporarily restricts interactions to a certain type of GitHub user within the given repository. You must have owner or admin access to set these restrictions. If an interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. + */ + setRestrictionsForRepo: { + (params?: RestEndpointMethodTypes["interactions"]["setRestrictionsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. + * @deprecated octokit.rest.interactions.setRestrictionsForYourPublicRepos() has been renamed to octokit.rest.interactions.setRestrictionsForAuthenticatedUser() (2021-02-02) + */ + setRestrictionsForYourPublicRepos: { + (params?: RestEndpointMethodTypes["interactions"]["setRestrictionsForYourPublicRepos"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + issues: { + /** + * Adds up to 10 assignees to an issue. Users already assigned to an issue are not replaced. + */ + addAssignees: { + (params?: RestEndpointMethodTypes["issues"]["addAssignees"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds labels to an issue. If you provide an empty array of labels, all labels are removed from the issue. + */ + addLabels: { + (params?: RestEndpointMethodTypes["issues"]["addLabels"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Checks if a user has permission to be assigned to an issue in this repository. + * + * If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. + * + * Otherwise a `404` status code is returned. + */ + checkUserCanBeAssigned: { + (params?: RestEndpointMethodTypes["issues"]["checkUserCanBeAssigned"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Checks if a user has permission to be assigned to a specific issue. + * + * If the `assignee` can be assigned to this issue, a `204` status code with no content is returned. + * + * Otherwise a `404` status code is returned. + */ + checkUserCanBeAssignedToIssue: { + (params?: RestEndpointMethodTypes["issues"]["checkUserCanBeAssignedToIssue"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Any user with pull access to a repository can create an issue. If [issues are disabled in the repository](https://docs.github.com/articles/disabling-issues/), the API returns a `410 Gone` status. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + create: { + (params?: RestEndpointMethodTypes["issues"]["create"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can use the REST API to create comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). + * Creating content too quickly using this endpoint may result in secondary rate limiting. + * See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" + * and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" + * for details. + */ + createComment: { + (params?: RestEndpointMethodTypes["issues"]["createComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a label for the specified repository with the given name and color. The name and color parameters are required. The color must be a valid [hexadecimal color code](http://www.color-hex.com/). + */ + createLabel: { + (params?: RestEndpointMethodTypes["issues"]["createLabel"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a milestone. + */ + createMilestone: { + (params?: RestEndpointMethodTypes["issues"]["createMilestone"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can use the REST API to delete comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + */ + deleteComment: { + (params?: RestEndpointMethodTypes["issues"]["deleteComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a label using the given label name. + */ + deleteLabel: { + (params?: RestEndpointMethodTypes["issues"]["deleteLabel"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a milestone using the given milestone number. + */ + deleteMilestone: { + (params?: RestEndpointMethodTypes["issues"]["deleteMilestone"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The API returns a [`301 Moved Permanently` status](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-redirects-redirects) if the issue was + * [transferred](https://docs.github.com/articles/transferring-an-issue-to-another-repository/) to another repository. If + * the issue was transferred to or deleted from a repository where the authenticated user lacks read access, the API + * returns a `404 Not Found` status. If the issue was deleted from a repository where the authenticated user has read + * access, the API returns a `410 Gone` status. To receive webhook events for transferred and deleted issues, subscribe + * to the [`issues`](https://docs.github.com/webhooks/event-payloads/#issues) webhook. + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: { + (params?: RestEndpointMethodTypes["issues"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can use the REST API to get comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + */ + getComment: { + (params?: RestEndpointMethodTypes["issues"]["getComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a single event by the event id. + */ + getEvent: { + (params?: RestEndpointMethodTypes["issues"]["getEvent"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a label using the given name. + */ + getLabel: { + (params?: RestEndpointMethodTypes["issues"]["getLabel"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a milestone using the given milestone number. + */ + getMilestone: { + (params?: RestEndpointMethodTypes["issues"]["getMilestone"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List issues assigned to the authenticated user across all visible repositories including owned repositories, member + * repositories, and organization repositories. You can use the `filter` query parameter to fetch issues that are not + * necessarily assigned to you. + * + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + list: { + (params?: RestEndpointMethodTypes["issues"]["list"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the [available assignees](https://docs.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) for issues in a repository. + */ + listAssignees: { + (params?: RestEndpointMethodTypes["issues"]["listAssignees"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can use the REST API to list comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + * + * Issue comments are ordered by ascending ID. + */ + listComments: { + (params?: RestEndpointMethodTypes["issues"]["listComments"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can use the REST API to list comments on issues and pull requests for a repository. Every pull request is an issue, but not every issue is a pull request. + * + * By default, issue comments are ordered by ascending ID. + */ + listCommentsForRepo: { + (params?: RestEndpointMethodTypes["issues"]["listCommentsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all events for an issue. + */ + listEvents: { + (params?: RestEndpointMethodTypes["issues"]["listEvents"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists events for a repository. + */ + listEventsForRepo: { + (params?: RestEndpointMethodTypes["issues"]["listEventsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all timeline events for an issue. + */ + listEventsForTimeline: { + (params?: RestEndpointMethodTypes["issues"]["listEventsForTimeline"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List issues across owned and member repositories assigned to the authenticated user. + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + listForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["issues"]["listForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List issues in an organization assigned to the authenticated user. + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + listForOrg: { + (params?: RestEndpointMethodTypes["issues"]["listForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List issues in a repository. Only open issues will be listed. + * + * **Note**: GitHub's REST API considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + listForRepo: { + (params?: RestEndpointMethodTypes["issues"]["listForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists labels for issues in a milestone. + */ + listLabelsForMilestone: { + (params?: RestEndpointMethodTypes["issues"]["listLabelsForMilestone"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all labels for a repository. + */ + listLabelsForRepo: { + (params?: RestEndpointMethodTypes["issues"]["listLabelsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all labels for an issue. + */ + listLabelsOnIssue: { + (params?: RestEndpointMethodTypes["issues"]["listLabelsOnIssue"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists milestones for a repository. + */ + listMilestones: { + (params?: RestEndpointMethodTypes["issues"]["listMilestones"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access can lock an issue or pull request's conversation. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + lock: { + (params?: RestEndpointMethodTypes["issues"]["lock"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes all labels from an issue. + */ + removeAllLabels: { + (params?: RestEndpointMethodTypes["issues"]["removeAllLabels"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes one or more assignees from an issue. + */ + removeAssignees: { + (params?: RestEndpointMethodTypes["issues"]["removeAssignees"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes the specified label from the issue, and returns the remaining labels on the issue. This endpoint returns a `404 Not Found` status if the label does not exist. + */ + removeLabel: { + (params?: RestEndpointMethodTypes["issues"]["removeLabel"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes any previous labels and sets the new labels for an issue. + */ + setLabels: { + (params?: RestEndpointMethodTypes["issues"]["setLabels"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access can unlock an issue's conversation. + */ + unlock: { + (params?: RestEndpointMethodTypes["issues"]["unlock"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Issue owners and users with push access can edit an issue. + */ + update: { + (params?: RestEndpointMethodTypes["issues"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can use the REST API to update comments on issues and pull requests. Every pull request is an issue, but not every issue is a pull request. + */ + updateComment: { + (params?: RestEndpointMethodTypes["issues"]["updateComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates a label using the given label name. + */ + updateLabel: { + (params?: RestEndpointMethodTypes["issues"]["updateLabel"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateMilestone: { + (params?: RestEndpointMethodTypes["issues"]["updateMilestone"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + licenses: { + get: { + (params?: RestEndpointMethodTypes["licenses"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getAllCommonlyUsed: { + (params?: RestEndpointMethodTypes["licenses"]["getAllCommonlyUsed"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This method returns the contents of the repository's license file, if one is detected. + * + * Similar to [Get repository content](https://docs.github.com/rest/reference/repos#get-repository-content), this method also supports [custom media types](https://docs.github.com/rest/overview/media-types) for retrieving the raw license content or rendered license HTML. + */ + getForRepo: { + (params?: RestEndpointMethodTypes["licenses"]["getForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + markdown: { + render: { + (params?: RestEndpointMethodTypes["markdown"]["render"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less. + */ + renderRaw: { + (params?: RestEndpointMethodTypes["markdown"]["renderRaw"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + meta: { + /** + * Returns meta information about GitHub, including a list of GitHub's IP addresses. For more information, see "[About GitHub's IP addresses](https://docs.github.com/articles/about-github-s-ip-addresses/)." + * + * The API's response also includes a list of GitHub's domain names. + * + * The values shown in the documentation's response are example values. You must always query the API directly to get the latest values. + * + * **Note:** This endpoint returns both IPv4 and IPv6 addresses. However, not all features support IPv6. You should refer to the specific documentation for each feature to determine if IPv6 is supported. + */ + get: { + (params?: RestEndpointMethodTypes["meta"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get all supported GitHub API versions. + */ + getAllVersions: { + (params?: RestEndpointMethodTypes["meta"]["getAllVersions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get the octocat as ASCII art + */ + getOctocat: { + (params?: RestEndpointMethodTypes["meta"]["getOctocat"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get a random sentence from the Zen of GitHub + */ + getZen: { + (params?: RestEndpointMethodTypes["meta"]["getZen"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get Hypermedia links to resources accessible in GitHub's REST API + */ + root: { + (params?: RestEndpointMethodTypes["meta"]["root"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + migrations: { + /** + * Stop an import for a repository. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + cancelImport: { + (params?: RestEndpointMethodTypes["migrations"]["cancelImport"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a previous migration archive. Downloadable migration archives are automatically deleted after seven days. Migration metadata, which is returned in the [List user migrations](https://docs.github.com/rest/migrations/users#list-user-migrations) and [Get a user migration status](https://docs.github.com/rest/migrations/users#get-a-user-migration-status) endpoints, will continue to be available even after an archive is deleted. + */ + deleteArchiveForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["deleteArchiveForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a previous migration archive. Migration archives are automatically deleted after seven days. + */ + deleteArchiveForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["deleteArchiveForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Fetches the URL to a migration archive. + */ + downloadArchiveForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["downloadArchiveForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Fetches the URL to download the migration archive as a `tar.gz` file. Depending on the resources your repository uses, the migration archive can contain JSON files with data for these objects: + * + * * attachments + * * bases + * * commit\_comments + * * issue\_comments + * * issue\_events + * * issues + * * milestones + * * organizations + * * projects + * * protected\_branches + * * pull\_request\_reviews + * * pull\_requests + * * releases + * * repositories + * * review\_comments + * * schema + * * users + * + * The archive will also contain an `attachments` directory that includes all attachment files uploaded to GitHub.com and a `repositories` directory that contains the repository's Git data. + */ + getArchiveForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["getArchiveForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Each type of source control system represents authors in a different way. For example, a Git commit author has a display name and an email address, but a Subversion commit author just has a username. The GitHub Importer will make the author information valid, but the author might not be correct. For example, it will change the bare Subversion username `hubot` into something like `hubot `. + * + * This endpoint and the [Map a commit author](https://docs.github.com/rest/migrations/source-imports#map-a-commit-author) endpoint allow you to provide correct Git author information. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + getCommitAuthors: { + (params?: RestEndpointMethodTypes["migrations"]["getCommitAuthors"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * View the progress of an import. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + * + * **Import status** + * + * This section includes details about the possible values of the `status` field of the Import Progress response. + * + * An import that does not have errors will progress through these steps: + * + * * `detecting` - the "detection" step of the import is in progress because the request did not include a `vcs` parameter. The import is identifying the type of source control present at the URL. + * * `importing` - the "raw" step of the import is in progress. This is where commit data is fetched from the original repository. The import progress response will include `commit_count` (the total number of raw commits that will be imported) and `percent` (0 - 100, the current progress through the import). + * * `mapping` - the "rewrite" step of the import is in progress. This is where SVN branches are converted to Git branches, and where author updates are applied. The import progress response does not include progress information. + * * `pushing` - the "push" step of the import is in progress. This is where the importer updates the repository on GitHub. The import progress response will include `push_percent`, which is the percent value reported by `git push` when it is "Writing objects". + * * `complete` - the import is complete, and the repository is ready on GitHub. + * + * If there are problems, you will see one of these in the `status` field: + * + * * `auth_failed` - the import requires authentication in order to connect to the original repository. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/migrations/source-imports#update-an-import) section. + * * `error` - the import encountered an error. The import progress response will include the `failed_step` and an error message. Contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api) for more information. + * * `detection_needs_auth` - the importer requires authentication for the originating repository to continue detection. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/migrations/source-imports#update-an-import) section. + * * `detection_found_nothing` - the importer didn't recognize any source control at the URL. To resolve, [Cancel the import](https://docs.github.com/rest/migrations/source-imports#cancel-an-import) and [retry](https://docs.github.com/rest/migrations/source-imports#start-an-import) with the correct URL. + * * `detection_found_multiple` - the importer found several projects or repositories at the provided URL. When this is the case, the Import Progress response will also include a `project_choices` field with the possible project choices as values. To update project choice, please see the [Update an import](https://docs.github.com/rest/migrations/source-imports#update-an-import) section. + * + * **The project_choices field** + * + * When multiple projects are found at the provided URL, the response hash will include a `project_choices` field, the value of which is an array of hashes each representing a project choice. The exact key/value pairs of the project hashes will differ depending on the version control type. + * + * **Git LFS related fields** + * + * This section includes details about Git LFS related fields that may be present in the Import Progress response. + * + * * `use_lfs` - describes whether the import has been opted in or out of using Git LFS. The value can be `opt_in`, `opt_out`, or `undecided` if no action has been taken. + * * `has_large_files` - the boolean value describing whether files larger than 100MB were found during the `importing` step. + * * `large_files_size` - the total size in gigabytes of files larger than 100MB found in the originating repository. + * * `large_files_count` - the total number of files larger than 100MB found in the originating repository. To see a list of these files, make a "Get Large Files" request. + */ + getImportStatus: { + (params?: RestEndpointMethodTypes["migrations"]["getImportStatus"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List files larger than 100MB found during the import + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + getLargeFiles: { + (params?: RestEndpointMethodTypes["migrations"]["getLargeFiles"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Fetches a single user migration. The response includes the `state` of the migration, which can be one of the following values: + * + * * `pending` - the migration hasn't started yet. + * * `exporting` - the migration is in progress. + * * `exported` - the migration finished successfully. + * * `failed` - the migration failed. + * + * Once the migration has been `exported` you can [download the migration archive](https://docs.github.com/rest/migrations/users#download-a-user-migration-archive). + */ + getStatusForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["getStatusForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Fetches the status of a migration. + * + * The `state` of a migration can be one of the following values: + * + * * `pending`, which means the migration hasn't started yet. + * * `exporting`, which means the migration is in progress. + * * `exported`, which means the migration finished successfully. + * * `failed`, which means the migration failed. + */ + getStatusForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["getStatusForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all migrations a user has started. + */ + listForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["listForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the most recent migrations, including both exports (which can be started through the REST API) and imports (which cannot be started using the REST API). + * + * A list of `repositories` is only returned for export migrations. + */ + listForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["listForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all the repositories for this user migration. + */ + listReposForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["listReposForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all the repositories for this organization migration. + */ + listReposForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["listReposForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all the repositories for this user migration. + * @deprecated octokit.rest.migrations.listReposForUser() has been renamed to octokit.rest.migrations.listReposForAuthenticatedUser() (2021-10-05) + */ + listReposForUser: { + (params?: RestEndpointMethodTypes["migrations"]["listReposForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Update an author's identity for the import. Your application can continue updating authors any time before you push + * new commits to the repository. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + mapCommitAuthor: { + (params?: RestEndpointMethodTypes["migrations"]["mapCommitAuthor"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can import repositories from Subversion, Mercurial, and TFS that include files larger than 100MB. This ability + * is powered by [Git LFS](https://git-lfs.com). + * + * You can learn more about our LFS feature and working with large files [on our help + * site](https://docs.github.com/repositories/working-with-files/managing-large-files). + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + setLfsPreference: { + (params?: RestEndpointMethodTypes["migrations"]["setLfsPreference"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Initiates the generation of a user migration archive. + */ + startForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["startForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Initiates the generation of a migration archive. + */ + startForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["startForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Start a source import to a GitHub repository using GitHub Importer. Importing into a GitHub repository with GitHub Actions enabled is not supported and will return a status `422 Unprocessable Entity` response. + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + startImport: { + (params?: RestEndpointMethodTypes["migrations"]["startImport"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Unlocks a repository. You can lock repositories when you [start a user migration](https://docs.github.com/rest/migrations/users#start-a-user-migration). Once the migration is complete you can unlock each repository to begin using it again or [delete the repository](https://docs.github.com/rest/repos/repos#delete-a-repository) if you no longer need the source data. Returns a status of `404 Not Found` if the repository is not locked. + */ + unlockRepoForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["migrations"]["unlockRepoForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Unlocks a repository that was locked for migration. You should unlock each migrated repository and [delete them](https://docs.github.com/rest/repos/repos#delete-a-repository) when the migration is complete and you no longer need the source data. + */ + unlockRepoForOrg: { + (params?: RestEndpointMethodTypes["migrations"]["unlockRepoForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * An import can be updated with credentials or a project choice by passing in the appropriate parameters in this API + * request. If no parameters are provided, the import will be restarted. + * + * Some servers (e.g. TFS servers) can have several projects at a single URL. In those cases the import progress will + * have the status `detection_found_multiple` and the Import Progress response will include a `project_choices` array. + * You can select the project to import by providing one of the objects in the `project_choices` array in the update request. + * + * **Warning:** Support for importing Mercurial, Subversion and Team Foundation Version Control repositories will end + * on October 17, 2023. For more details, see [changelog](https://gh.io/github-importer-non-git-eol). In the coming weeks, we will update + * these docs to reflect relevant changes to the API and will contact all integrators using the "Source imports" API. + */ + updateImport: { + (params?: RestEndpointMethodTypes["migrations"]["updateImport"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + orgs: { + /** + * Adds a team as a security manager for an organization. For more information, see "[Managing security for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization) for an organization." + * + * To use this endpoint, you must be an administrator for the organization, and you must use an access token with the `write:org` scope. + * + * GitHub Apps must have the `administration` organization read-write permission to use this endpoint. + */ + addSecurityManagerTeam: { + (params?: RestEndpointMethodTypes["orgs"]["addSecurityManagerTeam"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + blockUser: { + (params?: RestEndpointMethodTypes["orgs"]["blockUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Cancel an organization invitation. In order to cancel an organization invitation, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). + */ + cancelInvitation: { + (params?: RestEndpointMethodTypes["orgs"]["cancelInvitation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + checkBlockedUser: { + (params?: RestEndpointMethodTypes["orgs"]["checkBlockedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Check if a user is, publicly or privately, a member of the organization. + */ + checkMembershipForUser: { + (params?: RestEndpointMethodTypes["orgs"]["checkMembershipForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Check if the provided user is a public member of the organization. + */ + checkPublicMembershipForUser: { + (params?: RestEndpointMethodTypes["orgs"]["checkPublicMembershipForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * When an organization member is converted to an outside collaborator, they'll only have access to the repositories that their current team membership allows. The user will no longer be a member of the organization. For more information, see "[Converting an organization member to an outside collaborator](https://docs.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)". Converting an organization member to an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." + */ + convertMemberToOutsideCollaborator: { + (params?: RestEndpointMethodTypes["orgs"]["convertMemberToOutsideCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Invite people to an organization by using their GitHub user ID or their email address. In order to create invitations in an organization, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + createInvitation: { + (params?: RestEndpointMethodTypes["orgs"]["createInvitation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Here's how you can create a hook that posts payloads in JSON format: + */ + createWebhook: { + (params?: RestEndpointMethodTypes["orgs"]["createWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes an organization and all its repositories. + * + * The organization login will be unavailable for 90 days after deletion. + * + * Please review the Terms of Service regarding account deletion before using this endpoint: + * + * https://docs.github.com/site-policy/github-terms/github-terms-of-service + */ + delete: { + (params?: RestEndpointMethodTypes["orgs"]["delete"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteWebhook: { + (params?: RestEndpointMethodTypes["orgs"]["deleteWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables or disables the specified security feature for all eligible repositories in an organization. + * + * To use this endpoint, you must be an organization owner or be member of a team with the security manager role. + * A token with the 'write:org' scope is also required. + * + * GitHub Apps must have the `organization_administration:write` permission to use this endpoint. + * + * For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + enableOrDisableSecurityProductOnAllOrgRepos: { + (params?: RestEndpointMethodTypes["orgs"]["enableOrDisableSecurityProductOnAllOrgRepos"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To see many of the organization response values, you need to be an authenticated organization owner with the `admin:org` scope. When the value of `two_factor_requirement_enabled` is `true`, the organization requires all members, billing managers, and outside collaborators to enable [two-factor authentication](https://docs.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/). + * + * GitHub Apps with the `Organization plan` permission can use this endpoint to retrieve information about an organization's GitHub plan. See "[Authenticating with GitHub Apps](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/)" for details. For an example response, see 'Response with GitHub plan information' below." + */ + get: { + (params?: RestEndpointMethodTypes["orgs"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If the authenticated user is an active or pending member of the organization, this endpoint will return the user's membership. If the authenticated user is not affiliated with the organization, a `404` is returned. This endpoint will return a `403` if the request is made by a GitHub App that is blocked by the organization. + */ + getMembershipForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["orgs"]["getMembershipForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * In order to get a user's membership with an organization, the authenticated user must be an organization member. The `state` parameter in the response can be used to identify the user's membership status. + */ + getMembershipForUser: { + (params?: RestEndpointMethodTypes["orgs"]["getMembershipForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a webhook configured in an organization. To get only the webhook `config` properties, see "[Get a webhook configuration for an organization](/rest/reference/orgs#get-a-webhook-configuration-for-an-organization)." + */ + getWebhook: { + (params?: RestEndpointMethodTypes["orgs"]["getWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the webhook configuration for an organization. To get more information about the webhook, including the `active` state and `events`, use "[Get an organization webhook ](/rest/reference/orgs#get-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:read` permission. + */ + getWebhookConfigForOrg: { + (params?: RestEndpointMethodTypes["orgs"]["getWebhookConfigForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a delivery for a webhook configured in an organization. + */ + getWebhookDelivery: { + (params?: RestEndpointMethodTypes["orgs"]["getWebhookDelivery"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all organizations, in the order that they were created on GitHub. + * + * **Note:** Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers) to get the URL for the next page of organizations. + */ + list: { + (params?: RestEndpointMethodTypes["orgs"]["list"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all GitHub Apps in an organization. The installation count includes all GitHub Apps installed on repositories in the organization. You must be an organization owner with `admin:read` scope to use this endpoint. + */ + listAppInstallations: { + (params?: RestEndpointMethodTypes["orgs"]["listAppInstallations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the users blocked by an organization. + */ + listBlockedUsers: { + (params?: RestEndpointMethodTypes["orgs"]["listBlockedUsers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The return hash contains `failed_at` and `failed_reason` fields which represent the time at which the invitation failed and the reason for the failure. + */ + listFailedInvitations: { + (params?: RestEndpointMethodTypes["orgs"]["listFailedInvitations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List organizations for the authenticated user. + * + * **OAuth scope requirements** + * + * This only lists organizations that your authorization allows you to operate on in some way (e.g., you can list teams with `read:org` scope, you can publicize your organization membership with `user` scope, etc.). Therefore, this API requires at least `user` or `read:org` scope. OAuth requests with insufficient scope receive a `403 Forbidden` response. + */ + listForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["orgs"]["listForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List [public organization memberships](https://docs.github.com/articles/publicizing-or-concealing-organization-membership) for the specified user. + * + * This method only lists _public_ memberships, regardless of authentication. If you need to fetch all of the organization memberships (public and private) for the authenticated user, use the [List organizations for the authenticated user](https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user) API instead. + */ + listForUser: { + (params?: RestEndpointMethodTypes["orgs"]["listForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all teams associated with an invitation. In order to see invitations in an organization, the authenticated user must be an organization owner. + */ + listInvitationTeams: { + (params?: RestEndpointMethodTypes["orgs"]["listInvitationTeams"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all users who are members of an organization. If the authenticated user is also a member of this organization then both concealed and public members will be returned. + */ + listMembers: { + (params?: RestEndpointMethodTypes["orgs"]["listMembers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all of the authenticated user's organization memberships. + */ + listMembershipsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["orgs"]["listMembershipsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all users who are outside collaborators of an organization. + */ + listOutsideCollaborators: { + (params?: RestEndpointMethodTypes["orgs"]["listOutsideCollaborators"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the repositories a fine-grained personal access token has access to. Only GitHub Apps can call this API, + * using the `organization_personal_access_tokens: read` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + listPatGrantRepositories: { + (params?: RestEndpointMethodTypes["orgs"]["listPatGrantRepositories"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the repositories a fine-grained personal access token request is requesting access to. Only GitHub Apps can call this API, + * using the `organization_personal_access_token_requests: read` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + listPatGrantRequestRepositories: { + (params?: RestEndpointMethodTypes["orgs"]["listPatGrantRequestRepositories"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists requests from organization members to access organization resources with a fine-grained personal access token. Only GitHub Apps can call this API, + * using the `organization_personal_access_token_requests: read` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + listPatGrantRequests: { + (params?: RestEndpointMethodTypes["orgs"]["listPatGrantRequests"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists approved fine-grained personal access tokens owned by organization members that can access organization resources. Only GitHub Apps can call this API, + * using the `organization_personal_access_tokens: read` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + listPatGrants: { + (params?: RestEndpointMethodTypes["orgs"]["listPatGrants"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, or `hiring_manager`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + */ + listPendingInvitations: { + (params?: RestEndpointMethodTypes["orgs"]["listPendingInvitations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Members of an organization can choose to have their membership publicized or not. + */ + listPublicMembers: { + (params?: RestEndpointMethodTypes["orgs"]["listPublicMembers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists teams that are security managers for an organization. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `read:org` scope. + * + * GitHub Apps must have the `administration` organization read permission to use this endpoint. + */ + listSecurityManagerTeams: { + (params?: RestEndpointMethodTypes["orgs"]["listSecurityManagerTeams"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a list of webhook deliveries for a webhook configured in an organization. + */ + listWebhookDeliveries: { + (params?: RestEndpointMethodTypes["orgs"]["listWebhookDeliveries"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listWebhooks: { + (params?: RestEndpointMethodTypes["orgs"]["listWebhooks"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. + */ + pingWebhook: { + (params?: RestEndpointMethodTypes["orgs"]["pingWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Redeliver a delivery for a webhook configured in an organization. + */ + redeliverWebhookDelivery: { + (params?: RestEndpointMethodTypes["orgs"]["redeliverWebhookDelivery"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removing a user from this list will remove them from all teams and they will no longer have any access to the organization's repositories. + */ + removeMember: { + (params?: RestEndpointMethodTypes["orgs"]["removeMember"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * In order to remove a user's membership with an organization, the authenticated user must be an organization owner. + * + * If the specified user is an active member of the organization, this will remove them from the organization. If the specified user has been invited to the organization, this will cancel their invitation. The specified user will receive an email notification in both cases. + */ + removeMembershipForUser: { + (params?: RestEndpointMethodTypes["orgs"]["removeMembershipForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removing a user from this list will remove them from all the organization's repositories. + */ + removeOutsideCollaborator: { + (params?: RestEndpointMethodTypes["orgs"]["removeOutsideCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes the public membership for the authenticated user from the specified organization, unless public visibility is enforced by default. + */ + removePublicMembershipForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["orgs"]["removePublicMembershipForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes the security manager role from a team for an organization. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization) team from an organization." + * + * To use this endpoint, you must be an administrator for the organization, and you must use an access token with the `admin:org` scope. + * + * GitHub Apps must have the `administration` organization read-write permission to use this endpoint. + */ + removeSecurityManagerTeam: { + (params?: RestEndpointMethodTypes["orgs"]["removeSecurityManagerTeam"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Approves or denies a pending request to access organization resources via a fine-grained personal access token. Only GitHub Apps can call this API, + * using the `organization_personal_access_token_requests: write` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + reviewPatGrantRequest: { + (params?: RestEndpointMethodTypes["orgs"]["reviewPatGrantRequest"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Approves or denies multiple pending requests to access organization resources via a fine-grained personal access token. Only GitHub Apps can call this API, + * using the `organization_personal_access_token_requests: write` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + reviewPatGrantRequestsInBulk: { + (params?: RestEndpointMethodTypes["orgs"]["reviewPatGrantRequestsInBulk"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Only authenticated organization owners can add a member to the organization or update the member's role. + * + * * If the authenticated user is _adding_ a member to the organization, the invited user will receive an email inviting them to the organization. The user's [membership status](https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user) will be `pending` until they accept the invitation. + * + * * Authenticated users can _update_ a user's membership by passing the `role` parameter. If the authenticated user changes a member's role to `admin`, the affected user will receive an email notifying them that they've been made an organization owner. If the authenticated user changes an owner's role to `member`, no email will be sent. + * + * **Rate limits** + * + * To prevent abuse, the authenticated user is limited to 50 organization invitations per 24 hour period. If the organization is more than one month old or on a paid plan, the limit is 500 invitations per 24 hour period. + */ + setMembershipForUser: { + (params?: RestEndpointMethodTypes["orgs"]["setMembershipForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The user can publicize their own membership. (A user cannot publicize the membership for another user.) + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + setPublicMembershipForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["orgs"]["setPublicMembershipForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + unblockUser: { + (params?: RestEndpointMethodTypes["orgs"]["unblockUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Parameter Deprecation Notice:** GitHub will replace and discontinue `members_allowed_repository_creation_type` in favor of more granular permissions. The new input parameters are `members_can_create_public_repositories`, `members_can_create_private_repositories` for all organizations and `members_can_create_internal_repositories` for organizations associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see the [blog post](https://developer.github.com/changes/2019-12-03-internal-visibility-changes). + * + * Enables an authenticated organization owner with the `admin:org` scope or the `repo` scope to update the organization's profile and member privileges. + */ + update: { + (params?: RestEndpointMethodTypes["orgs"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Converts the authenticated user to an active member of the organization, if that user has a pending invitation from the organization. + */ + updateMembershipForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["orgs"]["updateMembershipForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the access an organization member has to organization resources via a fine-grained personal access token. Limited to revoking the token's existing access. Limited to revoking a token's existing access. Only GitHub Apps can call this API, + * using the `organization_personal_access_tokens: write` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + updatePatAccess: { + (params?: RestEndpointMethodTypes["orgs"]["updatePatAccess"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the access organization members have to organization resources via fine-grained personal access tokens. Limited to revoking a token's existing access. Only GitHub Apps can call this API, + * using the `organization_personal_access_tokens: write` permission. + * + * **Note**: Fine-grained PATs are in public beta. Related APIs, events, and functionality are subject to change. + */ + updatePatAccesses: { + (params?: RestEndpointMethodTypes["orgs"]["updatePatAccesses"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates a webhook configured in an organization. When you update a webhook, the `secret` will be overwritten. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for an organization](/rest/reference/orgs#update-a-webhook-configuration-for-an-organization)." + */ + updateWebhook: { + (params?: RestEndpointMethodTypes["orgs"]["updateWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the webhook configuration for an organization. To update more information about the webhook, including the `active` state and `events`, use "[Update an organization webhook ](/rest/reference/orgs#update-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:write` permission. + */ + updateWebhookConfigForOrg: { + (params?: RestEndpointMethodTypes["orgs"]["updateWebhookConfigForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + packages: { + /** + * Deletes a package owned by the authenticated user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `delete:packages` scopes. + * If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + deletePackageForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["deletePackageForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes an entire package in an organization. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `delete:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package you want to delete. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + deletePackageForOrg: { + (params?: RestEndpointMethodTypes["packages"]["deletePackageForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes an entire package for a user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `delete:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package you want to delete. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + deletePackageForUser: { + (params?: RestEndpointMethodTypes["packages"]["deletePackageForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a specific package version for a package owned by the authenticated user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `delete:packages` scopes. + * If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + deletePackageVersionForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["deletePackageVersionForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a specific package version in an organization. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `delete:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package whose version you want to delete. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + deletePackageVersionForOrg: { + (params?: RestEndpointMethodTypes["packages"]["deletePackageVersionForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a specific package version for a user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `delete:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package whose version you want to delete. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + deletePackageVersionForUser: { + (params?: RestEndpointMethodTypes["packages"]["deletePackageVersionForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists package versions for a package owned by an organization. + * + * If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * @deprecated octokit.rest.packages.getAllPackageVersionsForAPackageOwnedByAnOrg() has been renamed to octokit.rest.packages.getAllPackageVersionsForPackageOwnedByOrg() (2021-03-24) + */ + getAllPackageVersionsForAPackageOwnedByAnOrg: { + (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForAPackageOwnedByAnOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists package versions for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * @deprecated octokit.rest.packages.getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser() has been renamed to octokit.rest.packages.getAllPackageVersionsForPackageOwnedByAuthenticatedUser() (2021-03-24) + */ + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists package versions for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForPackageOwnedByAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists package versions for a package owned by an organization. + * + * If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + getAllPackageVersionsForPackageOwnedByOrg: { + (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForPackageOwnedByOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists package versions for a public package owned by a specified user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + getAllPackageVersionsForPackageOwnedByUser: { + (params?: RestEndpointMethodTypes["packages"]["getAllPackageVersionsForPackageOwnedByUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific package for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + getPackageForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["getPackageForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific package in an organization. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + getPackageForOrganization: { + (params?: RestEndpointMethodTypes["packages"]["getPackageForOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific package metadata for a public package owned by a user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + getPackageForUser: { + (params?: RestEndpointMethodTypes["packages"]["getPackageForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific package version for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + getPackageVersionForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["getPackageVersionForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific package version in an organization. + * + * You must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + getPackageVersionForOrganization: { + (params?: RestEndpointMethodTypes["packages"]["getPackageVersionForOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a specific package version for a public package owned by a specified user. + * + * At this time, to use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + getPackageVersionForUser: { + (params?: RestEndpointMethodTypes["packages"]["getPackageVersionForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all packages that are owned by the authenticated user within the user's namespace, and that encountered a conflict during a Docker migration. + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. + */ + listDockerMigrationConflictingPackagesForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["listDockerMigrationConflictingPackagesForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all packages that are in a specific organization, are readable by the requesting user, and that encountered a conflict during a Docker migration. + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. + */ + listDockerMigrationConflictingPackagesForOrganization: { + (params?: RestEndpointMethodTypes["packages"]["listDockerMigrationConflictingPackagesForOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all packages that are in a specific user's namespace, that the requesting user has access to, and that encountered a conflict during Docker migration. + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. + */ + listDockerMigrationConflictingPackagesForUser: { + (params?: RestEndpointMethodTypes["packages"]["listDockerMigrationConflictingPackagesForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists packages owned by the authenticated user within the user's namespace. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + listPackagesForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["listPackagesForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists packages in an organization readable by the user. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + listPackagesForOrganization: { + (params?: RestEndpointMethodTypes["packages"]["listPackagesForOrganization"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all packages in a user's namespace for which the requesting user has access. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` scope. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + listPackagesForUser: { + (params?: RestEndpointMethodTypes["packages"]["listPackagesForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Restores a package owned by the authenticated user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `write:packages` scopes. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + restorePackageForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["restorePackageForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Restores an entire package in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `write:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package you want to restore. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + restorePackageForOrg: { + (params?: RestEndpointMethodTypes["packages"]["restorePackageForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Restores an entire package for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `write:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package you want to restore. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + restorePackageForUser: { + (params?: RestEndpointMethodTypes["packages"]["restorePackageForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Restores a package version owned by the authenticated user. + * + * You can restore a deleted package version under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `write:packages` scopes. If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of GitHub Packages registries that only support repository-scoped permissions, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + */ + restorePackageVersionForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["packages"]["restorePackageVersionForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Restores a specific package version in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `read:packages` and `write:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package whose version you want to restore. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + restorePackageVersionForOrg: { + (params?: RestEndpointMethodTypes["packages"]["restorePackageVersionForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Restores a specific package version for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `read:packages` and `write:packages` scopes. In addition: + * - If the `package_type` belongs to a GitHub Packages registry that only supports repository-scoped permissions, your token must also include the `repo` scope. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#permissions-for-repository-scoped-packages)." + * - If the `package_type` belongs to a GitHub Packages registry that supports granular permissions, you must have admin permissions to the package whose version you want to restore. For the list of these registries, see "[About permissions for GitHub Packages](https://docs.github.com/packages/learn-github-packages/about-permissions-for-github-packages#granular-permissions-for-userorganization-scoped-packages)." + */ + restorePackageVersionForUser: { + (params?: RestEndpointMethodTypes["packages"]["restorePackageVersionForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + projects: { + /** + * Adds a collaborator to an organization project and sets their permission level. You must be an organization owner or a project `admin` to add a collaborator. + */ + addCollaborator: { + (params?: RestEndpointMethodTypes["projects"]["addCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + createCard: { + (params?: RestEndpointMethodTypes["projects"]["createCard"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new project column. + */ + createColumn: { + (params?: RestEndpointMethodTypes["projects"]["createColumn"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a user project board. Returns a `410 Gone` status if the user does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + createForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["projects"]["createForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates an organization project board. Returns a `410 Gone` status if projects are disabled in the organization or if the organization does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + createForOrg: { + (params?: RestEndpointMethodTypes["projects"]["createForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a repository project board. Returns a `410 Gone` status if projects are disabled in the repository or if the repository does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + createForRepo: { + (params?: RestEndpointMethodTypes["projects"]["createForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a project board. Returns a `404 Not Found` status if projects are disabled. + */ + delete: { + (params?: RestEndpointMethodTypes["projects"]["delete"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a project card + */ + deleteCard: { + (params?: RestEndpointMethodTypes["projects"]["deleteCard"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a project column. + */ + deleteColumn: { + (params?: RestEndpointMethodTypes["projects"]["deleteColumn"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a project by its `id`. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + get: { + (params?: RestEndpointMethodTypes["projects"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets information about a project card. + */ + getCard: { + (params?: RestEndpointMethodTypes["projects"]["getCard"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets information about a project column. + */ + getColumn: { + (params?: RestEndpointMethodTypes["projects"]["getColumn"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the collaborator's permission level for an organization project. Possible values for the `permission` key: `admin`, `write`, `read`, `none`. You must be an organization owner or a project `admin` to review a user's permission level. + */ + getPermissionForUser: { + (params?: RestEndpointMethodTypes["projects"]["getPermissionForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the project cards in a project. + */ + listCards: { + (params?: RestEndpointMethodTypes["projects"]["listCards"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the collaborators for an organization project. For a project, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. You must be an organization owner or a project `admin` to list collaborators. + */ + listCollaborators: { + (params?: RestEndpointMethodTypes["projects"]["listCollaborators"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the project columns in a project. + */ + listColumns: { + (params?: RestEndpointMethodTypes["projects"]["listColumns"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the projects in an organization. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + listForOrg: { + (params?: RestEndpointMethodTypes["projects"]["listForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the projects in a repository. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + listForRepo: { + (params?: RestEndpointMethodTypes["projects"]["listForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists projects for a user. + */ + listForUser: { + (params?: RestEndpointMethodTypes["projects"]["listForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + moveCard: { + (params?: RestEndpointMethodTypes["projects"]["moveCard"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + moveColumn: { + (params?: RestEndpointMethodTypes["projects"]["moveColumn"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a collaborator from an organization project. You must be an organization owner or a project `admin` to remove a collaborator. + */ + removeCollaborator: { + (params?: RestEndpointMethodTypes["projects"]["removeCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates a project board's information. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. + */ + update: { + (params?: RestEndpointMethodTypes["projects"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateCard: { + (params?: RestEndpointMethodTypes["projects"]["updateCard"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateColumn: { + (params?: RestEndpointMethodTypes["projects"]["updateColumn"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + pulls: { + /** + * Checks if a pull request has been merged into the base branch. The HTTP status of the response indicates whether or not the pull request has been merged; the response body is empty. + */ + checkIfMerged: { + (params?: RestEndpointMethodTypes["pulls"]["checkIfMerged"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. + */ + create: { + (params?: RestEndpointMethodTypes["pulls"]["create"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a reply to a review comment for a pull request. For the `comment_id`, provide the ID of the review comment you are replying to. This must be the ID of a _top-level review comment_, not a reply to that comment. Replies to replies are not supported. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + createReplyForReviewComment: { + (params?: RestEndpointMethodTypes["pulls"]["createReplyForReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Pull request reviews created in the `PENDING` state are not submitted and therefore do not include the `submitted_at` property in the response. To create a pending review for a pull request, leave the `event` parameter blank. For more information about submitting a `PENDING` review, see "[Submit a review for a pull request](https://docs.github.com/rest/pulls#submit-a-review-for-a-pull-request)." + * + * **Note:** To comment on a specific line in a file, you need to first determine the _position_ of that line in the diff. The GitHub REST API offers the `application/vnd.github.v3.diff` [media type](https://docs.github.com/rest/overview/media-types#commits-commit-comparison-and-pull-requests). To see a pull request diff, add this media type to the `Accept` header of a call to the [single pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) endpoint. + * + * The `position` value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + */ + createReview: { + (params?: RestEndpointMethodTypes["pulls"]["createReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a review comment in the pull request diff. To add a regular comment to a pull request timeline, see "[Create an issue comment](https://docs.github.com/rest/reference/issues#create-an-issue-comment)." We recommend creating a review comment using `line`, `side`, and optionally `start_line` and `start_side` if your comment applies to more than one line in the pull request diff. + * + * The `position` parameter is deprecated. If you use `position`, the `line`, `side`, `start_line`, and `start_side` parameters are not required. + * + * **Note:** The position value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + createReviewComment: { + (params?: RestEndpointMethodTypes["pulls"]["createReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a pull request review that has not been submitted. Submitted reviews cannot be deleted. + */ + deletePendingReview: { + (params?: RestEndpointMethodTypes["pulls"]["deletePendingReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a review comment. + */ + deleteReviewComment: { + (params?: RestEndpointMethodTypes["pulls"]["deleteReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** To dismiss a pull request review on a [protected branch](https://docs.github.com/rest/reference/repos#branches), you must be a repository administrator or be included in the list of people or teams who can dismiss pull request reviews. + */ + dismissReview: { + (params?: RestEndpointMethodTypes["pulls"]["dismissReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists details of a pull request by providing its number. + * + * When you get, [create](https://docs.github.com/rest/reference/pulls/#create-a-pull-request), or [edit](https://docs.github.com/rest/reference/pulls#update-a-pull-request) a pull request, GitHub creates a merge commit to test whether the pull request can be automatically merged into the base branch. This test commit is not added to the base branch or the head branch. You can review the status of the test commit using the `mergeable` key. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * The value of the `mergeable` attribute can be `true`, `false`, or `null`. If the value is `null`, then GitHub has started a background job to compute the mergeability. After giving the job time to complete, resubmit the request. When the job finishes, you will see a non-`null` value for the `mergeable` attribute in the response. If `mergeable` is `true`, then `merge_commit_sha` will be the SHA of the _test_ merge commit. + * + * The value of the `merge_commit_sha` attribute changes depending on the state of the pull request. Before merging a pull request, the `merge_commit_sha` attribute holds the SHA of the _test_ merge commit. After merging a pull request, the `merge_commit_sha` attribute changes depending on how you merged the pull request: + * + * * If merged as a [merge commit](https://docs.github.com/articles/about-merge-methods-on-github/), `merge_commit_sha` represents the SHA of the merge commit. + * * If merged via a [squash](https://docs.github.com/articles/about-merge-methods-on-github/#squashing-your-merge-commits), `merge_commit_sha` represents the SHA of the squashed commit on the base branch. + * * If [rebased](https://docs.github.com/articles/about-merge-methods-on-github/#rebasing-and-merging-your-commits), `merge_commit_sha` represents the commit that the base branch was updated to. + * + * Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + */ + get: { + (params?: RestEndpointMethodTypes["pulls"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Retrieves a pull request review by its ID. + */ + getReview: { + (params?: RestEndpointMethodTypes["pulls"]["getReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Provides details for a review comment. + */ + getReviewComment: { + (params?: RestEndpointMethodTypes["pulls"]["getReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + list: { + (params?: RestEndpointMethodTypes["pulls"]["list"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List comments for a specific pull request review. + */ + listCommentsForReview: { + (params?: RestEndpointMethodTypes["pulls"]["listCommentsForReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists a maximum of 250 commits for a pull request. To receive a complete commit list for pull requests with more than 250 commits, use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) endpoint. + */ + listCommits: { + (params?: RestEndpointMethodTypes["pulls"]["listCommits"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** Responses include a maximum of 3000 files. The paginated response returns 30 files per page by default. + */ + listFiles: { + (params?: RestEndpointMethodTypes["pulls"]["listFiles"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the users or teams whose review is requested for a pull request. Once a requested reviewer submits a review, they are no longer considered a requested reviewer. Their review will instead be returned by the [List reviews for a pull request](https://docs.github.com/rest/pulls/reviews#list-reviews-for-a-pull-request) operation. + */ + listRequestedReviewers: { + (params?: RestEndpointMethodTypes["pulls"]["listRequestedReviewers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all review comments for a pull request. By default, review comments are in ascending order by ID. + */ + listReviewComments: { + (params?: RestEndpointMethodTypes["pulls"]["listReviewComments"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists review comments for all pull requests in a repository. By default, review comments are in ascending order by ID. + */ + listReviewCommentsForRepo: { + (params?: RestEndpointMethodTypes["pulls"]["listReviewCommentsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The list of reviews returns in chronological order. + */ + listReviews: { + (params?: RestEndpointMethodTypes["pulls"]["listReviews"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Merges a pull request into the base branch. + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + merge: { + (params?: RestEndpointMethodTypes["pulls"]["merge"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes review requests from a pull request for a given set of users and/or teams. + */ + removeRequestedReviewers: { + (params?: RestEndpointMethodTypes["pulls"]["removeRequestedReviewers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + requestReviewers: { + (params?: RestEndpointMethodTypes["pulls"]["requestReviewers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Submits a pending review for a pull request. For more information about creating a pending review for a pull request, see "[Create a review for a pull request](https://docs.github.com/rest/pulls#create-a-review-for-a-pull-request)." + */ + submitReview: { + (params?: RestEndpointMethodTypes["pulls"]["submitReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + */ + update: { + (params?: RestEndpointMethodTypes["pulls"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the pull request branch with the latest upstream changes by merging HEAD from the base branch into the pull request branch. + */ + updateBranch: { + (params?: RestEndpointMethodTypes["pulls"]["updateBranch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Update the review summary comment with new text. + */ + updateReview: { + (params?: RestEndpointMethodTypes["pulls"]["updateReview"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables you to edit a review comment. + */ + updateReviewComment: { + (params?: RestEndpointMethodTypes["pulls"]["updateReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + rateLimit: { + /** + * **Note:** Accessing this endpoint does not count against your REST API rate limit. + * + * **Note:** The `rate` object is deprecated. If you're writing new API client code or updating existing code, you should use the `core` object instead of the `rate` object. The `core` object contains the same information that is present in the `rate` object. + */ + get: { + (params?: RestEndpointMethodTypes["rateLimit"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + reactions: { + /** + * Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with an HTTP `200` status means that you already added the reaction type to this commit comment. + */ + createForCommitComment: { + (params?: RestEndpointMethodTypes["reactions"]["createForCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with an HTTP `200` status means that you already added the reaction type to this issue. + */ + createForIssue: { + (params?: RestEndpointMethodTypes["reactions"]["createForIssue"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with an HTTP `200` status means that you already added the reaction type to this issue comment. + */ + createForIssueComment: { + (params?: RestEndpointMethodTypes["reactions"]["createForIssueComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with an HTTP `200` status means that you already added the reaction type to this pull request review comment. + */ + createForPullRequestReviewComment: { + (params?: RestEndpointMethodTypes["reactions"]["createForPullRequestReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). A response with a `Status: 200 OK` means that you already added the reaction type to this release. + */ + createForRelease: { + (params?: RestEndpointMethodTypes["reactions"]["createForRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + createForTeamDiscussionCommentInOrg: { + (params?: RestEndpointMethodTypes["reactions"]["createForTeamDiscussionCommentInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + createForTeamDiscussionInOrg: { + (params?: RestEndpointMethodTypes["reactions"]["createForTeamDiscussionInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). + */ + deleteForCommitComment: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/issues/:issue_number/reactions/:reaction_id`. + * + * Delete a reaction to an [issue](https://docs.github.com/rest/reference/issues/). + */ + deleteForIssue: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForIssue"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/issues/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). + */ + deleteForIssueComment: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForIssueComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/pulls/comments/:comment_id/reactions/:reaction_id.` + * + * Delete a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). + */ + deleteForPullRequestComment: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForPullRequestComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/releases/:release_id/reactions/:reaction_id`. + * + * Delete a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). + */ + deleteForRelease: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + deleteForTeamDiscussion: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForTeamDiscussion"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + deleteForTeamDiscussionComment: { + (params?: RestEndpointMethodTypes["reactions"]["deleteForTeamDiscussionComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to a [commit comment](https://docs.github.com/rest/reference/repos#comments). + */ + listForCommitComment: { + (params?: RestEndpointMethodTypes["reactions"]["listForCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to an [issue](https://docs.github.com/rest/reference/issues). + */ + listForIssue: { + (params?: RestEndpointMethodTypes["reactions"]["listForIssue"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to an [issue comment](https://docs.github.com/rest/reference/issues#comments). + */ + listForIssueComment: { + (params?: RestEndpointMethodTypes["reactions"]["listForIssueComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). + */ + listForPullRequestReviewComment: { + (params?: RestEndpointMethodTypes["reactions"]["listForPullRequestReviewComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to a [release](https://docs.github.com/rest/reference/repos#releases). + */ + listForRelease: { + (params?: RestEndpointMethodTypes["reactions"]["listForRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments/). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + listForTeamDiscussionCommentInOrg: { + (params?: RestEndpointMethodTypes["reactions"]["listForTeamDiscussionCommentInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + listForTeamDiscussionInOrg: { + (params?: RestEndpointMethodTypes["reactions"]["listForTeamDiscussionInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + repos: { + /** + * @deprecated octokit.rest.repos.acceptInvitation() has been renamed to octokit.rest.repos.acceptInvitationForAuthenticatedUser() (2021-10-05) + */ + acceptInvitation: { + (params?: RestEndpointMethodTypes["repos"]["acceptInvitation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + acceptInvitationForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["repos"]["acceptInvitationForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified apps push access for this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + addAppAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["addAppAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Adding an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." + * + * For more information on permission levels, see "[Repository permission levels for an organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". There are restrictions on which permissions can be granted to organization members when an organization base role is in place. In this case, the permission being given must be equal to or higher than the org base permission. Otherwise, the request will fail with: + * + * ``` + * Cannot assign {member} permission of {role name} + * ``` + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * The invitee will receive a notification that they have been invited to the repository, which they must accept or decline. They may do this via the notifications page, the email they receive, or by using the [repository invitations API endpoints](https://docs.github.com/rest/reference/repos#invitations). + * + * **Updating an existing collaborator's permission level** + * + * The endpoint can also be used to change the permissions of an existing collaborator without first removing and re-adding the collaborator. To change the permissions, use the same endpoint and pass a different `permission` parameter. The response will be a `204`, with no other indication that the permission level changed. + * + * **Rate limits** + * + * You are limited to sending 50 invitations to a repository per 24 hour period. Note there is no limit if you are inviting organization members to an organization repository. + */ + addCollaborator: { + (params?: RestEndpointMethodTypes["repos"]["addCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + addStatusCheckContexts: { + (params?: RestEndpointMethodTypes["repos"]["addStatusCheckContexts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified teams push access for this branch. You can also give push access to child teams. + */ + addTeamAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["addTeamAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified people push access for this branch. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + addUserAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["addUserAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + checkCollaborator: { + (params?: RestEndpointMethodTypes["repos"]["checkCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Shows whether dependency alerts are enabled or disabled for a repository. The authenticated user must have admin read access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/articles/about-security-alerts-for-vulnerable-dependencies)". + */ + checkVulnerabilityAlerts: { + (params?: RestEndpointMethodTypes["repos"]["checkVulnerabilityAlerts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List any syntax errors that are detected in the CODEOWNERS + * file. + * + * For more information about the correct CODEOWNERS syntax, + * see "[About code owners](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners)." + */ + codeownersErrors: { + (params?: RestEndpointMethodTypes["repos"]["codeownersErrors"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Deprecated**: Use `repos.compareCommitsWithBasehead()` (`GET /repos/{owner}/{repo}/compare/{basehead}`) instead. Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + compareCommits: { + (params?: RestEndpointMethodTypes["repos"]["compareCommits"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Compares two commits against one another. You can compare branches in the same repository, or you can compare branches that exist in different repositories within the same repository network, including fork branches. For more information about how to view a repository's network, see "[Understanding connections between repositories](https://docs.github.com/repositories/viewing-activity-and-data-for-your-repository/understanding-connections-between-repositories)." + * + * This endpoint is equivalent to running the `git log BASE..HEAD` command, but it returns commits in a different order. The `git log BASE..HEAD` command returns commits in reverse chronological order, whereas the API returns commits in chronological order. You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The API response includes details about the files that were changed between the two commits. This includes the status of the change (if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * When calling this endpoint without any paging parameter (`per_page` or `page`), the returned list is limited to 250 commits, and the last commit in the list is the most recent of the entire comparison. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, use a query parameter (`per_page` or `page`) to paginate the results. When using pagination: + * + * - The list of changed files is only shown on the first page of results, but it includes all changed files for the entire comparison. + * - The results are returned in chronological order, but the last commit in the returned list may not be the most recent one in the entire set if there are more pages of results. + * + * For more information on working with pagination, see "[Using pagination in the REST API](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api)." + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The `verification` object includes the following fields: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + compareCommitsWithBasehead: { + (params?: RestEndpointMethodTypes["repos"]["compareCommitsWithBasehead"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with admin access to the repository can create an autolink. + */ + createAutolink: { + (params?: RestEndpointMethodTypes["repos"]["createAutolink"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a comment for a commit using its `:commit_sha`. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + createCommitComment: { + (params?: RestEndpointMethodTypes["repos"]["createCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to require signed commits on a branch. You must enable branch protection to require signed commits. + */ + createCommitSignatureProtection: { + (params?: RestEndpointMethodTypes["repos"]["createCommitSignatureProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access in a repository can create commit statuses for a given SHA. + * + * Note: there is a limit of 1000 statuses per `sha` and `context` within a repository. Attempts to create more than 1000 statuses will result in a validation error. + */ + createCommitStatus: { + (params?: RestEndpointMethodTypes["repos"]["createCommitStatus"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can create a read-only deploy key. + */ + createDeployKey: { + (params?: RestEndpointMethodTypes["repos"]["createDeployKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deployments offer a few configurable parameters with certain defaults. + * + * The `ref` parameter can be any named branch, tag, or SHA. At GitHub we often deploy branches and verify them + * before we merge a pull request. + * + * The `environment` parameter allows deployments to be issued to different runtime environments. Teams often have + * multiple environments for verifying their applications, such as `production`, `staging`, and `qa`. This parameter + * makes it easier to track which environments have requested deployments. The default environment is `production`. + * + * The `auto_merge` parameter is used to ensure that the requested ref is not behind the repository's default branch. If + * the ref _is_ behind the default branch for the repository, we will attempt to merge it for you. If the merge succeeds, + * the API will return a successful merge commit. If merge conflicts prevent the merge from succeeding, the API will + * return a failure response. + * + * By default, [commit statuses](https://docs.github.com/rest/commits/statuses) for every submitted context must be in a `success` + * state. The `required_contexts` parameter allows you to specify a subset of contexts that must be `success`, or to + * specify contexts that have not yet been submitted. You are not required to use commit statuses to deploy. If you do + * not require any contexts or create any commit statuses, the deployment will always succeed. + * + * The `payload` parameter is available for any extra information that a deployment system might need. It is a JSON text + * field that will be passed on when a deployment event is dispatched. + * + * The `task` parameter is used by the deployment system to allow different execution paths. In the web world this might + * be `deploy:migrations` to run schema changes on the system. In the compiled world this could be a flag to compile an + * application with debugging enabled. + * + * Users with `repo` or `repo_deployment` scopes can create a deployment for a given ref. + * + * #### Merged branch response + * You will see this response when GitHub automatically merges the base branch into the topic branch instead of creating + * a deployment. This auto-merge happens when: + * * Auto-merge option is enabled in the repository + * * Topic branch does not include the latest changes on the base branch, which is `master` in the response example + * * There are no merge conflicts + * + * If there are no new commits in the base branch, a new request to create a deployment should give a successful + * response. + * + * #### Merge conflict response + * This error happens when the `auto_merge` option is enabled and when the default branch (in this case `master`), can't + * be merged into the branch that's being deployed (in this case `topic-branch`), due to merge conflicts. + * + * #### Failed commit status checks + * This error happens when the `required_contexts` parameter indicates that one or more contexts need to have a `success` + * status for the commit to be deployed, but one or more of the required contexts do not have a state of `success`. + */ + createDeployment: { + (params?: RestEndpointMethodTypes["repos"]["createDeployment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + createDeploymentBranchPolicy: { + (params?: RestEndpointMethodTypes["repos"]["createDeploymentBranchPolicy"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enable a custom deployment protection rule for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. Enabling a custom protection rule requires admin or owner permissions to the repository. GitHub Apps must have the `actions:write` permission to use this endpoint. + * + * For more information about the app that is providing this custom deployment rule, see the [documentation for the `GET /apps/{app_slug}` endpoint](https://docs.github.com/rest/apps/apps#get-an-app). + */ + createDeploymentProtectionRule: { + (params?: RestEndpointMethodTypes["repos"]["createDeploymentProtectionRule"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with `push` access can create deployment statuses for a given deployment. + * + * GitHub Apps require `read & write` access to "Deployments" and `read-only` access to "Repo contents" (for private repos). OAuth Apps require the `repo_deployment` scope. + */ + createDeploymentStatus: { + (params?: RestEndpointMethodTypes["repos"]["createDeploymentStatus"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can use this endpoint to trigger a webhook event called `repository_dispatch` when you want activity that happens outside of GitHub to trigger a GitHub Actions workflow or GitHub App webhook. You must configure your GitHub Actions workflow or GitHub App to run when the `repository_dispatch` event occurs. For an example `repository_dispatch` webhook payload, see "[RepositoryDispatchEvent](https://docs.github.com/webhooks/event-payloads/#repository_dispatch)." + * + * The `client_payload` parameter is available for any extra information that your workflow might need. This parameter is a JSON payload that will be passed on when the webhook event is dispatched. For example, the `client_payload` can include a message that a user would like to send using a GitHub Actions workflow. Or the `client_payload` can be used as a test to debug your workflow. + * + * This endpoint requires write access to the repository by providing either: + * + * - Personal access tokens with `repo` scope. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)" in the GitHub Help documentation. + * - GitHub Apps with both `metadata:read` and `contents:read&write` permissions. + * + * This input example shows how you can use the `client_payload` as a test to debug your workflow. + */ + createDispatchEvent: { + (params?: RestEndpointMethodTypes["repos"]["createDispatchEvent"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new repository for the authenticated user. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository. + */ + createForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["repos"]["createForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a fork for the authenticated user. + * + * **Note**: Forking a Repository happens asynchronously. You may have to wait a short period of time before you can access the git objects. If this takes longer than 5 minutes, be sure to contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + * + * **Note**: Although this endpoint works with GitHub Apps, the GitHub App must be installed on the destination account with access to all repositories and on the source account with access to the source repository. + */ + createFork: { + (params?: RestEndpointMethodTypes["repos"]["createFork"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new repository in the specified organization. The authenticated user must be a member of the organization. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + createInOrg: { + (params?: RestEndpointMethodTypes["repos"]["createInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create or update an environment with protection rules, such as required reviewers. For more information about environment protection rules, see "[Environments](/actions/reference/environments#environment-protection-rules)." + * + * **Note:** To create or update name patterns that branches must match in order to deploy to this environment, see "[Deployment branch policies](/rest/deployments/branch-policies)." + * + * **Note:** To create or update secrets for an environment, see "[Secrets](/rest/reference/actions#secrets)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + createOrUpdateEnvironment: { + (params?: RestEndpointMethodTypes["repos"]["createOrUpdateEnvironment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new file or replaces an existing file in a repository. You must authenticate using an access token with the `workflow` scope to use this endpoint. + * + * **Note:** If you use this endpoint and the "[Delete a file](https://docs.github.com/rest/reference/repos/#delete-file)" endpoint in parallel, the concurrent requests will conflict and you will receive errors. You must use these endpoints serially instead. + */ + createOrUpdateFileContents: { + (params?: RestEndpointMethodTypes["repos"]["createOrUpdateFileContents"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a repository ruleset for an organization. + */ + createOrgRuleset: { + (params?: RestEndpointMethodTypes["repos"]["createOrgRuleset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a GitHub Pages deployment for a repository. + * + * Users must have write permissions. GitHub Apps must have the `pages:write` permission to use this endpoint. + */ + createPagesDeployment: { + (params?: RestEndpointMethodTypes["repos"]["createPagesDeployment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Configures a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages)." + * + * To use this endpoint, you must be a repository administrator, maintainer, or have the 'manage GitHub Pages settings' permission. A token with the `repo` scope or Pages write permission is required. GitHub Apps must have the `administration:write` and `pages:write` permissions. + */ + createPagesSite: { + (params?: RestEndpointMethodTypes["repos"]["createPagesSite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access to the repository can create a release. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + createRelease: { + (params?: RestEndpointMethodTypes["repos"]["createRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Create a ruleset for a repository. + */ + createRepoRuleset: { + (params?: RestEndpointMethodTypes["repos"]["createRepoRuleset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This creates a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + createTagProtection: { + (params?: RestEndpointMethodTypes["repos"]["createTagProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new repository using a repository template. Use the `template_owner` and `template_repo` route parameters to specify the repository to use as the template. If the repository is not public, the authenticated user must own or be a member of an organization that owns the repository. To check if a repository is available to use as a template, get the repository's information using the [Get a repository](https://docs.github.com/rest/reference/repos#get-a-repository) endpoint and check that the `is_template` key is `true`. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + createUsingTemplate: { + (params?: RestEndpointMethodTypes["repos"]["createUsingTemplate"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Repositories can have multiple webhooks installed. Each webhook should have a unique `config`. Multiple webhooks can + * share the same `config` as long as those webhooks do not have any `events` that overlap. + */ + createWebhook: { + (params?: RestEndpointMethodTypes["repos"]["createWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * @deprecated octokit.rest.repos.declineInvitation() has been renamed to octokit.rest.repos.declineInvitationForAuthenticatedUser() (2021-10-05) + */ + declineInvitation: { + (params?: RestEndpointMethodTypes["repos"]["declineInvitation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + declineInvitationForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["repos"]["declineInvitationForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required. + * + * If an organization owner has configured the organization to prevent members from deleting organization-owned + * repositories, you will get a `403 Forbidden` response. + */ + delete: { + (params?: RestEndpointMethodTypes["repos"]["delete"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Disables the ability to restrict who can push to this branch. + */ + deleteAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["deleteAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removing admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + deleteAdminBranchProtection: { + (params?: RestEndpointMethodTypes["repos"]["deleteAdminBranchProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You must authenticate using an access token with the repo scope to use this endpoint. + */ + deleteAnEnvironment: { + (params?: RestEndpointMethodTypes["repos"]["deleteAnEnvironment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This deletes a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + deleteAutolink: { + (params?: RestEndpointMethodTypes["repos"]["deleteAutolink"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + deleteBranchProtection: { + (params?: RestEndpointMethodTypes["repos"]["deleteBranchProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteCommitComment: { + (params?: RestEndpointMethodTypes["repos"]["deleteCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to disable required signed commits on a branch. You must enable branch protection to require signed commits. + */ + deleteCommitSignatureProtection: { + (params?: RestEndpointMethodTypes["repos"]["deleteCommitSignatureProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deploy keys are immutable. If you need to update a key, remove the key and create a new one instead. + */ + deleteDeployKey: { + (params?: RestEndpointMethodTypes["repos"]["deleteDeployKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If the repository only has one deployment, you can delete the deployment regardless of its status. If the repository has more than one deployment, you can only delete inactive deployments. This ensures that repositories with multiple deployments will always have an active deployment. Anyone with `repo` or `repo_deployment` scopes can delete a deployment. + * + * To set a deployment as inactive, you must: + * + * * Create a new deployment that is active so that the system has a record of the current state, then delete the previously active deployment. + * * Mark the active deployment as inactive by adding any non-successful deployment status. + * + * For more information, see "[Create a deployment](https://docs.github.com/rest/deployments/deployments/#create-a-deployment)" and "[Create a deployment status](https://docs.github.com/rest/deployments/deployment-statuses#create-a-deployment-status)." + */ + deleteDeployment: { + (params?: RestEndpointMethodTypes["repos"]["deleteDeployment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + deleteDeploymentBranchPolicy: { + (params?: RestEndpointMethodTypes["repos"]["deleteDeploymentBranchPolicy"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a file in a repository. + * + * You can provide an additional `committer` parameter, which is an object containing information about the committer. Or, you can provide an `author` parameter, which is an object containing information about the author. + * + * The `author` section is optional and is filled in with the `committer` information if omitted. If the `committer` information is omitted, the authenticated user's information is used. + * + * You must provide values for both `name` and `email`, whether you choose to use `author` or `committer`. Otherwise, you'll receive a `422` status code. + * + * **Note:** If you use this endpoint and the "[Create or update file contents](https://docs.github.com/rest/reference/repos/#create-or-update-file-contents)" endpoint in parallel, the concurrent requests will conflict and you will receive errors. You must use these endpoints serially instead. + */ + deleteFile: { + (params?: RestEndpointMethodTypes["repos"]["deleteFile"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteInvitation: { + (params?: RestEndpointMethodTypes["repos"]["deleteInvitation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Delete a ruleset for an organization. + */ + deleteOrgRuleset: { + (params?: RestEndpointMethodTypes["repos"]["deleteOrgRuleset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). + * + * To use this endpoint, you must be a repository administrator, maintainer, or have the 'manage GitHub Pages settings' permission. A token with the `repo` scope or Pages write permission is required. GitHub Apps must have the `administration:write` and `pages:write` permissions. + */ + deletePagesSite: { + (params?: RestEndpointMethodTypes["repos"]["deletePagesSite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + deletePullRequestReviewProtection: { + (params?: RestEndpointMethodTypes["repos"]["deletePullRequestReviewProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access to the repository can delete a release. + */ + deleteRelease: { + (params?: RestEndpointMethodTypes["repos"]["deleteRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteReleaseAsset: { + (params?: RestEndpointMethodTypes["repos"]["deleteReleaseAsset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Delete a ruleset for a repository. + */ + deleteRepoRuleset: { + (params?: RestEndpointMethodTypes["repos"]["deleteRepoRuleset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This deletes a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + deleteTagProtection: { + (params?: RestEndpointMethodTypes["repos"]["deleteTagProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + deleteWebhook: { + (params?: RestEndpointMethodTypes["repos"]["deleteWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Disables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/articles/configuring-automated-security-fixes)". + */ + disableAutomatedSecurityFixes: { + (params?: RestEndpointMethodTypes["repos"]["disableAutomatedSecurityFixes"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Disables a custom deployment protection rule for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. Removing a custom protection rule requires admin or owner permissions to the repository. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Get an app](https://docs.github.com/rest/apps/apps#get-an-app)". + */ + disableDeploymentProtectionRule: { + (params?: RestEndpointMethodTypes["repos"]["disableDeploymentProtectionRule"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Disables Git LFS for a repository. Access tokens must have the `admin:enterprise` scope. + */ + disableLfsForRepo: { + (params?: RestEndpointMethodTypes["repos"]["disableLfsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Disables dependency alerts and the dependency graph for a repository. + * The authenticated user must have admin access to the repository. For more information, + * see "[About security alerts for vulnerable dependencies](https://docs.github.com/articles/about-security-alerts-for-vulnerable-dependencies)". + */ + disableVulnerabilityAlerts: { + (params?: RestEndpointMethodTypes["repos"]["disableVulnerabilityAlerts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `main`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * + * **Note**: For private repositories, these links are temporary and expire after five minutes. If the repository is empty, you will receive a 404 when you follow the redirect. + * @deprecated octokit.rest.repos.downloadArchive() has been renamed to octokit.rest.repos.downloadZipballArchive() (2020-09-17) + */ + downloadArchive: { + (params?: RestEndpointMethodTypes["repos"]["downloadArchive"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download a tar archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `main`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * **Note**: For private repositories, these links are temporary and expire after five minutes. + */ + downloadTarballArchive: { + (params?: RestEndpointMethodTypes["repos"]["downloadTarballArchive"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `main`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * + * **Note**: For private repositories, these links are temporary and expire after five minutes. If the repository is empty, you will receive a 404 when you follow the redirect. + */ + downloadZipballArchive: { + (params?: RestEndpointMethodTypes["repos"]["downloadZipballArchive"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/articles/configuring-automated-security-fixes)". + */ + enableAutomatedSecurityFixes: { + (params?: RestEndpointMethodTypes["repos"]["enableAutomatedSecurityFixes"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables Git LFS for a repository. Access tokens must have the `admin:enterprise` scope. + */ + enableLfsForRepo: { + (params?: RestEndpointMethodTypes["repos"]["enableLfsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Enables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/articles/about-security-alerts-for-vulnerable-dependencies)". + */ + enableVulnerabilityAlerts: { + (params?: RestEndpointMethodTypes["repos"]["enableVulnerabilityAlerts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Generate a name and body describing a [release](https://docs.github.com/rest/reference/repos#releases). The body content will be markdown formatted and contain information like the changes since last release and users who contributed. The generated release notes are not saved anywhere. They are intended to be generated and used when creating a new release. + */ + generateReleaseNotes: { + (params?: RestEndpointMethodTypes["repos"]["generateReleaseNotes"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The `parent` and `source` objects are present when the repository is a fork. `parent` is the repository this repository was forked from, `source` is the ultimate source for the network. + * + * **Note:** In order to see the `security_and_analysis` block for a repository you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + get: { + (params?: RestEndpointMethodTypes["repos"]["get"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists who has access to this protected branch. + * + * **Note**: Users, apps, and teams `restrictions` are only available for organization-owned repositories. + */ + getAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["getAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + getAdminBranchProtection: { + (params?: RestEndpointMethodTypes["repos"]["getAdminBranchProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets all custom deployment protection rules that are enabled for an environment. Anyone with read access to the repository can use this endpoint. If the repository is private and you want to use a personal access token (classic), you must use an access token with the `repo` scope. GitHub Apps and fine-grained personal access tokens must have the `actions:read` permission to use this endpoint. For more information about environments, see "[Using environments for deployment](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment)." + * + * For more information about the app that is providing this custom deployment rule, see the [documentation for the `GET /apps/{app_slug}` endpoint](https://docs.github.com/rest/apps/apps#get-an-app). + */ + getAllDeploymentProtectionRules: { + (params?: RestEndpointMethodTypes["repos"]["getAllDeploymentProtectionRules"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the environments for a repository. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getAllEnvironments: { + (params?: RestEndpointMethodTypes["repos"]["getAllEnvironments"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + getAllStatusCheckContexts: { + (params?: RestEndpointMethodTypes["repos"]["getAllStatusCheckContexts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getAllTopics: { + (params?: RestEndpointMethodTypes["repos"]["getAllTopics"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the GitHub Apps that have push access to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + getAppsWithAccessToProtectedBranch: { + (params?: RestEndpointMethodTypes["repos"]["getAppsWithAccessToProtectedBranch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This returns a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + getAutolink: { + (params?: RestEndpointMethodTypes["repos"]["getAutolink"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getBranch: { + (params?: RestEndpointMethodTypes["repos"]["getBranch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + getBranchProtection: { + (params?: RestEndpointMethodTypes["repos"]["getBranchProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns all rules that apply to the specified branch. + */ + getBranchRules: { + (params?: RestEndpointMethodTypes["repos"]["getBranchRules"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get the total number of clones and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. + */ + getClones: { + (params?: RestEndpointMethodTypes["repos"]["getClones"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a weekly aggregate of the number of additions and deletions pushed to a repository. + */ + getCodeFrequencyStats: { + (params?: RestEndpointMethodTypes["repos"]["getCodeFrequencyStats"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Checks the repository permission of a collaborator. The possible repository permissions are `admin`, `write`, `read`, and `none`. + */ + getCollaboratorPermissionLevel: { + (params?: RestEndpointMethodTypes["repos"]["getCollaboratorPermissionLevel"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with pull access in a repository can access a combined view of commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. + * + * + * Additionally, a combined `state` is returned. The `state` is one of: + * + * * **failure** if any of the contexts report as `error` or `failure` + * * **pending** if there are no statuses or a context is `pending` + * * **success** if the latest status for all contexts is `success` + */ + getCombinedStatusForRef: { + (params?: RestEndpointMethodTypes["repos"]["getCombinedStatusForRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the contents of a single commit reference. You must have `read` access for the repository to use this endpoint. + * + * **Note:** If there are more than 300 files in the commit diff, the response will include pagination link headers for the remaining files, up to a limit of 3000 files. Each page contains the static commit information, and the only changes are to the file listing. + * + * You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch `diff` and `patch` formats. Diffs with binary data will have no `patch` property. + * + * To return only the SHA-1 hash of the commit reference, you can provide the `sha` custom [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) in the `Accept` header. You can use this endpoint to check if a remote reference's SHA-1 hash is the same as your local reference's SHA-1 hash by providing the local SHA-1 reference as the ETag. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + getCommit: { + (params?: RestEndpointMethodTypes["repos"]["getCommit"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the last year of commit activity grouped by week. The `days` array is a group of commits per day, starting on `Sunday`. + */ + getCommitActivityStats: { + (params?: RestEndpointMethodTypes["repos"]["getCommitActivityStats"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getCommitComment: { + (params?: RestEndpointMethodTypes["repos"]["getCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to check whether a branch requires signed commits. An enabled status of `true` indicates you must sign commits on this branch. For more information, see [Signing commits with GPG](https://docs.github.com/articles/signing-commits-with-gpg) in GitHub Help. + * + * **Note**: You must enable branch protection to require signed commits. + */ + getCommitSignatureProtection: { + (params?: RestEndpointMethodTypes["repos"]["getCommitSignatureProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns all community profile metrics for a repository. The repository cannot be a fork. + * + * The returned metrics include an overall health score, the repository description, the presence of documentation, the + * detected code of conduct, the detected license, and the presence of ISSUE\_TEMPLATE, PULL\_REQUEST\_TEMPLATE, + * README, and CONTRIBUTING files. + * + * The `health_percentage` score is defined as a percentage of how many of + * these four documents are present: README, CONTRIBUTING, LICENSE, and + * CODE_OF_CONDUCT. For example, if all four documents are present, then + * the `health_percentage` is `100`. If only one is present, then the + * `health_percentage` is `25`. + * + * `content_reports_enabled` is only returned for organization-owned repositories. + */ + getCommunityProfileMetrics: { + (params?: RestEndpointMethodTypes["repos"]["getCommunityProfileMetrics"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit + * `:path`, you will receive the contents of the repository's root directory. See the description below regarding what the API response includes for directories. + * + * Files and symlinks support [a custom media type](https://docs.github.com/rest/reference/repos#custom-media-types) for + * retrieving the raw content or rendered HTML (when supported). All content types support [a custom media + * type](https://docs.github.com/rest/reference/repos#custom-media-types) to ensure the content is returned in a consistent + * object format. + * + * **Notes**: + * * To get a repository's contents recursively, you can [recursively get the tree](https://docs.github.com/rest/reference/git#trees). + * * This API has an upper limit of 1,000 files for a directory. If you need to retrieve more files, use the [Git Trees + * API](https://docs.github.com/rest/reference/git#get-a-tree). + * * Download URLs expire and are meant to be used just once. To ensure the download URL does not expire, please use the contents API to obtain a fresh download URL for each download. + * #### Size limits + * If the requested file's size is: + * * 1 MB or smaller: All features of this endpoint are supported. + * * Between 1-100 MB: Only the `raw` or `object` [custom media types](https://docs.github.com/rest/repos/contents#custom-media-types-for-repository-contents) are supported. Both will work as normal, except that when using the `object` media type, the `content` field will be an empty string and the `encoding` field will be `"none"`. To get the contents of these larger files, use the `raw` media type. + * * Greater than 100 MB: This endpoint is not supported. + * + * #### If the content is a directory + * The response will be an array of objects, one object for each item in the directory. + * When listing the contents of a directory, submodules have their "type" specified as "file". Logically, the value + * _should_ be "submodule". This behavior exists in API v3 [for backwards compatibility purposes](https://git.io/v1YCW). + * In the next major version of the API, the type will be returned as "submodule". + * + * #### If the content is a symlink + * If the requested `:path` points to a symlink, and the symlink's target is a normal file in the repository, then the + * API responds with the content of the file (in the format shown in the example. Otherwise, the API responds with an object + * describing the symlink itself. + * + * #### If the content is a submodule + * The `submodule_git_url` identifies the location of the submodule repository, and the `sha` identifies a specific + * commit within the submodule repository. Git uses the given URL when cloning the submodule repository, and checks out + * the submodule at that specific commit. + * + * If the submodule repository is not hosted on github.com, the Git URLs (`git_url` and `_links["git"]`) and the + * github.com URLs (`html_url` and `_links["html"]`) will have null values. + */ + getContent: { + (params?: RestEndpointMethodTypes["repos"]["getContent"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the `total` number of commits authored by the contributor. In addition, the response includes a Weekly Hash (`weeks` array) with the following information: + * + * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). + * * `a` - Number of additions + * * `d` - Number of deletions + * * `c` - Number of commits + */ + getContributorsStats: { + (params?: RestEndpointMethodTypes["repos"]["getContributorsStats"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets an enabled custom deployment protection rule for an environment. Anyone with read access to the repository can use this endpoint. If the repository is private and you want to use a personal access token (classic), you must use an access token with the `repo` scope. GitHub Apps and fine-grained personal access tokens must have the `actions:read` permission to use this endpoint. For more information about environments, see "[Using environments for deployment](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment)." + * + * For more information about the app that is providing this custom deployment rule, see [`GET /apps/{app_slug}`](https://docs.github.com/rest/apps/apps#get-an-app). + */ + getCustomDeploymentProtectionRule: { + (params?: RestEndpointMethodTypes["repos"]["getCustomDeploymentProtectionRule"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getDeployKey: { + (params?: RestEndpointMethodTypes["repos"]["getDeployKey"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + getDeployment: { + (params?: RestEndpointMethodTypes["repos"]["getDeployment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a deployment branch policy for an environment. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + getDeploymentBranchPolicy: { + (params?: RestEndpointMethodTypes["repos"]["getDeploymentBranchPolicy"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with pull access can view a deployment status for a deployment: + */ + getDeploymentStatus: { + (params?: RestEndpointMethodTypes["repos"]["getDeploymentStatus"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** To get information about name patterns that branches must match in order to deploy to this environment, see "[Get a deployment branch policy](/rest/deployments/branch-policies#get-a-deployment-branch-policy)." + * + * Anyone with read access to the repository can use this endpoint. If the + * repository is private, you must use an access token with the `repo` scope. GitHub + * Apps must have the `actions:read` permission to use this endpoint. + */ + getEnvironment: { + (params?: RestEndpointMethodTypes["repos"]["getEnvironment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets information about the single most recent build of a GitHub Pages site. + * + * A token with the `repo` scope is required. GitHub Apps must have the `pages:read` permission. + */ + getLatestPagesBuild: { + (params?: RestEndpointMethodTypes["repos"]["getLatestPagesBuild"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * View the latest published full release for the repository. + * + * The latest release is the most recent non-prerelease, non-draft release, sorted by the `created_at` attribute. The `created_at` attribute is the date of the commit used for the release, and not the date when the release was drafted or published. + */ + getLatestRelease: { + (params?: RestEndpointMethodTypes["repos"]["getLatestRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get a repository ruleset for an organization. + */ + getOrgRuleset: { + (params?: RestEndpointMethodTypes["repos"]["getOrgRuleset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get all the repository rulesets for an organization. + */ + getOrgRulesets: { + (params?: RestEndpointMethodTypes["repos"]["getOrgRulesets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets information about a GitHub Pages site. + * + * A token with the `repo` scope is required. GitHub Apps must have the `pages:read` permission. + */ + getPages: { + (params?: RestEndpointMethodTypes["repos"]["getPages"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets information about a GitHub Pages build. + * + * A token with the `repo` scope is required. GitHub Apps must have the `pages:read` permission. + */ + getPagesBuild: { + (params?: RestEndpointMethodTypes["repos"]["getPagesBuild"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a health check of the DNS settings for the `CNAME` record configured for a repository's GitHub Pages. + * + * The first request to this endpoint returns a `202 Accepted` status and starts an asynchronous background task to get the results for the domain. After the background task completes, subsequent requests to this endpoint return a `200 OK` status with the health check results in the response. + * + * To use this endpoint, you must be a repository administrator, maintainer, or have the 'manage GitHub Pages settings' permission. A token with the `repo` scope or Pages write permission is required. GitHub Apps must have the `administrative:write` and `pages:write` permissions. + */ + getPagesHealthCheck: { + (params?: RestEndpointMethodTypes["repos"]["getPagesHealthCheck"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the total commit counts for the `owner` and total commit counts in `all`. `all` is everyone combined, including the `owner` in the last 52 weeks. If you'd like to get the commit counts for non-owners, you can subtract `owner` from `all`. + * + * The array order is oldest week (index 0) to most recent week. + * + * The most recent week is seven days ago at UTC midnight to today at UTC midnight. + */ + getParticipationStats: { + (params?: RestEndpointMethodTypes["repos"]["getParticipationStats"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + getPullRequestReviewProtection: { + (params?: RestEndpointMethodTypes["repos"]["getPullRequestReviewProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Each array contains the day number, hour number, and number of commits: + * + * * `0-6`: Sunday - Saturday + * * `0-23`: Hour of day + * * Number of commits + * + * For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. + */ + getPunchCardStats: { + (params?: RestEndpointMethodTypes["repos"]["getPunchCardStats"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the preferred README for a repository. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + getReadme: { + (params?: RestEndpointMethodTypes["repos"]["getReadme"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets the README from a repository directory. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + getReadmeInDirectory: { + (params?: RestEndpointMethodTypes["repos"]["getReadmeInDirectory"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). + */ + getRelease: { + (params?: RestEndpointMethodTypes["repos"]["getRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. + */ + getReleaseAsset: { + (params?: RestEndpointMethodTypes["repos"]["getReleaseAsset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get a published release with the specified tag. + */ + getReleaseByTag: { + (params?: RestEndpointMethodTypes["repos"]["getReleaseByTag"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get a ruleset for a repository. + */ + getRepoRuleset: { + (params?: RestEndpointMethodTypes["repos"]["getRepoRuleset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get all the rulesets for a repository. + */ + getRepoRulesets: { + (params?: RestEndpointMethodTypes["repos"]["getRepoRulesets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + getStatusChecksProtection: { + (params?: RestEndpointMethodTypes["repos"]["getStatusChecksProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the teams who have push access to this branch. The list includes child teams. + */ + getTeamsWithAccessToProtectedBranch: { + (params?: RestEndpointMethodTypes["repos"]["getTeamsWithAccessToProtectedBranch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get the top 10 popular contents over the last 14 days. + */ + getTopPaths: { + (params?: RestEndpointMethodTypes["repos"]["getTopPaths"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get the top 10 referrers over the last 14 days. + */ + getTopReferrers: { + (params?: RestEndpointMethodTypes["repos"]["getTopReferrers"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the people who have push access to this branch. + */ + getUsersWithAccessToProtectedBranch: { + (params?: RestEndpointMethodTypes["repos"]["getUsersWithAccessToProtectedBranch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get the total number of views and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. + */ + getViews: { + (params?: RestEndpointMethodTypes["repos"]["getViews"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a webhook configured in a repository. To get only the webhook `config` properties, see "[Get a webhook configuration for a repository](/rest/reference/repos#get-a-webhook-configuration-for-a-repository)." + */ + getWebhook: { + (params?: RestEndpointMethodTypes["repos"]["getWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns the webhook configuration for a repository. To get more information about the webhook, including the `active` state and `events`, use "[Get a repository webhook](/rest/reference/orgs#get-a-repository-webhook)." + * + * Access tokens must have the `read:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:read` permission. + */ + getWebhookConfigForRepo: { + (params?: RestEndpointMethodTypes["repos"]["getWebhookConfigForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a delivery for a webhook configured in a repository. + */ + getWebhookDelivery: { + (params?: RestEndpointMethodTypes["repos"]["getWebhookDelivery"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This returns a list of autolinks configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + listAutolinks: { + (params?: RestEndpointMethodTypes["repos"]["listAutolinks"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listBranches: { + (params?: RestEndpointMethodTypes["repos"]["listBranches"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Returns all branches where the given commit SHA is the HEAD, or latest commit for the branch. + */ + listBranchesForHeadCommit: { + (params?: RestEndpointMethodTypes["repos"]["listBranchesForHeadCommit"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * Organization members with write, maintain, or admin privileges on the organization-owned repository can use this endpoint. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + listCollaborators: { + (params?: RestEndpointMethodTypes["repos"]["listCollaborators"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Use the `:commit_sha` to specify the commit that will have its comments listed. + */ + listCommentsForCommit: { + (params?: RestEndpointMethodTypes["repos"]["listCommentsForCommit"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Commit Comments use [these custom media types](https://docs.github.com/rest/reference/repos#custom-media-types). You can read more about the use of media types in the API [here](https://docs.github.com/rest/overview/media-types/). + * + * Comments are ordered by ascending ID. + */ + listCommitCommentsForRepo: { + (params?: RestEndpointMethodTypes["repos"]["listCommitCommentsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with pull access in a repository can view commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. Statuses are returned in reverse chronological order. The first status in the list will be the latest one. + * + * This resource is also available via a legacy route: `GET /repos/:owner/:repo/statuses/:ref`. + */ + listCommitStatusesForRef: { + (params?: RestEndpointMethodTypes["repos"]["listCommitStatusesForRef"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on their account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + listCommits: { + (params?: RestEndpointMethodTypes["repos"]["listCommits"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists contributors to the specified repository and sorts them by the number of commits per contributor in descending order. This endpoint may return information that is a few hours old because the GitHub REST API caches contributor data to improve performance. + * + * GitHub identifies contributors by author email address. This endpoint groups contribution counts by GitHub user, which includes all associated email addresses. To improve performance, only the first 500 author email addresses in the repository link to GitHub users. The rest will appear as anonymous contributors without associated GitHub user information. + */ + listContributors: { + (params?: RestEndpointMethodTypes["repos"]["listContributors"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets all custom deployment protection rule integrations that are available for an environment. Anyone with read access to the repository can use this endpoint. If the repository is private and you want to use a personal access token (classic), you must use an access token with the `repo` scope. GitHub Apps and fine-grained personal access tokens must have the `actions:read` permission to use this endpoint. + * + * For more information about environments, see "[Using environments for deployment](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment)." + * + * For more information about the app that is providing this custom deployment rule, see "[GET an app](https://docs.github.com/rest/apps/apps#get-an-app)". + */ + listCustomDeploymentRuleIntegrations: { + (params?: RestEndpointMethodTypes["repos"]["listCustomDeploymentRuleIntegrations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listDeployKeys: { + (params?: RestEndpointMethodTypes["repos"]["listDeployKeys"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the deployment branch policies for an environment. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + listDeploymentBranchPolicies: { + (params?: RestEndpointMethodTypes["repos"]["listDeploymentBranchPolicies"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with pull access can view deployment statuses for a deployment: + */ + listDeploymentStatuses: { + (params?: RestEndpointMethodTypes["repos"]["listDeploymentStatuses"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Simple filtering of deployments is available via query parameters: + */ + listDeployments: { + (params?: RestEndpointMethodTypes["repos"]["listDeployments"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + */ + listForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["repos"]["listForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists repositories for the specified organization. + * + * **Note:** In order to see the `security_and_analysis` block for a repository you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + listForOrg: { + (params?: RestEndpointMethodTypes["repos"]["listForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists public repositories for the specified user. Note: For GitHub AE, this endpoint will list internal repositories for the specified user. + */ + listForUser: { + (params?: RestEndpointMethodTypes["repos"]["listForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listForks: { + (params?: RestEndpointMethodTypes["repos"]["listForks"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * When authenticating as a user with admin rights to a repository, this endpoint will list all currently open repository invitations. + */ + listInvitations: { + (params?: RestEndpointMethodTypes["repos"]["listInvitations"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * When authenticating as a user, this endpoint will list all currently open repository invitations for that user. + */ + listInvitationsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["repos"]["listInvitationsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists languages for the specified repository. The value shown for each language is the number of bytes of code written in that language. + */ + listLanguages: { + (params?: RestEndpointMethodTypes["repos"]["listLanguages"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists builts of a GitHub Pages site. + * + * A token with the `repo` scope is required. GitHub Apps must have the `pages:read` permission. + */ + listPagesBuilds: { + (params?: RestEndpointMethodTypes["repos"]["listPagesBuilds"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all public repositories in the order that they were created. + * + * Note: + * - For GitHub Enterprise Server, this endpoint will only list repositories available to all users on the enterprise. + * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers) to get the URL for the next page of repositories. + */ + listPublic: { + (params?: RestEndpointMethodTypes["repos"]["listPublic"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the merged pull request that introduced the commit to the repository. If the commit is not present in the default branch, will only return open pull requests associated with the commit. + */ + listPullRequestsAssociatedWithCommit: { + (params?: RestEndpointMethodTypes["repos"]["listPullRequestsAssociatedWithCommit"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listReleaseAssets: { + (params?: RestEndpointMethodTypes["repos"]["listReleaseAssets"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This returns a list of releases, which does not include regular Git tags that have not been associated with a release. To get a list of Git tags, use the [Repository Tags API](https://docs.github.com/rest/reference/repos#list-repository-tags). + * + * Information about published releases are available to everyone. Only users with push access will receive listings for draft releases. + */ + listReleases: { + (params?: RestEndpointMethodTypes["repos"]["listReleases"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This returns the tag protection states of a repository. + * + * This information is only available to repository administrators. + */ + listTagProtection: { + (params?: RestEndpointMethodTypes["repos"]["listTagProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + listTags: { + (params?: RestEndpointMethodTypes["repos"]["listTags"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the teams that have access to the specified repository and that are also visible to the authenticated user. + * + * For a public repository, a team is listed only if that team added the public repository explicitly. + * + * Personal access tokens require the following scopes: + * * `public_repo` to call this endpoint on a public repository + * * `repo` to call this endpoint on a private repository (this scope also includes public repositories) + * + * This endpoint is not compatible with fine-grained personal access tokens. + */ + listTeams: { + (params?: RestEndpointMethodTypes["repos"]["listTeams"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Returns a list of webhook deliveries for a webhook configured in a repository. + */ + listWebhookDeliveries: { + (params?: RestEndpointMethodTypes["repos"]["listWebhookDeliveries"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists webhooks for a repository. `last response` may return null if there have not been any deliveries within 30 days. + */ + listWebhooks: { + (params?: RestEndpointMethodTypes["repos"]["listWebhooks"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + merge: { + (params?: RestEndpointMethodTypes["repos"]["merge"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sync a branch of a forked repository to keep it up-to-date with the upstream repository. + */ + mergeUpstream: { + (params?: RestEndpointMethodTypes["repos"]["mergeUpstream"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. + */ + pingWebhook: { + (params?: RestEndpointMethodTypes["repos"]["pingWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Redeliver a webhook delivery for a webhook configured in a repository. + */ + redeliverWebhookDelivery: { + (params?: RestEndpointMethodTypes["repos"]["redeliverWebhookDelivery"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of an app to push to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + removeAppAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["removeAppAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a collaborator from a repository. + * + * To use this endpoint, the authenticated user must either be an administrator of the repository or target themselves for removal. + * + * This endpoint also: + * - Cancels any outstanding invitations + * - Unasigns the user from any issues + * - Removes access to organization projects if the user is not an organization member and is not a collaborator on any other organization repositories. + * - Unstars the repository + * - Updates access permissions to packages + * + * Removing a user as a collaborator has the following effects on forks: + * - If the user had access to a fork through their membership to this repository, the user will also be removed from the fork. + * - If the user had their own fork of the repository, the fork will be deleted. + * - If the user still has read access to the repository, open pull requests by this user from a fork will be denied. + * + * **Note**: A user can still have access to the repository through organization permissions like base repository permissions. + * + * Although the API responds immediately, the additional permission updates might take some extra time to complete in the background. + * + * For more information on fork permissions, see "[About permissions and visibility of forks](https://docs.github.com/pull-requests/collaborating-with-pull-requests/working-with-forks/about-permissions-and-visibility-of-forks)". + */ + removeCollaborator: { + (params?: RestEndpointMethodTypes["repos"]["removeCollaborator"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + removeStatusCheckContexts: { + (params?: RestEndpointMethodTypes["repos"]["removeStatusCheckContexts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + removeStatusCheckProtection: { + (params?: RestEndpointMethodTypes["repos"]["removeStatusCheckProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a team to push to this branch. You can also remove push access for child teams. + */ + removeTeamAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["removeTeamAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a user to push to this branch. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames of the people who should no longer have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + removeUserAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["removeUserAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Renames a branch in a repository. + * + * **Note:** Although the API responds immediately, the branch rename process might take some extra time to complete in the background. You won't be able to push to the old branch name while the rename process is in progress. For more information, see "[Renaming a branch](https://docs.github.com/github/administering-a-repository/renaming-a-branch)". + * + * The permissions required to use this endpoint depends on whether you are renaming the default branch. + * + * To rename a non-default branch: + * + * * Users must have push access. + * * GitHub Apps must have the `contents:write` repository permission. + * + * To rename the default branch: + * + * * Users must have admin or owner permissions. + * * GitHub Apps must have the `administration:write` repository permission. + */ + renameBranch: { + (params?: RestEndpointMethodTypes["repos"]["renameBranch"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + replaceAllTopics: { + (params?: RestEndpointMethodTypes["repos"]["replaceAllTopics"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * You can request that your site be built from the latest revision on the default branch. This has the same effect as pushing a commit to your default branch, but does not require an additional commit. Manually triggering page builds can be helpful when diagnosing build warnings and failures. + * + * Build requests are limited to one concurrent build per repository and one concurrent build per requester. If you request a build while another is still in progress, the second request will be queued until the first completes. + */ + requestPagesBuild: { + (params?: RestEndpointMethodTypes["repos"]["requestPagesBuild"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adding admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + setAdminBranchProtection: { + (params?: RestEndpointMethodTypes["repos"]["setAdminBranchProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of apps that have push access to this branch. This removes all apps that previously had push access and grants push access to the new list of apps. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + setAppAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["setAppAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + */ + setStatusCheckContexts: { + (params?: RestEndpointMethodTypes["repos"]["setStatusCheckContexts"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of teams that have push access to this branch. This removes all teams that previously had push access and grants push access to the new list of teams. Team restrictions include child teams. + */ + setTeamAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["setTeamAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of people that have push access to this branch. This removes all people that previously had push access and grants push access to the new list of people. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + setUserAccessRestrictions: { + (params?: RestEndpointMethodTypes["repos"]["setUserAccessRestrictions"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This will trigger the hook with the latest push to the current repository if the hook is subscribed to `push` events. If the hook is not subscribed to `push` events, the server will respond with 204 but no test POST will be generated. + * + * **Note**: Previously `/repos/:owner/:repo/hooks/:hook_id/test` + */ + testPushWebhook: { + (params?: RestEndpointMethodTypes["repos"]["testPushWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * A transfer request will need to be accepted by the new owner when transferring a personal repository to another user. The response will contain the original `owner`, and the transfer will continue asynchronously. For more details on the requirements to transfer personal and organization-owned repositories, see [about repository transfers](https://docs.github.com/articles/about-repository-transfers/). + */ + transfer: { + (params?: RestEndpointMethodTypes["repos"]["transfer"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note**: To edit a repository's topics, use the [Replace all repository topics](https://docs.github.com/rest/reference/repos#replace-all-repository-topics) endpoint. + */ + update: { + (params?: RestEndpointMethodTypes["repos"]["update"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Protecting a branch requires admin or owner permissions to the repository. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + * + * **Note**: The list of users, apps, and teams in total is limited to 100 items. + */ + updateBranchProtection: { + (params?: RestEndpointMethodTypes["repos"]["updateBranchProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateCommitComment: { + (params?: RestEndpointMethodTypes["repos"]["updateCommitComment"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + updateDeploymentBranchPolicy: { + (params?: RestEndpointMethodTypes["repos"]["updateDeploymentBranchPolicy"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates information for a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). + * + * To use this endpoint, you must be a repository administrator, maintainer, or have the 'manage GitHub Pages settings' permission. A token with the `repo` scope or Pages write permission is required. GitHub Apps must have the `administration:write` and `pages:write` permissions. + */ + updateInformationAboutPagesSite: { + (params?: RestEndpointMethodTypes["repos"]["updateInformationAboutPagesSite"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + updateInvitation: { + (params?: RestEndpointMethodTypes["repos"]["updateInvitation"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Update a ruleset for an organization. + */ + updateOrgRuleset: { + (params?: RestEndpointMethodTypes["repos"]["updateOrgRuleset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating pull request review enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + */ + updatePullRequestReviewProtection: { + (params?: RestEndpointMethodTypes["repos"]["updatePullRequestReviewProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access to the repository can edit a release. + */ + updateRelease: { + (params?: RestEndpointMethodTypes["repos"]["updateRelease"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Users with push access to the repository can edit a release asset. + */ + updateReleaseAsset: { + (params?: RestEndpointMethodTypes["repos"]["updateReleaseAsset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Update a ruleset for a repository. + */ + updateRepoRuleset: { + (params?: RestEndpointMethodTypes["repos"]["updateRepoRuleset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. + * @deprecated octokit.rest.repos.updateStatusCheckPotection() has been renamed to octokit.rest.repos.updateStatusCheckProtection() (2020-09-17) + */ + updateStatusCheckPotection: { + (params?: RestEndpointMethodTypes["repos"]["updateStatusCheckPotection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. + */ + updateStatusCheckProtection: { + (params?: RestEndpointMethodTypes["repos"]["updateStatusCheckProtection"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates a webhook configured in a repository. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for a repository](/rest/reference/repos#update-a-webhook-configuration-for-a-repository)." + */ + updateWebhook: { + (params?: RestEndpointMethodTypes["repos"]["updateWebhook"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the webhook configuration for a repository. To update more information about the webhook, including the `active` state and `events`, use "[Update a repository webhook](/rest/reference/orgs#update-a-repository-webhook)." + * + * Access tokens must have the `write:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:write` permission. + */ + updateWebhookConfigForRepo: { + (params?: RestEndpointMethodTypes["repos"]["updateWebhookConfigForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint makes use of [a Hypermedia relation](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) to determine which URL to access. The endpoint you call to upload release assets is specific to your release. Use the `upload_url` returned in + * the response of the [Create a release endpoint](https://docs.github.com/rest/releases/releases#create-a-release) to upload a release asset. + * + * You need to use an HTTP client which supports [SNI](http://en.wikipedia.org/wiki/Server_Name_Indication) to make calls to this endpoint. + * + * Most libraries will set the required `Content-Length` header automatically. Use the required `Content-Type` header to provide the media type of the asset. For a list of media types, see [Media Types](https://www.iana.org/assignments/media-types/media-types.xhtml). For example: + * + * `application/zip` + * + * GitHub expects the asset data in its raw binary form, rather than JSON. You will send the raw binary content of the asset as the request body. Everything else about the endpoint is the same as the rest of the API. For example, + * you'll still need to pass your authentication to be able to upload an asset. + * + * When an upstream failure occurs, you will receive a `502 Bad Gateway` status. This may leave an empty asset with a state of `starter`. It can be safely deleted. + * + * **Notes:** + * * GitHub renames asset filenames that have special characters, non-alphanumeric characters, and leading or trailing periods. The "[List assets for a release](https://docs.github.com/rest/reference/repos#list-assets-for-a-release)" + * endpoint lists the renamed filenames. For more information and help, contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + * * To find the `release_id` query the [`GET /repos/{owner}/{repo}/releases/latest` endpoint](https://docs.github.com/rest/releases/releases#get-the-latest-release). + * * If you upload an asset with the same filename as another uploaded asset, you'll receive an error and must delete the old file before you can re-upload the new asset. + */ + uploadReleaseAsset: { + (params?: RestEndpointMethodTypes["repos"]["uploadReleaseAsset"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + search: { + /** + * Searches for query terms inside of a file. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for code, you can get text match metadata for the file **content** and file **path** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the definition of the `addClass` function inside [jQuery](https://github.com/jquery/jquery) repository, your query would look something like this: + * + * `q=addClass+in:file+language:js+repo:jquery/jquery` + * + * This query searches for the keyword `addClass` within a file's contents. The query limits the search to files where the language is JavaScript in the `jquery/jquery` repository. + * + * #### Considerations for code search + * + * Due to the complexity of searching code, there are a few restrictions on how searches are performed: + * + * * Only the _default branch_ is considered. In most cases, this will be the `master` branch. + * * Only files smaller than 384 KB are searchable. + * * You must always include at least one search term when searching source code. For example, searching for [`language:go`](https://github.com/search?utf8=%E2%9C%93&q=language%3Ago&type=Code) is not valid, while [`amazing + * language:go`](https://github.com/search?utf8=%E2%9C%93&q=amazing+language%3Ago&type=Code) is. + * + * This endpoint requires you to authenticate and limits you to 10 requests per minute. + */ + code: { + (params?: RestEndpointMethodTypes["search"]["code"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Find commits via various criteria on the default branch (usually `main`). This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for commits, you can get text match metadata for the **message** field when you provide the `text-match` media type. For more details about how to receive highlighted search results, see [Text match + * metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find commits related to CSS in the [octocat/Spoon-Knife](https://github.com/octocat/Spoon-Knife) repository. Your query would look something like this: + * + * `q=repo:octocat/Spoon-Knife+css` + */ + commits: { + (params?: RestEndpointMethodTypes["search"]["commits"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Find issues by state and keyword. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for issues, you can get text match metadata for the issue **title**, issue **body**, and issue **comment body** fields when you pass the `text-match` media type. For more details about how to receive highlighted + * search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this. + * + * `q=windows+label:bug+language:python+state:open&sort=created&order=asc` + * + * This query searches for the keyword `windows`, within any open issue that is labeled as `bug`. The search runs across repositories whose primary language is Python. The results are sorted by creation date in ascending order, which means the oldest issues appear first in the search results. + * + * **Note:** For [user-to-server](https://docs.github.com/developers/apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests) GitHub App requests, you can't retrieve a combination of issues and pull requests in a single query. Requests that don't include the `is:issue` or `is:pull-request` qualifier will receive an HTTP `422 Unprocessable Entity` response. To get results for both issues and pull requests, you must send separate queries for issues and pull requests. For more information about the `is` qualifier, see "[Searching only issues or pull requests](https://docs.github.com/github/searching-for-information-on-github/searching-issues-and-pull-requests#search-only-issues-or-pull-requests)." + */ + issuesAndPullRequests: { + (params?: RestEndpointMethodTypes["search"]["issuesAndPullRequests"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Find labels in a repository with names or descriptions that match search keywords. Returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for labels, you can get text match metadata for the label **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find labels in the `linguist` repository that match `bug`, `defect`, or `enhancement`. Your query might look like this: + * + * `q=bug+defect+enhancement&repository_id=64778136` + * + * The labels that best match the query appear first in the search results. + */ + labels: { + (params?: RestEndpointMethodTypes["search"]["labels"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Find repositories via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for repositories, you can get text match metadata for the **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for popular Tetris repositories written in assembly code, your query might look like this: + * + * `q=tetris+language:assembly&sort=stars&order=desc` + * + * This query searches for repositories with the word `tetris` in the name, the description, or the README. The results are limited to repositories where the primary language is assembly. The results are sorted by stars in descending order, so that the most popular repositories appear first in the search results. + */ + repos: { + (params?: RestEndpointMethodTypes["search"]["repos"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Find topics via various criteria. Results are sorted by best match. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). See "[Searching topics](https://docs.github.com/articles/searching-topics/)" for a detailed list of qualifiers. + * + * When searching for topics, you can get text match metadata for the topic's **short\_description**, **description**, **name**, or **display\_name** field when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for topics related to Ruby that are featured on https://github.com/topics. Your query might look like this: + * + * `q=ruby+is:featured` + * + * This query searches for topics with the keyword `ruby` and limits the results to find only topics that are featured. The topics that are the best match for the query appear first in the search results. + */ + topics: { + (params?: RestEndpointMethodTypes["search"]["topics"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Find users via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for users, you can get text match metadata for the issue **login**, public **email**, and **name** fields when you pass the `text-match` media type. For more details about highlighting search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you're looking for a list of popular users, you might try this query: + * + * `q=tom+repos:%3E42+followers:%3E1000` + * + * This query searches for users with the name `tom`. The results are restricted to users with more than 42 repositories and over 1,000 followers. + */ + users: { + (params?: RestEndpointMethodTypes["search"]["users"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + secretScanning: { + /** + * Gets a single secret scanning alert detected in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + getAlert: { + (params?: RestEndpointMethodTypes["secretScanning"]["getAlert"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists secret scanning alerts for eligible repositories in an enterprise, from newest to oldest. + * To use this endpoint, you must be a member of the enterprise, and you must use an access token with the `repo` scope or `security_events` scope. Alerts are only returned for organizations in the enterprise for which you are an organization owner or a [security manager](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization). + */ + listAlertsForEnterprise: { + (params?: RestEndpointMethodTypes["secretScanning"]["listAlertsForEnterprise"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists secret scanning alerts for eligible repositories in an organization, from newest to oldest. + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + listAlertsForOrg: { + (params?: RestEndpointMethodTypes["secretScanning"]["listAlertsForOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists secret scanning alerts for an eligible repository, from newest to oldest. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + listAlertsForRepo: { + (params?: RestEndpointMethodTypes["secretScanning"]["listAlertsForRepo"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all locations for a given secret scanning alert for an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + listLocationsForAlert: { + (params?: RestEndpointMethodTypes["secretScanning"]["listLocationsForAlert"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Updates the status of a secret scanning alert in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` write permission to use this endpoint. + */ + updateAlert: { + (params?: RestEndpointMethodTypes["secretScanning"]["updateAlert"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + securityAdvisories: { + /** + * Report a security vulnerability to the maintainers of the repository. + * See "[Privately reporting a security vulnerability](https://docs.github.com/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability)" for more information about private vulnerability reporting. + */ + createPrivateVulnerabilityReport: { + (params?: RestEndpointMethodTypes["securityAdvisories"]["createPrivateVulnerabilityReport"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new repository security advisory. + * You must authenticate using an access token with the `repo` scope or `repository_advisories:write` permission to use this endpoint. + * + * In order to create a draft repository security advisory, you must be a security manager or administrator of that repository. + */ + createRepositoryAdvisory: { + (params?: RestEndpointMethodTypes["securityAdvisories"]["createRepositoryAdvisory"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get a repository security advisory using its GitHub Security Advisory (GHSA) identifier. + * You can access any published security advisory on a public repository. + * You must authenticate using an access token with the `repo` scope or `repository_advisories:read` permission + * in order to get a published security advisory in a private repository, or any unpublished security advisory that you have access to. + * + * You can access an unpublished security advisory from a repository if you are a security manager or administrator of that repository, or if you are a + * collaborator on the security advisory. + */ + getRepositoryAdvisory: { + (params?: RestEndpointMethodTypes["securityAdvisories"]["getRepositoryAdvisory"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists security advisories in a repository. + * You must authenticate using an access token with the `repo` scope or `repository_advisories:read` permission + * in order to get published security advisories in a private repository, or any unpublished security advisories that you have access to. + * + * You can access unpublished security advisories from a repository if you are a security manager or administrator of that repository, or if you are a collaborator on any security advisory. + */ + listRepositoryAdvisories: { + (params?: RestEndpointMethodTypes["securityAdvisories"]["listRepositoryAdvisories"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Update a repository security advisory using its GitHub Security Advisory (GHSA) identifier. + * You must authenticate using an access token with the `repo` scope or `repository_advisories:write` permission to use this endpoint. + * + * In order to update any security advisory, you must be a security manager or administrator of that repository, + * or a collaborator on the repository security advisory. + */ + updateRepositoryAdvisory: { + (params?: RestEndpointMethodTypes["securityAdvisories"]["updateRepositoryAdvisory"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + teams: { + /** + * Adds an organization member to a team. An authenticated organization owner or team maintainer can add organization members to a team. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * An organization owner can add someone who is not part of the team's organization to a team. When an organization owner adds someone to a team who is not an organization member, this endpoint will send an invitation to the person via email. This newly-created membership will be in the "pending" state until the person accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + addOrUpdateMembershipForUserInOrg: { + (params?: RestEndpointMethodTypes["teams"]["addOrUpdateMembershipForUserInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + addOrUpdateProjectPermissionsInOrg: { + (params?: RestEndpointMethodTypes["teams"]["addOrUpdateProjectPermissionsInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + * + * For more information about the permission levels, see "[Repository permission levels for an organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". + */ + addOrUpdateRepoPermissionsInOrg: { + (params?: RestEndpointMethodTypes["teams"]["addOrUpdateRepoPermissionsInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + checkPermissionsForProjectInOrg: { + (params?: RestEndpointMethodTypes["teams"]["checkPermissionsForProjectInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Checks whether a team has `admin`, `push`, `maintain`, `triage`, or `pull` permission for a repository. Repositories inherited through a parent team will also be checked. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `application/vnd.github.v3.repository+json` accept header. + * + * If a team doesn't have permission for the repository, you will receive a `404 Not Found` response status. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + checkPermissionsForRepoInOrg: { + (params?: RestEndpointMethodTypes["teams"]["checkPermissionsForRepoInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To create a team, the authenticated user must be a member or owner of `{org}`. By default, organization members can create teams. Organization owners can limit team creation to organization owners. For more information, see "[Setting team creation permissions](https://docs.github.com/articles/setting-team-creation-permissions-in-your-organization)." + * + * When you create a new team, you automatically become a team maintainer without explicitly adding yourself to the optional array of `maintainers`. For more information, see "[About teams](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/about-teams)". + */ + create: { + (params?: RestEndpointMethodTypes["teams"]["create"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + createDiscussionCommentInOrg: { + (params?: RestEndpointMethodTypes["teams"]["createDiscussionCommentInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions`. + */ + createDiscussionInOrg: { + (params?: RestEndpointMethodTypes["teams"]["createDiscussionInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + deleteDiscussionCommentInOrg: { + (params?: RestEndpointMethodTypes["teams"]["deleteDiscussionCommentInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + deleteDiscussionInOrg: { + (params?: RestEndpointMethodTypes["teams"]["deleteDiscussionInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}`. + */ + deleteInOrg: { + (params?: RestEndpointMethodTypes["teams"]["deleteInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets a team using the team's `slug`. To create the `slug`, GitHub replaces special characters in the `name` string, changes all words to lowercase, and replaces spaces with a `-` separator. For example, `"My TEam Näme"` would become `my-team-name`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}`. + */ + getByName: { + (params?: RestEndpointMethodTypes["teams"]["getByName"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + getDiscussionCommentInOrg: { + (params?: RestEndpointMethodTypes["teams"]["getDiscussionCommentInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + getDiscussionInOrg: { + (params?: RestEndpointMethodTypes["teams"]["getDiscussionInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/memberships/{username}`. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + getMembershipForUserInOrg: { + (params?: RestEndpointMethodTypes["teams"]["getMembershipForUserInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all teams in an organization that are visible to the authenticated user. + */ + list: { + (params?: RestEndpointMethodTypes["teams"]["list"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the child teams of the team specified by `{team_slug}`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/teams`. + */ + listChildInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listChildInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + listDiscussionCommentsInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listDiscussionCommentsInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions`. + */ + listDiscussionsInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listDiscussionsInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List all of the teams across all of the organizations to which the authenticated user belongs. This method requires `user`, `repo`, or `read:org` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/) when authenticating via [OAuth](https://docs.github.com/apps/building-oauth-apps/). When using a fine-grained personal access token, the resource owner of the token [must be a single organization](https://docs.github.com/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#fine-grained-personal-access-tokens), and have at least read-only member organization permissions. The response payload only contains the teams from a single organization when using a fine-grained personal access token. + */ + listForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["teams"]["listForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Team members will include the members of child teams. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + listMembersInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listMembersInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/invitations`. + */ + listPendingInvitationsInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listPendingInvitationsInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the organization projects for a team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects`. + */ + listProjectsInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listProjectsInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists a team's repositories visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos`. + */ + listReposInOrg: { + (params?: RestEndpointMethodTypes["teams"]["listReposInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + removeMembershipForUserInOrg: { + (params?: RestEndpointMethodTypes["teams"]["removeMembershipForUserInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. This endpoint removes the project from the team, but does not delete the project. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + removeProjectInOrg: { + (params?: RestEndpointMethodTypes["teams"]["removeProjectInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. This does not delete the repository, it just removes it from the team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + removeRepoInOrg: { + (params?: RestEndpointMethodTypes["teams"]["removeRepoInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + updateDiscussionCommentInOrg: { + (params?: RestEndpointMethodTypes["teams"]["updateDiscussionCommentInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + updateDiscussionInOrg: { + (params?: RestEndpointMethodTypes["teams"]["updateDiscussionInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}`. + */ + updateInOrg: { + (params?: RestEndpointMethodTypes["teams"]["updateInOrg"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; + users: { + /** + * This endpoint is accessible with the `user` scope. + * @deprecated octokit.rest.users.addEmailForAuthenticated() has been renamed to octokit.rest.users.addEmailForAuthenticatedUser() (2021-10-05) + */ + addEmailForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["addEmailForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint is accessible with the `user` scope. + */ + addEmailForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["addEmailForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Add one or more social accounts to the authenticated user's profile. This endpoint is accessible with the `user` scope. + */ + addSocialAccountForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["addSocialAccountForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + block: { + (params?: RestEndpointMethodTypes["users"]["block"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + checkBlocked: { + (params?: RestEndpointMethodTypes["users"]["checkBlocked"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + checkFollowingForUser: { + (params?: RestEndpointMethodTypes["users"]["checkFollowingForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + checkPersonIsFollowedByAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["checkPersonIsFollowedByAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.createGpgKeyForAuthenticated() has been renamed to octokit.rest.users.createGpgKeyForAuthenticatedUser() (2021-10-05) + */ + createGpgKeyForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["createGpgKeyForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + createGpgKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["createGpgKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.createPublicSshKeyForAuthenticated() has been renamed to octokit.rest.users.createPublicSshKeyForAuthenticatedUser() (2021-10-05) + */ + createPublicSshKeyForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["createPublicSshKeyForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + createPublicSshKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["createPublicSshKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Creates an SSH signing key for the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `write:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." + */ + createSshSigningKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["createSshSigningKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint is accessible with the `user` scope. + * @deprecated octokit.rest.users.deleteEmailForAuthenticated() has been renamed to octokit.rest.users.deleteEmailForAuthenticatedUser() (2021-10-05) + */ + deleteEmailForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["deleteEmailForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * This endpoint is accessible with the `user` scope. + */ + deleteEmailForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["deleteEmailForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.deleteGpgKeyForAuthenticated() has been renamed to octokit.rest.users.deleteGpgKeyForAuthenticatedUser() (2021-10-05) + */ + deleteGpgKeyForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["deleteGpgKeyForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + deleteGpgKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["deleteGpgKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.deletePublicSshKeyForAuthenticated() has been renamed to octokit.rest.users.deletePublicSshKeyForAuthenticatedUser() (2021-10-05) + */ + deletePublicSshKeyForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["deletePublicSshKeyForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + deletePublicSshKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["deletePublicSshKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes one or more social accounts from the authenticated user's profile. This endpoint is accessible with the `user` scope. + */ + deleteSocialAccountForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["deleteSocialAccountForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Deletes an SSH signing key from the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `admin:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." + */ + deleteSshSigningKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["deleteSshSigningKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * Following a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. + */ + follow: { + (params?: RestEndpointMethodTypes["users"]["follow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * If the authenticated user is authenticated with an OAuth token with the `user` scope, then the response lists public and private profile information. + * + * If the authenticated user is authenticated through OAuth without the `user` scope, then the response lists only public profile information. + */ + getAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["getAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Provides publicly available information about someone with a GitHub account. + * + * GitHub Apps with the `Plan` user permission can use this endpoint to retrieve information about a user's GitHub plan. The GitHub App must be authenticated as a user. See "[Identifying and authorizing users for GitHub Apps](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/)" for details about authentication. For an example response, see 'Response with GitHub plan information' below" + * + * The `email` key in the following response is the publicly visible email address from your GitHub [profile page](https://github.com/settings/profile). When setting up your profile, you can select a primary email address to be “public” which provides an email entry for this endpoint. If you do not set a public email address for `email`, then it will have a value of `null`. You only see publicly visible email addresses when authenticated with GitHub. For more information, see [Authentication](https://docs.github.com/rest/overview/resources-in-the-rest-api#authentication). + * + * The Emails API enables you to list all of your email addresses, and toggle a primary email to be visible publicly. For more information, see "[Emails API](https://docs.github.com/rest/reference/users#emails)". + */ + getByUsername: { + (params?: RestEndpointMethodTypes["users"]["getByUsername"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Provides hovercard information when authenticated through basic auth or OAuth with the `repo` scope. You can find out more about someone in relation to their pull requests, issues, repositories, and organizations. + * + * The `subject_type` and `subject_id` parameters provide context for the person's hovercard, which returns more information than without the parameters. For example, if you wanted to find out more about `octocat` who owns the `Spoon-Knife` repository via cURL, it would look like this: + * + * ```shell + * curl -u username:token + * https://api.github.com/users/octocat/hovercard?subject_type=repository&subject_id=1300192 + * ``` + */ + getContextForUser: { + (params?: RestEndpointMethodTypes["users"]["getContextForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.getGpgKeyForAuthenticated() has been renamed to octokit.rest.users.getGpgKeyForAuthenticatedUser() (2021-10-05) + */ + getGpgKeyForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["getGpgKeyForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + getGpgKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["getGpgKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.getPublicSshKeyForAuthenticated() has been renamed to octokit.rest.users.getPublicSshKeyForAuthenticatedUser() (2021-10-05) + */ + getPublicSshKeyForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["getPublicSshKeyForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + getPublicSshKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["getPublicSshKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Gets extended details for an SSH signing key. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `read:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." + */ + getSshSigningKeyForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["getSshSigningKeyForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all users, in the order that they signed up on GitHub. This list includes personal user accounts and organization accounts. + * + * Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/guides/using-pagination-in-the-rest-api#using-link-headers) to get the URL for the next page of users. + */ + list: { + (params?: RestEndpointMethodTypes["users"]["list"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the users you've blocked on your personal account. + * @deprecated octokit.rest.users.listBlockedByAuthenticated() has been renamed to octokit.rest.users.listBlockedByAuthenticatedUser() (2021-10-05) + */ + listBlockedByAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["listBlockedByAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * List the users you've blocked on your personal account. + */ + listBlockedByAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listBlockedByAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. + * @deprecated octokit.rest.users.listEmailsForAuthenticated() has been renamed to octokit.rest.users.listEmailsForAuthenticatedUser() (2021-10-05) + */ + listEmailsForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["listEmailsForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. + */ + listEmailsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listEmailsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people who the authenticated user follows. + * @deprecated octokit.rest.users.listFollowedByAuthenticated() has been renamed to octokit.rest.users.listFollowedByAuthenticatedUser() (2021-10-05) + */ + listFollowedByAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["listFollowedByAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people who the authenticated user follows. + */ + listFollowedByAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listFollowedByAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people following the authenticated user. + */ + listFollowersForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listFollowersForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people following the specified user. + */ + listFollowersForUser: { + (params?: RestEndpointMethodTypes["users"]["listFollowersForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the people who the specified user follows. + */ + listFollowingForUser: { + (params?: RestEndpointMethodTypes["users"]["listFollowingForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.listGpgKeysForAuthenticated() has been renamed to octokit.rest.users.listGpgKeysForAuthenticatedUser() (2021-10-05) + */ + listGpgKeysForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["listGpgKeysForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + listGpgKeysForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listGpgKeysForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the GPG keys for a user. This information is accessible by anyone. + */ + listGpgKeysForUser: { + (params?: RestEndpointMethodTypes["users"]["listGpgKeysForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. + * @deprecated octokit.rest.users.listPublicEmailsForAuthenticated() has been renamed to octokit.rest.users.listPublicEmailsForAuthenticatedUser() (2021-10-05) + */ + listPublicEmailsForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["listPublicEmailsForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. + */ + listPublicEmailsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listPublicEmailsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the _verified_ public SSH keys for a user. This is accessible by anyone. + */ + listPublicKeysForUser: { + (params?: RestEndpointMethodTypes["users"]["listPublicKeysForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * @deprecated octokit.rest.users.listPublicSshKeysForAuthenticated() has been renamed to octokit.rest.users.listPublicSshKeysForAuthenticatedUser() (2021-10-05) + */ + listPublicSshKeysForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["listPublicSshKeysForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + listPublicSshKeysForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listPublicSshKeysForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists all of your social accounts. + */ + listSocialAccountsForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listSocialAccountsForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists social media accounts for a user. This endpoint is accessible by anyone. + */ + listSocialAccountsForUser: { + (params?: RestEndpointMethodTypes["users"]["listSocialAccountsForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the SSH signing keys for the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `read:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." + */ + listSshSigningKeysForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["listSshSigningKeysForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Lists the SSH signing keys for a user. This operation is accessible by anyone. + */ + listSshSigningKeysForUser: { + (params?: RestEndpointMethodTypes["users"]["listSshSigningKeysForUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the visibility for your primary email addresses. + * @deprecated octokit.rest.users.setPrimaryEmailVisibilityForAuthenticated() has been renamed to octokit.rest.users.setPrimaryEmailVisibilityForAuthenticatedUser() (2021-10-05) + */ + setPrimaryEmailVisibilityForAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["setPrimaryEmailVisibilityForAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Sets the visibility for your primary email addresses. + */ + setPrimaryEmailVisibilityForAuthenticatedUser: { + (params?: RestEndpointMethodTypes["users"]["setPrimaryEmailVisibilityForAuthenticatedUser"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + unblock: { + (params?: RestEndpointMethodTypes["users"]["unblock"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * Unfollowing a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. + */ + unfollow: { + (params?: RestEndpointMethodTypes["users"]["unfollow"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + /** + * **Note:** If your email is set to private and you send an `email` parameter as part of this request to update your profile, your privacy settings are still enforced: the email address will not be displayed on your public profile or via the API. + */ + updateAuthenticated: { + (params?: RestEndpointMethodTypes["users"]["updateAuthenticated"]["parameters"]): Promise; + defaults: RequestInterface["defaults"]; + endpoint: EndpointInterface<{ + url: string; + }>; + }; + }; +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts new file mode 100644 index 0000000..35c81bc --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types.d.ts @@ -0,0 +1,3599 @@ +import type { Endpoints, RequestParameters } from "@octokit/types"; +export type RestEndpointMethodTypes = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/actions/runners/{runner_id}/labels"]["response"]; + }; + addCustomLabelsToSelfHostedRunnerForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"]["response"]; + }; + addSelectedRepoToOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + addSelectedRepoToOrgVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"]["response"]; + }; + addSelectedRepoToRequiredWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}"]["response"]; + }; + approveWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"]["response"]; + }; + cancelWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"]["response"]; + }; + createEnvironmentVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repositories/{repository_id}/environments/{environment_name}/variables"]["response"]; + }; + createOrUpdateEnvironmentSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"]["response"]; + }; + createOrUpdateOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/secrets/{secret_name}"]["response"]; + }; + createOrUpdateRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"]["response"]; + }; + createOrgVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/actions/variables"]["response"]; + }; + createRegistrationTokenForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/actions/runners/registration-token"]["response"]; + }; + createRegistrationTokenForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runners/registration-token"]["response"]; + }; + createRemoveTokenForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/actions/runners/remove-token"]["response"]; + }; + createRemoveTokenForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runners/remove-token"]["response"]; + }; + createRepoVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/variables"]["response"]; + }; + createRequiredWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/actions/required_workflows"]["response"]; + }; + createWorkflowDispatch: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"]["response"]; + }; + deleteActionsCacheById: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"]["response"]; + }; + deleteActionsCacheByKey: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"]["response"]; + }; + deleteArtifact: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"]["response"]; + }; + deleteEnvironmentSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"]["response"]; + }; + deleteEnvironmentVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}"]["response"]; + }; + deleteOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/secrets/{secret_name}"]["response"]; + }; + deleteOrgVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/variables/{name}"]["response"]; + }; + deleteRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"]["response"]; + }; + deleteRepoVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/variables/{name}"]["response"]; + }; + deleteRequiredWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}"]["response"]; + }; + deleteSelfHostedRunnerFromOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/runners/{runner_id}"]["response"]; + }; + deleteSelfHostedRunnerFromRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"]["response"]; + }; + deleteWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"]["response"]; + }; + deleteWorkflowRunLogs: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"]["response"]; + }; + disableSelectedRepositoryGithubActionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"]["response"]; + }; + disableWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"]["response"]; + }; + downloadArtifact: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"]["response"]; + }; + downloadJobLogsForWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"]["response"]; + }; + downloadWorkflowRunAttemptLogs: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"]["response"]; + }; + downloadWorkflowRunLogs: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"]["response"]; + }; + enableSelectedRepositoryGithubActionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"]["response"]; + }; + enableWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"]["response"]; + }; + generateRunnerJitconfigForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/actions/runners/generate-jitconfig"]["response"]; + }; + generateRunnerJitconfigForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig"]["response"]; + }; + getActionsCacheList: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/caches"]["response"]; + }; + getActionsCacheUsage: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/cache/usage"]["response"]; + }; + getActionsCacheUsageByRepoForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/cache/usage-by-repository"]["response"]; + }; + getActionsCacheUsageForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/cache/usage"]["response"]; + }; + getAllowedActionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/permissions/selected-actions"]["response"]; + }; + getAllowedActionsRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"]["response"]; + }; + getArtifact: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"]["response"]; + }; + getEnvironmentPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"]["response"]; + }; + getEnvironmentSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"]["response"]; + }; + getEnvironmentVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}"]["response"]; + }; + getGithubActionsDefaultWorkflowPermissionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/permissions/workflow"]["response"]; + }; + getGithubActionsDefaultWorkflowPermissionsRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions/workflow"]["response"]; + }; + getGithubActionsPermissionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/permissions"]["response"]; + }; + getGithubActionsPermissionsRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions"]["response"]; + }; + getJobForWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"]["response"]; + }; + getOrgPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/secrets/public-key"]["response"]; + }; + getOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/secrets/{secret_name}"]["response"]; + }; + getOrgVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/variables/{name}"]["response"]; + }; + getPendingDeploymentsForRun: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"]["response"]; + }; + getRepoPermissions: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions"]["response"]; + }; + getRepoPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/secrets/public-key"]["response"]; + }; + getRepoRequiredWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}"]["response"]; + }; + getRepoRequiredWorkflowUsage: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing"]["response"]; + }; + getRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"]["response"]; + }; + getRepoVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/variables/{name}"]["response"]; + }; + getRequiredWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/required_workflows/{required_workflow_id}"]["response"]; + }; + getReviewsForRun: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"]["response"]; + }; + getSelfHostedRunnerForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/runners/{runner_id}"]["response"]; + }; + getSelfHostedRunnerForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"]["response"]; + }; + getWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"]["response"]; + }; + getWorkflowAccessToRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/permissions/access"]["response"]; + }; + getWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}"]["response"]; + }; + getWorkflowRunAttempt: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"]["response"]; + }; + getWorkflowRunUsage: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"]["response"]; + }; + getWorkflowUsage: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"]["response"]; + }; + listArtifactsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/artifacts"]["response"]; + }; + listEnvironmentSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/secrets"]["response"]; + }; + listEnvironmentVariables: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repositories/{repository_id}/environments/{environment_name}/variables"]["response"]; + }; + listJobsForWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"]["response"]; + }; + listJobsForWorkflowRunAttempt: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"]["response"]; + }; + listLabelsForSelfHostedRunnerForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/runners/{runner_id}/labels"]["response"]; + }; + listLabelsForSelfHostedRunnerForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"]["response"]; + }; + listOrgSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/secrets"]["response"]; + }; + listOrgVariables: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/variables"]["response"]; + }; + listRepoOrganizationSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/organization-secrets"]["response"]; + }; + listRepoOrganizationVariables: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/organization-variables"]["response"]; + }; + listRepoRequiredWorkflows: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{org}/{repo}/actions/required_workflows"]["response"]; + }; + listRepoSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/secrets"]["response"]; + }; + listRepoVariables: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/variables"]["response"]; + }; + listRepoWorkflows: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows"]["response"]; + }; + listRequiredWorkflowRuns: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs"]["response"]; + }; + listRequiredWorkflows: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/required_workflows"]["response"]; + }; + listRunnerApplicationsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/runners/downloads"]["response"]; + }; + listRunnerApplicationsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runners/downloads"]["response"]; + }; + listSelectedReposForOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"]["response"]; + }; + listSelectedReposForOrgVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/variables/{name}/repositories"]["response"]; + }; + listSelectedRepositoriesEnabledGithubActionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/permissions/repositories"]["response"]; + }; + listSelectedRepositoriesRequiredWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories"]["response"]; + }; + listSelfHostedRunnersForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/actions/runners"]["response"]; + }; + listSelfHostedRunnersForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runners"]["response"]; + }; + listWorkflowRunArtifacts: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"]["response"]; + }; + listWorkflowRuns: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"]["response"]; + }; + listWorkflowRunsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/actions/runs"]["response"]; + }; + reRunJobForWorkflowRun: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"]["response"]; + }; + reRunWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"]["response"]; + }; + reRunWorkflowFailedJobs: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"]["response"]; + }; + removeAllCustomLabelsFromSelfHostedRunnerForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"]["response"]; + }; + removeAllCustomLabelsFromSelfHostedRunnerForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"]["response"]; + }; + removeCustomLabelFromSelfHostedRunnerForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"]["response"]; + }; + removeCustomLabelFromSelfHostedRunnerForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"]["response"]; + }; + removeSelectedRepoFromOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + removeSelectedRepoFromOrgVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"]["response"]; + }; + removeSelectedRepoFromRequiredWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}"]["response"]; + }; + reviewCustomGatesForRun: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule"]["response"]; + }; + reviewPendingDeploymentsForRun: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"]["response"]; + }; + setAllowedActionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/permissions/selected-actions"]["response"]; + }; + setAllowedActionsRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"]["response"]; + }; + setCustomLabelsForSelfHostedRunnerForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/runners/{runner_id}/labels"]["response"]; + }; + setCustomLabelsForSelfHostedRunnerForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"]["response"]; + }; + setGithubActionsDefaultWorkflowPermissionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/permissions/workflow"]["response"]; + }; + setGithubActionsDefaultWorkflowPermissionsRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/permissions/workflow"]["response"]; + }; + setGithubActionsPermissionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/permissions"]["response"]; + }; + setGithubActionsPermissionsRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/permissions"]["response"]; + }; + setSelectedReposForOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"]["response"]; + }; + setSelectedReposForOrgVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/variables/{name}/repositories"]["response"]; + }; + setSelectedReposToRequiredWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories"]["response"]; + }; + setSelectedRepositoriesEnabledGithubActionsOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/actions/permissions/repositories"]["response"]; + }; + setWorkflowAccessToRepository: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/actions/permissions/access"]["response"]; + }; + updateEnvironmentVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}"]["response"]; + }; + updateOrgVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}/actions/variables/{name}"]["response"]; + }; + updateRepoVariable: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/actions/variables/{name}"]["response"]; + }; + updateRequiredWorkflow: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}"]["response"]; + }; + }; + activity: { + checkRepoIsStarredByAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/starred/{owner}/{repo}"]["response"]; + }; + deleteRepoSubscription: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/subscription"]["response"]; + }; + deleteThreadSubscription: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /notifications/threads/{thread_id}/subscription"]["response"]; + }; + getFeeds: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /feeds"]["response"]; + }; + getRepoSubscription: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/subscription"]["response"]; + }; + getThread: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /notifications/threads/{thread_id}"]["response"]; + }; + getThreadSubscriptionForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /notifications/threads/{thread_id}/subscription"]["response"]; + }; + listEventsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/events"]["response"]; + }; + listNotificationsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /notifications"]["response"]; + }; + listOrgEventsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/events/orgs/{org}"]["response"]; + }; + listPublicEvents: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /events"]["response"]; + }; + listPublicEventsForRepoNetwork: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /networks/{owner}/{repo}/events"]["response"]; + }; + listPublicEventsForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/events/public"]["response"]; + }; + listPublicOrgEvents: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/events"]["response"]; + }; + listReceivedEventsForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/received_events"]["response"]; + }; + listReceivedPublicEventsForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/received_events/public"]["response"]; + }; + listRepoEvents: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/events"]["response"]; + }; + listRepoNotificationsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/notifications"]["response"]; + }; + listReposStarredByAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/starred"]["response"]; + }; + listReposStarredByUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/starred"]["response"]; + }; + listReposWatchedByUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/subscriptions"]["response"]; + }; + listStargazersForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/stargazers"]["response"]; + }; + listWatchedReposForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/subscriptions"]["response"]; + }; + listWatchersForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/subscribers"]["response"]; + }; + markNotificationsAsRead: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /notifications"]["response"]; + }; + markRepoNotificationsAsRead: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/notifications"]["response"]; + }; + markThreadAsRead: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /notifications/threads/{thread_id}"]["response"]; + }; + setRepoSubscription: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/subscription"]["response"]; + }; + setThreadSubscription: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /notifications/threads/{thread_id}/subscription"]["response"]; + }; + starRepoForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/starred/{owner}/{repo}"]["response"]; + }; + unstarRepoForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/starred/{owner}/{repo}"]["response"]; + }; + }; + apps: { + addRepoToInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/installations/{installation_id}/repositories/{repository_id}"]["response"]; + }; + addRepoToInstallationForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/installations/{installation_id}/repositories/{repository_id}"]["response"]; + }; + checkToken: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /applications/{client_id}/token"]["response"]; + }; + createFromManifest: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /app-manifests/{code}/conversions"]["response"]; + }; + createInstallationAccessToken: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /app/installations/{installation_id}/access_tokens"]["response"]; + }; + deleteAuthorization: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /applications/{client_id}/grant"]["response"]; + }; + deleteInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /app/installations/{installation_id}"]["response"]; + }; + deleteToken: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /applications/{client_id}/token"]["response"]; + }; + getAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app"]["response"]; + }; + getBySlug: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /apps/{app_slug}"]["response"]; + }; + getInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app/installations/{installation_id}"]["response"]; + }; + getOrgInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/installation"]["response"]; + }; + getRepoInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/installation"]["response"]; + }; + getSubscriptionPlanForAccount: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /marketplace_listing/accounts/{account_id}"]["response"]; + }; + getSubscriptionPlanForAccountStubbed: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /marketplace_listing/stubbed/accounts/{account_id}"]["response"]; + }; + getUserInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/installation"]["response"]; + }; + getWebhookConfigForApp: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app/hook/config"]["response"]; + }; + getWebhookDelivery: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app/hook/deliveries/{delivery_id}"]["response"]; + }; + listAccountsForPlan: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /marketplace_listing/plans/{plan_id}/accounts"]["response"]; + }; + listAccountsForPlanStubbed: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"]["response"]; + }; + listInstallationReposForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/installations/{installation_id}/repositories"]["response"]; + }; + listInstallationRequestsForAuthenticatedApp: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app/installation-requests"]["response"]; + }; + listInstallations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app/installations"]["response"]; + }; + listInstallationsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/installations"]["response"]; + }; + listPlans: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /marketplace_listing/plans"]["response"]; + }; + listPlansStubbed: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /marketplace_listing/stubbed/plans"]["response"]; + }; + listReposAccessibleToInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /installation/repositories"]["response"]; + }; + listSubscriptionsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/marketplace_purchases"]["response"]; + }; + listSubscriptionsForAuthenticatedUserStubbed: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/marketplace_purchases/stubbed"]["response"]; + }; + listWebhookDeliveries: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /app/hook/deliveries"]["response"]; + }; + redeliverWebhookDelivery: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /app/hook/deliveries/{delivery_id}/attempts"]["response"]; + }; + removeRepoFromInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/installations/{installation_id}/repositories/{repository_id}"]["response"]; + }; + removeRepoFromInstallationForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/installations/{installation_id}/repositories/{repository_id}"]["response"]; + }; + resetToken: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /applications/{client_id}/token"]["response"]; + }; + revokeInstallationAccessToken: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /installation/token"]["response"]; + }; + scopeToken: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /applications/{client_id}/token/scoped"]["response"]; + }; + suspendInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /app/installations/{installation_id}/suspended"]["response"]; + }; + unsuspendInstallation: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /app/installations/{installation_id}/suspended"]["response"]; + }; + updateWebhookConfigForApp: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /app/hook/config"]["response"]; + }; + }; + billing: { + getGithubActionsBillingOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/settings/billing/actions"]["response"]; + }; + getGithubActionsBillingUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/settings/billing/actions"]["response"]; + }; + getGithubPackagesBillingOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/settings/billing/packages"]["response"]; + }; + getGithubPackagesBillingUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/settings/billing/packages"]["response"]; + }; + getSharedStorageBillingOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/settings/billing/shared-storage"]["response"]; + }; + getSharedStorageBillingUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/settings/billing/shared-storage"]["response"]; + }; + }; + checks: { + create: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/check-runs"]["response"]; + }; + createSuite: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/check-suites"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"]["response"]; + }; + getSuite: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"]["response"]; + }; + listAnnotations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"]["response"]; + }; + listForRef: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"]["response"]; + }; + listForSuite: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"]["response"]; + }; + listSuitesForRef: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"]["response"]; + }; + rerequestRun: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"]["response"]; + }; + rerequestSuite: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"]["response"]; + }; + setSuitesPreferences: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/check-suites/preferences"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]["response"]; + }; + }; + codeScanning: { + deleteAnalysis: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"]["response"]; + }; + getAlert: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"]["response"]; + }; + getAnalysis: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"]["response"]; + }; + getCodeqlDatabase: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}"]["response"]; + }; + getDefaultSetup: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/default-setup"]["response"]; + }; + getSarif: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"]["response"]; + }; + listAlertInstances: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"]["response"]; + }; + listAlertsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/code-scanning/alerts"]["response"]; + }; + listAlertsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts"]["response"]; + }; + listAlertsInstances: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"]["response"]; + }; + listCodeqlDatabases: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/codeql/databases"]["response"]; + }; + listRecentAnalyses: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/code-scanning/analyses"]["response"]; + }; + updateAlert: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"]["response"]; + }; + updateDefaultSetup: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/code-scanning/default-setup"]["response"]; + }; + uploadSarif: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/code-scanning/sarifs"]["response"]; + }; + }; + codesOfConduct: { + getAllCodesOfConduct: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /codes_of_conduct"]["response"]; + }; + getConductCode: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /codes_of_conduct/{key}"]["response"]; + }; + }; + codespaces: { + addRepositoryForSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + addSelectedRepoToOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + codespaceMachinesForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/{codespace_name}/machines"]["response"]; + }; + createForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/codespaces"]["response"]; + }; + createOrUpdateOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/codespaces/secrets/{secret_name}"]["response"]; + }; + createOrUpdateRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"]["response"]; + }; + createOrUpdateSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/codespaces/secrets/{secret_name}"]["response"]; + }; + createWithPrForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"]["response"]; + }; + createWithRepoForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/codespaces"]["response"]; + }; + deleteCodespacesBillingUsers: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/codespaces/billing/selected_users"]["response"]; + }; + deleteForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/codespaces/{codespace_name}"]["response"]; + }; + deleteFromOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"]["response"]; + }; + deleteOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"]["response"]; + }; + deleteRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"]["response"]; + }; + deleteSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/codespaces/secrets/{secret_name}"]["response"]; + }; + exportForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/codespaces/{codespace_name}/exports"]["response"]; + }; + getCodespacesForUserInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/members/{username}/codespaces"]["response"]; + }; + getExportDetailsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/{codespace_name}/exports/{export_id}"]["response"]; + }; + getForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/{codespace_name}"]["response"]; + }; + getOrgPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/codespaces/secrets/public-key"]["response"]; + }; + getOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/codespaces/secrets/{secret_name}"]["response"]; + }; + getPublicKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/secrets/public-key"]["response"]; + }; + getRepoPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"]["response"]; + }; + getRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"]["response"]; + }; + getSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/secrets/{secret_name}"]["response"]; + }; + listDevcontainersInRepositoryForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/devcontainers"]["response"]; + }; + listForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces"]["response"]; + }; + listInOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/codespaces"]["response"]; + }; + listInRepositoryForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces"]["response"]; + }; + listOrgSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/codespaces/secrets"]["response"]; + }; + listRepoSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/secrets"]["response"]; + }; + listRepositoriesForSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/secrets/{secret_name}/repositories"]["response"]; + }; + listSecretsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/codespaces/secrets"]["response"]; + }; + listSelectedReposForOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories"]["response"]; + }; + preFlightWithRepoForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/new"]["response"]; + }; + publishForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/codespaces/{codespace_name}/publish"]["response"]; + }; + removeRepositoryForSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + removeSelectedRepoFromOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + repoMachinesForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codespaces/machines"]["response"]; + }; + setCodespacesBilling: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/codespaces/billing"]["response"]; + }; + setCodespacesBillingUsers: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/codespaces/billing/selected_users"]["response"]; + }; + setRepositoriesForSecretForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/codespaces/secrets/{secret_name}/repositories"]["response"]; + }; + setSelectedReposForOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories"]["response"]; + }; + startForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/codespaces/{codespace_name}/start"]["response"]; + }; + stopForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/codespaces/{codespace_name}/stop"]["response"]; + }; + stopInOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"]["response"]; + }; + updateForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user/codespaces/{codespace_name}"]["response"]; + }; + }; + dependabot: { + addSelectedRepoToOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + createOrUpdateOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/dependabot/secrets/{secret_name}"]["response"]; + }; + createOrUpdateRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"]["response"]; + }; + deleteOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"]["response"]; + }; + deleteRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"]["response"]; + }; + getAlert: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"]["response"]; + }; + getOrgPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/dependabot/secrets/public-key"]["response"]; + }; + getOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/dependabot/secrets/{secret_name}"]["response"]; + }; + getRepoPublicKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"]["response"]; + }; + getRepoSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"]["response"]; + }; + listAlertsForEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /enterprises/{enterprise}/dependabot/alerts"]["response"]; + }; + listAlertsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/dependabot/alerts"]["response"]; + }; + listAlertsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/dependabot/alerts"]["response"]; + }; + listOrgSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/dependabot/secrets"]["response"]; + }; + listRepoSecrets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/dependabot/secrets"]["response"]; + }; + listSelectedReposForOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]["response"]; + }; + removeSelectedRepoFromOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"]["response"]; + }; + setSelectedReposForOrgSecret: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]["response"]; + }; + updateAlert: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"]["response"]; + }; + }; + dependencyGraph: { + createRepositorySnapshot: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/dependency-graph/snapshots"]["response"]; + }; + diffRange: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"]["response"]; + }; + exportSbom: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/dependency-graph/sbom"]["response"]; + }; + }; + emojis: { + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /emojis"]["response"]; + }; + }; + gists: { + checkIsStarred: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}/star"]["response"]; + }; + create: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /gists"]["response"]; + }; + createComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /gists/{gist_id}/comments"]["response"]; + }; + delete: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /gists/{gist_id}"]["response"]; + }; + deleteComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /gists/{gist_id}/comments/{comment_id}"]["response"]; + }; + fork: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /gists/{gist_id}/forks"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}"]["response"]; + }; + getComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}/comments/{comment_id}"]["response"]; + }; + getRevision: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}/{sha}"]["response"]; + }; + list: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists"]["response"]; + }; + listComments: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}/comments"]["response"]; + }; + listCommits: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}/commits"]["response"]; + }; + listForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/gists"]["response"]; + }; + listForks: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/{gist_id}/forks"]["response"]; + }; + listPublic: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/public"]["response"]; + }; + listStarred: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gists/starred"]["response"]; + }; + star: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /gists/{gist_id}/star"]["response"]; + }; + unstar: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /gists/{gist_id}/star"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /gists/{gist_id}"]["response"]; + }; + updateComment: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /gists/{gist_id}/comments/{comment_id}"]["response"]; + }; + }; + git: { + createBlob: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/git/blobs"]["response"]; + }; + createCommit: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/git/commits"]["response"]; + }; + createRef: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/git/refs"]["response"]; + }; + createTag: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/git/tags"]["response"]; + }; + createTree: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/git/trees"]["response"]; + }; + deleteRef: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/git/refs/{ref}"]["response"]; + }; + getBlob: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"]["response"]; + }; + getCommit: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"]["response"]; + }; + getRef: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/git/ref/{ref}"]["response"]; + }; + getTag: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"]["response"]; + }; + getTree: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"]["response"]; + }; + listMatchingRefs: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"]["response"]; + }; + updateRef: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]["response"]; + }; + }; + gitignore: { + getAllTemplates: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gitignore/templates"]["response"]; + }; + getTemplate: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /gitignore/templates/{name}"]["response"]; + }; + }; + interactions: { + getRestrictionsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/interaction-limits"]["response"]; + }; + getRestrictionsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/interaction-limits"]["response"]; + }; + getRestrictionsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/interaction-limits"]["response"]; + }; + getRestrictionsForYourPublicRepos: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/interaction-limits"]["response"]; + }; + removeRestrictionsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/interaction-limits"]["response"]; + }; + removeRestrictionsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/interaction-limits"]["response"]; + }; + removeRestrictionsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/interaction-limits"]["response"]; + }; + removeRestrictionsForYourPublicRepos: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/interaction-limits"]["response"]; + }; + setRestrictionsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/interaction-limits"]["response"]; + }; + setRestrictionsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/interaction-limits"]["response"]; + }; + setRestrictionsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/interaction-limits"]["response"]; + }; + setRestrictionsForYourPublicRepos: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/interaction-limits"]["response"]; + }; + }; + issues: { + addAssignees: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"]["response"]; + }; + addLabels: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; + }; + checkUserCanBeAssigned: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/assignees/{assignee}"]["response"]; + }; + checkUserCanBeAssignedToIssue: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}"]["response"]; + }; + create: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/issues"]["response"]; + }; + createComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"]["response"]; + }; + createLabel: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/labels"]["response"]; + }; + createMilestone: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/milestones"]["response"]; + }; + deleteComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"]["response"]; + }; + deleteLabel: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/labels/{name}"]["response"]; + }; + deleteMilestone: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}"]["response"]; + }; + getComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"]["response"]; + }; + getEvent: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/events/{event_id}"]["response"]; + }; + getLabel: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/labels/{name}"]["response"]; + }; + getMilestone: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/milestones/{milestone_number}"]["response"]; + }; + list: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /issues"]["response"]; + }; + listAssignees: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/assignees"]["response"]; + }; + listComments: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"]["response"]; + }; + listCommentsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/comments"]["response"]; + }; + listEvents: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/events"]["response"]; + }; + listEventsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/events"]["response"]; + }; + listEventsForTimeline: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"]["response"]; + }; + listForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/issues"]["response"]; + }; + listForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/issues"]["response"]; + }; + listForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues"]["response"]; + }; + listLabelsForMilestone: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"]["response"]; + }; + listLabelsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/labels"]["response"]; + }; + listLabelsOnIssue: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; + }; + listMilestones: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/milestones"]["response"]; + }; + lock: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"]["response"]; + }; + removeAllLabels: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; + }; + removeAssignees: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"]["response"]; + }; + removeLabel: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"]["response"]; + }; + setLabels: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"]["response"]; + }; + unlock: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/issues/{issue_number}"]["response"]; + }; + updateComment: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"]["response"]; + }; + updateLabel: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/labels/{name}"]["response"]; + }; + updateMilestone: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"]["response"]; + }; + }; + licenses: { + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /licenses/{license}"]["response"]; + }; + getAllCommonlyUsed: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /licenses"]["response"]; + }; + getForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/license"]["response"]; + }; + }; + markdown: { + render: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /markdown"]["response"]; + }; + renderRaw: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /markdown/raw"]["response"]; + }; + }; + meta: { + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /meta"]["response"]; + }; + getAllVersions: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /versions"]["response"]; + }; + getOctocat: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /octocat"]["response"]; + }; + getZen: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /zen"]["response"]; + }; + root: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /"]["response"]; + }; + }; + migrations: { + cancelImport: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/import"]["response"]; + }; + deleteArchiveForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/migrations/{migration_id}/archive"]["response"]; + }; + deleteArchiveForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/migrations/{migration_id}/archive"]["response"]; + }; + downloadArchiveForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/migrations/{migration_id}/archive"]["response"]; + }; + getArchiveForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/migrations/{migration_id}/archive"]["response"]; + }; + getCommitAuthors: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/import/authors"]["response"]; + }; + getImportStatus: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/import"]["response"]; + }; + getLargeFiles: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/import/large_files"]["response"]; + }; + getStatusForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/migrations/{migration_id}"]["response"]; + }; + getStatusForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/migrations/{migration_id}"]["response"]; + }; + listForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/migrations"]["response"]; + }; + listForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/migrations"]["response"]; + }; + listReposForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/migrations/{migration_id}/repositories"]["response"]; + }; + listReposForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/migrations/{migration_id}/repositories"]["response"]; + }; + listReposForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/migrations/{migration_id}/repositories"]["response"]; + }; + mapCommitAuthor: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"]["response"]; + }; + setLfsPreference: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/import/lfs"]["response"]; + }; + startForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/migrations"]["response"]; + }; + startForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/migrations"]["response"]; + }; + startImport: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/import"]["response"]; + }; + unlockRepoForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"]["response"]; + }; + unlockRepoForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"]["response"]; + }; + updateImport: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/import"]["response"]; + }; + }; + orgs: { + addSecurityManagerTeam: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/security-managers/teams/{team_slug}"]["response"]; + }; + blockUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/blocks/{username}"]["response"]; + }; + cancelInvitation: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/invitations/{invitation_id}"]["response"]; + }; + checkBlockedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/blocks/{username}"]["response"]; + }; + checkMembershipForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/members/{username}"]["response"]; + }; + checkPublicMembershipForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/public_members/{username}"]["response"]; + }; + convertMemberToOutsideCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/outside_collaborators/{username}"]["response"]; + }; + createInvitation: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/invitations"]["response"]; + }; + createWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/hooks"]["response"]; + }; + delete: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}"]["response"]; + }; + deleteWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/hooks/{hook_id}"]["response"]; + }; + enableOrDisableSecurityProductOnAllOrgRepos: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/{security_product}/{enablement}"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}"]["response"]; + }; + getMembershipForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/memberships/orgs/{org}"]["response"]; + }; + getMembershipForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/memberships/{username}"]["response"]; + }; + getWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/hooks/{hook_id}"]["response"]; + }; + getWebhookConfigForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/hooks/{hook_id}/config"]["response"]; + }; + getWebhookDelivery: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"]["response"]; + }; + list: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /organizations"]["response"]; + }; + listAppInstallations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/installations"]["response"]; + }; + listBlockedUsers: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/blocks"]["response"]; + }; + listFailedInvitations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/failed_invitations"]["response"]; + }; + listForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/orgs"]["response"]; + }; + listForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/orgs"]["response"]; + }; + listInvitationTeams: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/invitations/{invitation_id}/teams"]["response"]; + }; + listMembers: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/members"]["response"]; + }; + listMembershipsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/memberships/orgs"]["response"]; + }; + listOutsideCollaborators: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/outside_collaborators"]["response"]; + }; + listPatGrantRepositories: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories"]["response"]; + }; + listPatGrantRequestRepositories: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories"]["response"]; + }; + listPatGrantRequests: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /organizations/{org}/personal-access-token-requests"]["response"]; + }; + listPatGrants: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /organizations/{org}/personal-access-tokens"]["response"]; + }; + listPendingInvitations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/invitations"]["response"]; + }; + listPublicMembers: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/public_members"]["response"]; + }; + listSecurityManagerTeams: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/security-managers"]["response"]; + }; + listWebhookDeliveries: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/hooks/{hook_id}/deliveries"]["response"]; + }; + listWebhooks: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/hooks"]["response"]; + }; + pingWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/hooks/{hook_id}/pings"]["response"]; + }; + redeliverWebhookDelivery: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"]["response"]; + }; + removeMember: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/members/{username}"]["response"]; + }; + removeMembershipForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/memberships/{username}"]["response"]; + }; + removeOutsideCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/outside_collaborators/{username}"]["response"]; + }; + removePublicMembershipForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/public_members/{username}"]["response"]; + }; + removeSecurityManagerTeam: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/security-managers/teams/{team_slug}"]["response"]; + }; + reviewPatGrantRequest: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /organizations/{org}/personal-access-token-requests/{pat_request_id}"]["response"]; + }; + reviewPatGrantRequestsInBulk: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /organizations/{org}/personal-access-token-requests"]["response"]; + }; + setMembershipForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/memberships/{username}"]["response"]; + }; + setPublicMembershipForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/public_members/{username}"]["response"]; + }; + unblockUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/blocks/{username}"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}"]["response"]; + }; + updateMembershipForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user/memberships/orgs/{org}"]["response"]; + }; + updatePatAccess: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /organizations/{org}/personal-access-tokens/{pat_id}"]["response"]; + }; + updatePatAccesses: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /organizations/{org}/personal-access-tokens"]["response"]; + }; + updateWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}/hooks/{hook_id}"]["response"]; + }; + updateWebhookConfigForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}/hooks/{hook_id}/config"]["response"]; + }; + }; + packages: { + deletePackageForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/packages/{package_type}/{package_name}"]["response"]; + }; + deletePackageForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/packages/{package_type}/{package_name}"]["response"]; + }; + deletePackageForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /users/{username}/packages/{package_type}/{package_name}"]["response"]; + }; + deletePackageVersionForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; + }; + deletePackageVersionForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; + }; + deletePackageVersionForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; + }; + getAllPackageVersionsForAPackageOwnedByAnOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"]["response"]; + }; + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/packages/{package_type}/{package_name}/versions"]["response"]; + }; + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/packages/{package_type}/{package_name}/versions"]["response"]; + }; + getAllPackageVersionsForPackageOwnedByOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"]["response"]; + }; + getAllPackageVersionsForPackageOwnedByUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/packages/{package_type}/{package_name}/versions"]["response"]; + }; + getPackageForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/packages/{package_type}/{package_name}"]["response"]; + }; + getPackageForOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}"]["response"]; + }; + getPackageForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/packages/{package_type}/{package_name}"]["response"]; + }; + getPackageVersionForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; + }; + getPackageVersionForOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; + }; + getPackageVersionForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"]["response"]; + }; + listDockerMigrationConflictingPackagesForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/docker/conflicts"]["response"]; + }; + listDockerMigrationConflictingPackagesForOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/docker/conflicts"]["response"]; + }; + listDockerMigrationConflictingPackagesForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/docker/conflicts"]["response"]; + }; + listPackagesForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/packages"]["response"]; + }; + listPackagesForOrganization: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/packages"]["response"]; + }; + listPackagesForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/packages"]["response"]; + }; + restorePackageForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/packages/{package_type}/{package_name}/restore{?token}"]["response"]; + }; + restorePackageForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"]["response"]; + }; + restorePackageForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"]["response"]; + }; + restorePackageVersionForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]["response"]; + }; + restorePackageVersionForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]["response"]; + }; + restorePackageVersionForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]["response"]; + }; + }; + projects: { + addCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /projects/{project_id}/collaborators/{username}"]["response"]; + }; + createCard: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /projects/columns/{column_id}/cards"]["response"]; + }; + createColumn: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /projects/{project_id}/columns"]["response"]; + }; + createForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/projects"]["response"]; + }; + createForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/projects"]["response"]; + }; + createForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/projects"]["response"]; + }; + delete: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /projects/{project_id}"]["response"]; + }; + deleteCard: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /projects/columns/cards/{card_id}"]["response"]; + }; + deleteColumn: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /projects/columns/{column_id}"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/{project_id}"]["response"]; + }; + getCard: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/columns/cards/{card_id}"]["response"]; + }; + getColumn: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/columns/{column_id}"]["response"]; + }; + getPermissionForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/{project_id}/collaborators/{username}/permission"]["response"]; + }; + listCards: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/columns/{column_id}/cards"]["response"]; + }; + listCollaborators: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/{project_id}/collaborators"]["response"]; + }; + listColumns: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /projects/{project_id}/columns"]["response"]; + }; + listForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/projects"]["response"]; + }; + listForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/projects"]["response"]; + }; + listForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/projects"]["response"]; + }; + moveCard: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /projects/columns/cards/{card_id}/moves"]["response"]; + }; + moveColumn: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /projects/columns/{column_id}/moves"]["response"]; + }; + removeCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /projects/{project_id}/collaborators/{username}"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /projects/{project_id}"]["response"]; + }; + updateCard: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /projects/columns/cards/{card_id}"]["response"]; + }; + updateColumn: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /projects/columns/{column_id}"]["response"]; + }; + }; + pulls: { + checkIfMerged: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"]["response"]; + }; + create: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls"]["response"]; + }; + createReplyForReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"]["response"]; + }; + createReview: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"]["response"]; + }; + createReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"]["response"]; + }; + deletePendingReview: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"]["response"]; + }; + deleteReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"]["response"]; + }; + dismissReview: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}"]["response"]; + }; + getReview: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"]["response"]; + }; + getReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"]["response"]; + }; + list: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls"]["response"]; + }; + listCommentsForReview: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"]["response"]; + }; + listCommits: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"]["response"]; + }; + listFiles: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"]["response"]; + }; + listRequestedReviewers: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"]["response"]; + }; + listReviewComments: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"]["response"]; + }; + listReviewCommentsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments"]["response"]; + }; + listReviews: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"]["response"]; + }; + merge: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"]["response"]; + }; + removeRequestedReviewers: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"]["response"]; + }; + requestReviewers: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"]["response"]; + }; + submitReview: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"]["response"]; + }; + updateBranch: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"]["response"]; + }; + updateReview: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"]["response"]; + }; + updateReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"]["response"]; + }; + }; + rateLimit: { + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /rate_limit"]["response"]; + }; + }; + reactions: { + createForCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"]["response"]; + }; + createForIssue: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"]["response"]; + }; + createForIssueComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"]["response"]; + }; + createForPullRequestReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"]["response"]; + }; + createForRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"]["response"]; + }; + createForTeamDiscussionCommentInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["response"]; + }; + createForTeamDiscussionInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]["response"]; + }; + deleteForCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"]["response"]; + }; + deleteForIssue: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"]["response"]; + }; + deleteForIssueComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"]["response"]; + }; + deleteForPullRequestComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"]["response"]; + }; + deleteForRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"]["response"]; + }; + deleteForTeamDiscussion: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"]["response"]; + }; + deleteForTeamDiscussionComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"]["response"]; + }; + listForCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"]["response"]; + }; + listForIssue: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"]["response"]; + }; + listForIssueComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"]["response"]; + }; + listForPullRequestReviewComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"]["response"]; + }; + listForRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"]["response"]; + }; + listForTeamDiscussionCommentInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"]["response"]; + }; + listForTeamDiscussionInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]["response"]; + }; + }; + repos: { + acceptInvitation: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user/repository_invitations/{invitation_id}"]["response"]; + }; + acceptInvitationForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user/repository_invitations/{invitation_id}"]["response"]; + }; + addAppAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"]["response"]; + }; + addCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/collaborators/{username}"]["response"]; + }; + addStatusCheckContexts: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"]["response"]; + }; + addTeamAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"]["response"]; + }; + addUserAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"]["response"]; + }; + checkCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/collaborators/{username}"]["response"]; + }; + checkVulnerabilityAlerts: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/vulnerability-alerts"]["response"]; + }; + codeownersErrors: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/codeowners/errors"]["response"]; + }; + compareCommits: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/compare/{base}...{head}"]["response"]; + }; + compareCommitsWithBasehead: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/compare/{basehead}"]["response"]; + }; + createAutolink: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/autolinks"]["response"]; + }; + createCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"]["response"]; + }; + createCommitSignatureProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"]["response"]; + }; + createCommitStatus: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/statuses/{sha}"]["response"]; + }; + createDeployKey: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/keys"]["response"]; + }; + createDeployment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/deployments"]["response"]; + }; + createDeploymentBranchPolicy: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"]["response"]; + }; + createDeploymentProtectionRule: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"]["response"]; + }; + createDeploymentStatus: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"]["response"]; + }; + createDispatchEvent: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/dispatches"]["response"]; + }; + createForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/repos"]["response"]; + }; + createFork: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/forks"]["response"]; + }; + createInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/repos"]["response"]; + }; + createOrUpdateEnvironment: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/environments/{environment_name}"]["response"]; + }; + createOrUpdateFileContents: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/contents/{path}"]["response"]; + }; + createOrgRuleset: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/rulesets"]["response"]; + }; + createPagesDeployment: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pages/deployment"]["response"]; + }; + createPagesSite: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pages"]["response"]; + }; + createRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/releases"]["response"]; + }; + createRepoRuleset: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/rulesets"]["response"]; + }; + createTagProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/tags/protection"]["response"]; + }; + createUsingTemplate: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{template_owner}/{template_repo}/generate"]["response"]; + }; + createWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/hooks"]["response"]; + }; + declineInvitation: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/repository_invitations/{invitation_id}"]["response"]; + }; + declineInvitationForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/repository_invitations/{invitation_id}"]["response"]; + }; + delete: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}"]["response"]; + }; + deleteAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"]["response"]; + }; + deleteAdminBranchProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"]["response"]; + }; + deleteAnEnvironment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/environments/{environment_name}"]["response"]; + }; + deleteAutolink: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"]["response"]; + }; + deleteBranchProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"]["response"]; + }; + deleteCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/comments/{comment_id}"]["response"]; + }; + deleteCommitSignatureProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"]["response"]; + }; + deleteDeployKey: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/keys/{key_id}"]["response"]; + }; + deleteDeployment: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"]["response"]; + }; + deleteDeploymentBranchPolicy: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"]["response"]; + }; + deleteFile: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/contents/{path}"]["response"]; + }; + deleteInvitation: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"]["response"]; + }; + deleteOrgRuleset: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/rulesets/{ruleset_id}"]["response"]; + }; + deletePagesSite: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/pages"]["response"]; + }; + deletePullRequestReviewProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"]["response"]; + }; + deleteRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/releases/{release_id}"]["response"]; + }; + deleteReleaseAsset: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"]["response"]; + }; + deleteRepoRuleset: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"]["response"]; + }; + deleteTagProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"]["response"]; + }; + deleteWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"]["response"]; + }; + disableAutomatedSecurityFixes: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/automated-security-fixes"]["response"]; + }; + disableDeploymentProtectionRule: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"]["response"]; + }; + disableLfsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/lfs"]["response"]; + }; + disableVulnerabilityAlerts: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/vulnerability-alerts"]["response"]; + }; + downloadArchive: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/zipball/{ref}"]["response"]; + }; + downloadTarballArchive: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/tarball/{ref}"]["response"]; + }; + downloadZipballArchive: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/zipball/{ref}"]["response"]; + }; + enableAutomatedSecurityFixes: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/automated-security-fixes"]["response"]; + }; + enableLfsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/lfs"]["response"]; + }; + enableVulnerabilityAlerts: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/vulnerability-alerts"]["response"]; + }; + generateReleaseNotes: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/releases/generate-notes"]["response"]; + }; + get: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}"]["response"]; + }; + getAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"]["response"]; + }; + getAdminBranchProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"]["response"]; + }; + getAllDeploymentProtectionRules: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"]["response"]; + }; + getAllEnvironments: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/environments"]["response"]; + }; + getAllStatusCheckContexts: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"]["response"]; + }; + getAllTopics: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/topics"]["response"]; + }; + getAppsWithAccessToProtectedBranch: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"]["response"]; + }; + getAutolink: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"]["response"]; + }; + getBranch: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}"]["response"]; + }; + getBranchProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection"]["response"]; + }; + getBranchRules: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/rules/branches/{branch}"]["response"]; + }; + getClones: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/traffic/clones"]["response"]; + }; + getCodeFrequencyStats: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/stats/code_frequency"]["response"]; + }; + getCollaboratorPermissionLevel: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/collaborators/{username}/permission"]["response"]; + }; + getCombinedStatusForRef: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/status"]["response"]; + }; + getCommit: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}"]["response"]; + }; + getCommitActivityStats: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/stats/commit_activity"]["response"]; + }; + getCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/comments/{comment_id}"]["response"]; + }; + getCommitSignatureProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"]["response"]; + }; + getCommunityProfileMetrics: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/community/profile"]["response"]; + }; + getContent: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/contents/{path}"]["response"]; + }; + getContributorsStats: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/stats/contributors"]["response"]; + }; + getCustomDeploymentProtectionRule: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"]["response"]; + }; + getDeployKey: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/keys/{key_id}"]["response"]; + }; + getDeployment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}"]["response"]; + }; + getDeploymentBranchPolicy: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"]["response"]; + }; + getDeploymentStatus: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"]["response"]; + }; + getEnvironment: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}"]["response"]; + }; + getLatestPagesBuild: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pages/builds/latest"]["response"]; + }; + getLatestRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases/latest"]["response"]; + }; + getOrgRuleset: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/rulesets/{ruleset_id}"]["response"]; + }; + getOrgRulesets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/rulesets"]["response"]; + }; + getPages: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pages"]["response"]; + }; + getPagesBuild: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pages/builds/{build_id}"]["response"]; + }; + getPagesHealthCheck: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pages/health"]["response"]; + }; + getParticipationStats: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/stats/participation"]["response"]; + }; + getPullRequestReviewProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"]["response"]; + }; + getPunchCardStats: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/stats/punch_card"]["response"]; + }; + getReadme: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/readme"]["response"]; + }; + getReadmeInDirectory: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/readme/{dir}"]["response"]; + }; + getRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}"]["response"]; + }; + getReleaseAsset: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"]["response"]; + }; + getReleaseByTag: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases/tags/{tag}"]["response"]; + }; + getRepoRuleset: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"]["response"]; + }; + getRepoRulesets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/rulesets"]["response"]; + }; + getStatusChecksProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"]["response"]; + }; + getTeamsWithAccessToProtectedBranch: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"]["response"]; + }; + getTopPaths: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/traffic/popular/paths"]["response"]; + }; + getTopReferrers: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/traffic/popular/referrers"]["response"]; + }; + getUsersWithAccessToProtectedBranch: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"]["response"]; + }; + getViews: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/traffic/views"]["response"]; + }; + getWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}"]["response"]; + }; + getWebhookConfigForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"]["response"]; + }; + getWebhookDelivery: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"]["response"]; + }; + listAutolinks: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/autolinks"]["response"]; + }; + listBranches: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/branches"]["response"]; + }; + listBranchesForHeadCommit: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"]["response"]; + }; + listCollaborators: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/collaborators"]["response"]; + }; + listCommentsForCommit: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"]["response"]; + }; + listCommitCommentsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/comments"]["response"]; + }; + listCommitStatusesForRef: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{ref}/statuses"]["response"]; + }; + listCommits: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits"]["response"]; + }; + listContributors: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/contributors"]["response"]; + }; + listCustomDeploymentRuleIntegrations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps"]["response"]; + }; + listDeployKeys: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/keys"]["response"]; + }; + listDeploymentBranchPolicies: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"]["response"]; + }; + listDeploymentStatuses: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"]["response"]; + }; + listDeployments: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/deployments"]["response"]; + }; + listForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/repos"]["response"]; + }; + listForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/repos"]["response"]; + }; + listForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/repos"]["response"]; + }; + listForks: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/forks"]["response"]; + }; + listInvitations: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/invitations"]["response"]; + }; + listInvitationsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/repository_invitations"]["response"]; + }; + listLanguages: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/languages"]["response"]; + }; + listPagesBuilds: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/pages/builds"]["response"]; + }; + listPublic: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repositories"]["response"]; + }; + listPullRequestsAssociatedWithCommit: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"]["response"]; + }; + listReleaseAssets: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases/{release_id}/assets"]["response"]; + }; + listReleases: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/releases"]["response"]; + }; + listTagProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/tags/protection"]["response"]; + }; + listTags: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/tags"]["response"]; + }; + listTeams: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/teams"]["response"]; + }; + listWebhookDeliveries: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"]["response"]; + }; + listWebhooks: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/hooks"]["response"]; + }; + merge: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/merges"]["response"]; + }; + mergeUpstream: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/merge-upstream"]["response"]; + }; + pingWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"]["response"]; + }; + redeliverWebhookDelivery: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"]["response"]; + }; + removeAppAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"]["response"]; + }; + removeCollaborator: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/collaborators/{username}"]["response"]; + }; + removeStatusCheckContexts: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"]["response"]; + }; + removeStatusCheckProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"]["response"]; + }; + removeTeamAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"]["response"]; + }; + removeUserAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"]["response"]; + }; + renameBranch: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/rename"]["response"]; + }; + replaceAllTopics: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/topics"]["response"]; + }; + requestPagesBuild: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/pages/builds"]["response"]; + }; + setAdminBranchProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"]["response"]; + }; + setAppAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"]["response"]; + }; + setStatusCheckContexts: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"]["response"]; + }; + setTeamAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"]["response"]; + }; + setUserAccessRestrictions: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"]["response"]; + }; + testPushWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"]["response"]; + }; + transfer: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/transfer"]["response"]; + }; + update: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}"]["response"]; + }; + updateBranchProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/branches/{branch}/protection"]["response"]; + }; + updateCommitComment: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/comments/{comment_id}"]["response"]; + }; + updateDeploymentBranchPolicy: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"]["response"]; + }; + updateInformationAboutPagesSite: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/pages"]["response"]; + }; + updateInvitation: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"]["response"]; + }; + updateOrgRuleset: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/rulesets/{ruleset_id}"]["response"]; + }; + updatePullRequestReviewProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"]["response"]; + }; + updateRelease: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/releases/{release_id}"]["response"]; + }; + updateReleaseAsset: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"]["response"]; + }; + updateRepoRuleset: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"]["response"]; + }; + updateStatusCheckPotection: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"]["response"]; + }; + updateStatusCheckProtection: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"]["response"]; + }; + updateWebhook: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"]["response"]; + }; + updateWebhookConfigForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"]["response"]; + }; + uploadReleaseAsset: { + parameters: RequestParameters & Omit; + response: Endpoints["POST {origin}/repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}"]["response"]; + }; + }; + search: { + code: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/code"]["response"]; + }; + commits: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/commits"]["response"]; + }; + issuesAndPullRequests: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/issues"]["response"]; + }; + labels: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/labels"]["response"]; + }; + repos: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/repositories"]["response"]; + }; + topics: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/topics"]["response"]; + }; + users: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /search/users"]["response"]; + }; + }; + secretScanning: { + getAlert: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]["response"]; + }; + listAlertsForEnterprise: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /enterprises/{enterprise}/secret-scanning/alerts"]["response"]; + }; + listAlertsForOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/secret-scanning/alerts"]["response"]; + }; + listAlertsForRepo: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts"]["response"]; + }; + listLocationsForAlert: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"]["response"]; + }; + updateAlert: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]["response"]; + }; + }; + securityAdvisories: { + createPrivateVulnerabilityReport: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/security-advisories/reports"]["response"]; + }; + createRepositoryAdvisory: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /repos/{owner}/{repo}/security-advisories"]["response"]; + }; + getRepositoryAdvisory: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}"]["response"]; + }; + listRepositoryAdvisories: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /repos/{owner}/{repo}/security-advisories"]["response"]; + }; + updateRepositoryAdvisory: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}"]["response"]; + }; + }; + teams: { + addOrUpdateMembershipForUserInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"]["response"]; + }; + addOrUpdateProjectPermissionsInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"]["response"]; + }; + addOrUpdateRepoPermissionsInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"]["response"]; + }; + checkPermissionsForProjectInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"]["response"]; + }; + checkPermissionsForRepoInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"]["response"]; + }; + create: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/teams"]["response"]; + }; + createDiscussionCommentInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"]["response"]; + }; + createDiscussionInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /orgs/{org}/teams/{team_slug}/discussions"]["response"]; + }; + deleteDiscussionCommentInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"]["response"]; + }; + deleteDiscussionInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"]["response"]; + }; + deleteInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}"]["response"]; + }; + getByName: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}"]["response"]; + }; + getDiscussionCommentInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"]["response"]; + }; + getDiscussionInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"]["response"]; + }; + getMembershipForUserInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"]["response"]; + }; + list: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams"]["response"]; + }; + listChildInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/teams"]["response"]; + }; + listDiscussionCommentsInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"]["response"]; + }; + listDiscussionsInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/discussions"]["response"]; + }; + listForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/teams"]["response"]; + }; + listMembersInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/members"]["response"]; + }; + listPendingInvitationsInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/invitations"]["response"]; + }; + listProjectsInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/projects"]["response"]; + }; + listReposInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /orgs/{org}/teams/{team_slug}/repos"]["response"]; + }; + removeMembershipForUserInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"]["response"]; + }; + removeProjectInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"]["response"]; + }; + removeRepoInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"]["response"]; + }; + updateDiscussionCommentInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"]["response"]; + }; + updateDiscussionInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"]["response"]; + }; + updateInOrg: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /orgs/{org}/teams/{team_slug}"]["response"]; + }; + }; + users: { + addEmailForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/emails"]["response"]; + }; + addEmailForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/emails"]["response"]; + }; + addSocialAccountForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/social_accounts"]["response"]; + }; + block: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/blocks/{username}"]["response"]; + }; + checkBlocked: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/blocks/{username}"]["response"]; + }; + checkFollowingForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/following/{target_user}"]["response"]; + }; + checkPersonIsFollowedByAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/following/{username}"]["response"]; + }; + createGpgKeyForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/gpg_keys"]["response"]; + }; + createGpgKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/gpg_keys"]["response"]; + }; + createPublicSshKeyForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/keys"]["response"]; + }; + createPublicSshKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/keys"]["response"]; + }; + createSshSigningKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["POST /user/ssh_signing_keys"]["response"]; + }; + deleteEmailForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/emails"]["response"]; + }; + deleteEmailForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/emails"]["response"]; + }; + deleteGpgKeyForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/gpg_keys/{gpg_key_id}"]["response"]; + }; + deleteGpgKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/gpg_keys/{gpg_key_id}"]["response"]; + }; + deletePublicSshKeyForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/keys/{key_id}"]["response"]; + }; + deletePublicSshKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/keys/{key_id}"]["response"]; + }; + deleteSocialAccountForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/social_accounts"]["response"]; + }; + deleteSshSigningKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/ssh_signing_keys/{ssh_signing_key_id}"]["response"]; + }; + follow: { + parameters: RequestParameters & Omit; + response: Endpoints["PUT /user/following/{username}"]["response"]; + }; + getAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user"]["response"]; + }; + getByUsername: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}"]["response"]; + }; + getContextForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/hovercard"]["response"]; + }; + getGpgKeyForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/gpg_keys/{gpg_key_id}"]["response"]; + }; + getGpgKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/gpg_keys/{gpg_key_id}"]["response"]; + }; + getPublicSshKeyForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/keys/{key_id}"]["response"]; + }; + getPublicSshKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/keys/{key_id}"]["response"]; + }; + getSshSigningKeyForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/ssh_signing_keys/{ssh_signing_key_id}"]["response"]; + }; + list: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users"]["response"]; + }; + listBlockedByAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/blocks"]["response"]; + }; + listBlockedByAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/blocks"]["response"]; + }; + listEmailsForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/emails"]["response"]; + }; + listEmailsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/emails"]["response"]; + }; + listFollowedByAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/following"]["response"]; + }; + listFollowedByAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/following"]["response"]; + }; + listFollowersForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/followers"]["response"]; + }; + listFollowersForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/followers"]["response"]; + }; + listFollowingForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/following"]["response"]; + }; + listGpgKeysForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/gpg_keys"]["response"]; + }; + listGpgKeysForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/gpg_keys"]["response"]; + }; + listGpgKeysForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/gpg_keys"]["response"]; + }; + listPublicEmailsForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/public_emails"]["response"]; + }; + listPublicEmailsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/public_emails"]["response"]; + }; + listPublicKeysForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/keys"]["response"]; + }; + listPublicSshKeysForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/keys"]["response"]; + }; + listPublicSshKeysForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/keys"]["response"]; + }; + listSocialAccountsForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/social_accounts"]["response"]; + }; + listSocialAccountsForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/social_accounts"]["response"]; + }; + listSshSigningKeysForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /user/ssh_signing_keys"]["response"]; + }; + listSshSigningKeysForUser: { + parameters: RequestParameters & Omit; + response: Endpoints["GET /users/{username}/ssh_signing_keys"]["response"]; + }; + setPrimaryEmailVisibilityForAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user/email/visibility"]["response"]; + }; + setPrimaryEmailVisibilityForAuthenticatedUser: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user/email/visibility"]["response"]; + }; + unblock: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/blocks/{username}"]["response"]; + }; + unfollow: { + parameters: RequestParameters & Omit; + response: Endpoints["DELETE /user/following/{username}"]["response"]; + }; + updateAuthenticated: { + parameters: RequestParameters & Omit; + response: Endpoints["PATCH /user"]["response"]; + }; + }; +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/index.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/index.d.ts new file mode 100644 index 0000000..4f7623e --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/index.d.ts @@ -0,0 +1,11 @@ +import type { Octokit } from "@octokit/core"; +export type { RestEndpointMethodTypes } from "./generated/parameters-and-response-types"; +import type { Api } from "./types"; +export declare function restEndpointMethods(octokit: Octokit): Api; +export declare namespace restEndpointMethods { + var VERSION: string; +} +export declare function legacyRestEndpointMethods(octokit: Octokit): Api["rest"] & Api; +export declare namespace legacyRestEndpointMethods { + var VERSION: string; +} diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/types.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/types.d.ts new file mode 100644 index 0000000..f43e12e --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/types.d.ts @@ -0,0 +1,18 @@ +import type { Route, RequestParameters } from "@octokit/types"; +import type { RestEndpointMethods } from "./generated/method-types"; +export type Api = { + rest: RestEndpointMethods; +}; +export type EndpointDecorations = { + mapToData?: string; + deprecated?: string; + renamed?: [string, string]; + renamedParameters?: { + [name: string]: string; + }; +}; +export type EndpointsDefaultsAndDecorations = { + [scope: string]: { + [methodName: string]: [Route, RequestParameters?, EndpointDecorations?]; + }; +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts new file mode 100644 index 0000000..20671d8 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "7.2.3"; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js new file mode 100644 index 0000000..bf0c51c --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js @@ -0,0 +1,1985 @@ +// pkg/dist-src/version.js +var VERSION = "7.2.3"; + +// pkg/dist-src/generated/endpoints.js +var Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + addSelectedRepoToRequiredWorkflow: [ + "PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createRequiredWorkflow: ["POST /orgs/{org}/actions/required_workflows"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteRequiredWorkflow: [ + "DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoRequiredWorkflow: [ + "GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}" + ], + getRepoRequiredWorkflowUsage: [ + "GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing" + ], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getRequiredWorkflow: [ + "GET /orgs/{org}/actions/required_workflows/{required_workflow_id}" + ], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], + listRepoRequiredWorkflows: [ + "GET /repos/{org}/{repo}/actions/required_workflows" + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRequiredWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs" + ], + listRequiredWorkflows: ["GET /orgs/{org}/actions/required_workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], + listSelectedRepositoriesRequiredWorkflow: [ + "GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories" + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + removeSelectedRepoFromRequiredWorkflow: [ + "DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedReposToRequiredWorkflow: [ + "PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ], + updateRequiredWorkflow: [ + "PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}" + ] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], + deleteCodespacesBillingUsers: [ + "DELETE /orgs/{org}/codespaces/billing/selected_users" + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setCodespacesBilling: ["PUT /orgs/{org}/codespaces/billing"], + setCodespacesBillingUsers: [ + "POST /orgs/{org}/codespaces/billing/selected_users" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] + }, + emojis: { get: ["GET /emojis"] }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] + }, + meta: { + get: ["GET /meta"], + getAllVersions: ["GET /versions"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: ["PATCH /repos/{owner}/{repo}/import"] + }, + orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], + get: ["GET /orgs/{org}"], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: [ + "GET /organizations/{org}/personal-access-token-requests" + ], + listPatGrants: ["GET /organizations/{org}/personal-access-tokens"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /organizations/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /organizations/{org}/personal-access-token-requests" + ], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: [ + "POST /organizations/{org}/personal-access-tokens/{pat_id}" + ], + updatePatAccesses: ["POST /organizations/{org}/personal-access-tokens"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] + }, + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] + }, + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; +var endpoints_default = Endpoints; + +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { + method, + url + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +var handler = { + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const { decorations, endpointDefaults } = endpointMethodsMap.get(scope).get(methodName); + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); + } + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); + } + return newMethods; +} +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + function withDecorations(...args) { + let options = requestWithDefaults.endpoint.merge(...args); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: void 0 + }); + return requestWithDefaults(options); + } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); + } + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; + } + delete options2[name]; + } + } + return requestWithDefaults(options2); + } + return requestWithDefaults(...args); + } + return Object.assign(withDecorations, requestWithDefaults); +} + +// pkg/dist-src/index.js +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + rest: api + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + ...api, + rest: api + }; +} +legacyRestEndpointMethods.VERSION = VERSION; +export { + legacyRestEndpointMethods, + restEndpointMethods +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map new file mode 100644 index 0000000..7a698da --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/version.js", "../dist-src/generated/endpoints.js", "../dist-src/endpoints-to-methods.js", "../dist-src/index.js"], + "sourcesContent": ["const VERSION = \"7.2.3\";\nexport {\n VERSION\n};\n", "const Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\n \"POST /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n addCustomLabelsToSelfHostedRunnerForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToOrgVariable: [\n \"PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToRequiredWorkflow: [\n \"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}\"\n ],\n approveWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"\n ],\n cancelWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"\n ],\n createEnvironmentVariable: [\n \"POST /repositories/{repository_id}/environments/{environment_name}/variables\"\n ],\n createOrUpdateEnvironmentSecret: [\n \"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"\n ],\n createOrgVariable: [\"POST /orgs/{org}/actions/variables\"],\n createRegistrationTokenForOrg: [\n \"POST /orgs/{org}/actions/runners/registration-token\"\n ],\n createRegistrationTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/registration-token\"\n ],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/remove-token\"\n ],\n createRepoVariable: [\"POST /repos/{owner}/{repo}/actions/variables\"],\n createRequiredWorkflow: [\"POST /orgs/{org}/actions/required_workflows\"],\n createWorkflowDispatch: [\n \"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"\n ],\n deleteActionsCacheById: [\n \"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"\n ],\n deleteActionsCacheByKey: [\n \"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"\n ],\n deleteArtifact: [\n \"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"\n ],\n deleteEnvironmentSecret: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n deleteEnvironmentVariable: [\n \"DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteOrgVariable: [\"DELETE /orgs/{org}/actions/variables/{name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"\n ],\n deleteRepoVariable: [\n \"DELETE /repos/{owner}/{repo}/actions/variables/{name}\"\n ],\n deleteRequiredWorkflow: [\n \"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}\"\n ],\n deleteSelfHostedRunnerFromOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}\"\n ],\n deleteSelfHostedRunnerFromRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"\n ],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\n \"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"\n ],\n disableSelectedRepositoryGithubActionsOrganization: [\n \"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"\n ],\n disableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"\n ],\n downloadArtifact: [\n \"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"\n ],\n downloadJobLogsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"\n ],\n downloadWorkflowRunAttemptLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"\n ],\n downloadWorkflowRunLogs: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"\n ],\n enableSelectedRepositoryGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"\n ],\n enableWorkflow: [\n \"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"\n ],\n generateRunnerJitconfigForOrg: [\n \"POST /orgs/{org}/actions/runners/generate-jitconfig\"\n ],\n generateRunnerJitconfigForRepo: [\n \"POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig\"\n ],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\n \"GET /orgs/{org}/actions/cache/usage-by-repository\"\n ],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/selected-actions\"\n ],\n getAllowedActionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"\n ],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"\n ],\n getEnvironmentSecret: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"\n ],\n getEnvironmentVariable: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/workflow\"\n ],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/workflow\"\n ],\n getGithubActionsPermissionsOrganization: [\n \"GET /orgs/{org}/actions/permissions\"\n ],\n getGithubActionsPermissionsRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions\"\n ],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getOrgVariable: [\"GET /orgs/{org}/actions/variables/{name}\"],\n getPendingDeploymentsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"\n ],\n getRepoPermissions: [\n \"GET /repos/{owner}/{repo}/actions/permissions\",\n {},\n { renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"] }\n ],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoRequiredWorkflow: [\n \"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}\"\n ],\n getRepoRequiredWorkflowUsage: [\n \"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing\"\n ],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getRepoVariable: [\"GET /repos/{owner}/{repo}/actions/variables/{name}\"],\n getRequiredWorkflow: [\n \"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}\"\n ],\n getReviewsForRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"\n ],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"\n ],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\n \"GET /repos/{owner}/{repo}/actions/permissions/access\"\n ],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"\n ],\n getWorkflowRunUsage: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"\n ],\n getWorkflowUsage: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"\n ],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"\n ],\n listEnvironmentVariables: [\n \"GET /repositories/{repository_id}/environments/{environment_name}/variables\"\n ],\n listJobsForWorkflowRun: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"\n ],\n listJobsForWorkflowRunAttempt: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"\n ],\n listLabelsForSelfHostedRunnerForOrg: [\n \"GET /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n listLabelsForSelfHostedRunnerForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listOrgVariables: [\"GET /orgs/{org}/actions/variables\"],\n listRepoOrganizationSecrets: [\n \"GET /repos/{owner}/{repo}/actions/organization-secrets\"\n ],\n listRepoOrganizationVariables: [\n \"GET /repos/{owner}/{repo}/actions/organization-variables\"\n ],\n listRepoRequiredWorkflows: [\n \"GET /repos/{org}/{repo}/actions/required_workflows\"\n ],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoVariables: [\"GET /repos/{owner}/{repo}/actions/variables\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRequiredWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs\"\n ],\n listRequiredWorkflows: [\"GET /orgs/{org}/actions/required_workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\n \"GET /repos/{owner}/{repo}/actions/runners/downloads\"\n ],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"\n ],\n listSelectedReposForOrgVariable: [\n \"GET /orgs/{org}/actions/variables/{name}/repositories\"\n ],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"GET /orgs/{org}/actions/permissions/repositories\"\n ],\n listSelectedRepositoriesRequiredWorkflow: [\n \"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories\"\n ],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\n \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"\n ],\n listWorkflowRuns: [\n \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"\n ],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\n \"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"\n ],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\n \"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"\n ],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\n \"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromOrgVariable: [\n \"DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromRequiredWorkflow: [\n \"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}\"\n ],\n reviewCustomGatesForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule\"\n ],\n reviewPendingDeploymentsForRun: [\n \"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"\n ],\n setAllowedActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/selected-actions\"\n ],\n setAllowedActionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"\n ],\n setCustomLabelsForSelfHostedRunnerForOrg: [\n \"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"\n ],\n setCustomLabelsForSelfHostedRunnerForRepo: [\n \"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"\n ],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/workflow\"\n ],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"\n ],\n setGithubActionsPermissionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions\"\n ],\n setGithubActionsPermissionsRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"\n ],\n setSelectedReposForOrgVariable: [\n \"PUT /orgs/{org}/actions/variables/{name}/repositories\"\n ],\n setSelectedReposToRequiredWorkflow: [\n \"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories\"\n ],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\n \"PUT /orgs/{org}/actions/permissions/repositories\"\n ],\n setWorkflowAccessToRepository: [\n \"PUT /repos/{owner}/{repo}/actions/permissions/access\"\n ],\n updateEnvironmentVariable: [\n \"PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"\n ],\n updateOrgVariable: [\"PATCH /orgs/{org}/actions/variables/{name}\"],\n updateRepoVariable: [\n \"PATCH /repos/{owner}/{repo}/actions/variables/{name}\"\n ],\n updateRequiredWorkflow: [\n \"PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}\"\n ]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\n \"DELETE /notifications/threads/{thread_id}/subscription\"\n ],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\n \"GET /notifications/threads/{thread_id}/subscription\"\n ],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\n \"GET /users/{username}/events/orgs/{org}\"\n ],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\n \"GET /users/{username}/received_events/public\"\n ],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/notifications\"\n ],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\n \"PUT /notifications/threads/{thread_id}/subscription\"\n ],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"] }\n ],\n addRepoToInstallationForAuthenticatedUser: [\n \"PUT /user/installations/{installation_id}/repositories/{repository_id}\"\n ],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\n \"POST /app/installations/{installation_id}/access_tokens\"\n ],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\n \"GET /marketplace_listing/accounts/{account_id}\"\n ],\n getSubscriptionPlanForAccountStubbed: [\n \"GET /marketplace_listing/stubbed/accounts/{account_id}\"\n ],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\n \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"\n ],\n listInstallationReposForAuthenticatedUser: [\n \"GET /user/installations/{installation_id}/repositories\"\n ],\n listInstallationRequestsForAuthenticatedApp: [\n \"GET /app/installation-requests\"\n ],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\n \"GET /user/marketplace_purchases/stubbed\"\n ],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\n \"POST /app/hook/deliveries/{delivery_id}/attempts\"\n ],\n removeRepoFromInstallation: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\",\n {},\n { renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"] }\n ],\n removeRepoFromInstallationForAuthenticatedUser: [\n \"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"\n ],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\n \"DELETE /app/installations/{installation_id}/suspended\"\n ],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\n \"GET /users/{username}/settings/billing/actions\"\n ],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\n \"GET /users/{username}/settings/billing/packages\"\n ],\n getSharedStorageBillingOrg: [\n \"GET /orgs/{org}/settings/billing/shared-storage\"\n ],\n getSharedStorageBillingUser: [\n \"GET /users/{username}/settings/billing/shared-storage\"\n ]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\n \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"\n ],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\n \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"\n ],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\n \"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"\n ],\n rerequestSuite: [\n \"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"\n ],\n setSuitesPreferences: [\n \"PATCH /repos/{owner}/{repo}/check-suites/preferences\"\n ],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\n \"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"\n ],\n getAlert: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\",\n {},\n { renamedParameters: { alert_id: \"alert_number\" } }\n ],\n getAnalysis: [\n \"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"\n ],\n getCodeqlDatabase: [\n \"GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}\"\n ],\n getDefaultSetup: [\"GET /repos/{owner}/{repo}/code-scanning/default-setup\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\n \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\",\n {},\n { renamed: [\"codeScanning\", \"listAlertInstances\"] }\n ],\n listCodeqlDatabases: [\n \"GET /repos/{owner}/{repo}/code-scanning/codeql/databases\"\n ],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"\n ],\n updateDefaultSetup: [\n \"PATCH /repos/{owner}/{repo}/code-scanning/default-setup\"\n ],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n codespaceMachinesForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/machines\"\n ],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}\"\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n createOrUpdateSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}\"\n ],\n createWithPrForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"\n ],\n createWithRepoForAuthenticatedUser: [\n \"POST /repos/{owner}/{repo}/codespaces\"\n ],\n deleteCodespacesBillingUsers: [\n \"DELETE /orgs/{org}/codespaces/billing/selected_users\"\n ],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\n \"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/codespaces/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n deleteSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}\"\n ],\n exportForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/exports\"\n ],\n getCodespacesForUserInOrg: [\n \"GET /orgs/{org}/members/{username}/codespaces\"\n ],\n getExportDetailsForAuthenticatedUser: [\n \"GET /user/codespaces/{codespace_name}/exports/{export_id}\"\n ],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/codespaces/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/codespaces/secrets/{secret_name}\"],\n getPublicKeyForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/public-key\"\n ],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"\n ],\n getSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}\"\n ],\n listDevcontainersInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/devcontainers\"\n ],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\n \"GET /orgs/{org}/codespaces\",\n {},\n { renamedParameters: { org_id: \"org\" } }\n ],\n listInRepositoryForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces\"\n ],\n listOrgSecrets: [\"GET /orgs/{org}/codespaces/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\n \"GET /user/codespaces/secrets/{secret_name}/repositories\"\n ],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"\n ],\n preFlightWithRepoForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/new\"\n ],\n publishForAuthenticatedUser: [\n \"POST /user/codespaces/{codespace_name}/publish\"\n ],\n removeRepositoryForSecretForAuthenticatedUser: [\n \"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n repoMachinesForAuthenticatedUser: [\n \"GET /repos/{owner}/{repo}/codespaces/machines\"\n ],\n setCodespacesBilling: [\"PUT /orgs/{org}/codespaces/billing\"],\n setCodespacesBillingUsers: [\n \"POST /orgs/{org}/codespaces/billing/selected_users\"\n ],\n setRepositoriesForSecretForAuthenticatedUser: [\n \"PUT /user/codespaces/secrets/{secret_name}/repositories\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"\n ],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\n \"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"\n ],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n createOrUpdateOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"\n ],\n createOrUpdateRepoSecret: [\n \"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\n \"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n getAlert: [\"GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"\n ],\n getRepoSecret: [\n \"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/dependabot/alerts\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/dependabot/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/dependabot/alerts\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\n \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"\n ],\n removeSelectedRepoFromOrgSecret: [\n \"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"\n ],\n setSelectedReposForOrgSecret: [\n \"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"\n ]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\n \"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"\n ],\n diffRange: [\n \"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"\n ],\n exportSbom: [\"GET /repos/{owner}/{repo}/dependency-graph/sbom\"]\n },\n emojis: { get: [\"GET /emojis\"] },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\n \"GET /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"] }\n ],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\n \"DELETE /repos/{owner}/{repo}/interaction-limits\"\n ],\n removeRestrictionsForYourPublicRepos: [\n \"DELETE /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"] }\n ],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\n \"PUT /user/interaction-limits\",\n {},\n { renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"] }\n ]\n },\n issues: {\n addAssignees: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"\n ],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n checkUserCanBeAssignedToIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}\"\n ],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"\n ],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"\n ],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\n \"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"\n ],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"\n ],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\n \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"\n ],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\n \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"\n ],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"\n ],\n removeAssignees: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"\n ],\n removeLabel: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"\n ],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\n \"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"\n ]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\n \"POST /markdown/raw\",\n { headers: { \"content-type\": \"text/plain; charset=utf-8\" } }\n ]\n },\n meta: {\n get: [\"GET /meta\"],\n getAllVersions: [\"GET /versions\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/archive\"\n ],\n deleteArchiveForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/archive\"\n ],\n downloadArchiveForOrg: [\n \"GET /orgs/{org}/migrations/{migration_id}/archive\"\n ],\n getArchiveForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/archive\"\n ],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\n \"GET /user/migrations/{migration_id}/repositories\"\n ],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\n \"GET /user/migrations/{migration_id}/repositories\",\n {},\n { renamed: [\"migrations\", \"listReposForAuthenticatedUser\"] }\n ],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\n \"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"\n ],\n unlockRepoForOrg: [\n \"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"\n ],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n addSecurityManagerTeam: [\n \"PUT /orgs/{org}/security-managers/teams/{team_slug}\"\n ],\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\n \"PUT /orgs/{org}/outside_collaborators/{username}\"\n ],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n delete: [\"DELETE /orgs/{org}\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n enableOrDisableSecurityProductOnAllOrgRepos: [\n \"POST /orgs/{org}/{security_product}/{enablement}\"\n ],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\n \"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"\n ],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPatGrantRepositories: [\n \"GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories\"\n ],\n listPatGrantRequestRepositories: [\n \"GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories\"\n ],\n listPatGrantRequests: [\n \"GET /organizations/{org}/personal-access-token-requests\"\n ],\n listPatGrants: [\"GET /organizations/{org}/personal-access-tokens\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listSecurityManagerTeams: [\"GET /orgs/{org}/security-managers\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"\n ],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\n \"DELETE /orgs/{org}/outside_collaborators/{username}\"\n ],\n removePublicMembershipForAuthenticatedUser: [\n \"DELETE /orgs/{org}/public_members/{username}\"\n ],\n removeSecurityManagerTeam: [\n \"DELETE /orgs/{org}/security-managers/teams/{team_slug}\"\n ],\n reviewPatGrantRequest: [\n \"POST /organizations/{org}/personal-access-token-requests/{pat_request_id}\"\n ],\n reviewPatGrantRequestsInBulk: [\n \"POST /organizations/{org}/personal-access-token-requests\"\n ],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\n \"PUT /orgs/{org}/public_members/{username}\"\n ],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\n \"PATCH /user/memberships/orgs/{org}\"\n ],\n updatePatAccess: [\n \"POST /organizations/{org}/personal-access-tokens/{pat_id}\"\n ],\n updatePatAccesses: [\"POST /organizations/{org}/personal-access-tokens\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}\"\n ],\n deletePackageForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"\n ],\n deletePackageForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}\"\n ],\n deletePackageVersionForAuthenticatedUser: [\n \"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n deletePackageVersionForOrg: [\n \"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n deletePackageVersionForUser: [\n \"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\",\n {},\n { renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"] }\n ],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\",\n {},\n {\n renamed: [\n \"packages\",\n \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"\n ]\n }\n ],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions\"\n ],\n getAllPackageVersionsForPackageOwnedByOrg: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"\n ],\n getAllPackageVersionsForPackageOwnedByUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions\"\n ],\n getPackageForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}\"\n ],\n getPackageForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}\"\n ],\n getPackageForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}\"\n ],\n getPackageVersionForAuthenticatedUser: [\n \"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getPackageVersionForOrganization: [\n \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n getPackageVersionForUser: [\n \"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"\n ],\n listDockerMigrationConflictingPackagesForAuthenticatedUser: [\n \"GET /user/docker/conflicts\"\n ],\n listDockerMigrationConflictingPackagesForOrganization: [\n \"GET /orgs/{org}/docker/conflicts\"\n ],\n listDockerMigrationConflictingPackagesForUser: [\n \"GET /users/{username}/docker/conflicts\"\n ],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"\n ],\n restorePackageVersionForAuthenticatedUser: [\n \"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ],\n restorePackageVersionForOrg: [\n \"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ],\n restorePackageVersionForUser: [\n \"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"\n ]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\n \"GET /projects/{project_id}/collaborators/{username}/permission\"\n ],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\n \"DELETE /projects/{project_id}/collaborators/{username}\"\n ],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"\n ],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"\n ],\n deletePendingReview: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n deleteReviewComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"\n ],\n dismissReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"\n ],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n listReviewComments: [\n \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"\n ],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\n \"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n requestReviewers: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"\n ],\n submitReview: [\n \"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"\n ],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"\n ],\n updateReview: [\n \"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"\n ],\n updateReviewComment: [\n \"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"\n ]\n },\n rateLimit: { get: [\"GET /rate_limit\"] },\n reactions: {\n createForCommitComment: [\n \"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"\n ],\n createForIssue: [\n \"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"\n ],\n createForIssueComment: [\n \"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"\n ],\n createForPullRequestReviewComment: [\n \"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"\n ],\n createForRelease: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"\n ],\n createForTeamDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"\n ],\n createForTeamDiscussionInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"\n ],\n deleteForCommitComment: [\n \"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForIssue: [\n \"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"\n ],\n deleteForIssueComment: [\n \"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForPullRequestComment: [\n \"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"\n ],\n deleteForRelease: [\n \"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"\n ],\n deleteForTeamDiscussion: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"\n ],\n deleteForTeamDiscussionComment: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"\n ],\n listForCommitComment: [\n \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"\n ],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\n \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"\n ],\n listForPullRequestReviewComment: [\n \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"\n ],\n listForRelease: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"\n ],\n listForTeamDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"\n ],\n listForTeamDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"\n ]\n },\n repos: {\n acceptInvitation: [\n \"PATCH /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"] }\n ],\n acceptInvitationForAuthenticatedUser: [\n \"PATCH /user/repository_invitations/{invitation_id}\"\n ],\n addAppAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n addTeamAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n addUserAccessRestrictions: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\n \"GET /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\n \"GET /repos/{owner}/{repo}/compare/{basehead}\"\n ],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\n \"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"\n ],\n createCommitSignatureProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentBranchPolicy: [\n \"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"\n ],\n createDeploymentProtectionRule: [\n \"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules\"\n ],\n createDeploymentStatus: [\n \"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"\n ],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createOrgRuleset: [\"POST /orgs/{org}/rulesets\"],\n createPagesDeployment: [\"POST /repos/{owner}/{repo}/pages/deployment\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createRepoRuleset: [\"POST /repos/{owner}/{repo}/rulesets\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\n \"POST /repos/{template_owner}/{template_repo}/generate\"\n ],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\n \"DELETE /user/repository_invitations/{invitation_id}\",\n {},\n { renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"] }\n ],\n declineInvitationForAuthenticatedUser: [\n \"DELETE /user/repository_invitations/{invitation_id}\"\n ],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"\n ],\n deleteAdminBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n deleteAnEnvironment: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\n \"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"\n ],\n deleteDeploymentBranchPolicy: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\n \"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"\n ],\n deleteOrgRuleset: [\"DELETE /orgs/{org}/rulesets/{ruleset_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\n \"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"\n ],\n deleteRepoRuleset: [\"DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n deleteTagProtection: [\n \"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"\n ],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\n \"DELETE /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n disableDeploymentProtectionRule: [\n \"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}\"\n ],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\n \"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n downloadArchive: [\n \"GET /repos/{owner}/{repo}/zipball/{ref}\",\n {},\n { renamed: [\"repos\", \"downloadZipballArchive\"] }\n ],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\n \"PUT /repos/{owner}/{repo}/automated-security-fixes\"\n ],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\n \"PUT /repos/{owner}/{repo}/vulnerability-alerts\"\n ],\n generateReleaseNotes: [\n \"POST /repos/{owner}/{repo}/releases/generate-notes\"\n ],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"\n ],\n getAdminBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n getAllDeploymentProtectionRules: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules\"\n ],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"\n ],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"\n ],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n getBranchRules: [\"GET /repos/{owner}/{repo}/rules/branches/{branch}\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\n \"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"\n ],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"\n ],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getCustomDeploymentProtectionRule: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}\"\n ],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentBranchPolicy: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n getDeploymentStatus: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"\n ],\n getEnvironment: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}\"\n ],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getOrgRuleset: [\"GET /orgs/{org}/rulesets/{ruleset_id}\"],\n getOrgRulesets: [\"GET /orgs/{org}/rulesets\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getRepoRuleset: [\"GET /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n getRepoRulesets: [\"GET /repos/{owner}/{repo}/rulesets\"],\n getStatusChecksProtection: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n getTeamsWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"\n ],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\n \"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"\n ],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"\n ],\n getWebhookDelivery: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"\n ],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"\n ],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"\n ],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\n \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"\n ],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listCustomDeploymentRuleIntegrations: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps\"\n ],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentBranchPolicies: [\n \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"\n ],\n listDeploymentStatuses: [\n \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"\n ],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\n \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"\n ],\n listReleaseAssets: [\n \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"\n ],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\n \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"\n ],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\n \"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"\n ],\n removeAppAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n removeCollaborator: [\n \"DELETE /repos/{owner}/{repo}/collaborators/{username}\"\n ],\n removeStatusCheckContexts: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n removeStatusCheckProtection: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n removeTeamAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n removeUserAccessRestrictions: [\n \"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\n \"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"\n ],\n setAppAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\",\n {},\n { mapToData: \"apps\" }\n ],\n setStatusCheckContexts: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\",\n {},\n { mapToData: \"contexts\" }\n ],\n setTeamAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\",\n {},\n { mapToData: \"teams\" }\n ],\n setUserAccessRestrictions: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\",\n {},\n { mapToData: \"users\" }\n ],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\n \"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"\n ],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateDeploymentBranchPolicy: [\n \"PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"\n ],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\n \"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"\n ],\n updateOrgRuleset: [\"PUT /orgs/{org}/rulesets/{ruleset_id}\"],\n updatePullRequestReviewProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"\n ],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\n \"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"\n ],\n updateRepoRuleset: [\"PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}\"],\n updateStatusCheckPotection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\",\n {},\n { renamed: [\"repos\", \"updateStatusCheckProtection\"] }\n ],\n updateStatusCheckProtection: [\n \"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"\n ],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\n \"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"\n ],\n uploadReleaseAsset: [\n \"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\",\n { baseUrl: \"https://uploads.github.com\" }\n ]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"\n ],\n listAlertsForEnterprise: [\n \"GET /enterprises/{enterprise}/secret-scanning/alerts\"\n ],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\n \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"\n ],\n updateAlert: [\n \"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"\n ]\n },\n securityAdvisories: {\n createPrivateVulnerabilityReport: [\n \"POST /repos/{owner}/{repo}/security-advisories/reports\"\n ],\n createRepositoryAdvisory: [\n \"POST /repos/{owner}/{repo}/security-advisories\"\n ],\n getRepositoryAdvisory: [\n \"GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}\"\n ],\n listRepositoryAdvisories: [\"GET /repos/{owner}/{repo}/security-advisories\"],\n updateRepositoryAdvisory: [\n \"PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}\"\n ]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n addOrUpdateProjectPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n addOrUpdateRepoPermissionsInOrg: [\n \"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n checkPermissionsForProjectInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n checkPermissionsForRepoInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\n \"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"\n ],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n deleteDiscussionInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n getDiscussionInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n getMembershipForUserInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"\n ],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\n \"GET /orgs/{org}/teams/{team_slug}/invitations\"\n ],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"\n ],\n removeProjectInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"\n ],\n removeRepoInOrg: [\n \"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"\n ],\n updateDiscussionCommentInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"\n ],\n updateDiscussionInOrg: [\n \"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"\n ],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\n \"POST /user/emails\",\n {},\n { renamed: [\"users\", \"addEmailForAuthenticatedUser\"] }\n ],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n addSocialAccountForAuthenticatedUser: [\"POST /user/social_accounts\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\n \"POST /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"] }\n ],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\n \"POST /user/keys\",\n {},\n { renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"] }\n ],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n createSshSigningKeyForAuthenticatedUser: [\"POST /user/ssh_signing_keys\"],\n deleteEmailForAuthenticated: [\n \"DELETE /user/emails\",\n {},\n { renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"] }\n ],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\n \"DELETE /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"] }\n ],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\n \"DELETE /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"] }\n ],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n deleteSocialAccountForAuthenticatedUser: [\"DELETE /user/social_accounts\"],\n deleteSshSigningKeyForAuthenticatedUser: [\n \"DELETE /user/ssh_signing_keys/{ssh_signing_key_id}\"\n ],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\n \"GET /user/gpg_keys/{gpg_key_id}\",\n {},\n { renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"] }\n ],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\n \"GET /user/keys/{key_id}\",\n {},\n { renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"] }\n ],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n getSshSigningKeyForAuthenticatedUser: [\n \"GET /user/ssh_signing_keys/{ssh_signing_key_id}\"\n ],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\n \"GET /user/blocks\",\n {},\n { renamed: [\"users\", \"listBlockedByAuthenticatedUser\"] }\n ],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\n \"GET /user/emails\",\n {},\n { renamed: [\"users\", \"listEmailsForAuthenticatedUser\"] }\n ],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\n \"GET /user/following\",\n {},\n { renamed: [\"users\", \"listFollowedByAuthenticatedUser\"] }\n ],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\n \"GET /user/gpg_keys\",\n {},\n { renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"] }\n ],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\n \"GET /user/public_emails\",\n {},\n { renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"] }\n ],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\n \"GET /user/keys\",\n {},\n { renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"] }\n ],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n listSocialAccountsForAuthenticatedUser: [\"GET /user/social_accounts\"],\n listSocialAccountsForUser: [\"GET /users/{username}/social_accounts\"],\n listSshSigningKeysForAuthenticatedUser: [\"GET /user/ssh_signing_keys\"],\n listSshSigningKeysForUser: [\"GET /users/{username}/ssh_signing_keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\n \"PATCH /user/email/visibility\",\n {},\n { renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"] }\n ],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\n \"PATCH /user/email/visibility\"\n ],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\nvar endpoints_default = Endpoints;\nexport {\n endpoints_default as default\n};\n", "import ENDPOINTS from \"./generated/endpoints\";\nconst endpointMethodsMap = /* @__PURE__ */ new Map();\nfor (const [scope, endpoints] of Object.entries(ENDPOINTS)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign(\n {\n method,\n url\n },\n defaults\n );\n if (!endpointMethodsMap.has(scope)) {\n endpointMethodsMap.set(scope, /* @__PURE__ */ new Map());\n }\n endpointMethodsMap.get(scope).set(methodName, {\n scope,\n methodName,\n endpointDefaults,\n decorations\n });\n }\n}\nconst handler = {\n get({ octokit, scope, cache }, methodName) {\n if (cache[methodName]) {\n return cache[methodName];\n }\n const { decorations, endpointDefaults } = endpointMethodsMap.get(scope).get(methodName);\n if (decorations) {\n cache[methodName] = decorate(\n octokit,\n scope,\n methodName,\n endpointDefaults,\n decorations\n );\n } else {\n cache[methodName] = octokit.request.defaults(endpointDefaults);\n }\n return cache[methodName];\n }\n};\nfunction endpointsToMethods(octokit) {\n const newMethods = {};\n for (const scope of endpointMethodsMap.keys()) {\n newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler);\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n function withDecorations(...args) {\n let options = requestWithDefaults.endpoint.merge(...args);\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: void 0\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(\n `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`\n );\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n const options2 = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(\n decorations.renamedParameters\n )) {\n if (name in options2) {\n octokit.log.warn(\n `\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`\n );\n if (!(alias in options2)) {\n options2[alias] = options2[name];\n }\n delete options2[name];\n }\n }\n return requestWithDefaults(options2);\n }\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\nexport {\n endpointsToMethods\n};\n", "import { VERSION } from \"./version\";\nimport { endpointsToMethods } from \"./endpoints-to-methods\";\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit);\n return {\n ...api,\n rest: api\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\nexport {\n legacyRestEndpointMethods,\n restEndpointMethods\n};\n"], + "mappings": ";AAAA,IAAM,UAAU;;;ACAhB,IAAM,YAAY;AAAA,EAChB,SAAS;AAAA,IACP,yCAAyC;AAAA,MACvC;AAAA,IACF;AAAA,IACA,0CAA0C;AAAA,MACxC;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,mBAAmB;AAAA,MACjB;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,yBAAyB,CAAC,+CAA+C;AAAA,IACzE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,oCAAoC;AAAA,IACxD,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,yBAAyB,CAAC,+CAA+C;AAAA,IACzE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,oBAAoB,CAAC,8CAA8C;AAAA,IACnE,wBAAwB,CAAC,6CAA6C;AAAA,IACtE,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,kDAAkD;AAAA,IACpE,mBAAmB,CAAC,6CAA6C;AAAA,IACjE,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,oDAAoD;AAAA,IACxE,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,oDAAoD;AAAA,MAClD;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,mDAAmD;AAAA,MACjD;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,0CAA0C;AAAA,IAChE,sBAAsB,CAAC,+CAA+C;AAAA,IACtE,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,4BAA4B,CAAC,qCAAqC;AAAA,IAClE,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,aAAa,CAAC,2DAA2D;AAAA,IACzE,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,wDAAwD;AAAA,MACtD;AAAA,IACF;AAAA,IACA,sDAAsD;AAAA,MACpD;AAAA,IACF;AAAA,IACA,yCAAyC;AAAA,MACvC;AAAA,IACF;AAAA,IACA,uCAAuC;AAAA,MACrC;AAAA,IACF;AAAA,IACA,sBAAsB,CAAC,iDAAiD;AAAA,IACxE,iBAAiB,CAAC,4CAA4C;AAAA,IAC9D,cAAc,CAAC,+CAA+C;AAAA,IAC9D,gBAAgB,CAAC,0CAA0C;AAAA,IAC3D,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,WAAW,uCAAuC,EAAE;AAAA,IAClE;AAAA,IACA,kBAAkB,CAAC,sDAAsD;AAAA,IACzE,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,eAAe,CAAC,yDAAyD;AAAA,IACzE,iBAAiB,CAAC,oDAAoD;AAAA,IACtE,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,2BAA2B,CAAC,6CAA6C;AAAA,IACzE,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,aAAa,CAAC,2DAA2D;AAAA,IACzE,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,iDAAiD;AAAA,IAClE,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,sBAAsB,CAAC,6CAA6C;AAAA,IACpE,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,qCAAqC;AAAA,MACnC;AAAA,IACF;AAAA,IACA,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,iCAAiC;AAAA,IAClD,kBAAkB,CAAC,mCAAmC;AAAA,IACtD,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,2CAA2C;AAAA,IAC7D,mBAAmB,CAAC,6CAA6C;AAAA,IACjE,mBAAmB,CAAC,6CAA6C;AAAA,IACjE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,uBAAuB,CAAC,4CAA4C;AAAA,IACpE,8BAA8B,CAAC,2CAA2C;AAAA,IAC1E,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,0DAA0D;AAAA,MACxD;AAAA,IACF;AAAA,IACA,0CAA0C;AAAA,MACxC;AAAA,IACF;AAAA,IACA,6BAA6B,CAAC,iCAAiC;AAAA,IAC/D,8BAA8B,CAAC,2CAA2C;AAAA,IAC1E,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,yBAAyB,CAAC,wCAAwC;AAAA,IAClE,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,eAAe,CAAC,wDAAwD;AAAA,IACxE,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,iDAAiD;AAAA,MAC/C;AAAA,IACF;AAAA,IACA,kDAAkD;AAAA,MAChD;AAAA,IACF;AAAA,IACA,6CAA6C;AAAA,MAC3C;AAAA,IACF;AAAA,IACA,8CAA8C;AAAA,MAC5C;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,wCAAwC;AAAA,MACtC;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,0CAA0C;AAAA,MACxC;AAAA,IACF;AAAA,IACA,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,wDAAwD;AAAA,MACtD;AAAA,IACF;AAAA,IACA,sDAAsD;AAAA,MACpD;AAAA,IACF;AAAA,IACA,yCAAyC;AAAA,MACvC;AAAA,IACF;AAAA,IACA,uCAAuC;AAAA,MACrC;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,oCAAoC;AAAA,MAClC;AAAA,IACF;AAAA,IACA,yDAAyD;AAAA,MACvD;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,4CAA4C;AAAA,IAChE,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,EACF;AAAA,EACA,UAAU;AAAA,IACR,uCAAuC,CAAC,kCAAkC;AAAA,IAC1E,wBAAwB,CAAC,2CAA2C;AAAA,IACpE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,UAAU,CAAC,YAAY;AAAA,IACvB,qBAAqB,CAAC,wCAAwC;AAAA,IAC9D,WAAW,CAAC,wCAAwC;AAAA,IACpD,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,gCAAgC,CAAC,8BAA8B;AAAA,IAC/D,uCAAuC,CAAC,oBAAoB;AAAA,IAC5D,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,aAAa;AAAA,IAChC,gCAAgC,CAAC,qCAAqC;AAAA,IACtE,yBAAyB,CAAC,qCAAqC;AAAA,IAC/D,qBAAqB,CAAC,wBAAwB;AAAA,IAC9C,2BAA2B,CAAC,uCAAuC;AAAA,IACnE,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,kCAAkC;AAAA,IACnD,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,qCAAqC,CAAC,mBAAmB;AAAA,IACzD,wBAAwB,CAAC,+BAA+B;AAAA,IACxD,wBAAwB,CAAC,qCAAqC;AAAA,IAC9D,uBAAuB,CAAC,sCAAsC;AAAA,IAC9D,sCAAsC,CAAC,yBAAyB;AAAA,IAChE,qBAAqB,CAAC,uCAAuC;AAAA,IAC7D,yBAAyB,CAAC,oBAAoB;AAAA,IAC9C,6BAA6B,CAAC,yCAAyC;AAAA,IACvE,kBAAkB,CAAC,0CAA0C;AAAA,IAC7D,qBAAqB,CAAC,wCAAwC;AAAA,IAC9D,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,8BAA8B,CAAC,kCAAkC;AAAA,IACjE,gCAAgC,CAAC,qCAAqC;AAAA,EACxE;AAAA,EACA,MAAM;AAAA,IACJ,uBAAuB;AAAA,MACrB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,QAAQ,2CAA2C,EAAE;AAAA,IACnE;AAAA,IACA,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,YAAY,CAAC,sCAAsC;AAAA,IACnD,oBAAoB,CAAC,wCAAwC;AAAA,IAC7D,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,wCAAwC;AAAA,IAC9D,oBAAoB,CAAC,6CAA6C;AAAA,IAClE,aAAa,CAAC,wCAAwC;AAAA,IACtD,kBAAkB,CAAC,UAAU;AAAA,IAC7B,WAAW,CAAC,sBAAsB;AAAA,IAClC,iBAAiB,CAAC,0CAA0C;AAAA,IAC5D,oBAAoB,CAAC,8BAA8B;AAAA,IACnD,qBAAqB,CAAC,wCAAwC;AAAA,IAC9D,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,oCAAoC;AAAA,IAC1D,wBAAwB,CAAC,sBAAsB;AAAA,IAC/C,oBAAoB,CAAC,wCAAwC;AAAA,IAC7D,qBAAqB,CAAC,mDAAmD;AAAA,IACzE,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,6CAA6C;AAAA,MAC3C;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,wBAAwB;AAAA,IAC5C,uCAAuC,CAAC,yBAAyB;AAAA,IACjE,WAAW,CAAC,gCAAgC;AAAA,IAC5C,kBAAkB,CAAC,wCAAwC;AAAA,IAC3D,mCAAmC,CAAC,gCAAgC;AAAA,IACpE,uCAAuC,CAAC,iCAAiC;AAAA,IACzE,8CAA8C;AAAA,MAC5C;AAAA,IACF;AAAA,IACA,uBAAuB,CAAC,0BAA0B;AAAA,IAClD,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,QAAQ,gDAAgD,EAAE;AAAA,IACxE;AAAA,IACA,gDAAgD;AAAA,MAC9C;AAAA,IACF;AAAA,IACA,YAAY,CAAC,uCAAuC;AAAA,IACpD,+BAA+B,CAAC,4BAA4B;AAAA,IAC5D,YAAY,CAAC,6CAA6C;AAAA,IAC1D,qBAAqB,CAAC,oDAAoD;AAAA,IAC1E,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,2BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAAA,EACA,SAAS;AAAA,IACP,4BAA4B,CAAC,0CAA0C;AAAA,IACvE,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,6BAA6B,CAAC,2CAA2C;AAAA,IACzE,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAAA,EACA,QAAQ;AAAA,IACN,QAAQ,CAAC,uCAAuC;AAAA,IAChD,aAAa,CAAC,yCAAyC;AAAA,IACvD,KAAK,CAAC,qDAAqD;AAAA,IAC3D,UAAU,CAAC,yDAAyD;AAAA,IACpE,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,YAAY,CAAC,oDAAoD;AAAA,IACjE,cAAc;AAAA,MACZ;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,sDAAsD;AAAA,IACzE,cAAc;AAAA,MACZ;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,uDAAuD;AAAA,EAClE;AAAA,EACA,cAAc;AAAA,IACZ,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,UAAU;AAAA,MACR;AAAA,MACA,CAAC;AAAA,MACD,EAAE,mBAAmB,EAAE,UAAU,eAAe,EAAE;AAAA,IACpD;AAAA,IACA,aAAa;AAAA,MACX;AAAA,IACF;AAAA,IACA,mBAAmB;AAAA,MACjB;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,uDAAuD;AAAA,IACzE,UAAU,CAAC,2DAA2D;AAAA,IACtE,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,sCAAsC;AAAA,IACzD,mBAAmB,CAAC,gDAAgD;AAAA,IACpE,qBAAqB;AAAA,MACnB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,gBAAgB,oBAAoB,EAAE;AAAA,IACpD;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,oBAAoB,CAAC,kDAAkD;AAAA,IACvE,aAAa;AAAA,MACX;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,aAAa,CAAC,iDAAiD;AAAA,EACjE;AAAA,EACA,gBAAgB;AAAA,IACd,sBAAsB,CAAC,uBAAuB;AAAA,IAC9C,gBAAgB,CAAC,6BAA6B;AAAA,EAChD;AAAA,EACA,YAAY;AAAA,IACV,4CAA4C;AAAA,MAC1C;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,uCAAuC;AAAA,MACrC;AAAA,IACF;AAAA,IACA,4BAA4B,CAAC,uBAAuB;AAAA,IACpD,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,0CAA0C;AAAA,MACxC;AAAA,IACF;AAAA,IACA,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,oCAAoC;AAAA,MAClC;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,4BAA4B,CAAC,0CAA0C;AAAA,IACvE,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,qDAAqD;AAAA,IACvE,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,yBAAyB,CAAC,uCAAuC;AAAA,IACjE,iBAAiB,CAAC,+CAA+C;AAAA,IACjE,cAAc,CAAC,kDAAkD;AAAA,IACjE,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,eAAe;AAAA,MACb;AAAA,IACF;AAAA,IACA,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,mDAAmD;AAAA,MACjD;AAAA,IACF;AAAA,IACA,0BAA0B,CAAC,sBAAsB;AAAA,IACjD,oBAAoB;AAAA,MAClB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,mBAAmB,EAAE,QAAQ,MAAM,EAAE;AAAA,IACzC;AAAA,IACA,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,oCAAoC;AAAA,IACrD,iBAAiB,CAAC,8CAA8C;AAAA,IAChE,+CAA+C;AAAA,MAC7C;AAAA,IACF;AAAA,IACA,iCAAiC,CAAC,8BAA8B;AAAA,IAChE,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,uCAAuC;AAAA,MACrC;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,+CAA+C;AAAA,MAC7C;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,sBAAsB,CAAC,oCAAoC;AAAA,IAC3D,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,8CAA8C;AAAA,MAC5C;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,2BAA2B,CAAC,8CAA8C;AAAA,IAC1E,0BAA0B,CAAC,6CAA6C;AAAA,IACxE,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,4BAA4B,CAAC,yCAAyC;AAAA,EACxE;AAAA,EACA,YAAY;AAAA,IACV,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,qDAAqD;AAAA,IACvE,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,UAAU,CAAC,4DAA4D;AAAA,IACvE,iBAAiB,CAAC,+CAA+C;AAAA,IACjE,cAAc,CAAC,kDAAkD;AAAA,IACjE,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,eAAe;AAAA,MACb;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,mCAAmC;AAAA,IACtD,mBAAmB,CAAC,6CAA6C;AAAA,IACjE,gBAAgB,CAAC,oCAAoC;AAAA,IACrD,iBAAiB,CAAC,8CAA8C;AAAA,IAChE,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,aAAa;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAAA,EACA,iBAAiB;AAAA,IACf,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,WAAW;AAAA,MACT;AAAA,IACF;AAAA,IACA,YAAY,CAAC,iDAAiD;AAAA,EAChE;AAAA,EACA,QAAQ,EAAE,KAAK,CAAC,aAAa,EAAE;AAAA,EAC/B,OAAO;AAAA,IACL,gBAAgB,CAAC,2BAA2B;AAAA,IAC5C,QAAQ,CAAC,aAAa;AAAA,IACtB,eAAe,CAAC,gCAAgC;AAAA,IAChD,QAAQ,CAAC,yBAAyB;AAAA,IAClC,eAAe,CAAC,+CAA+C;AAAA,IAC/D,MAAM,CAAC,6BAA6B;AAAA,IACpC,KAAK,CAAC,sBAAsB;AAAA,IAC5B,YAAY,CAAC,4CAA4C;AAAA,IACzD,aAAa,CAAC,4BAA4B;AAAA,IAC1C,MAAM,CAAC,YAAY;AAAA,IACnB,cAAc,CAAC,+BAA+B;AAAA,IAC9C,aAAa,CAAC,8BAA8B;AAAA,IAC5C,aAAa,CAAC,6BAA6B;AAAA,IAC3C,WAAW,CAAC,4BAA4B;AAAA,IACxC,YAAY,CAAC,mBAAmB;AAAA,IAChC,aAAa,CAAC,oBAAoB;AAAA,IAClC,MAAM,CAAC,2BAA2B;AAAA,IAClC,QAAQ,CAAC,8BAA8B;AAAA,IACvC,QAAQ,CAAC,wBAAwB;AAAA,IACjC,eAAe,CAAC,8CAA8C;AAAA,EAChE;AAAA,EACA,KAAK;AAAA,IACH,YAAY,CAAC,sCAAsC;AAAA,IACnD,cAAc,CAAC,wCAAwC;AAAA,IACvD,WAAW,CAAC,qCAAqC;AAAA,IACjD,WAAW,CAAC,qCAAqC;AAAA,IACjD,YAAY,CAAC,sCAAsC;AAAA,IACnD,WAAW,CAAC,6CAA6C;AAAA,IACzD,SAAS,CAAC,gDAAgD;AAAA,IAC1D,WAAW,CAAC,oDAAoD;AAAA,IAChE,QAAQ,CAAC,yCAAyC;AAAA,IAClD,QAAQ,CAAC,8CAA8C;AAAA,IACvD,SAAS,CAAC,gDAAgD;AAAA,IAC1D,kBAAkB,CAAC,mDAAmD;AAAA,IACtE,WAAW,CAAC,4CAA4C;AAAA,EAC1D;AAAA,EACA,WAAW;AAAA,IACT,iBAAiB,CAAC,0BAA0B;AAAA,IAC5C,aAAa,CAAC,iCAAiC;AAAA,EACjD;AAAA,EACA,cAAc;AAAA,IACZ,qCAAqC,CAAC,8BAA8B;AAAA,IACpE,uBAAuB,CAAC,oCAAoC;AAAA,IAC5D,wBAAwB,CAAC,8CAA8C;AAAA,IACvE,mCAAmC;AAAA,MACjC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,gBAAgB,qCAAqC,EAAE;AAAA,IACrE;AAAA,IACA,wCAAwC,CAAC,iCAAiC;AAAA,IAC1E,0BAA0B,CAAC,uCAAuC;AAAA,IAClE,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,sCAAsC;AAAA,MACpC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,gBAAgB,wCAAwC,EAAE;AAAA,IACxE;AAAA,IACA,qCAAqC,CAAC,8BAA8B;AAAA,IACpE,uBAAuB,CAAC,oCAAoC;AAAA,IAC5D,wBAAwB,CAAC,8CAA8C;AAAA,IACvE,mCAAmC;AAAA,MACjC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,gBAAgB,qCAAqC,EAAE;AAAA,IACrE;AAAA,EACF;AAAA,EACA,QAAQ;AAAA,IACN,cAAc;AAAA,MACZ;AAAA,IACF;AAAA,IACA,WAAW,CAAC,yDAAyD;AAAA,IACrE,wBAAwB,CAAC,gDAAgD;AAAA,IACzE,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,mCAAmC;AAAA,IAC5C,eAAe;AAAA,MACb;AAAA,IACF;AAAA,IACA,aAAa,CAAC,mCAAmC;AAAA,IACjD,iBAAiB,CAAC,uCAAuC;AAAA,IACzD,eAAe;AAAA,MACb;AAAA,IACF;AAAA,IACA,aAAa,CAAC,4CAA4C;AAAA,IAC1D,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,KAAK,CAAC,iDAAiD;AAAA,IACvD,YAAY,CAAC,wDAAwD;AAAA,IACrE,UAAU,CAAC,oDAAoD;AAAA,IAC/D,UAAU,CAAC,yCAAyC;AAAA,IACpD,cAAc,CAAC,yDAAyD;AAAA,IACxE,MAAM,CAAC,aAAa;AAAA,IACpB,eAAe,CAAC,qCAAqC;AAAA,IACrD,cAAc,CAAC,0DAA0D;AAAA,IACzE,qBAAqB,CAAC,2CAA2C;AAAA,IACjE,YAAY,CAAC,wDAAwD;AAAA,IACrE,mBAAmB,CAAC,yCAAyC;AAAA,IAC7D,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,0BAA0B,CAAC,kBAAkB;AAAA,IAC7C,YAAY,CAAC,wBAAwB;AAAA,IACrC,aAAa,CAAC,kCAAkC;AAAA,IAChD,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,kCAAkC;AAAA,IACtD,mBAAmB;AAAA,MACjB;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,sCAAsC;AAAA,IACvD,MAAM,CAAC,sDAAsD;AAAA,IAC7D,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,aAAa;AAAA,MACX;AAAA,IACF;AAAA,IACA,WAAW,CAAC,wDAAwD;AAAA,IACpE,QAAQ,CAAC,yDAAyD;AAAA,IAClE,QAAQ,CAAC,mDAAmD;AAAA,IAC5D,eAAe,CAAC,0DAA0D;AAAA,IAC1E,aAAa,CAAC,2CAA2C;AAAA,IACzD,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAAA,EACA,UAAU;AAAA,IACR,KAAK,CAAC,yBAAyB;AAAA,IAC/B,oBAAoB,CAAC,eAAe;AAAA,IACpC,YAAY,CAAC,mCAAmC;AAAA,EAClD;AAAA,EACA,UAAU;AAAA,IACR,QAAQ,CAAC,gBAAgB;AAAA,IACzB,WAAW;AAAA,MACT;AAAA,MACA,EAAE,SAAS,EAAE,gBAAgB,4BAA4B,EAAE;AAAA,IAC7D;AAAA,EACF;AAAA,EACA,MAAM;AAAA,IACJ,KAAK,CAAC,WAAW;AAAA,IACjB,gBAAgB,CAAC,eAAe;AAAA,IAChC,YAAY,CAAC,cAAc;AAAA,IAC3B,QAAQ,CAAC,UAAU;AAAA,IACnB,MAAM,CAAC,OAAO;AAAA,EAChB;AAAA,EACA,YAAY;AAAA,IACV,cAAc,CAAC,qCAAqC;AAAA,IACpD,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,0CAA0C;AAAA,IAC7D,iBAAiB,CAAC,kCAAkC;AAAA,IACpD,eAAe,CAAC,8CAA8C;AAAA,IAC9D,+BAA+B,CAAC,qCAAqC;AAAA,IACrE,iBAAiB,CAAC,2CAA2C;AAAA,IAC7D,0BAA0B,CAAC,sBAAsB;AAAA,IACjD,YAAY,CAAC,4BAA4B;AAAA,IACzC,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,wDAAwD;AAAA,IAC1E,kBAAkB;AAAA,MAChB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,cAAc,+BAA+B,EAAE;AAAA,IAC7D;AAAA,IACA,iBAAiB,CAAC,wDAAwD;AAAA,IAC1E,kBAAkB,CAAC,wCAAwC;AAAA,IAC3D,2BAA2B,CAAC,uBAAuB;AAAA,IACnD,aAAa,CAAC,6BAA6B;AAAA,IAC3C,aAAa,CAAC,kCAAkC;AAAA,IAChD,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,cAAc,CAAC,oCAAoC;AAAA,EACrD;AAAA,EACA,MAAM;AAAA,IACJ,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,WAAW,CAAC,mCAAmC;AAAA,IAC/C,kBAAkB,CAAC,gDAAgD;AAAA,IACnE,kBAAkB,CAAC,mCAAmC;AAAA,IACtD,wBAAwB,CAAC,oCAAoC;AAAA,IAC7D,8BAA8B,CAAC,2CAA2C;AAAA,IAC1E,oCAAoC;AAAA,MAClC;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,8BAA8B;AAAA,IACjD,eAAe,CAAC,wBAAwB;AAAA,IACxC,QAAQ,CAAC,oBAAoB;AAAA,IAC7B,eAAe,CAAC,oCAAoC;AAAA,IACpD,6CAA6C;AAAA,MAC3C;AAAA,IACF;AAAA,IACA,KAAK,CAAC,iBAAiB;AAAA,IACvB,mCAAmC,CAAC,kCAAkC;AAAA,IACtE,sBAAsB,CAAC,wCAAwC;AAAA,IAC/D,YAAY,CAAC,iCAAiC;AAAA,IAC9C,wBAAwB,CAAC,wCAAwC;AAAA,IACjE,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,MAAM,CAAC,oBAAoB;AAAA,IAC3B,sBAAsB,CAAC,+BAA+B;AAAA,IACtD,kBAAkB,CAAC,wBAAwB;AAAA,IAC3C,uBAAuB,CAAC,oCAAoC;AAAA,IAC5D,0BAA0B,CAAC,gBAAgB;AAAA,IAC3C,aAAa,CAAC,4BAA4B;AAAA,IAC1C,qBAAqB,CAAC,mDAAmD;AAAA,IACzE,aAAa,CAAC,yBAAyB;AAAA,IACvC,qCAAqC,CAAC,4BAA4B;AAAA,IAClE,0BAA0B,CAAC,uCAAuC;AAAA,IAClE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,eAAe,CAAC,iDAAiD;AAAA,IACjE,wBAAwB,CAAC,6BAA6B;AAAA,IACtD,mBAAmB,CAAC,gCAAgC;AAAA,IACpD,0BAA0B,CAAC,mCAAmC;AAAA,IAC9D,uBAAuB,CAAC,4CAA4C;AAAA,IACpE,cAAc,CAAC,uBAAuB;AAAA,IACtC,aAAa,CAAC,wCAAwC;AAAA,IACtD,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,cAAc,CAAC,uCAAuC;AAAA,IACtD,yBAAyB,CAAC,2CAA2C;AAAA,IACrE,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,4CAA4C;AAAA,MAC1C;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,sBAAsB,CAAC,wCAAwC;AAAA,IAC/D,yCAAyC;AAAA,MACvC;AAAA,IACF;AAAA,IACA,aAAa,CAAC,sCAAsC;AAAA,IACpD,QAAQ,CAAC,mBAAmB;AAAA,IAC5B,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,kDAAkD;AAAA,IACtE,eAAe,CAAC,mCAAmC;AAAA,IACnD,2BAA2B,CAAC,0CAA0C;AAAA,EACxE;AAAA,EACA,UAAU;AAAA,IACR,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,0CAA0C;AAAA,MACxC;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,8CAA8C;AAAA,MAC5C;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,YAAY,2CAA2C,EAAE;AAAA,IACvE;AAAA,IACA,6DAA6D;AAAA,MAC3D;AAAA,MACA,CAAC;AAAA,MACD;AAAA,QACE,SAAS;AAAA,UACP;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IACA,yDAAyD;AAAA,MACvD;AAAA,IACF;AAAA,IACA,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,4CAA4C;AAAA,MAC1C;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,mBAAmB;AAAA,MACjB;AAAA,IACF;AAAA,IACA,uCAAuC;AAAA,MACrC;AAAA,IACF;AAAA,IACA,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,4DAA4D;AAAA,MAC1D;AAAA,IACF;AAAA,IACA,uDAAuD;AAAA,MACrD;AAAA,IACF;AAAA,IACA,+CAA+C;AAAA,MAC7C;AAAA,IACF;AAAA,IACA,kCAAkC,CAAC,oBAAoB;AAAA,IACvD,6BAA6B,CAAC,0BAA0B;AAAA,IACxD,qBAAqB,CAAC,gCAAgC;AAAA,IACtD,oCAAoC;AAAA,MAClC;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,2CAA2C;AAAA,MACzC;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AAAA,EACA,UAAU;AAAA,IACR,iBAAiB,CAAC,qDAAqD;AAAA,IACvE,YAAY,CAAC,0CAA0C;AAAA,IACvD,cAAc,CAAC,qCAAqC;AAAA,IACpD,4BAA4B,CAAC,qBAAqB;AAAA,IAClD,cAAc,CAAC,2BAA2B;AAAA,IAC1C,eAAe,CAAC,qCAAqC;AAAA,IACrD,QAAQ,CAAC,+BAA+B;AAAA,IACxC,YAAY,CAAC,0CAA0C;AAAA,IACvD,cAAc,CAAC,sCAAsC;AAAA,IACrD,KAAK,CAAC,4BAA4B;AAAA,IAClC,SAAS,CAAC,uCAAuC;AAAA,IACjD,WAAW,CAAC,mCAAmC;AAAA,IAC/C,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,WAAW,CAAC,yCAAyC;AAAA,IACrD,mBAAmB,CAAC,0CAA0C;AAAA,IAC9D,aAAa,CAAC,oCAAoC;AAAA,IAClD,YAAY,CAAC,0BAA0B;AAAA,IACvC,aAAa,CAAC,oCAAoC;AAAA,IAClD,aAAa,CAAC,gCAAgC;AAAA,IAC9C,UAAU,CAAC,8CAA8C;AAAA,IACzD,YAAY,CAAC,0CAA0C;AAAA,IACvD,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,8BAA8B;AAAA,IACvC,YAAY,CAAC,yCAAyC;AAAA,IACtD,cAAc,CAAC,qCAAqC;AAAA,EACtD;AAAA,EACA,OAAO;AAAA,IACL,eAAe,CAAC,qDAAqD;AAAA,IACrE,QAAQ,CAAC,kCAAkC;AAAA,IAC3C,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,cAAc,CAAC,wDAAwD;AAAA,IACvE,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,eAAe;AAAA,MACb;AAAA,IACF;AAAA,IACA,KAAK,CAAC,+CAA+C;AAAA,IACrD,WAAW;AAAA,MACT;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,uDAAuD;AAAA,IAC1E,MAAM,CAAC,iCAAiC;AAAA,IACxC,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,aAAa,CAAC,uDAAuD;AAAA,IACrE,WAAW,CAAC,qDAAqD;AAAA,IACjE,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,2BAA2B,CAAC,0CAA0C;AAAA,IACtE,aAAa,CAAC,uDAAuD;AAAA,IACrE,OAAO,CAAC,qDAAqD;AAAA,IAC7D,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,cAAc;AAAA,MACZ;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,iDAAiD;AAAA,IAC1D,cAAc;AAAA,MACZ;AAAA,IACF;AAAA,IACA,cAAc;AAAA,MACZ;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AAAA,EACA,WAAW,EAAE,KAAK,CAAC,iBAAiB,EAAE;AAAA,EACtC,WAAW;AAAA,IACT,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,qCAAqC;AAAA,MACnC;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,cAAc,CAAC,2DAA2D;AAAA,IAC1E,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAAA,EACA,OAAO;AAAA,IACL,kBAAkB;AAAA,MAChB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,sCAAsC,EAAE;AAAA,IAC/D;AAAA,IACA,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,OAAO;AAAA,IACtB;AAAA,IACA,iBAAiB,CAAC,oDAAoD;AAAA,IACtE,wBAAwB;AAAA,MACtB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,WAAW;AAAA,IAC1B;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,QAAQ;AAAA,IACvB;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,QAAQ;AAAA,IACvB;AAAA,IACA,mBAAmB,CAAC,oDAAoD;AAAA,IACxE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,6CAA6C;AAAA,IAChE,gBAAgB,CAAC,mDAAmD;AAAA,IACpE,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,sCAAsC;AAAA,IACvD,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,oBAAoB,CAAC,2CAA2C;AAAA,IAChE,iBAAiB,CAAC,iCAAiC;AAAA,IACnD,kBAAkB,CAAC,wCAAwC;AAAA,IAC3D,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,uCAAuC;AAAA,IAC7D,4BAA4B,CAAC,kBAAkB;AAAA,IAC/C,YAAY,CAAC,kCAAkC;AAAA,IAC/C,aAAa,CAAC,wBAAwB;AAAA,IACtC,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,4BAA4B,CAAC,2CAA2C;AAAA,IACxE,kBAAkB,CAAC,2BAA2B;AAAA,IAC9C,uBAAuB,CAAC,6CAA6C;AAAA,IACrE,iBAAiB,CAAC,kCAAkC;AAAA,IACpD,eAAe,CAAC,qCAAqC;AAAA,IACrD,mBAAmB,CAAC,qCAAqC;AAAA,IACzD,qBAAqB,CAAC,4CAA4C;AAAA,IAClE,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,eAAe,CAAC,kCAAkC;AAAA,IAClD,mBAAmB;AAAA,MACjB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,uCAAuC,EAAE;AAAA,IAChE;AAAA,IACA,uCAAuC;AAAA,MACrC;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,8BAA8B;AAAA,IACvC,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,sDAAsD;AAAA,IACvE,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,oDAAoD;AAAA,IAC1E,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,4CAA4C;AAAA,IAC9D,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,YAAY,CAAC,8CAA8C;AAAA,IAC3D,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,0CAA0C;AAAA,IAC7D,iBAAiB,CAAC,oCAAoC;AAAA,IACtD,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,eAAe,CAAC,oDAAoD;AAAA,IACpE,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,oDAAoD;AAAA,IACxE,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,eAAe,CAAC,8CAA8C;AAAA,IAC9D,+BAA+B;AAAA,MAC7B;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,kCAAkC;AAAA,IACtD,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,MACf;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,wBAAwB,EAAE;AAAA,IACjD;AAAA,IACA,wBAAwB,CAAC,yCAAyC;AAAA,IAClE,wBAAwB,CAAC,yCAAyC;AAAA,IAClE,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,+BAA+B;AAAA,IAClD,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,sBAAsB;AAAA,MACpB;AAAA,IACF;AAAA,IACA,KAAK,CAAC,2BAA2B;AAAA,IACjC,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,oBAAoB,CAAC,wCAAwC;AAAA,IAC7D,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,cAAc,CAAC,kCAAkC;AAAA,IACjD,oCAAoC;AAAA,MAClC;AAAA,IACF;AAAA,IACA,aAAa,CAAC,mDAAmD;AAAA,IACjE,WAAW,CAAC,6CAA6C;AAAA,IACzD,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,mDAAmD;AAAA,IACpE,WAAW,CAAC,0CAA0C;AAAA,IACtD,uBAAuB,CAAC,gDAAgD;AAAA,IACxE,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,yBAAyB,CAAC,gDAAgD;AAAA,IAC1E,WAAW,CAAC,yCAAyC;AAAA,IACrD,wBAAwB,CAAC,iDAAiD;AAAA,IAC1E,kBAAkB,CAAC,iDAAiD;AAAA,IACpE,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,4BAA4B,CAAC,6CAA6C;AAAA,IAC1E,YAAY,CAAC,2CAA2C;AAAA,IACxD,sBAAsB,CAAC,8CAA8C;AAAA,IACrE,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,cAAc,CAAC,yCAAyC;AAAA,IACxD,eAAe,CAAC,uDAAuD;AAAA,IACvE,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,qBAAqB;AAAA,MACnB;AAAA,IACF;AAAA,IACA,gBAAgB;AAAA,MACd;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,+CAA+C;AAAA,IACrE,kBAAkB,CAAC,2CAA2C;AAAA,IAC9D,eAAe,CAAC,uCAAuC;AAAA,IACvD,gBAAgB,CAAC,0BAA0B;AAAA,IAC3C,UAAU,CAAC,iCAAiC;AAAA,IAC5C,eAAe,CAAC,mDAAmD;AAAA,IACnE,qBAAqB,CAAC,wCAAwC;AAAA,IAC9D,uBAAuB,CAAC,+CAA+C;AAAA,IACvE,gCAAgC;AAAA,MAC9B;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,4CAA4C;AAAA,IAChE,WAAW,CAAC,kCAAkC;AAAA,IAC9C,sBAAsB,CAAC,wCAAwC;AAAA,IAC/D,YAAY,CAAC,iDAAiD;AAAA,IAC9D,iBAAiB,CAAC,sDAAsD;AAAA,IACxE,iBAAiB,CAAC,+CAA+C;AAAA,IACjE,gBAAgB,CAAC,iDAAiD;AAAA,IAClE,iBAAiB,CAAC,oCAAoC;AAAA,IACtD,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,qCAAqC;AAAA,MACnC;AAAA,IACF;AAAA,IACA,aAAa,CAAC,iDAAiD;AAAA,IAC/D,iBAAiB,CAAC,qDAAqD;AAAA,IACvE,qCAAqC;AAAA,MACnC;AAAA,IACF;AAAA,IACA,UAAU,CAAC,yCAAyC;AAAA,IACpD,YAAY,CAAC,2CAA2C;AAAA,IACxD,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,eAAe,CAAC,qCAAqC;AAAA,IACrD,cAAc,CAAC,oCAAoC;AAAA,IACnD,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,yCAAyC;AAAA,IAC7D,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,2BAA2B,CAAC,oCAAoC;AAAA,IAChE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,aAAa,CAAC,mCAAmC;AAAA,IACjD,kBAAkB,CAAC,wCAAwC;AAAA,IAC3D,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,gBAAgB,CAAC,gCAAgC;AAAA,IACjD,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,iBAAiB,CAAC,uCAAuC;AAAA,IACzD,0BAA0B,CAAC,iBAAiB;AAAA,IAC5C,YAAY,CAAC,uBAAuB;AAAA,IACpC,aAAa,CAAC,6BAA6B;AAAA,IAC3C,WAAW,CAAC,iCAAiC;AAAA,IAC7C,iBAAiB,CAAC,uCAAuC;AAAA,IACzD,qCAAqC,CAAC,kCAAkC;AAAA,IACxE,eAAe,CAAC,qCAAqC;AAAA,IACrD,iBAAiB,CAAC,wCAAwC;AAAA,IAC1D,YAAY,CAAC,mBAAmB;AAAA,IAChC,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,mBAAmB;AAAA,MACjB;AAAA,IACF;AAAA,IACA,cAAc,CAAC,oCAAoC;AAAA,IACnD,mBAAmB,CAAC,2CAA2C;AAAA,IAC/D,UAAU,CAAC,gCAAgC;AAAA,IAC3C,WAAW,CAAC,iCAAiC;AAAA,IAC7C,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,cAAc,CAAC,iCAAiC;AAAA,IAChD,OAAO,CAAC,mCAAmC;AAAA,IAC3C,eAAe,CAAC,2CAA2C;AAAA,IAC3D,aAAa,CAAC,kDAAkD;AAAA,IAChE,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,OAAO;AAAA,IACtB;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,WAAW;AAAA,IAC1B;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,QAAQ;AAAA,IACvB;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,QAAQ;AAAA,IACvB;AAAA,IACA,cAAc,CAAC,qDAAqD;AAAA,IACpE,kBAAkB,CAAC,kCAAkC;AAAA,IACrD,mBAAmB,CAAC,yCAAyC;AAAA,IAC7D,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,OAAO;AAAA,IACtB;AAAA,IACA,wBAAwB;AAAA,MACtB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,WAAW;AAAA,IAC1B;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,QAAQ;AAAA,IACvB;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,WAAW,QAAQ;AAAA,IACvB;AAAA,IACA,iBAAiB,CAAC,kDAAkD;AAAA,IACpE,UAAU,CAAC,qCAAqC;AAAA,IAChD,QAAQ,CAAC,6BAA6B;AAAA,IACtC,wBAAwB;AAAA,MACtB;AAAA,IACF;AAAA,IACA,qBAAqB,CAAC,mDAAmD;AAAA,IACzE,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,iCAAiC,CAAC,iCAAiC;AAAA,IACnE,kBAAkB;AAAA,MAChB;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,uCAAuC;AAAA,IAC1D,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,eAAe,CAAC,mDAAmD;AAAA,IACnE,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,iDAAiD;AAAA,IACrE,4BAA4B;AAAA,MAC1B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,6BAA6B,EAAE;AAAA,IACtD;AAAA,IACA,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,eAAe,CAAC,6CAA6C;AAAA,IAC7D,4BAA4B;AAAA,MAC1B;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,MACA,EAAE,SAAS,6BAA6B;AAAA,IAC1C;AAAA,EACF;AAAA,EACA,QAAQ;AAAA,IACN,MAAM,CAAC,kBAAkB;AAAA,IACzB,SAAS,CAAC,qBAAqB;AAAA,IAC/B,uBAAuB,CAAC,oBAAoB;AAAA,IAC5C,QAAQ,CAAC,oBAAoB;AAAA,IAC7B,OAAO,CAAC,0BAA0B;AAAA,IAClC,QAAQ,CAAC,oBAAoB;AAAA,IAC7B,OAAO,CAAC,mBAAmB;AAAA,EAC7B;AAAA,EACA,gBAAgB;AAAA,IACd,UAAU;AAAA,MACR;AAAA,IACF;AAAA,IACA,yBAAyB;AAAA,MACvB;AAAA,IACF;AAAA,IACA,kBAAkB,CAAC,wCAAwC;AAAA,IAC3D,mBAAmB,CAAC,kDAAkD;AAAA,IACtE,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,aAAa;AAAA,MACX;AAAA,IACF;AAAA,EACF;AAAA,EACA,oBAAoB;AAAA,IAClB,kCAAkC;AAAA,MAChC;AAAA,IACF;AAAA,IACA,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,0BAA0B,CAAC,+CAA+C;AAAA,IAC1E,0BAA0B;AAAA,MACxB;AAAA,IACF;AAAA,EACF;AAAA,EACA,OAAO;AAAA,IACL,mCAAmC;AAAA,MACjC;AAAA,IACF;AAAA,IACA,oCAAoC;AAAA,MAClC;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,iCAAiC;AAAA,MAC/B;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,wBAAwB;AAAA,IACjC,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,uBAAuB,CAAC,gDAAgD;AAAA,IACxE,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,aAAa,CAAC,sCAAsC;AAAA,IACpD,WAAW,CAAC,mCAAmC;AAAA,IAC/C,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,2BAA2B;AAAA,MACzB;AAAA,IACF;AAAA,IACA,MAAM,CAAC,uBAAuB;AAAA,IAC9B,gBAAgB,CAAC,yCAAyC;AAAA,IAC1D,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,sBAAsB,CAAC,+CAA+C;AAAA,IACtE,0BAA0B,CAAC,iBAAiB;AAAA,IAC5C,kBAAkB,CAAC,2CAA2C;AAAA,IAC9D,6BAA6B;AAAA,MAC3B;AAAA,IACF;AAAA,IACA,mBAAmB,CAAC,4CAA4C;AAAA,IAChE,gBAAgB,CAAC,yCAAyC;AAAA,IAC1D,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,oBAAoB;AAAA,MAClB;AAAA,IACF;AAAA,IACA,iBAAiB;AAAA,MACf;AAAA,IACF;AAAA,IACA,8BAA8B;AAAA,MAC5B;AAAA,IACF;AAAA,IACA,uBAAuB;AAAA,MACrB;AAAA,IACF;AAAA,IACA,aAAa,CAAC,qCAAqC;AAAA,EACrD;AAAA,EACA,OAAO;AAAA,IACL,0BAA0B;AAAA,MACxB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,8BAA8B,EAAE;AAAA,IACvD;AAAA,IACA,8BAA8B,CAAC,mBAAmB;AAAA,IAClD,sCAAsC,CAAC,4BAA4B;AAAA,IACnE,OAAO,CAAC,6BAA6B;AAAA,IACrC,cAAc,CAAC,6BAA6B;AAAA,IAC5C,uBAAuB,CAAC,+CAA+C;AAAA,IACvE,sCAAsC,CAAC,gCAAgC;AAAA,IACvE,8BAA8B;AAAA,MAC5B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,kCAAkC,EAAE;AAAA,IAC3D;AAAA,IACA,kCAAkC,CAAC,qBAAqB;AAAA,IACxD,oCAAoC;AAAA,MAClC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,wCAAwC,EAAE;AAAA,IACjE;AAAA,IACA,wCAAwC,CAAC,iBAAiB;AAAA,IAC1D,yCAAyC,CAAC,6BAA6B;AAAA,IACvE,6BAA6B;AAAA,MAC3B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,iCAAiC,EAAE;AAAA,IAC1D;AAAA,IACA,iCAAiC,CAAC,qBAAqB;AAAA,IACvD,8BAA8B;AAAA,MAC5B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,kCAAkC,EAAE;AAAA,IAC3D;AAAA,IACA,kCAAkC,CAAC,oCAAoC;AAAA,IACvE,oCAAoC;AAAA,MAClC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,wCAAwC,EAAE;AAAA,IACjE;AAAA,IACA,wCAAwC,CAAC,4BAA4B;AAAA,IACrE,yCAAyC,CAAC,8BAA8B;AAAA,IACxE,yCAAyC;AAAA,MACvC;AAAA,IACF;AAAA,IACA,QAAQ,CAAC,gCAAgC;AAAA,IACzC,kBAAkB,CAAC,WAAW;AAAA,IAC9B,eAAe,CAAC,uBAAuB;AAAA,IACvC,mBAAmB,CAAC,iCAAiC;AAAA,IACrD,2BAA2B;AAAA,MACzB;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,+BAA+B,EAAE;AAAA,IACxD;AAAA,IACA,+BAA+B,CAAC,iCAAiC;AAAA,IACjE,iCAAiC;AAAA,MAC/B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,qCAAqC,EAAE;AAAA,IAC9D;AAAA,IACA,qCAAqC,CAAC,yBAAyB;AAAA,IAC/D,sCAAsC;AAAA,MACpC;AAAA,IACF;AAAA,IACA,MAAM,CAAC,YAAY;AAAA,IACnB,4BAA4B;AAAA,MAC1B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,gCAAgC,EAAE;AAAA,IACzD;AAAA,IACA,gCAAgC,CAAC,kBAAkB;AAAA,IACnD,4BAA4B;AAAA,MAC1B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,gCAAgC,EAAE;AAAA,IACzD;AAAA,IACA,gCAAgC,CAAC,kBAAkB;AAAA,IACnD,6BAA6B;AAAA,MAC3B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,iCAAiC,EAAE;AAAA,IAC1D;AAAA,IACA,iCAAiC,CAAC,qBAAqB;AAAA,IACvD,mCAAmC,CAAC,qBAAqB;AAAA,IACzD,sBAAsB,CAAC,iCAAiC;AAAA,IACxD,sBAAsB,CAAC,iCAAiC;AAAA,IACxD,6BAA6B;AAAA,MAC3B;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,iCAAiC,EAAE;AAAA,IAC1D;AAAA,IACA,iCAAiC,CAAC,oBAAoB;AAAA,IACtD,oBAAoB,CAAC,gCAAgC;AAAA,IACrD,kCAAkC;AAAA,MAChC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,sCAAsC,EAAE;AAAA,IAC/D;AAAA,IACA,sCAAsC,CAAC,yBAAyB;AAAA,IAChE,uBAAuB,CAAC,4BAA4B;AAAA,IACpD,mCAAmC;AAAA,MACjC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,uCAAuC,EAAE;AAAA,IAChE;AAAA,IACA,uCAAuC,CAAC,gBAAgB;AAAA,IACxD,wCAAwC,CAAC,2BAA2B;AAAA,IACpE,2BAA2B,CAAC,uCAAuC;AAAA,IACnE,wCAAwC,CAAC,4BAA4B;AAAA,IACrE,2BAA2B,CAAC,wCAAwC;AAAA,IACpE,2CAA2C;AAAA,MACzC;AAAA,MACA,CAAC;AAAA,MACD,EAAE,SAAS,CAAC,SAAS,+CAA+C,EAAE;AAAA,IACxE;AAAA,IACA,+CAA+C;AAAA,MAC7C;AAAA,IACF;AAAA,IACA,SAAS,CAAC,gCAAgC;AAAA,IAC1C,UAAU,CAAC,mCAAmC;AAAA,IAC9C,qBAAqB,CAAC,aAAa;AAAA,EACrC;AACF;AACA,IAAI,oBAAoB;;;ACz0DxB,IAAM,qBAAqC,oBAAI,IAAI;AACnD,WAAW,CAAC,OAAO,SAAS,KAAK,OAAO,QAAQ,iBAAS,GAAG;AAC1D,aAAW,CAAC,YAAY,QAAQ,KAAK,OAAO,QAAQ,SAAS,GAAG;AAC9D,UAAM,CAAC,OAAO,UAAU,WAAW,IAAI;AACvC,UAAM,CAAC,QAAQ,GAAG,IAAI,MAAM,MAAM,GAAG;AACrC,UAAM,mBAAmB,OAAO;AAAA,MAC9B;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,IACF;AACA,QAAI,CAAC,mBAAmB,IAAI,KAAK,GAAG;AAClC,yBAAmB,IAAI,OAAuB,oBAAI,IAAI,CAAC;AAAA,IACzD;AACA,uBAAmB,IAAI,KAAK,EAAE,IAAI,YAAY;AAAA,MAC5C;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;AACA,IAAM,UAAU;AAAA,EACd,IAAI,EAAE,SAAS,OAAO,MAAM,GAAG,YAAY;AACzC,QAAI,MAAM,UAAU,GAAG;AACrB,aAAO,MAAM,UAAU;AAAA,IACzB;AACA,UAAM,EAAE,aAAa,iBAAiB,IAAI,mBAAmB,IAAI,KAAK,EAAE,IAAI,UAAU;AACtF,QAAI,aAAa;AACf,YAAM,UAAU,IAAI;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,OAAO;AACL,YAAM,UAAU,IAAI,QAAQ,QAAQ,SAAS,gBAAgB;AAAA,IAC/D;AACA,WAAO,MAAM,UAAU;AAAA,EACzB;AACF;AACA,SAAS,mBAAmB,SAAS;AACnC,QAAM,aAAa,CAAC;AACpB,aAAW,SAAS,mBAAmB,KAAK,GAAG;AAC7C,eAAW,KAAK,IAAI,IAAI,MAAM,EAAE,SAAS,OAAO,OAAO,CAAC,EAAE,GAAG,OAAO;AAAA,EACtE;AACA,SAAO;AACT;AACA,SAAS,SAAS,SAAS,OAAO,YAAY,UAAU,aAAa;AACnE,QAAM,sBAAsB,QAAQ,QAAQ,SAAS,QAAQ;AAC7D,WAAS,mBAAmB,MAAM;AAChC,QAAI,UAAU,oBAAoB,SAAS,MAAM,GAAG,IAAI;AACxD,QAAI,YAAY,WAAW;AACzB,gBAAU,OAAO,OAAO,CAAC,GAAG,SAAS;AAAA,QACnC,MAAM,QAAQ,YAAY,SAAS;AAAA,QACnC,CAAC,YAAY,SAAS,GAAG;AAAA,MAC3B,CAAC;AACD,aAAO,oBAAoB,OAAO;AAAA,IACpC;AACA,QAAI,YAAY,SAAS;AACvB,YAAM,CAAC,UAAU,aAAa,IAAI,YAAY;AAC9C,cAAQ,IAAI;AAAA,QACV,WAAW,SAAS,4CAA4C,YAAY;AAAA,MAC9E;AAAA,IACF;AACA,QAAI,YAAY,YAAY;AAC1B,cAAQ,IAAI,KAAK,YAAY,UAAU;AAAA,IACzC;AACA,QAAI,YAAY,mBAAmB;AACjC,YAAM,WAAW,oBAAoB,SAAS,MAAM,GAAG,IAAI;AAC3D,iBAAW,CAAC,MAAM,KAAK,KAAK,OAAO;AAAA,QACjC,YAAY;AAAA,MACd,GAAG;AACD,YAAI,QAAQ,UAAU;AACpB,kBAAQ,IAAI;AAAA,YACV,IAAI,8CAA8C,SAAS,uBAAuB;AAAA,UACpF;AACA,cAAI,EAAE,SAAS,WAAW;AACxB,qBAAS,KAAK,IAAI,SAAS,IAAI;AAAA,UACjC;AACA,iBAAO,SAAS,IAAI;AAAA,QACtB;AAAA,MACF;AACA,aAAO,oBAAoB,QAAQ;AAAA,IACrC;AACA,WAAO,oBAAoB,GAAG,IAAI;AAAA,EACpC;AACA,SAAO,OAAO,OAAO,iBAAiB,mBAAmB;AAC3D;;;ACzFA,SAAS,oBAAoB,SAAS;AACpC,QAAM,MAAM,mBAAmB,OAAO;AACtC,SAAO;AAAA,IACL,MAAM;AAAA,EACR;AACF;AACA,oBAAoB,UAAU;AAC9B,SAAS,0BAA0B,SAAS;AAC1C,QAAM,MAAM,mBAAmB,OAAO;AACtC,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM;AAAA,EACR;AACF;AACA,0BAA0B,UAAU;", + "names": [] +} diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/LICENSE b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/LICENSE new file mode 100644 index 0000000..57bee5f --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/LICENSE @@ -0,0 +1,7 @@ +MIT License Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/README.md b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/README.md new file mode 100644 index 0000000..c48ce42 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/README.md @@ -0,0 +1,65 @@ +# types.ts + +> Shared TypeScript definitions for Octokit projects + +[![@latest](https://img.shields.io/npm/v/@octokit/types.svg)](https://www.npmjs.com/package/@octokit/types) +[![Build Status](https://github.com/octokit/types.ts/workflows/Test/badge.svg)](https://github.com/octokit/types.ts/actions?workflow=Test) + + + +- [Usage](#usage) +- [Examples](#examples) + - [Get parameter and response data types for a REST API endpoint](#get-parameter-and-response-data-types-for-a-rest-api-endpoint) + - [Get response types from endpoint methods](#get-response-types-from-endpoint-methods) +- [Contributing](#contributing) +- [License](#license) + + + +## Usage + +See all exported types at https://octokit.github.io/types.ts + +## Examples + +### Get parameter and response data types for a REST API endpoint + +```ts +import { Endpoints } from "@octokit/types"; + +type listUserReposParameters = + Endpoints["GET /repos/{owner}/{repo}"]["parameters"]; +type listUserReposResponse = Endpoints["GET /repos/{owner}/{repo}"]["response"]; + +async function listRepos( + options: listUserReposParameters +): listUserReposResponse["data"] { + // ... +} +``` + +### Get response types from endpoint methods + +```ts +import { + GetResponseTypeFromEndpointMethod, + GetResponseDataTypeFromEndpointMethod, +} from "@octokit/types"; +import { Octokit } from "@octokit/rest"; + +const octokit = new Octokit(); +type CreateLabelResponseType = GetResponseTypeFromEndpointMethod< + typeof octokit.issues.createLabel +>; +type CreateLabelResponseDataType = GetResponseDataTypeFromEndpointMethod< + typeof octokit.issues.createLabel +>; +``` + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/AuthInterface.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/AuthInterface.d.ts new file mode 100644 index 0000000..8b39d61 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/AuthInterface.d.ts @@ -0,0 +1,31 @@ +import { EndpointOptions } from "./EndpointOptions"; +import { OctokitResponse } from "./OctokitResponse"; +import { RequestInterface } from "./RequestInterface"; +import { RequestParameters } from "./RequestParameters"; +import { Route } from "./Route"; +/** + * Interface to implement complex authentication strategies for Octokit. + * An object Implementing the AuthInterface can directly be passed as the + * `auth` option in the Octokit constructor. + * + * For the official implementations of the most common authentication + * strategies, see https://github.com/octokit/auth.js + */ +export interface AuthInterface { + (...args: AuthOptions): Promise; + hook: { + /** + * Sends a request using the passed `request` instance + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (request: RequestInterface, options: EndpointOptions): Promise>; + /** + * Sends a request using the passed `request` instance + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (request: RequestInterface, route: Route, parameters?: RequestParameters): Promise>; + }; +} diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts new file mode 100644 index 0000000..ccd453a --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts @@ -0,0 +1,21 @@ +import { RequestHeaders } from "./RequestHeaders"; +import { RequestMethod } from "./RequestMethod"; +import { RequestParameters } from "./RequestParameters"; +import { Url } from "./Url"; +/** + * The `.endpoint()` method is guaranteed to set all keys defined by RequestParameters + * as well as the method property. + */ +export type EndpointDefaults = RequestParameters & { + baseUrl: Url; + method: RequestMethod; + url?: Url; + headers: RequestHeaders & { + accept: string; + "user-agent": string; + }; + mediaType: { + format: string; + previews?: string[]; + }; +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/EndpointInterface.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/EndpointInterface.d.ts new file mode 100644 index 0000000..d7b4009 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/EndpointInterface.d.ts @@ -0,0 +1,65 @@ +import { EndpointDefaults } from "./EndpointDefaults"; +import { RequestOptions } from "./RequestOptions"; +import { RequestParameters } from "./RequestParameters"; +import { Route } from "./Route"; +import { Endpoints } from "./generated/Endpoints"; +export interface EndpointInterface { + /** + * Transforms a GitHub REST API endpoint into generic request options + * + * @param {object} endpoint Must set `url` unless it's set defaults. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (options: O & { + method?: string; + } & ("url" extends keyof D ? { + url?: string; + } : { + url: string; + })): RequestOptions & Pick; + /** + * Transforms a GitHub REST API endpoint into generic request options + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (route: keyof Endpoints | R, parameters?: P): (R extends keyof Endpoints ? Endpoints[R]["request"] : RequestOptions) & Pick; + /** + * Object with current default route and parameters + */ + DEFAULTS: D & EndpointDefaults; + /** + * Returns a new `endpoint` interface with new defaults + */ + defaults: (newDefaults: O) => EndpointInterface; + merge: { + /** + * Merges current endpoint defaults with passed route and parameters, + * without transforming them into request options. + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + * + */ + (route: keyof Endpoints | R, parameters?: P): D & (R extends keyof Endpoints ? Endpoints[R]["request"] & Endpoints[R]["parameters"] : EndpointDefaults) & P; + /** + * Merges current endpoint defaults with passed route and parameters, + * without transforming them into request options. + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ +

(options: P): EndpointDefaults & D & P; + /** + * Returns current default options. + * + * @deprecated use endpoint.DEFAULTS instead + */ + (): D & EndpointDefaults; + }; + /** + * Stateless method to turn endpoint options into request options. + * Calling `endpoint(options)` is the same as calling `endpoint.parse(endpoint.merge(options))`. + * + * @param {object} options `method`, `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + parse: (options: O) => RequestOptions & Pick; +} diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/EndpointOptions.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/EndpointOptions.d.ts new file mode 100644 index 0000000..8eccf5e --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/EndpointOptions.d.ts @@ -0,0 +1,7 @@ +import { RequestMethod } from "./RequestMethod"; +import { Url } from "./Url"; +import { RequestParameters } from "./RequestParameters"; +export type EndpointOptions = RequestParameters & { + method: RequestMethod; + url: Url; +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/Fetch.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/Fetch.d.ts new file mode 100644 index 0000000..983c79b --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/Fetch.d.ts @@ -0,0 +1,4 @@ +/** + * Browser's fetch method (or compatible such as fetch-mock) + */ +export type Fetch = any; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts new file mode 100644 index 0000000..2daaf34 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts @@ -0,0 +1,5 @@ +type Unwrap = T extends Promise ? U : T; +type AnyFunction = (...args: any[]) => any; +export type GetResponseTypeFromEndpointMethod = Unwrap>; +export type GetResponseDataTypeFromEndpointMethod = Unwrap>["data"]; +export {}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/OctokitResponse.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/OctokitResponse.d.ts new file mode 100644 index 0000000..8686e7f --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/OctokitResponse.d.ts @@ -0,0 +1,17 @@ +import { ResponseHeaders } from "./ResponseHeaders"; +import { Url } from "./Url"; +export type OctokitResponse = { + headers: ResponseHeaders; + /** + * http response code + */ + status: S; + /** + * URL of response after all redirects + */ + url: Url; + /** + * Response data as documented in the REST API reference documentation at https://docs.github.com/rest/reference + */ + data: T; +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestError.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestError.d.ts new file mode 100644 index 0000000..4608392 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestError.d.ts @@ -0,0 +1,11 @@ +export type RequestError = { + name: string; + status: number; + documentation_url: string; + errors?: Array<{ + resource: string; + code: string; + field: string; + message?: string; + }>; +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestHeaders.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestHeaders.d.ts new file mode 100644 index 0000000..4231159 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestHeaders.d.ts @@ -0,0 +1,15 @@ +export type RequestHeaders = { + /** + * Avoid setting `headers.accept`, use `mediaType.{format|previews}` option instead. + */ + accept?: string; + /** + * Use `authorization` to send authenticated request, remember `token ` / `bearer ` prefixes. Example: `token 1234567890abcdef1234567890abcdef12345678` + */ + authorization?: string; + /** + * `user-agent` is set do a default and can be overwritten as needed. + */ + "user-agent"?: string; + [header: string]: string | number | undefined; +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestInterface.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestInterface.d.ts new file mode 100644 index 0000000..851811f --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestInterface.d.ts @@ -0,0 +1,34 @@ +import { EndpointInterface } from "./EndpointInterface"; +import { OctokitResponse } from "./OctokitResponse"; +import { RequestParameters } from "./RequestParameters"; +import { Route } from "./Route"; +import { Endpoints } from "./generated/Endpoints"; +export interface RequestInterface { + /** + * Sends a request based on endpoint options + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (options: O & { + method?: string; + } & ("url" extends keyof D ? { + url?: string; + } : { + url: string; + })): Promise>; + /** + * Sends a request based on endpoint options + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (route: keyof Endpoints | R, options?: R extends keyof Endpoints ? Endpoints[R]["parameters"] & RequestParameters : RequestParameters): R extends keyof Endpoints ? Promise : Promise>; + /** + * Returns a new `request` with updated route and parameters + */ + defaults: (newDefaults: O) => RequestInterface; + /** + * Octokit endpoint API, see {@link https://github.com/octokit/endpoint.js|@octokit/endpoint} + */ + endpoint: EndpointInterface; +} diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestMethod.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestMethod.d.ts new file mode 100644 index 0000000..4cdfe61 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestMethod.d.ts @@ -0,0 +1,4 @@ +/** + * HTTP Verb supported by GitHub's REST API + */ +export type RequestMethod = "DELETE" | "GET" | "HEAD" | "PATCH" | "POST" | "PUT"; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestOptions.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestOptions.d.ts new file mode 100644 index 0000000..f83f5ab --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestOptions.d.ts @@ -0,0 +1,14 @@ +import { RequestHeaders } from "./RequestHeaders"; +import { RequestMethod } from "./RequestMethod"; +import { RequestRequestOptions } from "./RequestRequestOptions"; +import { Url } from "./Url"; +/** + * Generic request options as they are returned by the `endpoint()` method + */ +export type RequestOptions = { + method: RequestMethod; + url: Url; + headers: RequestHeaders; + body?: any; + request?: RequestRequestOptions; +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestParameters.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestParameters.d.ts new file mode 100644 index 0000000..3bc6e97 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestParameters.d.ts @@ -0,0 +1,45 @@ +import { RequestRequestOptions } from "./RequestRequestOptions"; +import { RequestHeaders } from "./RequestHeaders"; +import { Url } from "./Url"; +/** + * Parameters that can be passed into `request(route, parameters)` or `endpoint(route, parameters)` methods + */ +export type RequestParameters = { + /** + * Base URL to be used when a relative URL is passed, such as `/orgs/{org}`. + * If `baseUrl` is `https://enterprise.acme-inc.com/api/v3`, then the request + * will be sent to `https://enterprise.acme-inc.com/api/v3/orgs/{org}`. + */ + baseUrl?: Url; + /** + * HTTP headers. Use lowercase keys. + */ + headers?: RequestHeaders; + /** + * Media type options, see {@link https://developer.github.com/v3/media/|GitHub Developer Guide} + */ + mediaType?: { + /** + * `json` by default. Can be `raw`, `text`, `html`, `full`, `diff`, `patch`, `sha`, `base64`. Depending on endpoint + */ + format?: string; + /** + * Custom media type names of {@link https://docs.github.com/en/graphql/overview/schema-previews|GraphQL API Previews} without the `-preview` suffix. + * Example for single preview: `['squirrel-girl']`. + * Example for multiple previews: `['squirrel-girl', 'mister-fantastic']`. + */ + previews?: string[]; + }; + /** + * Pass custom meta information for the request. The `request` object will be returned as is. + */ + request?: RequestRequestOptions; + /** + * Any additional parameter will be passed as follows + * 1. URL parameter if `':parameter'` or `{parameter}` is part of `url` + * 2. Query parameter if `method` is `'GET'` or `'HEAD'` + * 3. Request body if `parameter` is `'data'` + * 4. JSON in the request body in the form of `body[parameter]` unless `parameter` key is `'data'` + */ + [parameter: string]: unknown; +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts new file mode 100644 index 0000000..b875b7e --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts @@ -0,0 +1,26 @@ +import { Fetch } from "./Fetch"; +import { Signal } from "./Signal"; +/** + * Octokit-specific request options which are ignored for the actual request, but can be used by Octokit or plugins to manipulate how the request is sent or how a response is handled + */ +export type RequestRequestOptions = { + /** + * Node only. Useful for custom proxy, certificate, or dns lookup. + * + * @see https://nodejs.org/api/http.html#http_class_http_agent + */ + agent?: unknown; + /** + * Custom replacement for built-in fetch method. Useful for testing or request hooks. + */ + fetch?: Fetch; + /** + * Use an `AbortController` instance to cancel a request. In node you can only cancel streamed requests. + */ + signal?: Signal; + /** + * Node only. Request/response timeout in ms, it resets on redirect. 0 to disable (OS limit applies). `options.request.signal` is recommended instead. + */ + timeout?: number; + [option: string]: any; +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts new file mode 100644 index 0000000..ff7af38 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts @@ -0,0 +1,20 @@ +export type ResponseHeaders = { + "cache-control"?: string; + "content-length"?: number; + "content-type"?: string; + date?: string; + etag?: string; + "last-modified"?: string; + link?: string; + location?: string; + server?: string; + status?: string; + vary?: string; + "x-github-mediatype"?: string; + "x-github-request-id"?: string; + "x-oauth-scopes"?: string; + "x-ratelimit-limit"?: string; + "x-ratelimit-remaining"?: string; + "x-ratelimit-reset"?: string; + [header: string]: string | number | undefined; +}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/Route.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/Route.d.ts new file mode 100644 index 0000000..808991e --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/Route.d.ts @@ -0,0 +1,4 @@ +/** + * String consisting of an optional HTTP method and relative path or absolute URL. Examples: `'/orgs/{org}'`, `'PUT /orgs/{org}'`, `GET https://example.com/foo/bar` + */ +export type Route = string; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/Signal.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/Signal.d.ts new file mode 100644 index 0000000..bdf9700 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/Signal.d.ts @@ -0,0 +1,6 @@ +/** + * Abort signal + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ +export type Signal = any; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/StrategyInterface.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/StrategyInterface.d.ts new file mode 100644 index 0000000..405cbd2 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/StrategyInterface.d.ts @@ -0,0 +1,4 @@ +import { AuthInterface } from "./AuthInterface"; +export interface StrategyInterface { + (...args: StrategyOptions): AuthInterface; +} diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/Url.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/Url.d.ts new file mode 100644 index 0000000..521f5ad --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/Url.d.ts @@ -0,0 +1,4 @@ +/** + * Relative or absolute URL. Examples: `'/orgs/{org}'`, `https://example.com/foo/bar` + */ +export type Url = string; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/VERSION.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/VERSION.d.ts new file mode 100644 index 0000000..05f7047 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/VERSION.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "10.0.0"; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts new file mode 100644 index 0000000..7ad302f --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts @@ -0,0 +1,3634 @@ +import { paths } from "@octokit/openapi-types"; +import { OctokitResponse } from "../OctokitResponse"; +import { RequestHeaders } from "../RequestHeaders"; +import { RequestRequestOptions } from "../RequestRequestOptions"; +type UnionToIntersection = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never; +type ExtractParameters = "parameters" extends keyof T ? UnionToIntersection<{ + [K in keyof T["parameters"]]-?: T["parameters"][K]; +}[keyof T["parameters"]]> : {}; +type ExtractRequestBody = "requestBody" extends keyof T ? "content" extends keyof T["requestBody"] ? "application/json" extends keyof T["requestBody"]["content"] ? T["requestBody"]["content"]["application/json"] : { + data: { + [K in keyof T["requestBody"]["content"]]: T["requestBody"]["content"][K]; + }[keyof T["requestBody"]["content"]]; +} : "application/json" extends keyof T["requestBody"] ? T["requestBody"]["application/json"] : { + data: { + [K in keyof T["requestBody"]]: T["requestBody"][K]; + }[keyof T["requestBody"]]; +} : {}; +type ToOctokitParameters = ExtractParameters & ExtractRequestBody>; +type Operation = { + parameters: ToOctokitParameters; + request: { + method: Method extends keyof MethodsMap ? MethodsMap[Method] : never; + url: Url; + headers: RequestHeaders; + request: RequestRequestOptions; + }; + response: ExtractOctokitResponse; +}; +type MethodsMap = { + delete: "DELETE"; + get: "GET"; + patch: "PATCH"; + post: "POST"; + put: "PUT"; +}; +type SuccessStatuses = 200 | 201 | 202 | 204; +type RedirectStatuses = 301 | 302; +type EmptyResponseStatuses = 201 | 204; +type KnownJsonResponseTypes = "application/json" | "application/scim+json" | "text/html"; +type SuccessResponseDataType = { + [K in SuccessStatuses & keyof Responses]: GetContentKeyIfPresent extends never ? never : OctokitResponse, K>; +}[SuccessStatuses & keyof Responses]; +type RedirectResponseDataType = { + [K in RedirectStatuses & keyof Responses]: OctokitResponse; +}[RedirectStatuses & keyof Responses]; +type EmptyResponseDataType = { + [K in EmptyResponseStatuses & keyof Responses]: OctokitResponse; +}[EmptyResponseStatuses & keyof Responses]; +type GetContentKeyIfPresent = "content" extends keyof T ? DataType : DataType; +type DataType = { + [K in KnownJsonResponseTypes & keyof T]: T[K]; +}[KnownJsonResponseTypes & keyof T]; +type ExtractOctokitResponse = "responses" extends keyof R ? SuccessResponseDataType extends never ? RedirectResponseDataType extends never ? EmptyResponseDataType : RedirectResponseDataType : SuccessResponseDataType : unknown; +export interface Endpoints { + /** + * @see https://docs.github.com/rest/reference/apps#delete-an-installation-for-the-authenticated-app + */ + "DELETE /app/installations/{installation_id}": Operation<"/app/installations/{installation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#unsuspend-an-app-installation + */ + "DELETE /app/installations/{installation_id}/suspended": Operation<"/app/installations/{installation_id}/suspended", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#delete-an-app-authorization + */ + "DELETE /applications/{client_id}/grant": Operation<"/applications/{client_id}/grant", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#delete-an-app-token + */ + "DELETE /applications/{client_id}/token": Operation<"/applications/{client_id}/token", "delete">; + /** + * @see https://docs.github.com/rest/reference/gists#delete-a-gist + */ + "DELETE /gists/{gist_id}": Operation<"/gists/{gist_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/gists#delete-a-gist-comment + */ + "DELETE /gists/{gist_id}/comments/{comment_id}": Operation<"/gists/{gist_id}/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/gists#unstar-a-gist + */ + "DELETE /gists/{gist_id}/star": Operation<"/gists/{gist_id}/star", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#revoke-an-installation-access-token + */ + "DELETE /installation/token": Operation<"/installation/token", "delete">; + /** + * @see https://docs.github.com/rest/reference/activity#delete-a-thread-subscription + */ + "DELETE /notifications/threads/{thread_id}/subscription": Operation<"/notifications/threads/{thread_id}/subscription", "delete">; + /** + * @see https://docs.github.com/rest/orgs/orgs/#delete-an-organization + */ + "DELETE /orgs/{org}": Operation<"/orgs/{org}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#disable-a-selected-repository-for-github-actions-in-an-organization + */ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}": Operation<"/orgs/{org}/actions/permissions/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-required-workflow + */ + "DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-repository-from-selected-repositories-list-for-a-required-workflow + */ + "DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-self-hosted-runner-from-an-organization + */ + "DELETE /orgs/{org}/actions/runners/{runner_id}": Operation<"/orgs/{org}/actions/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-all-custom-labels-from-a-self-hosted-runner-for-an-organization + */ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-custom-label-from-a-self-hosted-runner-for-an-organization + */ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-an-organization-secret + */ + "DELETE /orgs/{org}/actions/secrets/{secret_name}": Operation<"/orgs/{org}/actions/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret + */ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/actions/variables#delete-an-organization-variable + */ + "DELETE /orgs/{org}/actions/variables/{name}": Operation<"/orgs/{org}/actions/variables/{name}", "delete">; + /** + * @see https://docs.github.com/rest/actions/variables#remove-selected-repository-from-an-organization-variable + */ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/variables/{name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#unblock-a-user-from-an-organization + */ + "DELETE /orgs/{org}/blocks/{username}": Operation<"/orgs/{org}/blocks/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-codespaces-billing-users + */ + "DELETE /orgs/{org}/codespaces/billing/selected_users": Operation<"/orgs/{org}/codespaces/billing/selected_users", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-an-organization-secret + */ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#remove-selected-repository-from-an-organization-secret + */ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/dependabot#delete-an-organization-secret + */ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/dependabot#remove-selected-repository-from-an-organization-secret + */ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#delete-an-organization-webhook + */ + "DELETE /orgs/{org}/hooks/{hook_id}": Operation<"/orgs/{org}/hooks/{hook_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/interactions#remove-interaction-restrictions-for-an-organization + */ + "DELETE /orgs/{org}/interaction-limits": Operation<"/orgs/{org}/interaction-limits", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#cancel-an-organization-invitation + */ + "DELETE /orgs/{org}/invitations/{invitation_id}": Operation<"/orgs/{org}/invitations/{invitation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-an-organization-member + */ + "DELETE /orgs/{org}/members/{username}": Operation<"/orgs/{org}/members/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces + */ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}": Operation<"/orgs/{org}/members/{username}/codespaces/{codespace_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-organization-membership-for-a-user + */ + "DELETE /orgs/{org}/memberships/{username}": Operation<"/orgs/{org}/memberships/{username}", "delete">; + /** + * @see https://docs.github.com/rest/migrations/orgs#delete-an-organization-migration-archive + */ + "DELETE /orgs/{org}/migrations/{migration_id}/archive": Operation<"/orgs/{org}/migrations/{migration_id}/archive", "delete">; + /** + * @see https://docs.github.com/rest/migrations/orgs#unlock-an-organization-repository + */ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock": Operation<"/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-outside-collaborator-from-an-organization + */ + "DELETE /orgs/{org}/outside_collaborators/{username}": Operation<"/orgs/{org}/outside_collaborators/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-for-an-organization + */ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-version-for-an-organization + */ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-public-organization-membership-for-the-authenticated-user + */ + "DELETE /orgs/{org}/public_members/{username}": Operation<"/orgs/{org}/public_members/{username}", "delete">; + /** + * @see https://docs.github.com/rest/repos/rules#delete-organization-ruleset + */ + "DELETE /orgs/{org}/rulesets/{ruleset_id}": Operation<"/orgs/{org}/rulesets/{ruleset_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-a-security-manager-team + */ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}": Operation<"/orgs/{org}/security-managers/teams/{team_slug}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-team + */ + "DELETE /orgs/{org}/teams/{team_slug}": Operation<"/orgs/{org}/teams/{team_slug}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion-comment + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-team-discussion-comment-reaction + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-team-discussion-reaction + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user + */ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}": Operation<"/orgs/{org}/teams/{team_slug}/memberships/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team + */ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}": Operation<"/orgs/{org}/teams/{team_slug}/projects/{project_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#remove-a-repository-from-a-team + */ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": Operation<"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#delete-a-project-card + */ + "DELETE /projects/columns/cards/{card_id}": Operation<"/projects/columns/cards/{card_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#delete-a-project-column + */ + "DELETE /projects/columns/{column_id}": Operation<"/projects/columns/{column_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#delete-a-project + */ + "DELETE /projects/{project_id}": Operation<"/projects/{project_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#remove-project-collaborator + */ + "DELETE /projects/{project_id}/collaborators/{username}": Operation<"/projects/{project_id}/collaborators/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-repository + */ + "DELETE /repos/{owner}/{repo}": Operation<"/repos/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-an-artifact + */ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}": Operation<"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}", "delete">; + /** + * @see https://docs.github.com/rest/actions/cache#delete-a-github-actions-cache-for-a-repository-using-a-cache-id + */ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}": Operation<"/repos/{owner}/{repo}/actions/caches/{cache_id}", "delete">; + /** + * @see https://docs.github.com/rest/actions/cache#delete-github-actions-caches-for-a-repository-using-a-cache-key + */ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}": Operation<"/repos/{owner}/{repo}/actions/caches", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-self-hosted-runner-from-a-repository + */ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-all-custom-labels-from-a-self-hosted-runner-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-custom-label-from-a-self-hosted-runner-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-workflow-run + */ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-workflow-run-logs + */ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/logs", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-repository-secret + */ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/actions/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/actions/variables#delete-a-repository-variable + */ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}": Operation<"/repos/{owner}/{repo}/actions/variables/{name}", "delete">; + /** + * @see https://docs.github.com/rest/repos/autolinks#delete-an-autolink-reference-from-a-repository + */ + "DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}": Operation<"/repos/{owner}/{repo}/autolinks/{autolink_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#disable-automated-security-fixes + */ + "DELETE /repos/{owner}/{repo}/automated-security-fixes": Operation<"/repos/{owner}/{repo}/automated-security-fixes", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#delete-branch-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#delete-admin-branch-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#delete-pull-request-review-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#delete-commit-signature-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#remove-status-check-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#remove-status-check-contexts + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#delete-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#remove-app-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#remove-team-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#remove-user-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "delete">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#delete-a-code-scanning-analysis-from-a-repository + */ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}": Operation<"/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-a-repository-secret + */ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#remove-a-repository-collaborator + */ + "DELETE /repos/{owner}/{repo}/collaborators/{username}": Operation<"/repos/{owner}/{repo}/collaborators/{username}", "delete">; + /** + * @see https://docs.github.com/rest/commits/comments#delete-a-commit-comment + */ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-a-commit-comment-reaction + */ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-file + */ + "DELETE /repos/{owner}/{repo}/contents/{path}": Operation<"/repos/{owner}/{repo}/contents/{path}", "delete">; + /** + * @see https://docs.github.com/rest/reference/dependabot#delete-a-repository-secret + */ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/deployments/deployments#delete-a-deployment + */ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}", "delete">; + /** + * @see https://docs.github.com/rest/deployments/environments#delete-an-environment + */ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}", "delete">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#delete-deployment-branch-policy + */ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}", "delete">; + /** + * @see https://docs.github.com/rest/deployments/protection-rules#disable-deployment-protection-rule + */ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/git#delete-a-reference + */ + "DELETE /repos/{owner}/{repo}/git/refs/{ref}": Operation<"/repos/{owner}/{repo}/git/refs/{ref}", "delete">; + /** + * @see https://docs.github.com/rest/webhooks/repos#delete-a-repository-webhook + */ + "DELETE /repos/{owner}/{repo}/hooks/{hook_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}", "delete">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#cancel-an-import + */ + "DELETE /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "delete">; + /** + * @see https://docs.github.com/rest/reference/interactions#remove-interaction-restrictions-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/interaction-limits": Operation<"/repos/{owner}/{repo}/interaction-limits", "delete">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#delete-a-repository-invitation + */ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}": Operation<"/repos/{owner}/{repo}/invitations/{invitation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#delete-an-issue-comment + */ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-an-issue-comment-reaction + */ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#remove-assignees-from-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/assignees", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#remove-all-labels-from-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#remove-a-label-from-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#unlock-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/lock", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-an-issue-reaction + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/deploy-keys#delete-a-deploy-key + */ + "DELETE /repos/{owner}/{repo}/keys/{key_id}": Operation<"/repos/{owner}/{repo}/keys/{key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#delete-a-label + */ + "DELETE /repos/{owner}/{repo}/labels/{name}": Operation<"/repos/{owner}/{repo}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#disable-git-lfs-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/lfs": Operation<"/repos/{owner}/{repo}/lfs", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#delete-a-milestone + */ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}", "delete">; + /** + * @see https://docs.github.com/rest/pages#delete-a-github-pages-site + */ + "DELETE /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "delete">; + /** + * @see https://docs.github.com/rest/reference/pulls#delete-a-review-comment-for-a-pull-request + */ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-a-pull-request-comment-reaction + */ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/pulls#remove-requested-reviewers-from-a-pull-request + */ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "delete">; + /** + * @see https://docs.github.com/rest/reference/pulls#delete-a-pending-review-for-a-pull-request + */ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-release-asset + */ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}": Operation<"/repos/{owner}/{repo}/releases/assets/{asset_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-release + */ + "DELETE /repos/{owner}/{repo}/releases/{release_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions/#delete-a-release-reaction + */ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/repos/rules#delete-repository-ruleset + */ + "DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}": Operation<"/repos/{owner}/{repo}/rulesets/{ruleset_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/activity#delete-a-repository-subscription + */ + "DELETE /repos/{owner}/{repo}/subscription": Operation<"/repos/{owner}/{repo}/subscription", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-tag-protection-state-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}": Operation<"/repos/{owner}/{repo}/tags/protection/{tag_protection_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#disable-vulnerability-alerts + */ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts": Operation<"/repos/{owner}/{repo}/vulnerability-alerts", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-an-environment-secret + */ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/actions/variables#delete-an-environment-variable + */ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/variables/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#delete-a-team-legacy + */ + "DELETE /teams/{team_id}": Operation<"/teams/{team_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion-legacy + */ + "DELETE /teams/{team_id}/discussions/{discussion_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion-comment-legacy + */ + "DELETE /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-team-member-legacy + */ + "DELETE /teams/{team_id}/members/{username}": Operation<"/teams/{team_id}/members/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user-legacy + */ + "DELETE /teams/{team_id}/memberships/{username}": Operation<"/teams/{team_id}/memberships/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#remove-a-project-from-a-team-legacy + */ + "DELETE /teams/{team_id}/projects/{project_id}": Operation<"/teams/{team_id}/projects/{project_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#remove-a-repository-from-a-team-legacy + */ + "DELETE /teams/{team_id}/repos/{owner}/{repo}": Operation<"/teams/{team_id}/repos/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#unblock-a-user + */ + "DELETE /user/blocks/{username}": Operation<"/user/blocks/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-a-secret-for-the-authenticated-user + */ + "DELETE /user/codespaces/secrets/{secret_name}": Operation<"/user/codespaces/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#remove-a-selected-repository-from-a-user-secret + */ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/user/codespaces/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-a-codespace-for-the-authenticated-user + */ + "DELETE /user/codespaces/{codespace_name}": Operation<"/user/codespaces/{codespace_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-an-email-address-for-the-authenticated-user + */ + "DELETE /user/emails": Operation<"/user/emails", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#unfollow-a-user + */ + "DELETE /user/following/{username}": Operation<"/user/following/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-a-gpg-key-for-the-authenticated-user + */ + "DELETE /user/gpg_keys/{gpg_key_id}": Operation<"/user/gpg_keys/{gpg_key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#remove-a-repository-from-an-app-installation + */ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}": Operation<"/user/installations/{installation_id}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/interactions#remove-interaction-restrictions-from-your-public-repositories + */ + "DELETE /user/interaction-limits": Operation<"/user/interaction-limits", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-a-public-ssh-key-for-the-authenticated-user + */ + "DELETE /user/keys/{key_id}": Operation<"/user/keys/{key_id}", "delete">; + /** + * @see https://docs.github.com/rest/migrations/users#delete-a-user-migration-archive + */ + "DELETE /user/migrations/{migration_id}/archive": Operation<"/user/migrations/{migration_id}/archive", "delete">; + /** + * @see https://docs.github.com/rest/migrations/users#unlock-a-user-repository + */ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock": Operation<"/user/migrations/{migration_id}/repos/{repo_name}/lock", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-for-the-authenticated-user + */ + "DELETE /user/packages/{package_type}/{package_name}": Operation<"/user/packages/{package_type}/{package_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-version-for-the-authenticated-user + */ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/user/packages/{package_type}/{package_name}/versions/{package_version_id}", "delete">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#decline-a-repository-invitation + */ + "DELETE /user/repository_invitations/{invitation_id}": Operation<"/user/repository_invitations/{invitation_id}", "delete">; + /** + * @see https://docs.github.com/rest/users/social-accounts#delete-social-account-for-authenticated-user + */ + "DELETE /user/social_accounts": Operation<"/user/social_accounts", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-a-ssh-signing-key-for-the-authenticated-user + */ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}": Operation<"/user/ssh_signing_keys/{ssh_signing_key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/activity#unstar-a-repository-for-the-authenticated-user + */ + "DELETE /user/starred/{owner}/{repo}": Operation<"/user/starred/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-for-a-user + */ + "DELETE /users/{username}/packages/{package_type}/{package_name}": Operation<"/users/{username}/packages/{package_type}/{package_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-version-for-a-user + */ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", "delete">; + /** + * @see https://docs.github.com/rest/overview/resources-in-the-rest-api#root-endpoint + */ + "GET /": Operation<"/", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-the-authenticated-app + */ + "GET /app": Operation<"/app", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-webhook-configuration-for-an-app + */ + "GET /app/hook/config": Operation<"/app/hook/config", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-deliveries-for-an-app-webhook + */ + "GET /app/hook/deliveries": Operation<"/app/hook/deliveries", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-delivery-for-an-app-webhook + */ + "GET /app/hook/deliveries/{delivery_id}": Operation<"/app/hook/deliveries/{delivery_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-installation-requests-for-the-authenticated-app + */ + "GET /app/installation-requests": Operation<"/app/installation-requests", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app + */ + "GET /app/installations": Operation<"/app/installations", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-an-installation-for-the-authenticated-app + */ + "GET /app/installations/{installation_id}": Operation<"/app/installations/{installation_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps/#get-an-app + */ + "GET /apps/{app_slug}": Operation<"/apps/{app_slug}", "get">; + /** + * @see https://docs.github.com/rest/reference/codes-of-conduct#get-all-codes-of-conduct + */ + "GET /codes_of_conduct": Operation<"/codes_of_conduct", "get">; + /** + * @see https://docs.github.com/rest/reference/codes-of-conduct#get-a-code-of-conduct + */ + "GET /codes_of_conduct/{key}": Operation<"/codes_of_conduct/{key}", "get">; + /** + * @see https://docs.github.com/rest/reference/emojis#get-emojis + */ + "GET /emojis": Operation<"/emojis", "get">; + /** + * @see https://docs.github.com/rest/dependabot/alerts#list-dependabot-alerts-for-an-enterprise + */ + "GET /enterprises/{enterprise}/dependabot/alerts": Operation<"/enterprises/{enterprise}/dependabot/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-an-enterprise + */ + "GET /enterprises/{enterprise}/secret-scanning/alerts": Operation<"/enterprises/{enterprise}/secret-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events + */ + "GET /events": Operation<"/events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-feeds + */ + "GET /feeds": Operation<"/feeds", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gists-for-the-authenticated-user + */ + "GET /gists": Operation<"/gists", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-public-gists + */ + "GET /gists/public": Operation<"/gists/public", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-starred-gists + */ + "GET /gists/starred": Operation<"/gists/starred", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#get-a-gist + */ + "GET /gists/{gist_id}": Operation<"/gists/{gist_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-comments + */ + "GET /gists/{gist_id}/comments": Operation<"/gists/{gist_id}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#get-a-gist-comment + */ + "GET /gists/{gist_id}/comments/{comment_id}": Operation<"/gists/{gist_id}/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-commits + */ + "GET /gists/{gist_id}/commits": Operation<"/gists/{gist_id}/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-forks + */ + "GET /gists/{gist_id}/forks": Operation<"/gists/{gist_id}/forks", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#check-if-a-gist-is-starred + */ + "GET /gists/{gist_id}/star": Operation<"/gists/{gist_id}/star", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#get-a-gist-revision + */ + "GET /gists/{gist_id}/{sha}": Operation<"/gists/{gist_id}/{sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/gitignore#get-all-gitignore-templates + */ + "GET /gitignore/templates": Operation<"/gitignore/templates", "get">; + /** + * @see https://docs.github.com/rest/reference/gitignore#get-a-gitignore-template + */ + "GET /gitignore/templates/{name}": Operation<"/gitignore/templates/{name}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-repositories-accessible-to-the-app-installation + */ + "GET /installation/repositories": Operation<"/installation/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issues-assigned-to-the-authenticated-user + */ + "GET /issues": Operation<"/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/licenses#get-all-commonly-used-licenses + */ + "GET /licenses": Operation<"/licenses", "get">; + /** + * @see https://docs.github.com/rest/reference/licenses#get-a-license + */ + "GET /licenses/{license}": Operation<"/licenses/{license}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-subscription-plan-for-an-account + */ + "GET /marketplace_listing/accounts/{account_id}": Operation<"/marketplace_listing/accounts/{account_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-plans + */ + "GET /marketplace_listing/plans": Operation<"/marketplace_listing/plans", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-accounts-for-a-plan + */ + "GET /marketplace_listing/plans/{plan_id}/accounts": Operation<"/marketplace_listing/plans/{plan_id}/accounts", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-subscription-plan-for-an-account-stubbed + */ + "GET /marketplace_listing/stubbed/accounts/{account_id}": Operation<"/marketplace_listing/stubbed/accounts/{account_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-plans-stubbed + */ + "GET /marketplace_listing/stubbed/plans": Operation<"/marketplace_listing/stubbed/plans", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-accounts-for-a-plan-stubbed + */ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts": Operation<"/marketplace_listing/stubbed/plans/{plan_id}/accounts", "get">; + /** + * @see https://docs.github.com/rest/reference/meta#get-github-meta-information + */ + "GET /meta": Operation<"/meta", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-for-a-network-of-repositories + */ + "GET /networks/{owner}/{repo}/events": Operation<"/networks/{owner}/{repo}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user + */ + "GET /notifications": Operation<"/notifications", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-a-thread + */ + "GET /notifications/threads/{thread_id}": Operation<"/notifications/threads/{thread_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-a-thread-subscription-for-the-authenticated-user + */ + "GET /notifications/threads/{thread_id}/subscription": Operation<"/notifications/threads/{thread_id}/subscription", "get">; + /** + * @see https://docs.github.com/rest/reference/meta#get-octocat + */ + "GET /octocat": Operation<"/octocat", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations + */ + "GET /organizations": Operation<"/organizations", "get">; + /** + * @see https://docs.github.com/rest/orgs/orgs#list-requests-to-access-organization-resources-with-fine-grained-personal-access-tokens + */ + "GET /organizations/{org}/personal-access-token-requests": Operation<"/organizations/{org}/personal-access-token-requests", "get">; + /** + * @see https://docs.github.com/rest/orgs/orgs#list-repositories-requested-to-be-accessed-by-a-fine-grained-personal-access-token + */ + "GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories": Operation<"/organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/orgs/orgs#list-fine-grained-personal-access-tokens-with-access-to-organization-resources + */ + "GET /organizations/{org}/personal-access-tokens": Operation<"/organizations/{org}/personal-access-tokens", "get">; + /** + * @see https://docs.github.com/rest/orgs/orgs#list-repositories-a-fine-grained-personal-access-token-has-access-to + */ + "GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories": Operation<"/organizations/{org}/personal-access-tokens/{pat_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-in-organization + * @deprecated "org_id" is now "org" + */ + "GET /orgs/{org_id}/codespaces": Operation<"/orgs/{org}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-an-organization + */ + "GET /orgs/{org}": Operation<"/orgs/{org}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-cache-usage-for-an-organization + */ + "GET /orgs/{org}/actions/cache/usage": Operation<"/orgs/{org}/actions/cache/usage", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repositories-with-github-actions-cache-usage-for-an-organization + */ + "GET /orgs/{org}/actions/cache/usage-by-repository": Operation<"/orgs/{org}/actions/cache/usage-by-repository", "get">; + /** + * @see https://docs.github.com/rest/actions/oidc#get-the-customization-template-for-an-oidc-subject-claim-for-an-organization + */ + "GET /orgs/{org}/actions/oidc/customization/sub": Operation<"/orgs/{org}/actions/oidc/customization/sub", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-permissions-for-an-organization + */ + "GET /orgs/{org}/actions/permissions": Operation<"/orgs/{org}/actions/permissions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-enabled-for-github-actions-in-an-organization + */ + "GET /orgs/{org}/actions/permissions/repositories": Operation<"/orgs/{org}/actions/permissions/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-allowed-actions-for-an-organization + */ + "GET /orgs/{org}/actions/permissions/selected-actions": Operation<"/orgs/{org}/actions/permissions/selected-actions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-default-workflow-permissions + */ + "GET /orgs/{org}/actions/permissions/workflow": Operation<"/orgs/{org}/actions/permissions/workflow", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-required-workflows + */ + "GET /orgs/{org}/actions/required_workflows": Operation<"/orgs/{org}/actions/required_workflows", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-required-workflow + */ + "GET /orgs/{org}/actions/required_workflows/{required_workflow_id}": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-required-workflows + */ + "GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-an-organization + */ + "GET /orgs/{org}/actions/runners": Operation<"/orgs/{org}/actions/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-runner-applications-for-an-organization + */ + "GET /orgs/{org}/actions/runners/downloads": Operation<"/orgs/{org}/actions/runners/downloads", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-for-an-organization + */ + "GET /orgs/{org}/actions/runners/{runner_id}": Operation<"/orgs/{org}/actions/runners/{runner_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-labels-for-a-self-hosted-runner-for-an-organization + */ + "GET /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-organization-secrets + */ + "GET /orgs/{org}/actions/secrets": Operation<"/orgs/{org}/actions/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-organization-public-key + */ + "GET /orgs/{org}/actions/secrets/public-key": Operation<"/orgs/{org}/actions/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-organization-secret + */ + "GET /orgs/{org}/actions/secrets/{secret_name}": Operation<"/orgs/{org}/actions/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#list-organization-variables + */ + "GET /orgs/{org}/actions/variables": Operation<"/orgs/{org}/actions/variables", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#get-an-organization-variable + */ + "GET /orgs/{org}/actions/variables/{name}": Operation<"/orgs/{org}/actions/variables/{name}", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#list-selected-repositories-for-an-organization-variable + */ + "GET /orgs/{org}/actions/variables/{name}/repositories": Operation<"/orgs/{org}/actions/variables/{name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-users-blocked-by-an-organization + */ + "GET /orgs/{org}/blocks": Operation<"/orgs/{org}/blocks", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#check-if-a-user-is-blocked-by-an-organization + */ + "GET /orgs/{org}/blocks/{username}": Operation<"/orgs/{org}/blocks/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-for-an-organization + */ + "GET /orgs/{org}/code-scanning/alerts": Operation<"/orgs/{org}/code-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-in-organization + */ + "GET /orgs/{org}/codespaces": Operation<"/orgs/{org}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-organization-secrets + */ + "GET /orgs/{org}/codespaces/secrets": Operation<"/orgs/{org}/codespaces/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-an-organization-public-key + */ + "GET /orgs/{org}/codespaces/secrets/public-key": Operation<"/orgs/{org}/codespaces/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-an-organization-secret + */ + "GET /orgs/{org}/codespaces/secrets/{secret_name}": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/dependabot/alerts#list-dependabot-alerts-for-an-organization + */ + "GET /orgs/{org}/dependabot/alerts": Operation<"/orgs/{org}/dependabot/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-organization-secrets + */ + "GET /orgs/{org}/dependabot/secrets": Operation<"/orgs/{org}/dependabot/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-an-organization-public-key + */ + "GET /orgs/{org}/dependabot/secrets/public-key": Operation<"/orgs/{org}/dependabot/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-an-organization-secret + */ + "GET /orgs/{org}/dependabot/secrets/{secret_name}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-docker-migration-conflicting-packages-for-organization + */ + "GET /orgs/{org}/docker/conflicts": Operation<"/orgs/{org}/docker/conflicts", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-organization-events + */ + "GET /orgs/{org}/events": Operation<"/orgs/{org}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-failed-organization-invitations + */ + "GET /orgs/{org}/failed_invitations": Operation<"/orgs/{org}/failed_invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-webhooks + */ + "GET /orgs/{org}/hooks": Operation<"/orgs/{org}/hooks", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}": Operation<"/orgs/{org}/hooks/{hook_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-a-webhook-configuration-for-an-organization + */ + "GET /orgs/{org}/hooks/{hook_id}/config": Operation<"/orgs/{org}/hooks/{hook_id}/config", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-deliveries-for-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}/deliveries": Operation<"/orgs/{org}/hooks/{hook_id}/deliveries", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-a-webhook-delivery-for-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}": Operation<"/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-an-organization-installation-for-the-authenticated-app + */ + "GET /orgs/{org}/installation": Operation<"/orgs/{org}/installation", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-app-installations-for-an-organization + */ + "GET /orgs/{org}/installations": Operation<"/orgs/{org}/installations", "get">; + /** + * @see https://docs.github.com/rest/reference/interactions#get-interaction-restrictions-for-an-organization + */ + "GET /orgs/{org}/interaction-limits": Operation<"/orgs/{org}/interaction-limits", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-pending-organization-invitations + */ + "GET /orgs/{org}/invitations": Operation<"/orgs/{org}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-invitation-teams + */ + "GET /orgs/{org}/invitations/{invitation_id}/teams": Operation<"/orgs/{org}/invitations/{invitation_id}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-organization-issues-assigned-to-the-authenticated-user + */ + "GET /orgs/{org}/issues": Operation<"/orgs/{org}/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-members + */ + "GET /orgs/{org}/members": Operation<"/orgs/{org}/members", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#check-organization-membership-for-a-user + */ + "GET /orgs/{org}/members/{username}": Operation<"/orgs/{org}/members/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-codespaces-for-user-in-org + */ + "GET /orgs/{org}/members/{username}/codespaces": Operation<"/orgs/{org}/members/{username}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user + */ + "GET /orgs/{org}/memberships/{username}": Operation<"/orgs/{org}/memberships/{username}", "get">; + /** + * @see https://docs.github.com/rest/migrations/orgs#list-organization-migrations + */ + "GET /orgs/{org}/migrations": Operation<"/orgs/{org}/migrations", "get">; + /** + * @see https://docs.github.com/rest/migrations/orgs#get-an-organization-migration-status + */ + "GET /orgs/{org}/migrations/{migration_id}": Operation<"/orgs/{org}/migrations/{migration_id}", "get">; + /** + * @see https://docs.github.com/rest/migrations/orgs#download-an-organization-migration-archive + */ + "GET /orgs/{org}/migrations/{migration_id}/archive": Operation<"/orgs/{org}/migrations/{migration_id}/archive", "get">; + /** + * @see https://docs.github.com/rest/migrations/orgs#list-repositories-in-an-organization-migration + */ + "GET /orgs/{org}/migrations/{migration_id}/repositories": Operation<"/orgs/{org}/migrations/{migration_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-outside-collaborators-for-an-organization + */ + "GET /orgs/{org}/outside_collaborators": Operation<"/orgs/{org}/outside_collaborators", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-an-organization + */ + "GET /orgs/{org}/packages": Operation<"/orgs/{org}/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-for-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}", "get">; + /** + * @see https://docs.github.com/rest/packages#get-all-package-versions-for-a-package-owned-by-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-version-for-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-organization-projects + */ + "GET /orgs/{org}/projects": Operation<"/orgs/{org}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-public-organization-members + */ + "GET /orgs/{org}/public_members": Operation<"/orgs/{org}/public_members", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#check-public-organization-membership-for-a-user + */ + "GET /orgs/{org}/public_members/{username}": Operation<"/orgs/{org}/public_members/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-organization-repositories + */ + "GET /orgs/{org}/repos": Operation<"/orgs/{org}/repos", "get">; + /** + * @see https://docs.github.com/rest/repos/rules#get-organization-rulesets + */ + "GET /orgs/{org}/rulesets": Operation<"/orgs/{org}/rulesets", "get">; + /** + * @see https://docs.github.com/rest/repos/rules#get-organization-ruleset + */ + "GET /orgs/{org}/rulesets/{ruleset_id}": Operation<"/orgs/{org}/rulesets/{ruleset_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-an-organization + */ + "GET /orgs/{org}/secret-scanning/alerts": Operation<"/orgs/{org}/secret-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-security-manager-teams + */ + "GET /orgs/{org}/security-managers": Operation<"/orgs/{org}/security-managers", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-actions-billing-for-an-organization + */ + "GET /orgs/{org}/settings/billing/actions": Operation<"/orgs/{org}/settings/billing/actions", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-packages-billing-for-an-organization + */ + "GET /orgs/{org}/settings/billing/packages": Operation<"/orgs/{org}/settings/billing/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-shared-storage-billing-for-an-organization + */ + "GET /orgs/{org}/settings/billing/shared-storage": Operation<"/orgs/{org}/settings/billing/shared-storage", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-teams + */ + "GET /orgs/{org}/teams": Operation<"/orgs/{org}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-team-by-name + */ + "GET /orgs/{org}/teams/{team_slug}": Operation<"/orgs/{org}/teams/{team_slug}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussions + */ + "GET /orgs/{org}/teams/{team_slug}/discussions": Operation<"/orgs/{org}/teams/{team_slug}/discussions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussion-comments + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion-comment + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-pending-team-invitations + */ + "GET /orgs/{org}/teams/{team_slug}/invitations": Operation<"/orgs/{org}/teams/{team_slug}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-members + */ + "GET /orgs/{org}/teams/{team_slug}/members": Operation<"/orgs/{org}/teams/{team_slug}/members", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user + */ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}": Operation<"/orgs/{org}/teams/{team_slug}/memberships/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-projects + */ + "GET /orgs/{org}/teams/{team_slug}/projects": Operation<"/orgs/{org}/teams/{team_slug}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project + */ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}": Operation<"/orgs/{org}/teams/{team_slug}/projects/{project_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-repositories + */ + "GET /orgs/{org}/teams/{team_slug}/repos": Operation<"/orgs/{org}/teams/{team_slug}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#check-team-permissions-for-a-repository + */ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": Operation<"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-child-teams + */ + "GET /orgs/{org}/teams/{team_slug}/teams": Operation<"/orgs/{org}/teams/{team_slug}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-a-project-card + */ + "GET /projects/columns/cards/{card_id}": Operation<"/projects/columns/cards/{card_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-a-project-column + */ + "GET /projects/columns/{column_id}": Operation<"/projects/columns/{column_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-cards + */ + "GET /projects/columns/{column_id}/cards": Operation<"/projects/columns/{column_id}/cards", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-a-project + */ + "GET /projects/{project_id}": Operation<"/projects/{project_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-collaborators + */ + "GET /projects/{project_id}/collaborators": Operation<"/projects/{project_id}/collaborators", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-project-permission-for-a-user + */ + "GET /projects/{project_id}/collaborators/{username}/permission": Operation<"/projects/{project_id}/collaborators/{username}/permission", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-columns + */ + "GET /projects/{project_id}/columns": Operation<"/projects/{project_id}/columns", "get">; + /** + * @see https://docs.github.com/rest/reference/rate-limit#get-rate-limit-status-for-the-authenticated-user + */ + "GET /rate_limit": Operation<"/rate_limit", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-required-workflows + */ + "GET /repos/{org}/{repo}/actions/required_workflows": Operation<"/repos/{org}/{repo}/actions/required_workflows", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-repository-required-workflow + */ + "GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}": Operation<"/repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-repository-required-workflow-usage + */ + "GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing": Operation<"/repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository + */ + "GET /repos/{owner}/{repo}": Operation<"/repos/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-artifacts-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/artifacts": Operation<"/repos/{owner}/{repo}/actions/artifacts", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-artifact + */ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}": Operation<"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-an-artifact + */ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}": Operation<"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-cache-usage-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/cache/usage": Operation<"/repos/{owner}/{repo}/actions/cache/usage", "get">; + /** + * @see https://docs.github.com/rest/actions/cache#list-github-actions-caches-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/caches": Operation<"/repos/{owner}/{repo}/actions/caches", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-job-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}": Operation<"/repos/{owner}/{repo}/actions/jobs/{job_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-job-logs-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs": Operation<"/repos/{owner}/{repo}/actions/jobs/{job_id}/logs", "get">; + /** + * @see https://docs.github.com/rest/actions/oidc#get-the-customization-template-for-an-oidc-subject-claim-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/oidc/customization/sub": Operation<"/repos/{owner}/{repo}/actions/oidc/customization/sub", "get">; + /** + * @see https://docs.github.com/rest/actions/secrets#list-repository-organization-secrets + */ + "GET /repos/{owner}/{repo}/actions/organization-secrets": Operation<"/repos/{owner}/{repo}/actions/organization-secrets", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#list-repository-organization-variables + */ + "GET /repos/{owner}/{repo}/actions/organization-variables": Operation<"/repos/{owner}/{repo}/actions/organization-variables", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-permissions-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions": Operation<"/repos/{owner}/{repo}/actions/permissions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-workflow-access-level-to-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions/access": Operation<"/repos/{owner}/{repo}/actions/permissions/access", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-allowed-actions-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions": Operation<"/repos/{owner}/{repo}/actions/permissions/selected-actions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-default-workflow-permissions-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions/workflow": Operation<"/repos/{owner}/{repo}/actions/permissions/workflow", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-required-workflow-runs + */ + "GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs": Operation<"/repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners": Operation<"/repos/{owner}/{repo}/actions/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-runner-applications-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners/downloads": Operation<"/repos/{owner}/{repo}/actions/runners/downloads", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-labels-for-a-self-hosted-runner-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-runs-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runs": Operation<"/repos/{owner}/{repo}/actions/runs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-the-review-history-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/approvals", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-run-artifacts + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-workflow-run-attempt + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-jobs-for-a-workflow-run-attempt + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-workflow-run-attempt-logs + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-jobs-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-workflow-run-logs + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/logs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-pending-deployments-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-workflow-run-usage + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/timing", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/actions/secrets": Operation<"/repos/{owner}/{repo}/actions/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-repository-public-key + */ + "GET /repos/{owner}/{repo}/actions/secrets/public-key": Operation<"/repos/{owner}/{repo}/actions/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-repository-secret + */ + "GET /repos/{owner}/{repo}/actions/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/actions/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#list-repository-variables + */ + "GET /repos/{owner}/{repo}/actions/variables": Operation<"/repos/{owner}/{repo}/actions/variables", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#get-a-repository-variable + */ + "GET /repos/{owner}/{repo}/actions/variables/{name}": Operation<"/repos/{owner}/{repo}/actions/variables/{name}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-workflows + */ + "GET /repos/{owner}/{repo}/actions/workflows": Operation<"/repos/{owner}/{repo}/actions/workflows", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-workflow + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-runs + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-workflow-usage + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-assignees + */ + "GET /repos/{owner}/{repo}/assignees": Operation<"/repos/{owner}/{repo}/assignees", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#check-if-a-user-can-be-assigned + */ + "GET /repos/{owner}/{repo}/assignees/{assignee}": Operation<"/repos/{owner}/{repo}/assignees/{assignee}", "get">; + /** + * @see https://docs.github.com/rest/repos/autolinks#list-all-autolinks-of-a-repository + */ + "GET /repos/{owner}/{repo}/autolinks": Operation<"/repos/{owner}/{repo}/autolinks", "get">; + /** + * @see https://docs.github.com/rest/repos/autolinks#get-an-autolink-reference-of-a-repository + */ + "GET /repos/{owner}/{repo}/autolinks/{autolink_id}": Operation<"/repos/{owner}/{repo}/autolinks/{autolink_id}", "get">; + /** + * @see https://docs.github.com/rest/branches/branches#list-branches + */ + "GET /repos/{owner}/{repo}/branches": Operation<"/repos/{owner}/{repo}/branches", "get">; + /** + * @see https://docs.github.com/rest/branches/branches#get-a-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}": Operation<"/repos/{owner}/{repo}/branches/{branch}", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-branch-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-admin-branch-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-pull-request-review-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-commit-signature-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-status-checks-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-all-status-check-contexts + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-access-restrictions + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#list-apps-with-access-to-the-protected-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#list-teams-with-access-to-the-protected-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#list-users-with-access-to-the-protected-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#get-a-check-run + */ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-run-annotations + */ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#get-a-check-suite + */ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}": Operation<"/repos/{owner}/{repo}/check-suites/{check_suite_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-runs-in-a-check-suite + */ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs": Operation<"/repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts": Operation<"/repos/{owner}/{repo}/code-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-code-scanning-alert + * @deprecated "alert_id" is now "alert_number" + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-code-scanning-alert + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-instances-of-a-code-scanning-alert + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-analyses-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/analyses": Operation<"/repos/{owner}/{repo}/code-scanning/analyses", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}": Operation<"/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-codeql-databases-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases": Operation<"/repos/{owner}/{repo}/code-scanning/codeql/databases", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-codeql-database-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}": Operation<"/repos/{owner}/{repo}/code-scanning/codeql/databases/{language}", "get">; + /** + * @see https://docs.github.com/rest/code-scanning#get-a-code-scanning-default-setup-configuration + */ + "GET /repos/{owner}/{repo}/code-scanning/default-setup": Operation<"/repos/{owner}/{repo}/code-scanning/default-setup", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-information-about-a-sarif-upload + */ + "GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}": Operation<"/repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-codeowners-errors + */ + "GET /repos/{owner}/{repo}/codeowners/errors": Operation<"/repos/{owner}/{repo}/codeowners/errors", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-codespaces-in-a-repository-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/codespaces": Operation<"/repos/{owner}/{repo}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-devcontainers-in-a-repository-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/codespaces/devcontainers": Operation<"/repos/{owner}/{repo}/codespaces/devcontainers", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-available-machine-types-for-a-repository + */ + "GET /repos/{owner}/{repo}/codespaces/machines": Operation<"/repos/{owner}/{repo}/codespaces/machines", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#preview-attributes-for-a-new-codespace + */ + "GET /repos/{owner}/{repo}/codespaces/new": Operation<"/repos/{owner}/{repo}/codespaces/new", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/codespaces/secrets": Operation<"/repos/{owner}/{repo}/codespaces/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-repository-public-key + */ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key": Operation<"/repos/{owner}/{repo}/codespaces/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-repository-secret + */ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#list-repository-collaborators + */ + "GET /repos/{owner}/{repo}/collaborators": Operation<"/repos/{owner}/{repo}/collaborators", "get">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#check-if-a-user-is-a-repository-collaborator + */ + "GET /repos/{owner}/{repo}/collaborators/{username}": Operation<"/repos/{owner}/{repo}/collaborators/{username}", "get">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#get-repository-permissions-for-a-user + */ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission": Operation<"/repos/{owner}/{repo}/collaborators/{username}/permission", "get">; + /** + * @see https://docs.github.com/rest/commits/comments#list-commit-comments-for-a-repository + */ + "GET /repos/{owner}/{repo}/comments": Operation<"/repos/{owner}/{repo}/comments", "get">; + /** + * @see https://docs.github.com/rest/commits/comments#get-a-commit-comment + */ + "GET /repos/{owner}/{repo}/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-commit-comment + */ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/comments/{comment_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#list-commits + */ + "GET /repos/{owner}/{repo}/commits": Operation<"/repos/{owner}/{repo}/commits", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#list-branches-for-head-commit + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", "get">; + /** + * @see https://docs.github.com/rest/commits/comments#list-commit-comments + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/comments", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#list-pull-requests-associated-with-a-commit + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/pulls", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#get-a-commit + */ + "GET /repos/{owner}/{repo}/commits/{ref}": Operation<"/repos/{owner}/{repo}/commits/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-runs-for-a-git-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs": Operation<"/repos/{owner}/{repo}/commits/{ref}/check-runs", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-suites-for-a-git-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites": Operation<"/repos/{owner}/{repo}/commits/{ref}/check-suites", "get">; + /** + * @see https://docs.github.com/rest/commits/statuses#get-the-combined-status-for-a-specific-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/status": Operation<"/repos/{owner}/{repo}/commits/{ref}/status", "get">; + /** + * @see https://docs.github.com/rest/commits/statuses#list-commit-statuses-for-a-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses": Operation<"/repos/{owner}/{repo}/commits/{ref}/statuses", "get">; + /** + * @see https://docs.github.com/rest/metrics/community#get-community-profile-metrics + */ + "GET /repos/{owner}/{repo}/community/profile": Operation<"/repos/{owner}/{repo}/community/profile", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#compare-two-commits + */ + "GET /repos/{owner}/{repo}/compare/{basehead}": Operation<"/repos/{owner}/{repo}/compare/{basehead}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#compare-two-commits + */ + "GET /repos/{owner}/{repo}/compare/{base}...{head}": Operation<"/repos/{owner}/{repo}/compare/{base}...{head}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-repository-content + */ + "GET /repos/{owner}/{repo}/contents/{path}": Operation<"/repos/{owner}/{repo}/contents/{path}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-contributors + */ + "GET /repos/{owner}/{repo}/contributors": Operation<"/repos/{owner}/{repo}/contributors", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-dependabot-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/dependabot/alerts": Operation<"/repos/{owner}/{repo}/dependabot/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-a-dependabot-alert + */ + "GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/dependabot/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/dependabot/secrets": Operation<"/repos/{owner}/{repo}/dependabot/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-a-repository-public-key + */ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key": Operation<"/repos/{owner}/{repo}/dependabot/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-a-repository-secret + */ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/dependency-graph#get-a-diff-of-the-dependencies-between-commits + */ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}": Operation<"/repos/{owner}/{repo}/dependency-graph/compare/{basehead}", "get">; + /** + * @see https://docs.github.com/rest/dependency-graph/sboms#export-a-software-bill-of-materials-sbom-for-a-repository + */ + "GET /repos/{owner}/{repo}/dependency-graph/sbom": Operation<"/repos/{owner}/{repo}/dependency-graph/sbom", "get">; + /** + * @see https://docs.github.com/rest/deployments/deployments#list-deployments + */ + "GET /repos/{owner}/{repo}/deployments": Operation<"/repos/{owner}/{repo}/deployments", "get">; + /** + * @see https://docs.github.com/rest/deployments/deployments#get-a-deployment + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}", "get">; + /** + * @see https://docs.github.com/rest/deployments/statuses#list-deployment-statuses + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "get">; + /** + * @see https://docs.github.com/rest/deployments/statuses#get-a-deployment-status + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}", "get">; + /** + * @see https://docs.github.com/rest/deployments/environments#list-environments + */ + "GET /repos/{owner}/{repo}/environments": Operation<"/repos/{owner}/{repo}/environments", "get">; + /** + * @see https://docs.github.com/rest/deployments/environments#get-an-environment + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}", "get">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#list-deployment-branch-policies + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", "get">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#get-deployment-branch-policy + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}", "get">; + /** + * @see https://docs.github.com/rest/deployments/protection-rules#get-all-deployment-protection-rules + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules", "get">; + /** + * @see https://docs.github.com/rest/deployments/protection-rules#list-custom-deployment-rule-integrations + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", "get">; + /** + * @see https://docs.github.com/rest/deployments/protection-rules#get-a-deployment-protection-rule + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repository-events + */ + "GET /repos/{owner}/{repo}/events": Operation<"/repos/{owner}/{repo}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-forks + */ + "GET /repos/{owner}/{repo}/forks": Operation<"/repos/{owner}/{repo}/forks", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-blob + */ + "GET /repos/{owner}/{repo}/git/blobs/{file_sha}": Operation<"/repos/{owner}/{repo}/git/blobs/{file_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-commit + */ + "GET /repos/{owner}/{repo}/git/commits/{commit_sha}": Operation<"/repos/{owner}/{repo}/git/commits/{commit_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#list-matching-references + */ + "GET /repos/{owner}/{repo}/git/matching-refs/{ref}": Operation<"/repos/{owner}/{repo}/git/matching-refs/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-reference + */ + "GET /repos/{owner}/{repo}/git/ref/{ref}": Operation<"/repos/{owner}/{repo}/git/ref/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-tag + */ + "GET /repos/{owner}/{repo}/git/tags/{tag_sha}": Operation<"/repos/{owner}/{repo}/git/tags/{tag_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-tree + */ + "GET /repos/{owner}/{repo}/git/trees/{tree_sha}": Operation<"/repos/{owner}/{repo}/git/trees/{tree_sha}", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repos#list-repository-webhooks + */ + "GET /repos/{owner}/{repo}/hooks": Operation<"/repos/{owner}/{repo}/hooks", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repos#get-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repo-config#get-a-webhook-configuration-for-a-repository + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/config", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repo-deliveries#list-deliveries-for-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repo-deliveries#get-a-delivery-for-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}", "get">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#get-an-import-status + */ + "GET /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "get">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#get-commit-authors + */ + "GET /repos/{owner}/{repo}/import/authors": Operation<"/repos/{owner}/{repo}/import/authors", "get">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#get-large-files + */ + "GET /repos/{owner}/{repo}/import/large_files": Operation<"/repos/{owner}/{repo}/import/large_files", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-repository-installation-for-the-authenticated-app + */ + "GET /repos/{owner}/{repo}/installation": Operation<"/repos/{owner}/{repo}/installation", "get">; + /** + * @see https://docs.github.com/rest/reference/interactions#get-interaction-restrictions-for-a-repository + */ + "GET /repos/{owner}/{repo}/interaction-limits": Operation<"/repos/{owner}/{repo}/interaction-limits", "get">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#list-repository-invitations + */ + "GET /repos/{owner}/{repo}/invitations": Operation<"/repos/{owner}/{repo}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-repository-issues + */ + "GET /repos/{owner}/{repo}/issues": Operation<"/repos/{owner}/{repo}/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-comments-for-a-repository + */ + "GET /repos/{owner}/{repo}/issues/comments": Operation<"/repos/{owner}/{repo}/issues/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-an-issue-comment + */ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue-comment + */ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-events-for-a-repository + */ + "GET /repos/{owner}/{repo}/issues/events": Operation<"/repos/{owner}/{repo}/issues/events", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-an-issue-event + */ + "GET /repos/{owner}/{repo}/issues/events/{event_id}": Operation<"/repos/{owner}/{repo}/issues/events/{event_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#check-if-a-user-can-be-assigned-to-a-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-comments + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-events + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/events": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-timeline-events-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/timeline", "get">; + /** + * @see https://docs.github.com/rest/deploy-keys#list-deploy-keys + */ + "GET /repos/{owner}/{repo}/keys": Operation<"/repos/{owner}/{repo}/keys", "get">; + /** + * @see https://docs.github.com/rest/deploy-keys#get-a-deploy-key + */ + "GET /repos/{owner}/{repo}/keys/{key_id}": Operation<"/repos/{owner}/{repo}/keys/{key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-a-repository + */ + "GET /repos/{owner}/{repo}/labels": Operation<"/repos/{owner}/{repo}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-a-label + */ + "GET /repos/{owner}/{repo}/labels/{name}": Operation<"/repos/{owner}/{repo}/labels/{name}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-languages + */ + "GET /repos/{owner}/{repo}/languages": Operation<"/repos/{owner}/{repo}/languages", "get">; + /** + * @see https://docs.github.com/rest/reference/licenses/#get-the-license-for-a-repository + */ + "GET /repos/{owner}/{repo}/license": Operation<"/repos/{owner}/{repo}/license", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-milestones + */ + "GET /repos/{owner}/{repo}/milestones": Operation<"/repos/{owner}/{repo}/milestones", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-a-milestone + */ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-issues-in-a-milestone + */ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/notifications": Operation<"/repos/{owner}/{repo}/notifications", "get">; + /** + * @see https://docs.github.com/rest/pages#get-a-github-pages-site + */ + "GET /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "get">; + /** + * @see https://docs.github.com/rest/pages#list-github-pages-builds + */ + "GET /repos/{owner}/{repo}/pages/builds": Operation<"/repos/{owner}/{repo}/pages/builds", "get">; + /** + * @see https://docs.github.com/rest/pages#get-latest-pages-build + */ + "GET /repos/{owner}/{repo}/pages/builds/latest": Operation<"/repos/{owner}/{repo}/pages/builds/latest", "get">; + /** + * @see https://docs.github.com/rest/pages#get-github-pages-build + */ + "GET /repos/{owner}/{repo}/pages/builds/{build_id}": Operation<"/repos/{owner}/{repo}/pages/builds/{build_id}", "get">; + /** + * @see https://docs.github.com/rest/pages#get-a-dns-health-check-for-github-pages + */ + "GET /repos/{owner}/{repo}/pages/health": Operation<"/repos/{owner}/{repo}/pages/health", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-repository-projects + */ + "GET /repos/{owner}/{repo}/projects": Operation<"/repos/{owner}/{repo}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests + */ + "GET /repos/{owner}/{repo}/pulls": Operation<"/repos/{owner}/{repo}/pulls", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-review-comments-in-a-repository + */ + "GET /repos/{owner}/{repo}/pulls/comments": Operation<"/repos/{owner}/{repo}/pulls/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-a-review-comment-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-pull-request-review-comment + */ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-review-comments-on-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-commits-on-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests-files + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/files", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#check-if-a-pull-request-has-been-merged + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/merge": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/merge", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-all-requested-reviewers-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-reviews-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-a-review-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-comments-for-a-pull-request-review + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository-readme + */ + "GET /repos/{owner}/{repo}/readme": Operation<"/repos/{owner}/{repo}/readme", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository-directory-readme + */ + "GET /repos/{owner}/{repo}/readme/{dir}": Operation<"/repos/{owner}/{repo}/readme/{dir}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-releases + */ + "GET /repos/{owner}/{repo}/releases": Operation<"/repos/{owner}/{repo}/releases", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-release-asset + */ + "GET /repos/{owner}/{repo}/releases/assets/{asset_id}": Operation<"/repos/{owner}/{repo}/releases/assets/{asset_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-the-latest-release + */ + "GET /repos/{owner}/{repo}/releases/latest": Operation<"/repos/{owner}/{repo}/releases/latest", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-release-by-tag-name + */ + "GET /repos/{owner}/{repo}/releases/tags/{tag}": Operation<"/repos/{owner}/{repo}/releases/tags/{tag}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-release + */ + "GET /repos/{owner}/{repo}/releases/{release_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-release-assets + */ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets": Operation<"/repos/{owner}/{repo}/releases/{release_id}/assets", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-release + */ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/repos/rules#get-rules-for-a-branch + */ + "GET /repos/{owner}/{repo}/rules/branches/{branch}": Operation<"/repos/{owner}/{repo}/rules/branches/{branch}", "get">; + /** + * @see https://docs.github.com/rest/repos/rules#get-repository-rulesets + */ + "GET /repos/{owner}/{repo}/rulesets": Operation<"/repos/{owner}/{repo}/rulesets", "get">; + /** + * @see https://docs.github.com/rest/repos/rules#get-repository-ruleset + */ + "GET /repos/{owner}/{repo}/rulesets/{ruleset_id}": Operation<"/repos/{owner}/{repo}/rulesets/{ruleset_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#get-a-secret-scanning-alert + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-locations-for-a-secret-scanning-alert + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "get">; + /** + * @see https://docs.github.com/rest/security-advisories/repository-advisories#list-repository-security-advisories + */ + "GET /repos/{owner}/{repo}/security-advisories": Operation<"/repos/{owner}/{repo}/security-advisories", "get">; + /** + * @see https://docs.github.com/rest/security-advisories/repository-advisories#get-a-repository-security-advisory + */ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}": Operation<"/repos/{owner}/{repo}/security-advisories/{ghsa_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-stargazers + */ + "GET /repos/{owner}/{repo}/stargazers": Operation<"/repos/{owner}/{repo}/stargazers", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-the-weekly-commit-activity + */ + "GET /repos/{owner}/{repo}/stats/code_frequency": Operation<"/repos/{owner}/{repo}/stats/code_frequency", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-the-last-year-of-commit-activity + */ + "GET /repos/{owner}/{repo}/stats/commit_activity": Operation<"/repos/{owner}/{repo}/stats/commit_activity", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-all-contributor-commit-activity + */ + "GET /repos/{owner}/{repo}/stats/contributors": Operation<"/repos/{owner}/{repo}/stats/contributors", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-the-weekly-commit-count + */ + "GET /repos/{owner}/{repo}/stats/participation": Operation<"/repos/{owner}/{repo}/stats/participation", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-the-hourly-commit-count-for-each-day + */ + "GET /repos/{owner}/{repo}/stats/punch_card": Operation<"/repos/{owner}/{repo}/stats/punch_card", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-watchers + */ + "GET /repos/{owner}/{repo}/subscribers": Operation<"/repos/{owner}/{repo}/subscribers", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-a-repository-subscription + */ + "GET /repos/{owner}/{repo}/subscription": Operation<"/repos/{owner}/{repo}/subscription", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-tags + */ + "GET /repos/{owner}/{repo}/tags": Operation<"/repos/{owner}/{repo}/tags", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-tag-protection-state-of-a-repository + */ + "GET /repos/{owner}/{repo}/tags/protection": Operation<"/repos/{owner}/{repo}/tags/protection", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#download-a-repository-archive + */ + "GET /repos/{owner}/{repo}/tarball/{ref}": Operation<"/repos/{owner}/{repo}/tarball/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-teams + */ + "GET /repos/{owner}/{repo}/teams": Operation<"/repos/{owner}/{repo}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-all-repository-topics + */ + "GET /repos/{owner}/{repo}/topics": Operation<"/repos/{owner}/{repo}/topics", "get">; + /** + * @see https://docs.github.com/rest/metrics/traffic#get-repository-clones + */ + "GET /repos/{owner}/{repo}/traffic/clones": Operation<"/repos/{owner}/{repo}/traffic/clones", "get">; + /** + * @see https://docs.github.com/rest/metrics/traffic#get-top-referral-paths + */ + "GET /repos/{owner}/{repo}/traffic/popular/paths": Operation<"/repos/{owner}/{repo}/traffic/popular/paths", "get">; + /** + * @see https://docs.github.com/rest/metrics/traffic#get-top-referral-sources + */ + "GET /repos/{owner}/{repo}/traffic/popular/referrers": Operation<"/repos/{owner}/{repo}/traffic/popular/referrers", "get">; + /** + * @see https://docs.github.com/rest/metrics/traffic#get-page-views + */ + "GET /repos/{owner}/{repo}/traffic/views": Operation<"/repos/{owner}/{repo}/traffic/views", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#check-if-vulnerability-alerts-are-enabled-for-a-repository + */ + "GET /repos/{owner}/{repo}/vulnerability-alerts": Operation<"/repos/{owner}/{repo}/vulnerability-alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#download-a-repository-archive + */ + "GET /repos/{owner}/{repo}/zipball/{ref}": Operation<"/repos/{owner}/{repo}/zipball/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-public-repositories + */ + "GET /repositories": Operation<"/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-environment-secrets + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-environment-public-key + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-environment-secret + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#list-environment-variables + */ + "GET /repositories/{repository_id}/environments/{environment_name}/variables": Operation<"/repositories/{repository_id}/environments/{environment_name}/variables", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#get-an-environment-variable + */ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/variables/{name}", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-code + */ + "GET /search/code": Operation<"/search/code", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-commits + */ + "GET /search/commits": Operation<"/search/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-issues-and-pull-requests + */ + "GET /search/issues": Operation<"/search/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-labels + */ + "GET /search/labels": Operation<"/search/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-repositories + */ + "GET /search/repositories": Operation<"/search/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-topics + */ + "GET /search/topics": Operation<"/search/topics", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-users + */ + "GET /search/users": Operation<"/search/users", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#get-a-team-legacy + */ + "GET /teams/{team_id}": Operation<"/teams/{team_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussions-legacy + */ + "GET /teams/{team_id}/discussions": Operation<"/teams/{team_id}/discussions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussion-comments-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion-comment-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-comment-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-pending-team-invitations-legacy + */ + "GET /teams/{team_id}/invitations": Operation<"/teams/{team_id}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-members-legacy + */ + "GET /teams/{team_id}/members": Operation<"/teams/{team_id}/members", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-team-member-legacy + */ + "GET /teams/{team_id}/members/{username}": Operation<"/teams/{team_id}/members/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user-legacy + */ + "GET /teams/{team_id}/memberships/{username}": Operation<"/teams/{team_id}/memberships/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#list-team-projects-legacy + */ + "GET /teams/{team_id}/projects": Operation<"/teams/{team_id}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#check-team-permissions-for-a-project-legacy + */ + "GET /teams/{team_id}/projects/{project_id}": Operation<"/teams/{team_id}/projects/{project_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#list-team-repositories-legacy + */ + "GET /teams/{team_id}/repos": Operation<"/teams/{team_id}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#check-team-permissions-for-a-repository-legacy + */ + "GET /teams/{team_id}/repos/{owner}/{repo}": Operation<"/teams/{team_id}/repos/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#list-child-teams-legacy + */ + "GET /teams/{team_id}/teams": Operation<"/teams/{team_id}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-the-authenticated-user + */ + "GET /user": Operation<"/user", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-users-blocked-by-the-authenticated-user + */ + "GET /user/blocks": Operation<"/user/blocks", "get">; + /** + * @see https://docs.github.com/rest/reference/users#check-if-a-user-is-blocked-by-the-authenticated-user + */ + "GET /user/blocks/{username}": Operation<"/user/blocks/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-codespaces-for-the-authenticated-user + */ + "GET /user/codespaces": Operation<"/user/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-secrets-for-the-authenticated-user + */ + "GET /user/codespaces/secrets": Operation<"/user/codespaces/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-public-key-for-the-authenticated-user + */ + "GET /user/codespaces/secrets/public-key": Operation<"/user/codespaces/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-secret-for-the-authenticated-user + */ + "GET /user/codespaces/secrets/{secret_name}": Operation<"/user/codespaces/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-a-user-secret + */ + "GET /user/codespaces/secrets/{secret_name}/repositories": Operation<"/user/codespaces/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-codespace-for-the-authenticated-user + */ + "GET /user/codespaces/{codespace_name}": Operation<"/user/codespaces/{codespace_name}", "get">; + /** + * @see https://docs.github.com/rest/codespaces/codespaces#get-details-about-a-codespace-export + */ + "GET /user/codespaces/{codespace_name}/exports/{export_id}": Operation<"/user/codespaces/{codespace_name}/exports/{export_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-machine-types-for-a-codespace + */ + "GET /user/codespaces/{codespace_name}/machines": Operation<"/user/codespaces/{codespace_name}/machines", "get">; + /** + * @see https://docs.github.com/rest/packages#list-docker-migration-conflicting-packages-for-authenticated-user + */ + "GET /user/docker/conflicts": Operation<"/user/docker/conflicts", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-email-addresses-for-the-authenticated-user + */ + "GET /user/emails": Operation<"/user/emails", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-followers-of-the-authenticated-user + */ + "GET /user/followers": Operation<"/user/followers", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-the-people-the-authenticated-user-follows + */ + "GET /user/following": Operation<"/user/following", "get">; + /** + * @see https://docs.github.com/rest/reference/users#check-if-a-person-is-followed-by-the-authenticated-user + */ + "GET /user/following/{username}": Operation<"/user/following/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-gpg-keys-for-the-authenticated-user + */ + "GET /user/gpg_keys": Operation<"/user/gpg_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-gpg-key-for-the-authenticated-user + */ + "GET /user/gpg_keys/{gpg_key_id}": Operation<"/user/gpg_keys/{gpg_key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-app-installations-accessible-to-the-user-access-token + */ + "GET /user/installations": Operation<"/user/installations", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-repositories-accessible-to-the-user-access-token + */ + "GET /user/installations/{installation_id}/repositories": Operation<"/user/installations/{installation_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/interactions#get-interaction-restrictions-for-your-public-repositories + */ + "GET /user/interaction-limits": Operation<"/user/interaction-limits", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-user-account-issues-assigned-to-the-authenticated-user + */ + "GET /user/issues": Operation<"/user/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-ssh-keys-for-the-authenticated-user + */ + "GET /user/keys": Operation<"/user/keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-public-ssh-key-for-the-authenticated-user + */ + "GET /user/keys/{key_id}": Operation<"/user/keys/{key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-subscriptions-for-the-authenticated-user + */ + "GET /user/marketplace_purchases": Operation<"/user/marketplace_purchases", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-subscriptions-for-the-authenticated-user-stubbed + */ + "GET /user/marketplace_purchases/stubbed": Operation<"/user/marketplace_purchases/stubbed", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-memberships-for-the-authenticated-user + */ + "GET /user/memberships/orgs": Operation<"/user/memberships/orgs", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-an-organization-membership-for-the-authenticated-user + */ + "GET /user/memberships/orgs/{org}": Operation<"/user/memberships/orgs/{org}", "get">; + /** + * @see https://docs.github.com/rest/migrations/users#list-user-migrations + */ + "GET /user/migrations": Operation<"/user/migrations", "get">; + /** + * @see https://docs.github.com/rest/migrations/users#get-a-user-migration-status + */ + "GET /user/migrations/{migration_id}": Operation<"/user/migrations/{migration_id}", "get">; + /** + * @see https://docs.github.com/rest/migrations/users#download-a-user-migration-archive + */ + "GET /user/migrations/{migration_id}/archive": Operation<"/user/migrations/{migration_id}/archive", "get">; + /** + * @see https://docs.github.com/rest/migrations/users#list-repositories-for-a-user-migration + */ + "GET /user/migrations/{migration_id}/repositories": Operation<"/user/migrations/{migration_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user + */ + "GET /user/orgs": Operation<"/user/orgs", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-the-authenticated-user + */ + "GET /user/packages": Operation<"/user/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-for-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}": Operation<"/user/packages/{package_type}/{package_name}", "get">; + /** + * @see https://docs.github.com/rest/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}/versions": Operation<"/user/packages/{package_type}/{package_name}/versions", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-version-for-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/user/packages/{package_type}/{package_name}/versions/{package_version_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-email-addresses-for-the-authenticated-user + */ + "GET /user/public_emails": Operation<"/user/public_emails", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-the-authenticated-user + */ + "GET /user/repos": Operation<"/user/repos", "get">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#list-repository-invitations-for-the-authenticated-user + */ + "GET /user/repository_invitations": Operation<"/user/repository_invitations", "get">; + /** + * @see https://docs.github.com/rest/users/social-accounts#list-social-accounts-for-the-authenticated-user + */ + "GET /user/social_accounts": Operation<"/user/social_accounts", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-ssh-signing-keys-for-the-authenticated-user + */ + "GET /user/ssh_signing_keys": Operation<"/user/ssh_signing_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-ssh-signing-key-for-the-authenticated-user + */ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}": Operation<"/user/ssh_signing_keys/{ssh_signing_key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-starred-by-the-authenticated-user + */ + "GET /user/starred": Operation<"/user/starred", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#check-if-a-repository-is-starred-by-the-authenticated-user + */ + "GET /user/starred/{owner}/{repo}": Operation<"/user/starred/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-watched-by-the-authenticated-user + */ + "GET /user/subscriptions": Operation<"/user/subscriptions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-teams-for-the-authenticated-user + */ + "GET /user/teams": Operation<"/user/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-users + */ + "GET /users": Operation<"/users", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-user + */ + "GET /users/{username}": Operation<"/users/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-docker-migration-conflicting-packages-for-user + */ + "GET /users/{username}/docker/conflicts": Operation<"/users/{username}/docker/conflicts", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-events-for-the-authenticated-user + */ + "GET /users/{username}/events": Operation<"/users/{username}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-organization-events-for-the-authenticated-user + */ + "GET /users/{username}/events/orgs/{org}": Operation<"/users/{username}/events/orgs/{org}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-for-a-user + */ + "GET /users/{username}/events/public": Operation<"/users/{username}/events/public", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-followers-of-a-user + */ + "GET /users/{username}/followers": Operation<"/users/{username}/followers", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-the-people-a-user-follows + */ + "GET /users/{username}/following": Operation<"/users/{username}/following", "get">; + /** + * @see https://docs.github.com/rest/reference/users#check-if-a-user-follows-another-user + */ + "GET /users/{username}/following/{target_user}": Operation<"/users/{username}/following/{target_user}", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gists-for-a-user + */ + "GET /users/{username}/gists": Operation<"/users/{username}/gists", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-gpg-keys-for-a-user + */ + "GET /users/{username}/gpg_keys": Operation<"/users/{username}/gpg_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-contextual-information-for-a-user + */ + "GET /users/{username}/hovercard": Operation<"/users/{username}/hovercard", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-user-installation-for-the-authenticated-app + */ + "GET /users/{username}/installation": Operation<"/users/{username}/installation", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-keys-for-a-user + */ + "GET /users/{username}/keys": Operation<"/users/{username}/keys", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-a-user + */ + "GET /users/{username}/orgs": Operation<"/users/{username}/orgs", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-user + */ + "GET /users/{username}/packages": Operation<"/users/{username}/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-for-a-user + */ + "GET /users/{username}/packages/{package_type}/{package_name}": Operation<"/users/{username}/packages/{package_type}/{package_name}", "get">; + /** + * @see https://docs.github.com/rest/packages#get-all-package-versions-for-a-package-owned-by-a-user + */ + "GET /users/{username}/packages/{package_type}/{package_name}/versions": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-version-for-a-user + */ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-user-projects + */ + "GET /users/{username}/projects": Operation<"/users/{username}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-events-received-by-the-authenticated-user + */ + "GET /users/{username}/received_events": Operation<"/users/{username}/received_events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-received-by-a-user + */ + "GET /users/{username}/received_events/public": Operation<"/users/{username}/received_events/public", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-a-user + */ + "GET /users/{username}/repos": Operation<"/users/{username}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-actions-billing-for-a-user + */ + "GET /users/{username}/settings/billing/actions": Operation<"/users/{username}/settings/billing/actions", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-packages-billing-for-a-user + */ + "GET /users/{username}/settings/billing/packages": Operation<"/users/{username}/settings/billing/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-shared-storage-billing-for-a-user + */ + "GET /users/{username}/settings/billing/shared-storage": Operation<"/users/{username}/settings/billing/shared-storage", "get">; + /** + * @see https://docs.github.com/rest/users/social-accounts#list-social-accounts-for-a-user + */ + "GET /users/{username}/social_accounts": Operation<"/users/{username}/social_accounts", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-ssh-signing-keys-for-a-user + */ + "GET /users/{username}/ssh_signing_keys": Operation<"/users/{username}/ssh_signing_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-starred-by-a-user + */ + "GET /users/{username}/starred": Operation<"/users/{username}/starred", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-watched-by-a-user + */ + "GET /users/{username}/subscriptions": Operation<"/users/{username}/subscriptions", "get">; + /** + * @see https://docs.github.com/rest/reference/meta#get-all-api-versions + */ + "GET /versions": Operation<"/versions", "get">; + /** + * @see https://docs.github.com/rest/meta#get-the-zen-of-github + */ + "GET /zen": Operation<"/zen", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#update-a-webhook-configuration-for-an-app + */ + "PATCH /app/hook/config": Operation<"/app/hook/config", "patch">; + /** + * @see https://docs.github.com/rest/reference/apps#reset-a-token + */ + "PATCH /applications/{client_id}/token": Operation<"/applications/{client_id}/token", "patch">; + /** + * @see https://docs.github.com/rest/reference/gists/#update-a-gist + */ + "PATCH /gists/{gist_id}": Operation<"/gists/{gist_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/gists#update-a-gist-comment + */ + "PATCH /gists/{gist_id}/comments/{comment_id}": Operation<"/gists/{gist_id}/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/activity#mark-a-thread-as-read + */ + "PATCH /notifications/threads/{thread_id}": Operation<"/notifications/threads/{thread_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-an-organization + */ + "PATCH /orgs/{org}": Operation<"/orgs/{org}", "patch">; + /** + * @see https://docs.github.com/rest/reference/actions#update-a-required-workflow + */ + "PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}", "patch">; + /** + * @see https://docs.github.com/rest/actions/variables#update-an-organization-variable + */ + "PATCH /orgs/{org}/actions/variables/{name}": Operation<"/orgs/{org}/actions/variables/{name}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-an-organization-webhook + */ + "PATCH /orgs/{org}/hooks/{hook_id}": Operation<"/orgs/{org}/hooks/{hook_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-a-webhook-configuration-for-an-organization + */ + "PATCH /orgs/{org}/hooks/{hook_id}/config": Operation<"/orgs/{org}/hooks/{hook_id}/config", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-team + */ + "PATCH /orgs/{org}/teams/{team_slug}": Operation<"/orgs/{org}/teams/{team_slug}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion + */ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion-comment + */ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/projects#update-a-project-card + */ + "PATCH /projects/columns/cards/{card_id}": Operation<"/projects/columns/cards/{card_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/projects#update-a-project-column + */ + "PATCH /projects/columns/{column_id}": Operation<"/projects/columns/{column_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/projects#update-a-project + */ + "PATCH /projects/{project_id}": Operation<"/projects/{project_id}", "patch">; + /** + * @see https://docs.github.com/rest/repos/repos#update-a-repository + */ + "PATCH /repos/{owner}/{repo}": Operation<"/repos/{owner}/{repo}", "patch">; + /** + * @see https://docs.github.com/rest/actions/variables#update-a-repository-variable + */ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}": Operation<"/repos/{owner}/{repo}/actions/variables/{name}", "patch">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#update-pull-request-review-protection + */ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", "patch">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#update-status-check-protection + */ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", "patch">; + /** + * @see https://docs.github.com/rest/reference/checks#update-a-check-run + */ + "PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites + */ + "PATCH /repos/{owner}/{repo}/check-suites/preferences": Operation<"/repos/{owner}/{repo}/check-suites/preferences", "patch">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#update-a-code-scanning-alert + */ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", "patch">; + /** + * @see https://docs.github.com/rest/code-scanning#update-a-code-scanning-default-setup-configuration + */ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup": Operation<"/repos/{owner}/{repo}/code-scanning/default-setup", "patch">; + /** + * @see https://docs.github.com/rest/commits/comments#update-a-commit-comment + */ + "PATCH /repos/{owner}/{repo}/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/dependabot#update-a-dependabot-alert + */ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/dependabot/alerts/{alert_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/git#update-a-reference + */ + "PATCH /repos/{owner}/{repo}/git/refs/{ref}": Operation<"/repos/{owner}/{repo}/git/refs/{ref}", "patch">; + /** + * @see https://docs.github.com/rest/webhooks/repos#update-a-repository-webhook + */ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}", "patch">; + /** + * @see https://docs.github.com/rest/webhooks/repo-config#update-a-webhook-configuration-for-a-repository + */ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/config", "patch">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#update-an-import + */ + "PATCH /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "patch">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author + */ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}": Operation<"/repos/{owner}/{repo}/import/authors/{author_id}", "patch">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference + */ + "PATCH /repos/{owner}/{repo}/import/lfs": Operation<"/repos/{owner}/{repo}/import/lfs", "patch">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#update-a-repository-invitation + */ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}": Operation<"/repos/{owner}/{repo}/invitations/{invitation_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-an-issue-comment + */ + "PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-an-issue + */ + "PATCH /repos/{owner}/{repo}/issues/{issue_number}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-a-label + */ + "PATCH /repos/{owner}/{repo}/labels/{name}": Operation<"/repos/{owner}/{repo}/labels/{name}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-a-milestone + */ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/pulls#update-a-review-comment-for-a-pull-request + */ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/pulls/#update-a-pull-request + */ + "PATCH /repos/{owner}/{repo}/pulls/{pull_number}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-a-release-asset + */ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}": Operation<"/repos/{owner}/{repo}/releases/assets/{asset_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-a-release + */ + "PATCH /repos/{owner}/{repo}/releases/{release_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#update-a-secret-scanning-alert + */ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", "patch">; + /** + * @see https://docs.github.com/rest/security-advisories/repository-advisories#update-a-repository-security-advisory + */ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}": Operation<"/repos/{owner}/{repo}/security-advisories/{ghsa_id}", "patch">; + /** + * @see https://docs.github.com/rest/actions/variables#update-an-environment-variable + */ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/variables/{name}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams/#update-a-team-legacy + */ + "PATCH /teams/{team_id}": Operation<"/teams/{team_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion-legacy + */ + "PATCH /teams/{team_id}/discussions/{discussion_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion-comment-legacy + */ + "PATCH /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/users/#update-the-authenticated-user + */ + "PATCH /user": Operation<"/user", "patch">; + /** + * @see https://docs.github.com/rest/reference/codespaces#update-a-codespace-for-the-authenticated-user + */ + "PATCH /user/codespaces/{codespace_name}": Operation<"/user/codespaces/{codespace_name}", "patch">; + /** + * @see https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user + */ + "PATCH /user/email/visibility": Operation<"/user/email/visibility", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-an-organization-membership-for-the-authenticated-user + */ + "PATCH /user/memberships/orgs/{org}": Operation<"/user/memberships/orgs/{org}", "patch">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#accept-a-repository-invitation + */ + "PATCH /user/repository_invitations/{invitation_id}": Operation<"/user/repository_invitations/{invitation_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/apps#create-a-github-app-from-a-manifest + */ + "POST /app-manifests/{code}/conversions": Operation<"/app-manifests/{code}/conversions", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#redeliver-a-delivery-for-an-app-webhook + */ + "POST /app/hook/deliveries/{delivery_id}/attempts": Operation<"/app/hook/deliveries/{delivery_id}/attempts", "post">; + /** + * @see https://docs.github.com/rest/reference/apps/#create-an-installation-access-token-for-an-app + */ + "POST /app/installations/{installation_id}/access_tokens": Operation<"/app/installations/{installation_id}/access_tokens", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#check-a-token + */ + "POST /applications/{client_id}/token": Operation<"/applications/{client_id}/token", "post">; + /** + * @see https://docs.github.com/rest/apps/apps#create-a-scoped-access-token + */ + "POST /applications/{client_id}/token/scoped": Operation<"/applications/{client_id}/token/scoped", "post">; + /** + * @see https://docs.github.com/rest/reference/gists#create-a-gist + */ + "POST /gists": Operation<"/gists", "post">; + /** + * @see https://docs.github.com/rest/reference/gists#create-a-gist-comment + */ + "POST /gists/{gist_id}/comments": Operation<"/gists/{gist_id}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/gists#fork-a-gist + */ + "POST /gists/{gist_id}/forks": Operation<"/gists/{gist_id}/forks", "post">; + /** + * @see https://docs.github.com/rest/reference/markdown#render-a-markdown-document + */ + "POST /markdown": Operation<"/markdown", "post">; + /** + * @see https://docs.github.com/rest/reference/markdown#render-a-markdown-document-in-raw-mode + */ + "POST /markdown/raw": Operation<"/markdown/raw", "post">; + /** + * @see https://docs.github.com/rest/orgs/orgs#review-requests-to-access-organization-resources-with-a-fine-grained-personal-access-token + */ + "POST /organizations/{org}/personal-access-token-requests": Operation<"/organizations/{org}/personal-access-token-requests", "post">; + /** + * @see https://docs.github.com/rest/orgs/orgs#review-a-request-to-access-organization-resources-with-a-fine-grained-personal-access-token + */ + "POST /organizations/{org}/personal-access-token-requests/{pat_request_id}": Operation<"/organizations/{org}/personal-access-token-requests/{pat_request_id}", "post">; + /** + * @see https://docs.github.com/rest/orgs/orgs#update-the-access-to-organization-resources-via-fine-grained-personal-access-tokens + */ + "POST /organizations/{org}/personal-access-tokens": Operation<"/organizations/{org}/personal-access-tokens", "post">; + /** + * @see https://docs.github.com/rest/orgs/orgs#update-the-access-a-fine-grained-personal-access-token-has-to-organization-resources + */ + "POST /organizations/{org}/personal-access-tokens/{pat_id}": Operation<"/organizations/{org}/personal-access-tokens/{pat_id}", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-required-workflow + */ + "POST /orgs/{org}/actions/required_workflows": Operation<"/orgs/{org}/actions/required_workflows", "post">; + /** + * @see https://docs.github.com/rest/actions/self-hosted-runners#create-configuration-for-a-just-in-time-runner-for-an-organization + */ + "POST /orgs/{org}/actions/runners/generate-jitconfig": Operation<"/orgs/{org}/actions/runners/generate-jitconfig", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-registration-token-for-an-organization + */ + "POST /orgs/{org}/actions/runners/registration-token": Operation<"/orgs/{org}/actions/runners/registration-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-remove-token-for-an-organization + */ + "POST /orgs/{org}/actions/runners/remove-token": Operation<"/orgs/{org}/actions/runners/remove-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#add-custom-labels-to-a-self-hosted-runner-for-an-organization + */ + "POST /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "post">; + /** + * @see https://docs.github.com/rest/actions/variables#create-an-organization-variable + */ + "POST /orgs/{org}/actions/variables": Operation<"/orgs/{org}/actions/variables", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#set-codespaces-billing-users + */ + "POST /orgs/{org}/codespaces/billing/selected_users": Operation<"/orgs/{org}/codespaces/billing/selected_users", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#create-an-organization-webhook + */ + "POST /orgs/{org}/hooks": Operation<"/orgs/{org}/hooks", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#redeliver-a-delivery-for-an-organization-webhook + */ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": Operation<"/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#ping-an-organization-webhook + */ + "POST /orgs/{org}/hooks/{hook_id}/pings": Operation<"/orgs/{org}/hooks/{hook_id}/pings", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#create-an-organization-invitation + */ + "POST /orgs/{org}/invitations": Operation<"/orgs/{org}/invitations", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces + */ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop": Operation<"/orgs/{org}/members/{username}/codespaces/{codespace_name}/stop", "post">; + /** + * @see https://docs.github.com/rest/migrations/orgs#start-an-organization-migration + */ + "POST /orgs/{org}/migrations": Operation<"/orgs/{org}/migrations", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-for-an-organization + */ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-version-for-an-organization + */ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-an-organization-project + */ + "POST /orgs/{org}/projects": Operation<"/orgs/{org}/projects", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-an-organization-repository + */ + "POST /orgs/{org}/repos": Operation<"/orgs/{org}/repos", "post">; + /** + * @see https://docs.github.com/rest/repos/rules#create-organization-repository-ruleset + */ + "POST /orgs/{org}/rulesets": Operation<"/orgs/{org}/rulesets", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-team + */ + "POST /orgs/{org}/teams": Operation<"/orgs/{org}/teams", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion + */ + "POST /orgs/{org}/teams/{team_slug}/discussions": Operation<"/orgs/{org}/teams/{team_slug}/discussions", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion-comment + */ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion-comment + */ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion + */ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#enable-or-disable-security-product-on-all-org-repos + */ + "POST /orgs/{org}/{security_product}/{enablement}": Operation<"/orgs/{org}/{security_product}/{enablement}", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#move-a-project-card + */ + "POST /projects/columns/cards/{card_id}/moves": Operation<"/projects/columns/cards/{card_id}/moves", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-project-card + */ + "POST /projects/columns/{column_id}/cards": Operation<"/projects/columns/{column_id}/cards", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#move-a-project-column + */ + "POST /projects/columns/{column_id}/moves": Operation<"/projects/columns/{column_id}/moves", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-project-column + */ + "POST /projects/{project_id}/columns": Operation<"/projects/{project_id}/columns", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#re-run-job-for-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun": Operation<"/repos/{owner}/{repo}/actions/jobs/{job_id}/rerun", "post">; + /** + * @see https://docs.github.com/rest/actions/self-hosted-runners#create-configuration-for-a-just-in-time-runner-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig": Operation<"/repos/{owner}/{repo}/actions/runners/generate-jitconfig", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-registration-token-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/registration-token": Operation<"/repos/{owner}/{repo}/actions/runners/registration-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-remove-token-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/remove-token": Operation<"/repos/{owner}/{repo}/actions/runners/remove-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#add-custom-labels-to-a-self-hosted-runner-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#approve-a-workflow-run-for-a-fork-pull-request + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/approve", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#cancel-a-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/cancel", "post">; + /** + * @see https://docs.github.com/rest/actions/workflow-runs#review-custom-deployment-protection-rules-for-a-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#review-pending-deployments-for-a-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#re-run-a-workflow + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/rerun", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#re-run-workflow-failed-jobs + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs", "post">; + /** + * @see https://docs.github.com/rest/actions/variables#create-a-repository-variable + */ + "POST /repos/{owner}/{repo}/actions/variables": Operation<"/repos/{owner}/{repo}/actions/variables", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-workflow-dispatch-event + */ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches", "post">; + /** + * @see https://docs.github.com/rest/repos/autolinks#create-an-autolink-reference-for-a-repository + */ + "POST /repos/{owner}/{repo}/autolinks": Operation<"/repos/{owner}/{repo}/autolinks", "post">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#set-admin-branch-protection + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", "post">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#create-commit-signature-protection + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", "post">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#add-status-check-contexts + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "post">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#add-app-access-restrictions + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "post">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#add-team-access-restrictions + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "post">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#add-user-access-restrictions + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "post">; + /** + * @see https://docs.github.com/rest/branches/branches#rename-a-branch + */ + "POST /repos/{owner}/{repo}/branches/{branch}/rename": Operation<"/repos/{owner}/{repo}/branches/{branch}/rename", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#create-a-check-run + */ + "POST /repos/{owner}/{repo}/check-runs": Operation<"/repos/{owner}/{repo}/check-runs", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#rerequest-a-check-run + */ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#create-a-check-suite + */ + "POST /repos/{owner}/{repo}/check-suites": Operation<"/repos/{owner}/{repo}/check-suites", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#rerequest-a-check-suite + */ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest": Operation<"/repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", "post">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#upload-an-analysis-as-sarif-data + */ + "POST /repos/{owner}/{repo}/code-scanning/sarifs": Operation<"/repos/{owner}/{repo}/code-scanning/sarifs", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-a-codespace-in-a-repository + */ + "POST /repos/{owner}/{repo}/codespaces": Operation<"/repos/{owner}/{repo}/codespaces", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-commit-comment + */ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/comments/{comment_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/commits/comments#create-a-commit-comment + */ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/dependency-graph#create-a-snapshot-of-dependencies-for-a-repository + */ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots": Operation<"/repos/{owner}/{repo}/dependency-graph/snapshots", "post">; + /** + * @see https://docs.github.com/rest/deployments/deployments#create-a-deployment + */ + "POST /repos/{owner}/{repo}/deployments": Operation<"/repos/{owner}/{repo}/deployments", "post">; + /** + * @see https://docs.github.com/rest/deployments/statuses#create-a-deployment-status + */ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-dispatch-event + */ + "POST /repos/{owner}/{repo}/dispatches": Operation<"/repos/{owner}/{repo}/dispatches", "post">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#create-deployment-branch-policy + */ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", "post">; + /** + * @see https://docs.github.com/rest/deployments/deployment-protection-rules#create-a-deployment-protection-rule + */ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-fork + */ + "POST /repos/{owner}/{repo}/forks": Operation<"/repos/{owner}/{repo}/forks", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-blob + */ + "POST /repos/{owner}/{repo}/git/blobs": Operation<"/repos/{owner}/{repo}/git/blobs", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-commit + */ + "POST /repos/{owner}/{repo}/git/commits": Operation<"/repos/{owner}/{repo}/git/commits", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-reference + */ + "POST /repos/{owner}/{repo}/git/refs": Operation<"/repos/{owner}/{repo}/git/refs", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-tag-object + */ + "POST /repos/{owner}/{repo}/git/tags": Operation<"/repos/{owner}/{repo}/git/tags", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-tree + */ + "POST /repos/{owner}/{repo}/git/trees": Operation<"/repos/{owner}/{repo}/git/trees", "post">; + /** + * @see https://docs.github.com/rest/webhooks/repos#create-a-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks": Operation<"/repos/{owner}/{repo}/hooks", "post">; + /** + * @see https://docs.github.com/rest/webhooks/repo-deliveries#redeliver-a-delivery-for-a-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", "post">; + /** + * @see https://docs.github.com/rest/webhooks/repos#ping-a-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/pings": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/pings", "post">; + /** + * @see https://docs.github.com/rest/webhooks/repos#test-the-push-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/tests": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/tests", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-an-issue + */ + "POST /repos/{owner}/{repo}/issues": Operation<"/repos/{owner}/{repo}/issues", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-an-issue-comment + */ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#add-assignees-to-an-issue + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/assignees", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-an-issue-comment + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#add-labels-to-an-issue + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-an-issue + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/deploy-keys#create-a-deploy-key + */ + "POST /repos/{owner}/{repo}/keys": Operation<"/repos/{owner}/{repo}/keys", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-a-label + */ + "POST /repos/{owner}/{repo}/labels": Operation<"/repos/{owner}/{repo}/labels", "post">; + /** + * @see https://docs.github.com/rest/branches/branches#sync-a-fork-branch-with-the-upstream-repository + */ + "POST /repos/{owner}/{repo}/merge-upstream": Operation<"/repos/{owner}/{repo}/merge-upstream", "post">; + /** + * @see https://docs.github.com/rest/branches/branches#merge-a-branch + */ + "POST /repos/{owner}/{repo}/merges": Operation<"/repos/{owner}/{repo}/merges", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-a-milestone + */ + "POST /repos/{owner}/{repo}/milestones": Operation<"/repos/{owner}/{repo}/milestones", "post">; + /** + * @see https://docs.github.com/rest/pages#create-a-github-pages-site + */ + "POST /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "post">; + /** + * @see https://docs.github.com/rest/pages#request-a-github-pages-build + */ + "POST /repos/{owner}/{repo}/pages/builds": Operation<"/repos/{owner}/{repo}/pages/builds", "post">; + /** + * @see https://docs.github.com/rest/pages#create-a-github-pages-deployment + */ + "POST /repos/{owner}/{repo}/pages/deployment": Operation<"/repos/{owner}/{repo}/pages/deployment", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-repository-project + */ + "POST /repos/{owner}/{repo}/projects": Operation<"/repos/{owner}/{repo}/projects", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls": Operation<"/repos/{owner}/{repo}/pulls", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-pull-request-review-comment + */ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-a-codespace-from-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/codespaces", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-review-comment-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-reply-for-a-review-comment + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#request-reviewers-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-review-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#submit-a-review-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events", "post">; + /** + * @see https://docs.github.com/rest/releases/releases#create-a-release + */ + "POST /repos/{owner}/{repo}/releases": Operation<"/repos/{owner}/{repo}/releases", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#generate-release-notes + */ + "POST /repos/{owner}/{repo}/releases/generate-notes": Operation<"/repos/{owner}/{repo}/releases/generate-notes", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-release + */ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/repos/rules#create-repository-ruleset + */ + "POST /repos/{owner}/{repo}/rulesets": Operation<"/repos/{owner}/{repo}/rulesets", "post">; + /** + * @see https://docs.github.com/rest/security-advisories/repository-advisories#create-a-repository-security-advisory + */ + "POST /repos/{owner}/{repo}/security-advisories": Operation<"/repos/{owner}/{repo}/security-advisories", "post">; + /** + * @see https://docs.github.com/rest/security-advisories/repository-advisories#privately-report-a-security-vulnerability + */ + "POST /repos/{owner}/{repo}/security-advisories/reports": Operation<"/repos/{owner}/{repo}/security-advisories/reports", "post">; + /** + * @see https://docs.github.com/rest/commits/statuses#create-a-commit-status + */ + "POST /repos/{owner}/{repo}/statuses/{sha}": Operation<"/repos/{owner}/{repo}/statuses/{sha}", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-tag-protection-state-for-a-repository + */ + "POST /repos/{owner}/{repo}/tags/protection": Operation<"/repos/{owner}/{repo}/tags/protection", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#transfer-a-repository + */ + "POST /repos/{owner}/{repo}/transfer": Operation<"/repos/{owner}/{repo}/transfer", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-using-a-template + */ + "POST /repos/{template_owner}/{template_repo}/generate": Operation<"/repos/{template_owner}/{template_repo}/generate", "post">; + /** + * @see https://docs.github.com/rest/actions/variables#create-an-environment-variable + */ + "POST /repositories/{repository_id}/environments/{environment_name}/variables": Operation<"/repositories/{repository_id}/environments/{environment_name}/variables", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion-legacy + */ + "POST /teams/{team_id}/discussions": Operation<"/teams/{team_id}/discussions", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion-comment-legacy + */ + "POST /teams/{team_id}/discussions/{discussion_number}/comments": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-team-discussion-comment-legacy + */ + "POST /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-team-discussion-legacy + */ + "POST /teams/{team_id}/discussions/{discussion_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces": Operation<"/user/codespaces", "post">; + /** + * @see https://docs.github.com/rest/codespaces/codespaces#export-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces/{codespace_name}/exports": Operation<"/user/codespaces/{codespace_name}/exports", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces/codespaces#create-a-repository-from-an-unpublished-codespace + */ + "POST /user/codespaces/{codespace_name}/publish": Operation<"/user/codespaces/{codespace_name}/publish", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#start-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces/{codespace_name}/start": Operation<"/user/codespaces/{codespace_name}/start", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#stop-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces/{codespace_name}/stop": Operation<"/user/codespaces/{codespace_name}/stop", "post">; + /** + * @see https://docs.github.com/rest/reference/users#add-an-email-address-for-the-authenticated-user + */ + "POST /user/emails": Operation<"/user/emails", "post">; + /** + * @see https://docs.github.com/rest/reference/users#create-a-gpg-key-for-the-authenticated-user + */ + "POST /user/gpg_keys": Operation<"/user/gpg_keys", "post">; + /** + * @see https://docs.github.com/rest/reference/users#create-a-public-ssh-key-for-the-authenticated-user + */ + "POST /user/keys": Operation<"/user/keys", "post">; + /** + * @see https://docs.github.com/rest/migrations/users#start-a-user-migration + */ + "POST /user/migrations": Operation<"/user/migrations", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-for-the-authenticated-user + */ + "POST /user/packages/{package_type}/{package_name}/restore{?token}": Operation<"/user/packages/{package_type}/{package_name}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-version-for-the-authenticated-user + */ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": Operation<"/user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-user-project + */ + "POST /user/projects": Operation<"/user/projects", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user + */ + "POST /user/repos": Operation<"/user/repos", "post">; + /** + * @see https://docs.github.com/rest/users/social-accounts#add-social-account-for-authenticated-user + */ + "POST /user/social_accounts": Operation<"/user/social_accounts", "post">; + /** + * @see https://docs.github.com/rest/reference/users#create-an-ssh-signing-key-for-the-authenticated-user + */ + "POST /user/ssh_signing_keys": Operation<"/user/ssh_signing_keys", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-for-a-user + */ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}": Operation<"/users/{username}/packages/{package_type}/{package_name}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-version-for-a-user + */ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#upload-a-release-asset + */ + "POST {origin}/repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}": Operation<"/repos/{owner}/{repo}/releases/{release_id}/assets", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#suspend-an-app-installation + */ + "PUT /app/installations/{installation_id}/suspended": Operation<"/app/installations/{installation_id}/suspended", "put">; + /** + * @see https://docs.github.com/rest/reference/gists#star-a-gist + */ + "PUT /gists/{gist_id}/star": Operation<"/gists/{gist_id}/star", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#mark-notifications-as-read + */ + "PUT /notifications": Operation<"/notifications", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#set-a-thread-subscription + */ + "PUT /notifications/threads/{thread_id}/subscription": Operation<"/notifications/threads/{thread_id}/subscription", "put">; + /** + * @see https://docs.github.com/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-an-organization + */ + "PUT /orgs/{org}/actions/oidc/customization/sub": Operation<"/orgs/{org}/actions/oidc/customization/sub", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-github-actions-permissions-for-an-organization + */ + "PUT /orgs/{org}/actions/permissions": Operation<"/orgs/{org}/actions/permissions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-selected-repositories-enabled-for-github-actions-in-an-organization + */ + "PUT /orgs/{org}/actions/permissions/repositories": Operation<"/orgs/{org}/actions/permissions/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#enable-a-selected-repository-for-github-actions-in-an-organization + */ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}": Operation<"/orgs/{org}/actions/permissions/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-allowed-actions-for-an-organization + */ + "PUT /orgs/{org}/actions/permissions/selected-actions": Operation<"/orgs/{org}/actions/permissions/selected-actions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-default-workflow-permissions + */ + "PUT /orgs/{org}/actions/permissions/workflow": Operation<"/orgs/{org}/actions/permissions/workflow", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-selected-repositories-for-a-required-workflow + */ + "PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-a-repository-to-selected-repositories-list-for-a-required-workflow + */ + "PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-custom-labels-for-a-self-hosted-runner-for-an-organization + */ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret + */ + "PUT /orgs/{org}/actions/secrets/{secret_name}": Operation<"/orgs/{org}/actions/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret + */ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-selected-repository-to-an-organization-secret + */ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/actions/variables#set-selected-repositories-for-an-organization-variable + */ + "PUT /orgs/{org}/actions/variables/{name}/repositories": Operation<"/orgs/{org}/actions/variables/{name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/actions/variables#add-selected-repository-to-an-organization-variable + */ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/variables/{name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#block-a-user-from-an-organization + */ + "PUT /orgs/{org}/blocks/{username}": Operation<"/orgs/{org}/blocks/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#set-codespaces-billing + */ + "PUT /orgs/{org}/codespaces/billing": Operation<"/orgs/{org}/codespaces/billing", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret + */ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-an-organization-secret + */ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#add-selected-repository-to-an-organization-secret + */ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret + */ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#set-selected-repositories-for-an-organization-secret + */ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#add-selected-repository-to-an-organization-secret + */ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/interactions#set-interaction-restrictions-for-an-organization + */ + "PUT /orgs/{org}/interaction-limits": Operation<"/orgs/{org}/interaction-limits", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#set-organization-membership-for-a-user + */ + "PUT /orgs/{org}/memberships/{username}": Operation<"/orgs/{org}/memberships/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#convert-an-organization-member-to-outside-collaborator + */ + "PUT /orgs/{org}/outside_collaborators/{username}": Operation<"/orgs/{org}/outside_collaborators/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#set-public-organization-membership-for-the-authenticated-user + */ + "PUT /orgs/{org}/public_members/{username}": Operation<"/orgs/{org}/public_members/{username}", "put">; + /** + * @see https://docs.github.com/rest/repos/rules#update-organization-ruleset + */ + "PUT /orgs/{org}/rulesets/{ruleset_id}": Operation<"/orgs/{org}/rulesets/{ruleset_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#add-a-security-manager-team + */ + "PUT /orgs/{org}/security-managers/teams/{team_slug}": Operation<"/orgs/{org}/security-managers/teams/{team_slug}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user + */ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}": Operation<"/orgs/{org}/teams/{team_slug}/memberships/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions + */ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}": Operation<"/orgs/{org}/teams/{team_slug}/projects/{project_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams/#add-or-update-team-repository-permissions + */ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": Operation<"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", "put">; + /** + * @see https://docs.github.com/rest/reference/projects#add-project-collaborator + */ + "PUT /projects/{project_id}/collaborators/{username}": Operation<"/projects/{project_id}/collaborators/{username}", "put">; + /** + * @see https://docs.github.com/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub": Operation<"/repos/{owner}/{repo}/actions/oidc/customization/sub", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-github-actions-permissions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions": Operation<"/repos/{owner}/{repo}/actions/permissions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-workflow-access-to-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions/access": Operation<"/repos/{owner}/{repo}/actions/permissions/access", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-allowed-actions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions": Operation<"/repos/{owner}/{repo}/actions/permissions/selected-actions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-default-workflow-permissions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow": Operation<"/repos/{owner}/{repo}/actions/permissions/workflow", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-custom-labels-for-a-self-hosted-runner-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#create-or-update-a-repository-secret + */ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/actions/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#disable-a-workflow + */ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#enable-a-workflow + */ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#enable-automated-security-fixes + */ + "PUT /repos/{owner}/{repo}/automated-security-fixes": Operation<"/repos/{owner}/{repo}/automated-security-fixes", "put">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#update-branch-protection + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection", "put">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#set-status-check-contexts + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "put">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#set-app-access-restrictions + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "put">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#set-team-access-restrictions + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "put">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#set-user-access-restrictions + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-or-update-a-repository-secret + */ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#add-a-repository-collaborator + */ + "PUT /repos/{owner}/{repo}/collaborators/{username}": Operation<"/repos/{owner}/{repo}/collaborators/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#create-or-update-file-contents + */ + "PUT /repos/{owner}/{repo}/contents/{path}": Operation<"/repos/{owner}/{repo}/contents/{path}", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#create-or-update-a-repository-secret + */ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/deployments/environments#create-or-update-an-environment + */ + "PUT /repos/{owner}/{repo}/environments/{environment_name}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}", "put">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#update-deployment-branch-policy + */ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}", "put">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#start-an-import + */ + "PUT /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "put">; + /** + * @see https://docs.github.com/rest/reference/interactions#set-interaction-restrictions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/interaction-limits": Operation<"/repos/{owner}/{repo}/interaction-limits", "put">; + /** + * @see https://docs.github.com/rest/reference/issues#set-labels-for-an-issue + */ + "PUT /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/issues#lock-an-issue + */ + "PUT /repos/{owner}/{repo}/issues/{issue_number}/lock": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/lock", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#enable-git-lfs-for-a-repository + */ + "PUT /repos/{owner}/{repo}/lfs": Operation<"/repos/{owner}/{repo}/lfs", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#mark-repository-notifications-as-read + */ + "PUT /repos/{owner}/{repo}/notifications": Operation<"/repos/{owner}/{repo}/notifications", "put">; + /** + * @see https://docs.github.com/rest/pages#update-information-about-a-github-pages-site + */ + "PUT /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#merge-a-pull-request + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/merge", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#update-a-review-for-a-pull-request + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#dismiss-a-review-for-a-pull-request + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#update-a-pull-request-branch + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/update-branch", "put">; + /** + * @see https://docs.github.com/rest/repos/rules#update-repository-ruleset + */ + "PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}": Operation<"/repos/{owner}/{repo}/rulesets/{ruleset_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#set-a-repository-subscription + */ + "PUT /repos/{owner}/{repo}/subscription": Operation<"/repos/{owner}/{repo}/subscription", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#replace-all-repository-topics + */ + "PUT /repos/{owner}/{repo}/topics": Operation<"/repos/{owner}/{repo}/topics", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#enable-vulnerability-alerts + */ + "PUT /repos/{owner}/{repo}/vulnerability-alerts": Operation<"/repos/{owner}/{repo}/vulnerability-alerts", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#create-or-update-an-environment-secret + */ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-team-member-legacy + */ + "PUT /teams/{team_id}/members/{username}": Operation<"/teams/{team_id}/members/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user-legacy + */ + "PUT /teams/{team_id}/memberships/{username}": Operation<"/teams/{team_id}/memberships/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams/#add-or-update-team-project-permissions-legacy + */ + "PUT /teams/{team_id}/projects/{project_id}": Operation<"/teams/{team_id}/projects/{project_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-repository-permissions-legacy + */ + "PUT /teams/{team_id}/repos/{owner}/{repo}": Operation<"/teams/{team_id}/repos/{owner}/{repo}", "put">; + /** + * @see https://docs.github.com/rest/reference/users#block-a-user + */ + "PUT /user/blocks/{username}": Operation<"/user/blocks/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-or-update-a-secret-for-the-authenticated-user + */ + "PUT /user/codespaces/secrets/{secret_name}": Operation<"/user/codespaces/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-a-user-secret + */ + "PUT /user/codespaces/secrets/{secret_name}/repositories": Operation<"/user/codespaces/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#add-a-selected-repository-to-a-user-secret + */ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/user/codespaces/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/users#follow-a-user + */ + "PUT /user/following/{username}": Operation<"/user/following/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/apps#add-a-repository-to-an-app-installation + */ + "PUT /user/installations/{installation_id}/repositories/{repository_id}": Operation<"/user/installations/{installation_id}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/interactions#set-interaction-restrictions-for-your-public-repositories + */ + "PUT /user/interaction-limits": Operation<"/user/interaction-limits", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#star-a-repository-for-the-authenticated-user + */ + "PUT /user/starred/{owner}/{repo}": Operation<"/user/starred/{owner}/{repo}", "put">; +} +export {}; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/index.d.ts b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/index.d.ts new file mode 100644 index 0000000..004ae9b --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/dist-types/index.d.ts @@ -0,0 +1,21 @@ +export * from "./AuthInterface"; +export * from "./EndpointDefaults"; +export * from "./EndpointInterface"; +export * from "./EndpointOptions"; +export * from "./Fetch"; +export * from "./OctokitResponse"; +export * from "./RequestError"; +export * from "./RequestHeaders"; +export * from "./RequestInterface"; +export * from "./RequestMethod"; +export * from "./RequestOptions"; +export * from "./RequestParameters"; +export * from "./RequestRequestOptions"; +export * from "./ResponseHeaders"; +export * from "./Route"; +export * from "./Signal"; +export * from "./StrategyInterface"; +export * from "./Url"; +export * from "./VERSION"; +export * from "./GetResponseTypeFromEndpointMethod"; +export * from "./generated/Endpoints"; diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/package.json b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/package.json new file mode 100644 index 0000000..e663b32 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types/package.json @@ -0,0 +1,46 @@ +{ + "name": "@octokit/types", + "version": "10.0.0", + "publishConfig": { + "access": "public" + }, + "description": "Shared TypeScript definitions for Octokit projects", + "dependencies": { + "@octokit/openapi-types": "^18.0.0" + }, + "repository": "github:octokit/types.ts", + "keywords": [ + "github", + "api", + "sdk", + "toolkit", + "typescript" + ], + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "devDependencies": { + "@octokit/tsconfig": "^1.0.2", + "@types/node": ">= 8", + "github-openapi-graphql-query": "^4.0.0", + "handlebars": "^4.7.6", + "json-schema-to-typescript": "^13.0.0", + "lodash.set": "^4.3.2", + "npm-run-all": "^4.1.5", + "pascal-case": "^3.1.1", + "prettier": "^2.0.0", + "semantic-release": "^21.0.0", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "sort-keys": "^4.2.0", + "string-to-jsdoc-comment": "^1.0.0", + "typedoc": "^0.24.0", + "typescript": "^5.0.0" + }, + "octokit": { + "openapi-version": "12.0.0" + }, + "files": [ + "dist-types/**" + ], + "types": "dist-types/index.d.ts", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/plugin-rest-endpoint-methods/package.json b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/package.json new file mode 100644 index 0000000..bf606d6 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-rest-endpoint-methods/package.json @@ -0,0 +1,60 @@ +{ + "name": "@octokit/plugin-rest-endpoint-methods", + "version": "7.2.3", + "description": "Octokit plugin adding one method for all of api.github.com REST API endpoints", + "repository": "github:octokit/plugin-rest-endpoint-methods.js", + "keywords": [ + "github", + "api", + "sdk", + "toolkit" + ], + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "dependencies": { + "@octokit/types": "^10.0.0" + }, + "devDependencies": { + "@gimenete/type-writer": "^0.1.5", + "@octokit/core": "^4.0.0", + "@octokit/tsconfig": "^2.0.0", + "@types/fetch-mock": "^7.3.1", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "esbuild": "^0.18.0", + "fetch-mock": "^9.0.0", + "fs-extra": "^11.0.0", + "github-openapi-graphql-query": "^4.0.0", + "glob": "^10.2.6", + "jest": "^29.0.0", + "lodash.camelcase": "^4.3.0", + "lodash.set": "^4.3.2", + "lodash.upperfirst": "^4.3.1", + "mustache": "^4.0.0", + "npm-run-all": "^4.1.5", + "prettier": "2.8.8", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "sort-keys": "^4.2.0", + "string-to-jsdoc-comment": "^1.0.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "peerDependencies": { + "@octokit/core": ">=3" + }, + "publishConfig": { + "access": "public" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "browser": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "module": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/plugin-retry/LICENSE b/dist/node_modules/@octokit/plugin-retry/LICENSE new file mode 100644 index 0000000..12d45d9 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/dist/node_modules/@octokit/plugin-retry/README.md b/dist/node_modules/@octokit/plugin-retry/README.md new file mode 100644 index 0000000..1b5eeb9 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/README.md @@ -0,0 +1,103 @@ +# plugin-retry.js + +> Retries requests for server 4xx/5xx responses except `400`, `401`, `403`, `404`, and `422`. + +[![@latest](https://img.shields.io/npm/v/@octokit/plugin-retry.svg)](https://www.npmjs.com/package/@octokit/plugin-retry) +[![Build Status](https://github.com/octokit/plugin-retry.js/workflows/Test/badge.svg)](https://github.com/octokit/plugin-retry.js/actions?workflow=Test) + +## Usage + + + + + + +
+Browsers + + +Load `@octokit/plugin-retry` and [`@octokit/core`](https://github.com/octokit/core.js) (or core-compatible module) directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+Node + + +Install with `npm install @octokit/core @octokit/plugin-retry`. Optionally replace `@octokit/core` with a core-compatible module + +```js +const { Octokit } = require("@octokit/core"); +const { retry } = require("@octokit/plugin-retry"); +``` + +
+ +```js +const MyOctokit = Octokit.plugin(retry); +const octokit = new MyOctokit({ auth: "secret123" }); + +// retries request up to 3 times in case of a 500 response +octokit.request("/").catch((error) => { + if (error.request.request.retryCount) { + console.log( + `request failed after ${error.request.request.retryCount} retries` + ); + } + + console.error(error); +}); +``` + +To override the default `doNotRetry` list: + +```js +const octokit = new MyOctokit({ + auth: "secret123", + retry: { + doNotRetry: [ + /* List of HTTP 4xx/5xx status codes */ + ], + }, +}); +``` + +To override the number of retries: + +```js +const octokit = new MyOctokit({ + auth: "secret123", + request: { retries: 1 }, +}); +``` + +You can manually ask for retries for any request by passing `{ request: { retries: numRetries, retryAfter: delayInSeconds }}`. Note that the `doNotRetry` option from the constructor is ignored in this case, requests will be retried no matter their response code. + +```js +octokit + .request("/", { request: { retries: 1, retryAfter: 1 } }) + .catch((error) => { + if (error.request.request.retryCount) { + console.log( + `request failed after ${error.request.request.retryCount} retries` + ); + } + + console.error(error); + }); +``` + +Pass `{ retry: { enabled: false } }` to disable this plugin. + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/plugin-retry/dist-node/index.js b/dist/node_modules/@octokit/plugin-retry/dist-node/index.js new file mode 100644 index 0000000..18d2048 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/dist-node/index.js @@ -0,0 +1,116 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + VERSION: () => VERSION, + retry: () => retry +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/error-request.js +async function errorRequest(state, octokit, error, options) { + if (!error.request || !error.request.request) { + throw error; + } + if (error.status >= 400 && !state.doNotRetry.includes(error.status)) { + const retries = options.request.retries != null ? options.request.retries : state.retries; + const retryAfter = Math.pow((options.request.retryCount || 0) + 1, 2); + throw octokit.retry.retryRequest(error, retries, retryAfter); + } + throw error; +} + +// pkg/dist-src/wrap-request.js +var import_light = __toESM(require("bottleneck/light")); +var import_request_error = require("@octokit/request-error"); +async function wrapRequest(state, octokit, request, options) { + const limiter = new import_light.default(); + limiter.on("failed", function(error, info) { + const maxRetries = ~~error.request.request.retries; + const after = ~~error.request.request.retryAfter; + options.request.retryCount = info.retryCount + 1; + if (maxRetries > info.retryCount) { + return after * state.retryAfterBaseValue; + } + }); + return limiter.schedule( + requestWithGraphqlErrorHandling.bind(null, state, octokit, request), + options + ); +} +async function requestWithGraphqlErrorHandling(state, octokit, request, options) { + const response = await request(request, options); + if (response.data && response.data.errors && /Something went wrong while executing your query/.test( + response.data.errors[0].message + )) { + const error = new import_request_error.RequestError(response.data.errors[0].message, 500, { + request: options, + response + }); + return errorRequest(state, octokit, error, options); + } + return response; +} + +// pkg/dist-src/index.js +var VERSION = "4.1.6"; +function retry(octokit, octokitOptions) { + const state = Object.assign( + { + enabled: true, + retryAfterBaseValue: 1e3, + doNotRetry: [400, 401, 403, 404, 422], + retries: 3 + }, + octokitOptions.retry + ); + if (state.enabled) { + octokit.hook.error("request", errorRequest.bind(null, state, octokit)); + octokit.hook.wrap("request", wrapRequest.bind(null, state, octokit)); + } + return { + retry: { + retryRequest: (error, retries, retryAfter) => { + error.request.request = Object.assign({}, error.request.request, { + retries, + retryAfter + }); + return error; + } + } + }; +} +retry.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + VERSION, + retry +}); diff --git a/dist/node_modules/@octokit/plugin-retry/dist-node/index.js.map b/dist/node_modules/@octokit/plugin-retry/dist-node/index.js.map new file mode 100644 index 0000000..19c6617 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/error-request.js", "../dist-src/wrap-request.js"], + "sourcesContent": ["import { errorRequest } from \"./error-request\";\nimport { wrapRequest } from \"./wrap-request\";\nconst VERSION = \"4.1.6\";\nfunction retry(octokit, octokitOptions) {\n const state = Object.assign(\n {\n enabled: true,\n retryAfterBaseValue: 1e3,\n doNotRetry: [400, 401, 403, 404, 422],\n retries: 3\n },\n octokitOptions.retry\n );\n if (state.enabled) {\n octokit.hook.error(\"request\", errorRequest.bind(null, state, octokit));\n octokit.hook.wrap(\"request\", wrapRequest.bind(null, state, octokit));\n }\n return {\n retry: {\n retryRequest: (error, retries, retryAfter) => {\n error.request.request = Object.assign({}, error.request.request, {\n retries,\n retryAfter\n });\n return error;\n }\n }\n };\n}\nretry.VERSION = VERSION;\nexport {\n VERSION,\n retry\n};\n", "async function errorRequest(state, octokit, error, options) {\n if (!error.request || !error.request.request) {\n throw error;\n }\n if (error.status >= 400 && !state.doNotRetry.includes(error.status)) {\n const retries = options.request.retries != null ? options.request.retries : state.retries;\n const retryAfter = Math.pow((options.request.retryCount || 0) + 1, 2);\n throw octokit.retry.retryRequest(error, retries, retryAfter);\n }\n throw error;\n}\nexport {\n errorRequest\n};\n", "import Bottleneck from \"bottleneck/light\";\nimport { RequestError } from \"@octokit/request-error\";\nimport { errorRequest } from \"./error-request\";\nasync function wrapRequest(state, octokit, request, options) {\n const limiter = new Bottleneck();\n limiter.on(\"failed\", function(error, info) {\n const maxRetries = ~~error.request.request.retries;\n const after = ~~error.request.request.retryAfter;\n options.request.retryCount = info.retryCount + 1;\n if (maxRetries > info.retryCount) {\n return after * state.retryAfterBaseValue;\n }\n });\n return limiter.schedule(\n requestWithGraphqlErrorHandling.bind(null, state, octokit, request),\n options\n );\n}\nasync function requestWithGraphqlErrorHandling(state, octokit, request, options) {\n const response = await request(request, options);\n if (response.data && response.data.errors && /Something went wrong while executing your query/.test(\n response.data.errors[0].message\n )) {\n const error = new RequestError(response.data.errors[0].message, 500, {\n request: options,\n response\n });\n return errorRequest(state, octokit, error, options);\n }\n return response;\n}\nexport {\n wrapRequest\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,eAAe,aAAa,OAAO,SAAS,OAAO,SAAS;AAC1D,MAAI,CAAC,MAAM,WAAW,CAAC,MAAM,QAAQ,SAAS;AAC5C,UAAM;AAAA,EACR;AACA,MAAI,MAAM,UAAU,OAAO,CAAC,MAAM,WAAW,SAAS,MAAM,MAAM,GAAG;AACnE,UAAM,UAAU,QAAQ,QAAQ,WAAW,OAAO,QAAQ,QAAQ,UAAU,MAAM;AAClF,UAAM,aAAa,KAAK,KAAK,QAAQ,QAAQ,cAAc,KAAK,GAAG,CAAC;AACpE,UAAM,QAAQ,MAAM,aAAa,OAAO,SAAS,UAAU;AAAA,EAC7D;AACA,QAAM;AACR;;;ACVA,mBAAuB;AACvB,2BAA6B;AAE7B,eAAe,YAAY,OAAO,SAAS,SAAS,SAAS;AAC3D,QAAM,UAAU,IAAI,aAAAA,QAAW;AAC/B,UAAQ,GAAG,UAAU,SAAS,OAAO,MAAM;AACzC,UAAM,aAAa,CAAC,CAAC,MAAM,QAAQ,QAAQ;AAC3C,UAAM,QAAQ,CAAC,CAAC,MAAM,QAAQ,QAAQ;AACtC,YAAQ,QAAQ,aAAa,KAAK,aAAa;AAC/C,QAAI,aAAa,KAAK,YAAY;AAChC,aAAO,QAAQ,MAAM;AAAA,IACvB;AAAA,EACF,CAAC;AACD,SAAO,QAAQ;AAAA,IACb,gCAAgC,KAAK,MAAM,OAAO,SAAS,OAAO;AAAA,IAClE;AAAA,EACF;AACF;AACA,eAAe,gCAAgC,OAAO,SAAS,SAAS,SAAS;AAC/E,QAAM,WAAW,MAAM,QAAQ,SAAS,OAAO;AAC/C,MAAI,SAAS,QAAQ,SAAS,KAAK,UAAU,kDAAkD;AAAA,IAC7F,SAAS,KAAK,OAAO,CAAC,EAAE;AAAA,EAC1B,GAAG;AACD,UAAM,QAAQ,IAAI,kCAAa,SAAS,KAAK,OAAO,CAAC,EAAE,SAAS,KAAK;AAAA,MACnE,SAAS;AAAA,MACT;AAAA,IACF,CAAC;AACD,WAAO,aAAa,OAAO,SAAS,OAAO,OAAO;AAAA,EACpD;AACA,SAAO;AACT;;;AF5BA,IAAM,UAAU;AAChB,SAAS,MAAM,SAAS,gBAAgB;AACtC,QAAM,QAAQ,OAAO;AAAA,IACnB;AAAA,MACE,SAAS;AAAA,MACT,qBAAqB;AAAA,MACrB,YAAY,CAAC,KAAK,KAAK,KAAK,KAAK,GAAG;AAAA,MACpC,SAAS;AAAA,IACX;AAAA,IACA,eAAe;AAAA,EACjB;AACA,MAAI,MAAM,SAAS;AACjB,YAAQ,KAAK,MAAM,WAAW,aAAa,KAAK,MAAM,OAAO,OAAO,CAAC;AACrE,YAAQ,KAAK,KAAK,WAAW,YAAY,KAAK,MAAM,OAAO,OAAO,CAAC;AAAA,EACrE;AACA,SAAO;AAAA,IACL,OAAO;AAAA,MACL,cAAc,CAAC,OAAO,SAAS,eAAe;AAC5C,cAAM,QAAQ,UAAU,OAAO,OAAO,CAAC,GAAG,MAAM,QAAQ,SAAS;AAAA,UAC/D;AAAA,UACA;AAAA,QACF,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACF;AACA,MAAM,UAAU;", + "names": ["Bottleneck"] +} diff --git a/dist/node_modules/@octokit/plugin-retry/dist-src/error-request.js b/dist/node_modules/@octokit/plugin-retry/dist-src/error-request.js new file mode 100644 index 0000000..3c156ba --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/dist-src/error-request.js @@ -0,0 +1,14 @@ +async function errorRequest(state, octokit, error, options) { + if (!error.request || !error.request.request) { + throw error; + } + if (error.status >= 400 && !state.doNotRetry.includes(error.status)) { + const retries = options.request.retries != null ? options.request.retries : state.retries; + const retryAfter = Math.pow((options.request.retryCount || 0) + 1, 2); + throw octokit.retry.retryRequest(error, retries, retryAfter); + } + throw error; +} +export { + errorRequest +}; diff --git a/dist/node_modules/@octokit/plugin-retry/dist-src/index.js b/dist/node_modules/@octokit/plugin-retry/dist-src/index.js new file mode 100644 index 0000000..a3e5b4a --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/dist-src/index.js @@ -0,0 +1,34 @@ +import { errorRequest } from "./error-request"; +import { wrapRequest } from "./wrap-request"; +const VERSION = "0.0.0-development"; +function retry(octokit, octokitOptions) { + const state = Object.assign( + { + enabled: true, + retryAfterBaseValue: 1e3, + doNotRetry: [400, 401, 403, 404, 422], + retries: 3 + }, + octokitOptions.retry + ); + if (state.enabled) { + octokit.hook.error("request", errorRequest.bind(null, state, octokit)); + octokit.hook.wrap("request", wrapRequest.bind(null, state, octokit)); + } + return { + retry: { + retryRequest: (error, retries, retryAfter) => { + error.request.request = Object.assign({}, error.request.request, { + retries, + retryAfter + }); + return error; + } + } + }; +} +retry.VERSION = VERSION; +export { + VERSION, + retry +}; diff --git a/dist/node_modules/@octokit/plugin-retry/dist-src/version.js b/dist/node_modules/@octokit/plugin-retry/dist-src/version.js new file mode 100644 index 0000000..3abd6f2 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "4.1.6"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/plugin-retry/dist-src/wrap-request.js b/dist/node_modules/@octokit/plugin-retry/dist-src/wrap-request.js new file mode 100644 index 0000000..ce46216 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/dist-src/wrap-request.js @@ -0,0 +1,34 @@ +import Bottleneck from "bottleneck/light"; +import { RequestError } from "@octokit/request-error"; +import { errorRequest } from "./error-request"; +async function wrapRequest(state, octokit, request, options) { + const limiter = new Bottleneck(); + limiter.on("failed", function(error, info) { + const maxRetries = ~~error.request.request.retries; + const after = ~~error.request.request.retryAfter; + options.request.retryCount = info.retryCount + 1; + if (maxRetries > info.retryCount) { + return after * state.retryAfterBaseValue; + } + }); + return limiter.schedule( + requestWithGraphqlErrorHandling.bind(null, state, octokit, request), + options + ); +} +async function requestWithGraphqlErrorHandling(state, octokit, request, options) { + const response = await request(request, options); + if (response.data && response.data.errors && /Something went wrong while executing your query/.test( + response.data.errors[0].message + )) { + const error = new RequestError(response.data.errors[0].message, 500, { + request: options, + response + }); + return errorRequest(state, octokit, error, options); + } + return response; +} +export { + wrapRequest +}; diff --git a/dist/node_modules/@octokit/plugin-retry/dist-types/error-request.d.ts b/dist/node_modules/@octokit/plugin-retry/dist-types/error-request.d.ts new file mode 100644 index 0000000..d7bdc98 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/dist-types/error-request.d.ts @@ -0,0 +1 @@ +export declare function errorRequest(state: any, octokit: any, error: any, options: any): Promise; diff --git a/dist/node_modules/@octokit/plugin-retry/dist-types/index.d.ts b/dist/node_modules/@octokit/plugin-retry/dist-types/index.d.ts new file mode 100644 index 0000000..015f694 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/dist-types/index.d.ts @@ -0,0 +1,11 @@ +import { Octokit } from "@octokit/core"; +import { RequestError } from "@octokit/request-error"; +export declare const VERSION = "0.0.0-development"; +export declare function retry(octokit: Octokit, octokitOptions: any): { + retry: { + retryRequest: (error: RequestError, retries: number, retryAfter: number) => RequestError; + }; +}; +export declare namespace retry { + var VERSION: string; +} diff --git a/dist/node_modules/@octokit/plugin-retry/dist-types/version.d.ts b/dist/node_modules/@octokit/plugin-retry/dist-types/version.d.ts new file mode 100644 index 0000000..ef9d17e --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "4.1.6"; diff --git a/dist/node_modules/@octokit/plugin-retry/dist-types/wrap-request.d.ts b/dist/node_modules/@octokit/plugin-retry/dist-types/wrap-request.d.ts new file mode 100644 index 0000000..6ab1bb6 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/dist-types/wrap-request.d.ts @@ -0,0 +1 @@ +export declare function wrapRequest(state: any, octokit: any, request: any, options: any): Promise; diff --git a/dist/node_modules/@octokit/plugin-retry/dist-web/index.js b/dist/node_modules/@octokit/plugin-retry/dist-web/index.js new file mode 100644 index 0000000..770cdc4 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/dist-web/index.js @@ -0,0 +1,78 @@ +// pkg/dist-src/error-request.js +async function errorRequest(state, octokit, error, options) { + if (!error.request || !error.request.request) { + throw error; + } + if (error.status >= 400 && !state.doNotRetry.includes(error.status)) { + const retries = options.request.retries != null ? options.request.retries : state.retries; + const retryAfter = Math.pow((options.request.retryCount || 0) + 1, 2); + throw octokit.retry.retryRequest(error, retries, retryAfter); + } + throw error; +} + +// pkg/dist-src/wrap-request.js +import Bottleneck from "bottleneck/light"; +import { RequestError } from "@octokit/request-error"; +async function wrapRequest(state, octokit, request, options) { + const limiter = new Bottleneck(); + limiter.on("failed", function(error, info) { + const maxRetries = ~~error.request.request.retries; + const after = ~~error.request.request.retryAfter; + options.request.retryCount = info.retryCount + 1; + if (maxRetries > info.retryCount) { + return after * state.retryAfterBaseValue; + } + }); + return limiter.schedule( + requestWithGraphqlErrorHandling.bind(null, state, octokit, request), + options + ); +} +async function requestWithGraphqlErrorHandling(state, octokit, request, options) { + const response = await request(request, options); + if (response.data && response.data.errors && /Something went wrong while executing your query/.test( + response.data.errors[0].message + )) { + const error = new RequestError(response.data.errors[0].message, 500, { + request: options, + response + }); + return errorRequest(state, octokit, error, options); + } + return response; +} + +// pkg/dist-src/index.js +var VERSION = "4.1.6"; +function retry(octokit, octokitOptions) { + const state = Object.assign( + { + enabled: true, + retryAfterBaseValue: 1e3, + doNotRetry: [400, 401, 403, 404, 422], + retries: 3 + }, + octokitOptions.retry + ); + if (state.enabled) { + octokit.hook.error("request", errorRequest.bind(null, state, octokit)); + octokit.hook.wrap("request", wrapRequest.bind(null, state, octokit)); + } + return { + retry: { + retryRequest: (error, retries, retryAfter) => { + error.request.request = Object.assign({}, error.request.request, { + retries, + retryAfter + }); + return error; + } + } + }; +} +retry.VERSION = VERSION; +export { + VERSION, + retry +}; diff --git a/dist/node_modules/@octokit/plugin-retry/dist-web/index.js.map b/dist/node_modules/@octokit/plugin-retry/dist-web/index.js.map new file mode 100644 index 0000000..848fc84 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/error-request.js", "../dist-src/wrap-request.js", "../dist-src/index.js"], + "sourcesContent": ["async function errorRequest(state, octokit, error, options) {\n if (!error.request || !error.request.request) {\n throw error;\n }\n if (error.status >= 400 && !state.doNotRetry.includes(error.status)) {\n const retries = options.request.retries != null ? options.request.retries : state.retries;\n const retryAfter = Math.pow((options.request.retryCount || 0) + 1, 2);\n throw octokit.retry.retryRequest(error, retries, retryAfter);\n }\n throw error;\n}\nexport {\n errorRequest\n};\n", "import Bottleneck from \"bottleneck/light\";\nimport { RequestError } from \"@octokit/request-error\";\nimport { errorRequest } from \"./error-request\";\nasync function wrapRequest(state, octokit, request, options) {\n const limiter = new Bottleneck();\n limiter.on(\"failed\", function(error, info) {\n const maxRetries = ~~error.request.request.retries;\n const after = ~~error.request.request.retryAfter;\n options.request.retryCount = info.retryCount + 1;\n if (maxRetries > info.retryCount) {\n return after * state.retryAfterBaseValue;\n }\n });\n return limiter.schedule(\n requestWithGraphqlErrorHandling.bind(null, state, octokit, request),\n options\n );\n}\nasync function requestWithGraphqlErrorHandling(state, octokit, request, options) {\n const response = await request(request, options);\n if (response.data && response.data.errors && /Something went wrong while executing your query/.test(\n response.data.errors[0].message\n )) {\n const error = new RequestError(response.data.errors[0].message, 500, {\n request: options,\n response\n });\n return errorRequest(state, octokit, error, options);\n }\n return response;\n}\nexport {\n wrapRequest\n};\n", "import { errorRequest } from \"./error-request\";\nimport { wrapRequest } from \"./wrap-request\";\nconst VERSION = \"4.1.6\";\nfunction retry(octokit, octokitOptions) {\n const state = Object.assign(\n {\n enabled: true,\n retryAfterBaseValue: 1e3,\n doNotRetry: [400, 401, 403, 404, 422],\n retries: 3\n },\n octokitOptions.retry\n );\n if (state.enabled) {\n octokit.hook.error(\"request\", errorRequest.bind(null, state, octokit));\n octokit.hook.wrap(\"request\", wrapRequest.bind(null, state, octokit));\n }\n return {\n retry: {\n retryRequest: (error, retries, retryAfter) => {\n error.request.request = Object.assign({}, error.request.request, {\n retries,\n retryAfter\n });\n return error;\n }\n }\n };\n}\nretry.VERSION = VERSION;\nexport {\n VERSION,\n retry\n};\n"], + "mappings": ";AAAA,eAAe,aAAa,OAAO,SAAS,OAAO,SAAS;AAC1D,MAAI,CAAC,MAAM,WAAW,CAAC,MAAM,QAAQ,SAAS;AAC5C,UAAM;AAAA,EACR;AACA,MAAI,MAAM,UAAU,OAAO,CAAC,MAAM,WAAW,SAAS,MAAM,MAAM,GAAG;AACnE,UAAM,UAAU,QAAQ,QAAQ,WAAW,OAAO,QAAQ,QAAQ,UAAU,MAAM;AAClF,UAAM,aAAa,KAAK,KAAK,QAAQ,QAAQ,cAAc,KAAK,GAAG,CAAC;AACpE,UAAM,QAAQ,MAAM,aAAa,OAAO,SAAS,UAAU;AAAA,EAC7D;AACA,QAAM;AACR;;;ACVA,OAAO,gBAAgB;AACvB,SAAS,oBAAoB;AAE7B,eAAe,YAAY,OAAO,SAAS,SAAS,SAAS;AAC3D,QAAM,UAAU,IAAI,WAAW;AAC/B,UAAQ,GAAG,UAAU,SAAS,OAAO,MAAM;AACzC,UAAM,aAAa,CAAC,CAAC,MAAM,QAAQ,QAAQ;AAC3C,UAAM,QAAQ,CAAC,CAAC,MAAM,QAAQ,QAAQ;AACtC,YAAQ,QAAQ,aAAa,KAAK,aAAa;AAC/C,QAAI,aAAa,KAAK,YAAY;AAChC,aAAO,QAAQ,MAAM;AAAA,IACvB;AAAA,EACF,CAAC;AACD,SAAO,QAAQ;AAAA,IACb,gCAAgC,KAAK,MAAM,OAAO,SAAS,OAAO;AAAA,IAClE;AAAA,EACF;AACF;AACA,eAAe,gCAAgC,OAAO,SAAS,SAAS,SAAS;AAC/E,QAAM,WAAW,MAAM,QAAQ,SAAS,OAAO;AAC/C,MAAI,SAAS,QAAQ,SAAS,KAAK,UAAU,kDAAkD;AAAA,IAC7F,SAAS,KAAK,OAAO,CAAC,EAAE;AAAA,EAC1B,GAAG;AACD,UAAM,QAAQ,IAAI,aAAa,SAAS,KAAK,OAAO,CAAC,EAAE,SAAS,KAAK;AAAA,MACnE,SAAS;AAAA,MACT;AAAA,IACF,CAAC;AACD,WAAO,aAAa,OAAO,SAAS,OAAO,OAAO;AAAA,EACpD;AACA,SAAO;AACT;;;AC5BA,IAAM,UAAU;AAChB,SAAS,MAAM,SAAS,gBAAgB;AACtC,QAAM,QAAQ,OAAO;AAAA,IACnB;AAAA,MACE,SAAS;AAAA,MACT,qBAAqB;AAAA,MACrB,YAAY,CAAC,KAAK,KAAK,KAAK,KAAK,GAAG;AAAA,MACpC,SAAS;AAAA,IACX;AAAA,IACA,eAAe;AAAA,EACjB;AACA,MAAI,MAAM,SAAS;AACjB,YAAQ,KAAK,MAAM,WAAW,aAAa,KAAK,MAAM,OAAO,OAAO,CAAC;AACrE,YAAQ,KAAK,KAAK,WAAW,YAAY,KAAK,MAAM,OAAO,OAAO,CAAC;AAAA,EACrE;AACA,SAAO;AAAA,IACL,OAAO;AAAA,MACL,cAAc,CAAC,OAAO,SAAS,eAAe;AAC5C,cAAM,QAAQ,UAAU,OAAO,OAAO,CAAC,GAAG,MAAM,QAAQ,SAAS;AAAA,UAC/D;AAAA,UACA;AAAA,QACF,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACF;AACA,MAAM,UAAU;", + "names": [] +} diff --git a/dist/node_modules/@octokit/plugin-retry/package.json b/dist/node_modules/@octokit/plugin-retry/package.json new file mode 100644 index 0000000..efa481f --- /dev/null +++ b/dist/node_modules/@octokit/plugin-retry/package.json @@ -0,0 +1,46 @@ +{ + "name": "@octokit/plugin-retry", + "version": "4.1.6", + "publishConfig": { + "access": "public" + }, + "description": "Automatic retry plugin for octokit", + "repository": "github:octokit/plugin-retry.js", + "author": "Simon Grondin (http://github.com/SGrondin)", + "license": "MIT", + "dependencies": { + "@octokit/types": "^9.0.0", + "bottleneck": "^2.15.3" + }, + "peerDependencies": { + "@octokit/core": ">=3" + }, + "devDependencies": { + "@octokit/core": "^4.0.0", + "@octokit/request-error": "^3.0.0", + "@octokit/tsconfig": "^1.0.2", + "@types/fetch-mock": "^7.3.1", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "esbuild": "^0.17.19", + "fetch-mock": "^9.0.0", + "glob": "^10.2.6", + "jest": "^29.0.0", + "prettier": "2.8.8", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "browser": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "module": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/plugin-throttling/LICENSE b/dist/node_modules/@octokit/plugin-throttling/LICENSE new file mode 100644 index 0000000..af5366d --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/@octokit/plugin-throttling/README.md b/dist/node_modules/@octokit/plugin-throttling/README.md new file mode 100644 index 0000000..00d09d3 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/README.md @@ -0,0 +1,158 @@ +# plugin-throttling.js + +> Octokit plugin for GitHub’s recommended request throttling + +[![@latest](https://img.shields.io/npm/v/@octokit/plugin-throttling.svg)](https://www.npmjs.com/package/@octokit/plugin-throttling) +[![Build Status](https://github.com/octokit/plugin-throttling.js/workflows/Test/badge.svg)](https://github.com/octokit/plugin-throttling.js/actions?workflow=Test) + +Implements all [recommended best practices](https://docs.github.com/en/rest/guides/best-practices-for-integrators) to prevent hitting secondary rate limits. + +## Usage + + + + + + +
+Browsers + + +Load `@octokit/plugin-throttling` and [`@octokit/core`](https://github.com/octokit/core.js) (or core-compatible module) directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+Node + + +Install with `npm install @octokit/core @octokit/plugin-throttling`. Optionally replace `@octokit/core` with a core-compatible module. + +**Note**: If you use it with `@octokit/rest` v16, install `@octokit/core` as a devDependency. This is only temporary and will no longer be necessary with `@octokit/rest` v17. + +```js +const { Octokit } = require("@octokit/core"); +const { throttling } = require("@octokit/plugin-throttling"); +``` + +
+ +The code below creates a "Hello, world!" issue on every repository in a given organization. Without the throttling plugin it would send many requests in parallel and would hit rate limits very quickly. But the `@octokit/plugin-throttling` slows down your requests according to the official guidelines, so you don't get blocked before your quota is exhausted. + +The `throttle.onSecondaryRateLimit` and `throttle.onRateLimit` options are required. Return `true` to automatically retry the request after `retryAfter` seconds. + +```js +const MyOctokit = Octokit.plugin(throttling); + +const octokit = new MyOctokit({ + auth: `secret123`, + throttle: { + onRateLimit: (retryAfter, options, octokit, retryCount) => { + octokit.log.warn( + `Request quota exhausted for request ${options.method} ${options.url}` + ); + + if (retryCount < 1) { + // only retries once + octokit.log.info(`Retrying after ${retryAfter} seconds!`); + return true; + } + }, + onSecondaryRateLimit: (retryAfter, options, octokit) => { + // does not retry, only logs a warning + octokit.log.warn( + `SecondaryRateLimit detected for request ${options.method} ${options.url}` + ); + }, + }, +}); + +async function createIssueOnAllRepos(org) { + const repos = await octokit.paginate( + octokit.repos.listForOrg.endpoint({ org }) + ); + return Promise.all( + repos.map(({ name }) => + octokit.issues.create({ + owner, + repo: name, + title: "Hello, world!", + }) + ) + ); +} +``` + +Pass `{ throttle: { enabled: false } }` to disable this plugin. + +### Clustering + +Enabling Clustering support ensures that your application will not go over rate limits **across Octokit instances and across Nodejs processes**. + +First install either `redis` or `ioredis`: + +``` +# NodeRedis (https://github.com/NodeRedis/node_redis) +npm install --save redis + +# or ioredis (https://github.com/luin/ioredis) +npm install --save ioredis +``` + +Then in your application: + +```js +const Bottleneck = require("bottleneck"); +const Redis = require("redis"); + +const client = Redis.createClient({ + /* options */ +}); +const connection = new Bottleneck.RedisConnection({ client }); +connection.on("error", err => console.error(err)); + +const octokit = new MyOctokit({ + auth: 'secret123' + throttle: { + onSecondaryRateLimit: (retryAfter, options, octokit) => { + /* ... */ + }, + onRateLimit: (retryAfter, options, octokit) => { + /* ... */ + }, + + // The Bottleneck connection object + connection, + + // A "throttling ID". All octokit instances with the same ID + // using the same Redis server will share the throttling. + id: "my-super-app", + + // Otherwise the plugin uses a lighter version of Bottleneck without Redis support + Bottleneck + } +}); + +// To close the connection and allow your application to exit cleanly: +await connection.disconnect(); +``` + +To use the `ioredis` library instead: + +```js +const Redis = require("ioredis"); +const client = new Redis({ + /* options */ +}); +const connection = new Bottleneck.IORedisConnection({ client }); +connection.on("error", (err) => console.error(err)); +``` + +## LICENSE + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-node/index.js b/dist/node_modules/@octokit/plugin-throttling/dist-node/index.js new file mode 100644 index 0000000..2094ceb --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-node/index.js @@ -0,0 +1,236 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var BottleneckLight = _interopDefault(require('bottleneck/light')); + +const VERSION = "5.2.3"; + +const noop = () => Promise.resolve(); +// @ts-expect-error +function wrapRequest(state, request, options) { + return state.retryLimiter.schedule(doRequest, state, request, options); +} +// @ts-expect-error +async function doRequest(state, request, options) { + const isWrite = options.method !== "GET" && options.method !== "HEAD"; + const { + pathname + } = new URL(options.url, "http://github.test"); + const isSearch = options.method === "GET" && pathname.startsWith("/search/"); + const isGraphQL = pathname.startsWith("/graphql"); + const retryCount = ~~request.retryCount; + const jobOptions = retryCount > 0 ? { + priority: 0, + weight: 0 + } : {}; + if (state.clustering) { + // Remove a job from Redis if it has not completed or failed within 60s + // Examples: Node process terminated, client disconnected, etc. + // @ts-expect-error + jobOptions.expiration = 1000 * 60; + } + // Guarantee at least 1000ms between writes + // GraphQL can also trigger writes + if (isWrite || isGraphQL) { + await state.write.key(state.id).schedule(jobOptions, noop); + } + // Guarantee at least 3000ms between requests that trigger notifications + if (isWrite && state.triggersNotification(pathname)) { + await state.notifications.key(state.id).schedule(jobOptions, noop); + } + // Guarantee at least 2000ms between search requests + if (isSearch) { + await state.search.key(state.id).schedule(jobOptions, noop); + } + const req = state.global.key(state.id).schedule(jobOptions, request, options); + if (isGraphQL) { + const res = await req; + if (res.data.errors != null && + // @ts-expect-error + res.data.errors.some(error => error.type === "RATE_LIMITED")) { + const error = Object.assign(new Error("GraphQL Rate Limit Exceeded"), { + response: res, + data: res.data + }); + throw error; + } + } + return req; +} + +var triggersNotificationPaths = ["/orgs/{org}/invitations", "/orgs/{org}/invitations/{invitation_id}", "/orgs/{org}/teams/{team_slug}/discussions", "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "/repos/{owner}/{repo}/collaborators/{username}", "/repos/{owner}/{repo}/commits/{commit_sha}/comments", "/repos/{owner}/{repo}/issues", "/repos/{owner}/{repo}/issues/{issue_number}/comments", "/repos/{owner}/{repo}/pulls", "/repos/{owner}/{repo}/pulls/{pull_number}/comments", "/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", "/repos/{owner}/{repo}/pulls/{pull_number}/merge", "/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "/repos/{owner}/{repo}/pulls/{pull_number}/reviews", "/repos/{owner}/{repo}/releases", "/teams/{team_id}/discussions", "/teams/{team_id}/discussions/{discussion_number}/comments"]; + +function routeMatcher(paths) { + // EXAMPLE. For the following paths: + /* [ + "/orgs/{org}/invitations", + "/repos/{owner}/{repo}/collaborators/{username}" + ] */ + const regexes = paths.map(path => path.split("/").map(c => c.startsWith("{") ? "(?:.+?)" : c).join("/")); + // 'regexes' would contain: + /* [ + '/orgs/(?:.+?)/invitations', + '/repos/(?:.+?)/(?:.+?)/collaborators/(?:.+?)' + ] */ + const regex = `^(?:${regexes.map(r => `(?:${r})`).join("|")})[^/]*$`; + // 'regex' would contain: + /* + ^(?:(?:\/orgs\/(?:.+?)\/invitations)|(?:\/repos\/(?:.+?)\/(?:.+?)\/collaborators\/(?:.+?)))[^\/]*$ + It may look scary, but paste it into https://www.debuggex.com/ + and it will make a lot more sense! + */ + return new RegExp(regex, "i"); +} + +// @ts-expect-error +// Workaround to allow tests to directly access the triggersNotification function. +const regex = routeMatcher(triggersNotificationPaths); +const triggersNotification = regex.test.bind(regex); +const groups = {}; +// @ts-expect-error +const createGroups = function (Bottleneck, common) { + groups.global = new Bottleneck.Group({ + id: "octokit-global", + maxConcurrent: 10, + ...common + }); + groups.search = new Bottleneck.Group({ + id: "octokit-search", + maxConcurrent: 1, + minTime: 2000, + ...common + }); + groups.write = new Bottleneck.Group({ + id: "octokit-write", + maxConcurrent: 1, + minTime: 1000, + ...common + }); + groups.notifications = new Bottleneck.Group({ + id: "octokit-notifications", + maxConcurrent: 1, + minTime: 3000, + ...common + }); +}; +function throttling(octokit, octokitOptions) { + const { + enabled = true, + Bottleneck = BottleneckLight, + id = "no-id", + timeout = 1000 * 60 * 2, + // Redis TTL: 2 minutes + connection + } = octokitOptions.throttle || {}; + if (!enabled) { + return {}; + } + const common = { + connection, + timeout + }; + if (groups.global == null) { + createGroups(Bottleneck, common); + } + if (octokitOptions.throttle && octokitOptions.throttle.minimalSecondaryRateRetryAfter) { + octokit.log.warn("[@octokit/plugin-throttling] `options.throttle.minimalSecondaryRateRetryAfter` is deprecated, please use `options.throttle.fallbackSecondaryRateRetryAfter` instead"); + octokitOptions.throttle.fallbackSecondaryRateRetryAfter = octokitOptions.throttle.minimalSecondaryRateRetryAfter; + delete octokitOptions.throttle.minimalSecondaryRateRetryAfter; + } + if (octokitOptions.throttle && octokitOptions.throttle.onAbuseLimit) { + octokit.log.warn("[@octokit/plugin-throttling] `onAbuseLimit()` is deprecated and will be removed in a future release of `@octokit/plugin-throttling`, please use the `onSecondaryRateLimit` handler instead"); + // @ts-ignore types don't allow for both properties to be set + octokitOptions.throttle.onSecondaryRateLimit = octokitOptions.throttle.onAbuseLimit; + // @ts-ignore + delete octokitOptions.throttle.onAbuseLimit; + } + const state = Object.assign({ + clustering: connection != null, + triggersNotification, + fallbackSecondaryRateRetryAfter: 60, + retryAfterBaseValue: 1000, + retryLimiter: new Bottleneck(), + id, + ...groups + }, octokitOptions.throttle); + if (typeof state.onSecondaryRateLimit !== "function" || typeof state.onRateLimit !== "function") { + throw new Error(`octokit/plugin-throttling error: + You must pass the onSecondaryRateLimit and onRateLimit error handlers. + See https://octokit.github.io/rest.js/#throttling + + const octokit = new Octokit({ + throttle: { + onSecondaryRateLimit: (retryAfter, options) => {/* ... */}, + onRateLimit: (retryAfter, options) => {/* ... */} + } + }) + `); + } + const events = {}; + const emitter = new Bottleneck.Events(events); + // @ts-expect-error + events.on("secondary-limit", state.onSecondaryRateLimit); + // @ts-expect-error + events.on("rate-limit", state.onRateLimit); + // @ts-expect-error + events.on("error", e => octokit.log.warn("Error in throttling-plugin limit handler", e)); + // @ts-expect-error + state.retryLimiter.on("failed", async function (error, info) { + const [state, request, options] = info.args; + const { + pathname + } = new URL(options.url, "http://github.test"); + const shouldRetryGraphQL = pathname.startsWith("/graphql") && error.status !== 401; + if (!(shouldRetryGraphQL || error.status === 403)) { + return; + } + const retryCount = ~~request.retryCount; + request.retryCount = retryCount; + // backward compatibility + options.request.retryCount = retryCount; + const { + wantRetry, + retryAfter = 0 + } = await async function () { + if (/\bsecondary rate\b/i.test(error.message)) { + // The user has hit the secondary rate limit. (REST and GraphQL) + // https://docs.github.com/en/rest/overview/resources-in-the-rest-api#secondary-rate-limits + // The Retry-After header can sometimes be blank when hitting a secondary rate limit, + // but is always present after 2-3s, so make sure to set `retryAfter` to at least 5s by default. + const retryAfter = Number(error.response.headers["retry-after"]) || state.fallbackSecondaryRateRetryAfter; + const wantRetry = await emitter.trigger("secondary-limit", retryAfter, options, octokit, retryCount); + return { + wantRetry, + retryAfter + }; + } + if (error.response.headers != null && error.response.headers["x-ratelimit-remaining"] === "0") { + // The user has used all their allowed calls for the current time period (REST and GraphQL) + // https://docs.github.com/en/rest/reference/rate-limit (REST) + // https://docs.github.com/en/graphql/overview/resource-limitations#rate-limit (GraphQL) + const rateLimitReset = new Date(~~error.response.headers["x-ratelimit-reset"] * 1000).getTime(); + const retryAfter = Math.max(Math.ceil((rateLimitReset - Date.now()) / 1000), 0); + const wantRetry = await emitter.trigger("rate-limit", retryAfter, options, octokit, retryCount); + return { + wantRetry, + retryAfter + }; + } + return {}; + }(); + if (wantRetry) { + request.retryCount++; + return retryAfter * state.retryAfterBaseValue; + } + }); + octokit.hook.wrap("request", wrapRequest.bind(null, state)); + return {}; +} +throttling.VERSION = VERSION; +throttling.triggersNotification = triggersNotification; + +exports.throttling = throttling; +//# sourceMappingURL=index.js.map diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-node/index.js.map b/dist/node_modules/@octokit/plugin-throttling/dist-node/index.js.map new file mode 100644 index 0000000..0b1ddd1 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/wrap-request.js","../dist-src/generated/triggers-notification-paths.js","../dist-src/route-matcher.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"5.2.3\";\n","const noop = () => Promise.resolve();\n// @ts-expect-error\nexport function wrapRequest(state, request, options) {\n return state.retryLimiter.schedule(doRequest, state, request, options);\n}\n// @ts-expect-error\nasync function doRequest(state, request, options) {\n const isWrite = options.method !== \"GET\" && options.method !== \"HEAD\";\n const { pathname } = new URL(options.url, \"http://github.test\");\n const isSearch = options.method === \"GET\" && pathname.startsWith(\"/search/\");\n const isGraphQL = pathname.startsWith(\"/graphql\");\n const retryCount = ~~request.retryCount;\n const jobOptions = retryCount > 0 ? { priority: 0, weight: 0 } : {};\n if (state.clustering) {\n // Remove a job from Redis if it has not completed or failed within 60s\n // Examples: Node process terminated, client disconnected, etc.\n // @ts-expect-error\n jobOptions.expiration = 1000 * 60;\n }\n // Guarantee at least 1000ms between writes\n // GraphQL can also trigger writes\n if (isWrite || isGraphQL) {\n await state.write.key(state.id).schedule(jobOptions, noop);\n }\n // Guarantee at least 3000ms between requests that trigger notifications\n if (isWrite && state.triggersNotification(pathname)) {\n await state.notifications.key(state.id).schedule(jobOptions, noop);\n }\n // Guarantee at least 2000ms between search requests\n if (isSearch) {\n await state.search.key(state.id).schedule(jobOptions, noop);\n }\n const req = state.global.key(state.id).schedule(jobOptions, request, options);\n if (isGraphQL) {\n const res = await req;\n if (res.data.errors != null &&\n // @ts-expect-error\n res.data.errors.some((error) => error.type === \"RATE_LIMITED\")) {\n const error = Object.assign(new Error(\"GraphQL Rate Limit Exceeded\"), {\n response: res,\n data: res.data,\n });\n throw error;\n }\n }\n return req;\n}\n","export default [\n \"/orgs/{org}/invitations\",\n \"/orgs/{org}/invitations/{invitation_id}\",\n \"/orgs/{org}/teams/{team_slug}/discussions\",\n \"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"/repos/{owner}/{repo}/collaborators/{username}\",\n \"/repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"/repos/{owner}/{repo}/issues\",\n \"/repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"/repos/{owner}/{repo}/pulls\",\n \"/repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\",\n \"/repos/{owner}/{repo}/pulls/{pull_number}/merge\",\n \"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n \"/repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"/repos/{owner}/{repo}/releases\",\n \"/teams/{team_id}/discussions\",\n \"/teams/{team_id}/discussions/{discussion_number}/comments\",\n];\n","export function routeMatcher(paths) {\n // EXAMPLE. For the following paths:\n /* [\n \"/orgs/{org}/invitations\",\n \"/repos/{owner}/{repo}/collaborators/{username}\"\n ] */\n const regexes = paths.map((path) => path\n .split(\"/\")\n .map((c) => (c.startsWith(\"{\") ? \"(?:.+?)\" : c))\n .join(\"/\"));\n // 'regexes' would contain:\n /* [\n '/orgs/(?:.+?)/invitations',\n '/repos/(?:.+?)/(?:.+?)/collaborators/(?:.+?)'\n ] */\n const regex = `^(?:${regexes.map((r) => `(?:${r})`).join(\"|\")})[^/]*$`;\n // 'regex' would contain:\n /*\n ^(?:(?:\\/orgs\\/(?:.+?)\\/invitations)|(?:\\/repos\\/(?:.+?)\\/(?:.+?)\\/collaborators\\/(?:.+?)))[^\\/]*$\n \n It may look scary, but paste it into https://www.debuggex.com/\n and it will make a lot more sense!\n */\n return new RegExp(regex, \"i\");\n}\n","// @ts-expect-error\nimport BottleneckLight from \"bottleneck/light\";\nimport { VERSION } from \"./version\";\nimport { wrapRequest } from \"./wrap-request\";\nimport triggersNotificationPaths from \"./generated/triggers-notification-paths\";\nimport { routeMatcher } from \"./route-matcher\";\n// Workaround to allow tests to directly access the triggersNotification function.\nconst regex = routeMatcher(triggersNotificationPaths);\nconst triggersNotification = regex.test.bind(regex);\nconst groups = {};\n// @ts-expect-error\nconst createGroups = function (Bottleneck, common) {\n groups.global = new Bottleneck.Group({\n id: \"octokit-global\",\n maxConcurrent: 10,\n ...common,\n });\n groups.search = new Bottleneck.Group({\n id: \"octokit-search\",\n maxConcurrent: 1,\n minTime: 2000,\n ...common,\n });\n groups.write = new Bottleneck.Group({\n id: \"octokit-write\",\n maxConcurrent: 1,\n minTime: 1000,\n ...common,\n });\n groups.notifications = new Bottleneck.Group({\n id: \"octokit-notifications\",\n maxConcurrent: 1,\n minTime: 3000,\n ...common,\n });\n};\nexport function throttling(octokit, octokitOptions) {\n const { enabled = true, Bottleneck = BottleneckLight, id = \"no-id\", timeout = 1000 * 60 * 2, // Redis TTL: 2 minutes\n connection, } = octokitOptions.throttle || {};\n if (!enabled) {\n return {};\n }\n const common = { connection, timeout };\n if (groups.global == null) {\n createGroups(Bottleneck, common);\n }\n if (octokitOptions.throttle &&\n octokitOptions.throttle.minimalSecondaryRateRetryAfter) {\n octokit.log.warn(\"[@octokit/plugin-throttling] `options.throttle.minimalSecondaryRateRetryAfter` is deprecated, please use `options.throttle.fallbackSecondaryRateRetryAfter` instead\");\n octokitOptions.throttle.fallbackSecondaryRateRetryAfter =\n octokitOptions.throttle.minimalSecondaryRateRetryAfter;\n delete octokitOptions.throttle.minimalSecondaryRateRetryAfter;\n }\n if (octokitOptions.throttle && octokitOptions.throttle.onAbuseLimit) {\n octokit.log.warn(\"[@octokit/plugin-throttling] `onAbuseLimit()` is deprecated and will be removed in a future release of `@octokit/plugin-throttling`, please use the `onSecondaryRateLimit` handler instead\");\n // @ts-ignore types don't allow for both properties to be set\n octokitOptions.throttle.onSecondaryRateLimit =\n octokitOptions.throttle.onAbuseLimit;\n // @ts-ignore\n delete octokitOptions.throttle.onAbuseLimit;\n }\n const state = Object.assign({\n clustering: connection != null,\n triggersNotification,\n fallbackSecondaryRateRetryAfter: 60,\n retryAfterBaseValue: 1000,\n retryLimiter: new Bottleneck(),\n id,\n ...groups,\n }, octokitOptions.throttle);\n if (typeof state.onSecondaryRateLimit !== \"function\" ||\n typeof state.onRateLimit !== \"function\") {\n throw new Error(`octokit/plugin-throttling error:\n You must pass the onSecondaryRateLimit and onRateLimit error handlers.\n See https://octokit.github.io/rest.js/#throttling\n\n const octokit = new Octokit({\n throttle: {\n onSecondaryRateLimit: (retryAfter, options) => {/* ... */},\n onRateLimit: (retryAfter, options) => {/* ... */}\n }\n })\n `);\n }\n const events = {};\n const emitter = new Bottleneck.Events(events);\n // @ts-expect-error\n events.on(\"secondary-limit\", state.onSecondaryRateLimit);\n // @ts-expect-error\n events.on(\"rate-limit\", state.onRateLimit);\n // @ts-expect-error\n events.on(\"error\", (e) => octokit.log.warn(\"Error in throttling-plugin limit handler\", e));\n // @ts-expect-error\n state.retryLimiter.on(\"failed\", async function (error, info) {\n const [state, request, options] = info.args;\n const { pathname } = new URL(options.url, \"http://github.test\");\n const shouldRetryGraphQL = pathname.startsWith(\"/graphql\") && error.status !== 401;\n if (!(shouldRetryGraphQL || error.status === 403)) {\n return;\n }\n const retryCount = ~~request.retryCount;\n request.retryCount = retryCount;\n // backward compatibility\n options.request.retryCount = retryCount;\n const { wantRetry, retryAfter = 0 } = await (async function () {\n if (/\\bsecondary rate\\b/i.test(error.message)) {\n // The user has hit the secondary rate limit. (REST and GraphQL)\n // https://docs.github.com/en/rest/overview/resources-in-the-rest-api#secondary-rate-limits\n // The Retry-After header can sometimes be blank when hitting a secondary rate limit,\n // but is always present after 2-3s, so make sure to set `retryAfter` to at least 5s by default.\n const retryAfter = Number(error.response.headers[\"retry-after\"]) ||\n state.fallbackSecondaryRateRetryAfter;\n const wantRetry = await emitter.trigger(\"secondary-limit\", retryAfter, options, octokit, retryCount);\n return { wantRetry, retryAfter };\n }\n if (error.response.headers != null &&\n error.response.headers[\"x-ratelimit-remaining\"] === \"0\") {\n // The user has used all their allowed calls for the current time period (REST and GraphQL)\n // https://docs.github.com/en/rest/reference/rate-limit (REST)\n // https://docs.github.com/en/graphql/overview/resource-limitations#rate-limit (GraphQL)\n const rateLimitReset = new Date(~~error.response.headers[\"x-ratelimit-reset\"] * 1000).getTime();\n const retryAfter = Math.max(Math.ceil((rateLimitReset - Date.now()) / 1000), 0);\n const wantRetry = await emitter.trigger(\"rate-limit\", retryAfter, options, octokit, retryCount);\n return { wantRetry, retryAfter };\n }\n return {};\n })();\n if (wantRetry) {\n request.retryCount++;\n return retryAfter * state.retryAfterBaseValue;\n }\n });\n octokit.hook.wrap(\"request\", wrapRequest.bind(null, state));\n return {};\n}\nthrottling.VERSION = VERSION;\nthrottling.triggersNotification = triggersNotification;\n"],"names":["VERSION","noop","Promise","resolve","wrapRequest","state","request","options","retryLimiter","schedule","doRequest","isWrite","method","pathname","URL","url","isSearch","startsWith","isGraphQL","retryCount","jobOptions","priority","weight","clustering","expiration","write","key","id","triggersNotification","notifications","search","req","global","res","data","errors","some","error","type","Object","assign","Error","response","routeMatcher","paths","regexes","map","path","split","c","join","regex","r","RegExp","triggersNotificationPaths","test","bind","groups","createGroups","Bottleneck","common","Group","maxConcurrent","minTime","throttling","octokit","octokitOptions","enabled","BottleneckLight","timeout","connection","throttle","minimalSecondaryRateRetryAfter","log","warn","fallbackSecondaryRateRetryAfter","onAbuseLimit","onSecondaryRateLimit","retryAfterBaseValue","onRateLimit","events","emitter","Events","on","e","info","args","shouldRetryGraphQL","status","wantRetry","retryAfter","message","Number","headers","trigger","rateLimitReset","Date","getTime","Math","max","ceil","now","hook","wrap"],"mappings":";;;;;;;;AAAO,MAAMA,OAAO,GAAG,mBAAmB;;ACA1C,MAAMC,IAAI,GAAGA,MAAMC,OAAO,CAACC,OAAO,EAAE;AACpC;AACA,AAAO,SAASC,WAAWA,CAACC,KAAK,EAAEC,OAAO,EAAEC,OAAO,EAAE;EACjD,OAAOF,KAAK,CAACG,YAAY,CAACC,QAAQ,CAACC,SAAS,EAAEL,KAAK,EAAEC,OAAO,EAAEC,OAAO,CAAC;AAC1E;AACA;AACA,eAAeG,SAASA,CAACL,KAAK,EAAEC,OAAO,EAAEC,OAAO,EAAE;EAC9C,MAAMI,OAAO,GAAGJ,OAAO,CAACK,MAAM,KAAK,KAAK,IAAIL,OAAO,CAACK,MAAM,KAAK,MAAM;EACrE,MAAM;IAAEC;GAAU,GAAG,IAAIC,GAAG,CAACP,OAAO,CAACQ,GAAG,EAAE,oBAAoB,CAAC;EAC/D,MAAMC,QAAQ,GAAGT,OAAO,CAACK,MAAM,KAAK,KAAK,IAAIC,QAAQ,CAACI,UAAU,CAAC,UAAU,CAAC;EAC5E,MAAMC,SAAS,GAAGL,QAAQ,CAACI,UAAU,CAAC,UAAU,CAAC;EACjD,MAAME,UAAU,GAAG,CAAC,CAACb,OAAO,CAACa,UAAU;EACvC,MAAMC,UAAU,GAAGD,UAAU,GAAG,CAAC,GAAG;IAAEE,QAAQ,EAAE,CAAC;IAAEC,MAAM,EAAE;GAAG,GAAG,EAAE;EACnE,IAAIjB,KAAK,CAACkB,UAAU,EAAE;;;;IAIlBH,UAAU,CAACI,UAAU,GAAG,IAAI,GAAG,EAAE;;;;EAIrC,IAAIb,OAAO,IAAIO,SAAS,EAAE;IACtB,MAAMb,KAAK,CAACoB,KAAK,CAACC,GAAG,CAACrB,KAAK,CAACsB,EAAE,CAAC,CAAClB,QAAQ,CAACW,UAAU,EAAEnB,IAAI,CAAC;;;EAG9D,IAAIU,OAAO,IAAIN,KAAK,CAACuB,oBAAoB,CAACf,QAAQ,CAAC,EAAE;IACjD,MAAMR,KAAK,CAACwB,aAAa,CAACH,GAAG,CAACrB,KAAK,CAACsB,EAAE,CAAC,CAAClB,QAAQ,CAACW,UAAU,EAAEnB,IAAI,CAAC;;;EAGtE,IAAIe,QAAQ,EAAE;IACV,MAAMX,KAAK,CAACyB,MAAM,CAACJ,GAAG,CAACrB,KAAK,CAACsB,EAAE,CAAC,CAAClB,QAAQ,CAACW,UAAU,EAAEnB,IAAI,CAAC;;EAE/D,MAAM8B,GAAG,GAAG1B,KAAK,CAAC2B,MAAM,CAACN,GAAG,CAACrB,KAAK,CAACsB,EAAE,CAAC,CAAClB,QAAQ,CAACW,UAAU,EAAEd,OAAO,EAAEC,OAAO,CAAC;EAC7E,IAAIW,SAAS,EAAE;IACX,MAAMe,GAAG,GAAG,MAAMF,GAAG;IACrB,IAAIE,GAAG,CAACC,IAAI,CAACC,MAAM,IAAI,IAAI;;IAEvBF,GAAG,CAACC,IAAI,CAACC,MAAM,CAACC,IAAI,CAAEC,KAAK,IAAKA,KAAK,CAACC,IAAI,KAAK,cAAc,CAAC,EAAE;MAChE,MAAMD,KAAK,GAAGE,MAAM,CAACC,MAAM,CAAC,IAAIC,KAAK,CAAC,6BAA6B,CAAC,EAAE;QAClEC,QAAQ,EAAET,GAAG;QACbC,IAAI,EAAED,GAAG,CAACC;OACb,CAAC;MACF,MAAMG,KAAK;;;EAGnB,OAAON,GAAG;AACd;;AC9CA,gCAAe,CACX,yBAAyB,EACzB,yCAAyC,EACzC,2CAA2C,EAC3C,wEAAwE,EACxE,gDAAgD,EAChD,qDAAqD,EACrD,8BAA8B,EAC9B,sDAAsD,EACtD,6BAA6B,EAC7B,oDAAoD,EACpD,yEAAyE,EACzE,iDAAiD,EACjD,+DAA+D,EAC/D,mDAAmD,EACnD,gCAAgC,EAChC,8BAA8B,EAC9B,2DAA2D,CAC9D;;AClBM,SAASY,YAAYA,CAACC,KAAK,EAAE;;;AAGpC;AACA;AACA;EACI,MAAMC,OAAO,GAAGD,KAAK,CAACE,GAAG,CAAEC,IAAI,IAAKA,IAAI,CACnCC,KAAK,CAAC,GAAG,CAAC,CACVF,GAAG,CAAEG,CAAC,IAAMA,CAAC,CAAChC,UAAU,CAAC,GAAG,CAAC,GAAG,SAAS,GAAGgC,CAAE,CAAC,CAC/CC,IAAI,CAAC,GAAG,CAAC,CAAC;;;AAGnB;AACA;AACA;EACI,MAAMC,KAAK,GAAI,OAAMN,OAAO,CAACC,GAAG,CAAEM,CAAC,IAAM,MAAKA,CAAE,GAAE,CAAC,CAACF,IAAI,CAAC,GAAG,CAAE,SAAQ;;;AAG1E;AACA;AACA;AACA;EAEI,OAAO,IAAIG,MAAM,CAACF,KAAK,EAAE,GAAG,CAAC;AACjC;;ACxBA;AACA,AAKA;AACA,MAAMA,KAAK,GAAGR,YAAY,CAACW,yBAAyB,CAAC;AACrD,MAAM1B,oBAAoB,GAAGuB,KAAK,CAACI,IAAI,CAACC,IAAI,CAACL,KAAK,CAAC;AACnD,MAAMM,MAAM,GAAG,EAAE;AACjB;AACA,MAAMC,YAAY,GAAG,UAAUC,UAAU,EAAEC,MAAM,EAAE;EAC/CH,MAAM,CAACzB,MAAM,GAAG,IAAI2B,UAAU,CAACE,KAAK,CAAC;IACjClC,EAAE,EAAE,gBAAgB;IACpBmC,aAAa,EAAE,EAAE;IACjB,GAAGF;GACN,CAAC;EACFH,MAAM,CAAC3B,MAAM,GAAG,IAAI6B,UAAU,CAACE,KAAK,CAAC;IACjClC,EAAE,EAAE,gBAAgB;IACpBmC,aAAa,EAAE,CAAC;IAChBC,OAAO,EAAE,IAAI;IACb,GAAGH;GACN,CAAC;EACFH,MAAM,CAAChC,KAAK,GAAG,IAAIkC,UAAU,CAACE,KAAK,CAAC;IAChClC,EAAE,EAAE,eAAe;IACnBmC,aAAa,EAAE,CAAC;IAChBC,OAAO,EAAE,IAAI;IACb,GAAGH;GACN,CAAC;EACFH,MAAM,CAAC5B,aAAa,GAAG,IAAI8B,UAAU,CAACE,KAAK,CAAC;IACxClC,EAAE,EAAE,uBAAuB;IAC3BmC,aAAa,EAAE,CAAC;IAChBC,OAAO,EAAE,IAAI;IACb,GAAGH;GACN,CAAC;AACN,CAAC;AACD,AAAO,SAASI,UAAUA,CAACC,OAAO,EAAEC,cAAc,EAAE;EAChD,MAAM;IAAEC,OAAO,GAAG,IAAI;IAAER,UAAU,GAAGS,eAAe;IAAEzC,EAAE,GAAG,OAAO;IAAE0C,OAAO,GAAG,IAAI,GAAG,EAAE,GAAG,CAAC;;IAC3FC;GAAa,GAAGJ,cAAc,CAACK,QAAQ,IAAI,EAAE;EAC7C,IAAI,CAACJ,OAAO,EAAE;IACV,OAAO,EAAE;;EAEb,MAAMP,MAAM,GAAG;IAAEU,UAAU;IAAED;GAAS;EACtC,IAAIZ,MAAM,CAACzB,MAAM,IAAI,IAAI,EAAE;IACvB0B,YAAY,CAACC,UAAU,EAAEC,MAAM,CAAC;;EAEpC,IAAIM,cAAc,CAACK,QAAQ,IACvBL,cAAc,CAACK,QAAQ,CAACC,8BAA8B,EAAE;IACxDP,OAAO,CAACQ,GAAG,CAACC,IAAI,CAAC,qKAAqK,CAAC;IACvLR,cAAc,CAACK,QAAQ,CAACI,+BAA+B,GACnDT,cAAc,CAACK,QAAQ,CAACC,8BAA8B;IAC1D,OAAON,cAAc,CAACK,QAAQ,CAACC,8BAA8B;;EAEjE,IAAIN,cAAc,CAACK,QAAQ,IAAIL,cAAc,CAACK,QAAQ,CAACK,YAAY,EAAE;IACjEX,OAAO,CAACQ,GAAG,CAACC,IAAI,CAAC,4LAA4L,CAAC;;IAE9MR,cAAc,CAACK,QAAQ,CAACM,oBAAoB,GACxCX,cAAc,CAACK,QAAQ,CAACK,YAAY;;IAExC,OAAOV,cAAc,CAACK,QAAQ,CAACK,YAAY;;EAE/C,MAAMvE,KAAK,GAAGkC,MAAM,CAACC,MAAM,CAAC;IACxBjB,UAAU,EAAE+C,UAAU,IAAI,IAAI;IAC9B1C,oBAAoB;IACpB+C,+BAA+B,EAAE,EAAE;IACnCG,mBAAmB,EAAE,IAAI;IACzBtE,YAAY,EAAE,IAAImD,UAAU,EAAE;IAC9BhC,EAAE;IACF,GAAG8B;GACN,EAAES,cAAc,CAACK,QAAQ,CAAC;EAC3B,IAAI,OAAOlE,KAAK,CAACwE,oBAAoB,KAAK,UAAU,IAChD,OAAOxE,KAAK,CAAC0E,WAAW,KAAK,UAAU,EAAE;IACzC,MAAM,IAAItC,KAAK,CAAE;AACzB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAAK,CAAC;;EAEF,MAAMuC,MAAM,GAAG,EAAE;EACjB,MAAMC,OAAO,GAAG,IAAItB,UAAU,CAACuB,MAAM,CAACF,MAAM,CAAC;;EAE7CA,MAAM,CAACG,EAAE,CAAC,iBAAiB,EAAE9E,KAAK,CAACwE,oBAAoB,CAAC;;EAExDG,MAAM,CAACG,EAAE,CAAC,YAAY,EAAE9E,KAAK,CAAC0E,WAAW,CAAC;;EAE1CC,MAAM,CAACG,EAAE,CAAC,OAAO,EAAGC,CAAC,IAAKnB,OAAO,CAACQ,GAAG,CAACC,IAAI,CAAC,0CAA0C,EAAEU,CAAC,CAAC,CAAC;;EAE1F/E,KAAK,CAACG,YAAY,CAAC2E,EAAE,CAAC,QAAQ,EAAE,gBAAgB9C,KAAK,EAAEgD,IAAI,EAAE;IACzD,MAAM,CAAChF,KAAK,EAAEC,OAAO,EAAEC,OAAO,CAAC,GAAG8E,IAAI,CAACC,IAAI;IAC3C,MAAM;MAAEzE;KAAU,GAAG,IAAIC,GAAG,CAACP,OAAO,CAACQ,GAAG,EAAE,oBAAoB,CAAC;IAC/D,MAAMwE,kBAAkB,GAAG1E,QAAQ,CAACI,UAAU,CAAC,UAAU,CAAC,IAAIoB,KAAK,CAACmD,MAAM,KAAK,GAAG;IAClF,IAAI,EAAED,kBAAkB,IAAIlD,KAAK,CAACmD,MAAM,KAAK,GAAG,CAAC,EAAE;MAC/C;;IAEJ,MAAMrE,UAAU,GAAG,CAAC,CAACb,OAAO,CAACa,UAAU;IACvCb,OAAO,CAACa,UAAU,GAAGA,UAAU;;IAE/BZ,OAAO,CAACD,OAAO,CAACa,UAAU,GAAGA,UAAU;IACvC,MAAM;MAAEsE,SAAS;MAAEC,UAAU,GAAG;KAAG,GAAG,MAAO,kBAAkB;MAC3D,IAAI,qBAAqB,CAACnC,IAAI,CAAClB,KAAK,CAACsD,OAAO,CAAC,EAAE;;;;;QAK3C,MAAMD,UAAU,GAAGE,MAAM,CAACvD,KAAK,CAACK,QAAQ,CAACmD,OAAO,CAAC,aAAa,CAAC,CAAC,IAC5DxF,KAAK,CAACsE,+BAA+B;QACzC,MAAMc,SAAS,GAAG,MAAMR,OAAO,CAACa,OAAO,CAAC,iBAAiB,EAAEJ,UAAU,EAAEnF,OAAO,EAAE0D,OAAO,EAAE9C,UAAU,CAAC;QACpG,OAAO;UAAEsE,SAAS;UAAEC;SAAY;;MAEpC,IAAIrD,KAAK,CAACK,QAAQ,CAACmD,OAAO,IAAI,IAAI,IAC9BxD,KAAK,CAACK,QAAQ,CAACmD,OAAO,CAAC,uBAAuB,CAAC,KAAK,GAAG,EAAE;;;;QAIzD,MAAME,cAAc,GAAG,IAAIC,IAAI,CAAC,CAAC,CAAC3D,KAAK,CAACK,QAAQ,CAACmD,OAAO,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,CAACI,OAAO,EAAE;QAC/F,MAAMP,UAAU,GAAGQ,IAAI,CAACC,GAAG,CAACD,IAAI,CAACE,IAAI,CAAC,CAACL,cAAc,GAAGC,IAAI,CAACK,GAAG,EAAE,IAAI,IAAI,CAAC,EAAE,CAAC,CAAC;QAC/E,MAAMZ,SAAS,GAAG,MAAMR,OAAO,CAACa,OAAO,CAAC,YAAY,EAAEJ,UAAU,EAAEnF,OAAO,EAAE0D,OAAO,EAAE9C,UAAU,CAAC;QAC/F,OAAO;UAAEsE,SAAS;UAAEC;SAAY;;MAEpC,OAAO,EAAE;KACZ,EAAG;IACJ,IAAID,SAAS,EAAE;MACXnF,OAAO,CAACa,UAAU,EAAE;MACpB,OAAOuE,UAAU,GAAGrF,KAAK,CAACyE,mBAAmB;;GAEpD,CAAC;EACFb,OAAO,CAACqC,IAAI,CAACC,IAAI,CAAC,SAAS,EAAEnG,WAAW,CAACoD,IAAI,CAAC,IAAI,EAAEnD,KAAK,CAAC,CAAC;EAC3D,OAAO,EAAE;AACb;AACA2D,UAAU,CAAChE,OAAO,GAAGA,OAAO;AAC5BgE,UAAU,CAACpC,oBAAoB,GAAGA,oBAAoB;;;;"} \ No newline at end of file diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-src/generated/triggers-notification-paths.js b/dist/node_modules/@octokit/plugin-throttling/dist-src/generated/triggers-notification-paths.js new file mode 100644 index 0000000..fc09d28 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-src/generated/triggers-notification-paths.js @@ -0,0 +1,19 @@ +export default [ + "/orgs/{org}/invitations", + "/orgs/{org}/invitations/{invitation_id}", + "/orgs/{org}/teams/{team_slug}/discussions", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "/repos/{owner}/{repo}/collaborators/{username}", + "/repos/{owner}/{repo}/commits/{commit_sha}/comments", + "/repos/{owner}/{repo}/issues", + "/repos/{owner}/{repo}/issues/{issue_number}/comments", + "/repos/{owner}/{repo}/pulls", + "/repos/{owner}/{repo}/pulls/{pull_number}/comments", + "/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", + "/repos/{owner}/{repo}/pulls/{pull_number}/merge", + "/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "/repos/{owner}/{repo}/releases", + "/teams/{team_id}/discussions", + "/teams/{team_id}/discussions/{discussion_number}/comments", +]; diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-src/index.js b/dist/node_modules/@octokit/plugin-throttling/dist-src/index.js new file mode 100644 index 0000000..49955d7 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-src/index.js @@ -0,0 +1,137 @@ +// @ts-expect-error +import BottleneckLight from "bottleneck/light"; +import { VERSION } from "./version"; +import { wrapRequest } from "./wrap-request"; +import triggersNotificationPaths from "./generated/triggers-notification-paths"; +import { routeMatcher } from "./route-matcher"; +// Workaround to allow tests to directly access the triggersNotification function. +const regex = routeMatcher(triggersNotificationPaths); +const triggersNotification = regex.test.bind(regex); +const groups = {}; +// @ts-expect-error +const createGroups = function (Bottleneck, common) { + groups.global = new Bottleneck.Group({ + id: "octokit-global", + maxConcurrent: 10, + ...common, + }); + groups.search = new Bottleneck.Group({ + id: "octokit-search", + maxConcurrent: 1, + minTime: 2000, + ...common, + }); + groups.write = new Bottleneck.Group({ + id: "octokit-write", + maxConcurrent: 1, + minTime: 1000, + ...common, + }); + groups.notifications = new Bottleneck.Group({ + id: "octokit-notifications", + maxConcurrent: 1, + minTime: 3000, + ...common, + }); +}; +export function throttling(octokit, octokitOptions) { + const { enabled = true, Bottleneck = BottleneckLight, id = "no-id", timeout = 1000 * 60 * 2, // Redis TTL: 2 minutes + connection, } = octokitOptions.throttle || {}; + if (!enabled) { + return {}; + } + const common = { connection, timeout }; + if (groups.global == null) { + createGroups(Bottleneck, common); + } + if (octokitOptions.throttle && + octokitOptions.throttle.minimalSecondaryRateRetryAfter) { + octokit.log.warn("[@octokit/plugin-throttling] `options.throttle.minimalSecondaryRateRetryAfter` is deprecated, please use `options.throttle.fallbackSecondaryRateRetryAfter` instead"); + octokitOptions.throttle.fallbackSecondaryRateRetryAfter = + octokitOptions.throttle.minimalSecondaryRateRetryAfter; + delete octokitOptions.throttle.minimalSecondaryRateRetryAfter; + } + if (octokitOptions.throttle && octokitOptions.throttle.onAbuseLimit) { + octokit.log.warn("[@octokit/plugin-throttling] `onAbuseLimit()` is deprecated and will be removed in a future release of `@octokit/plugin-throttling`, please use the `onSecondaryRateLimit` handler instead"); + // @ts-ignore types don't allow for both properties to be set + octokitOptions.throttle.onSecondaryRateLimit = + octokitOptions.throttle.onAbuseLimit; + // @ts-ignore + delete octokitOptions.throttle.onAbuseLimit; + } + const state = Object.assign({ + clustering: connection != null, + triggersNotification, + fallbackSecondaryRateRetryAfter: 60, + retryAfterBaseValue: 1000, + retryLimiter: new Bottleneck(), + id, + ...groups, + }, octokitOptions.throttle); + if (typeof state.onSecondaryRateLimit !== "function" || + typeof state.onRateLimit !== "function") { + throw new Error(`octokit/plugin-throttling error: + You must pass the onSecondaryRateLimit and onRateLimit error handlers. + See https://octokit.github.io/rest.js/#throttling + + const octokit = new Octokit({ + throttle: { + onSecondaryRateLimit: (retryAfter, options) => {/* ... */}, + onRateLimit: (retryAfter, options) => {/* ... */} + } + }) + `); + } + const events = {}; + const emitter = new Bottleneck.Events(events); + // @ts-expect-error + events.on("secondary-limit", state.onSecondaryRateLimit); + // @ts-expect-error + events.on("rate-limit", state.onRateLimit); + // @ts-expect-error + events.on("error", (e) => octokit.log.warn("Error in throttling-plugin limit handler", e)); + // @ts-expect-error + state.retryLimiter.on("failed", async function (error, info) { + const [state, request, options] = info.args; + const { pathname } = new URL(options.url, "http://github.test"); + const shouldRetryGraphQL = pathname.startsWith("/graphql") && error.status !== 401; + if (!(shouldRetryGraphQL || error.status === 403)) { + return; + } + const retryCount = ~~request.retryCount; + request.retryCount = retryCount; + // backward compatibility + options.request.retryCount = retryCount; + const { wantRetry, retryAfter = 0 } = await (async function () { + if (/\bsecondary rate\b/i.test(error.message)) { + // The user has hit the secondary rate limit. (REST and GraphQL) + // https://docs.github.com/en/rest/overview/resources-in-the-rest-api#secondary-rate-limits + // The Retry-After header can sometimes be blank when hitting a secondary rate limit, + // but is always present after 2-3s, so make sure to set `retryAfter` to at least 5s by default. + const retryAfter = Number(error.response.headers["retry-after"]) || + state.fallbackSecondaryRateRetryAfter; + const wantRetry = await emitter.trigger("secondary-limit", retryAfter, options, octokit, retryCount); + return { wantRetry, retryAfter }; + } + if (error.response.headers != null && + error.response.headers["x-ratelimit-remaining"] === "0") { + // The user has used all their allowed calls for the current time period (REST and GraphQL) + // https://docs.github.com/en/rest/reference/rate-limit (REST) + // https://docs.github.com/en/graphql/overview/resource-limitations#rate-limit (GraphQL) + const rateLimitReset = new Date(~~error.response.headers["x-ratelimit-reset"] * 1000).getTime(); + const retryAfter = Math.max(Math.ceil((rateLimitReset - Date.now()) / 1000), 0); + const wantRetry = await emitter.trigger("rate-limit", retryAfter, options, octokit, retryCount); + return { wantRetry, retryAfter }; + } + return {}; + })(); + if (wantRetry) { + request.retryCount++; + return retryAfter * state.retryAfterBaseValue; + } + }); + octokit.hook.wrap("request", wrapRequest.bind(null, state)); + return {}; +} +throttling.VERSION = VERSION; +throttling.triggersNotification = triggersNotification; diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-src/route-matcher.js b/dist/node_modules/@octokit/plugin-throttling/dist-src/route-matcher.js new file mode 100644 index 0000000..95c5c2e --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-src/route-matcher.js @@ -0,0 +1,25 @@ +export function routeMatcher(paths) { + // EXAMPLE. For the following paths: + /* [ + "/orgs/{org}/invitations", + "/repos/{owner}/{repo}/collaborators/{username}" + ] */ + const regexes = paths.map((path) => path + .split("/") + .map((c) => (c.startsWith("{") ? "(?:.+?)" : c)) + .join("/")); + // 'regexes' would contain: + /* [ + '/orgs/(?:.+?)/invitations', + '/repos/(?:.+?)/(?:.+?)/collaborators/(?:.+?)' + ] */ + const regex = `^(?:${regexes.map((r) => `(?:${r})`).join("|")})[^/]*$`; + // 'regex' would contain: + /* + ^(?:(?:\/orgs\/(?:.+?)\/invitations)|(?:\/repos\/(?:.+?)\/(?:.+?)\/collaborators\/(?:.+?)))[^\/]*$ + + It may look scary, but paste it into https://www.debuggex.com/ + and it will make a lot more sense! + */ + return new RegExp(regex, "i"); +} diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-src/types.js b/dist/node_modules/@octokit/plugin-throttling/dist-src/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-src/types.js @@ -0,0 +1 @@ +export {}; diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-src/version.js b/dist/node_modules/@octokit/plugin-throttling/dist-src/version.js new file mode 100644 index 0000000..44aa1b5 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-src/version.js @@ -0,0 +1 @@ +export const VERSION = "5.2.3"; diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-src/wrap-request.js b/dist/node_modules/@octokit/plugin-throttling/dist-src/wrap-request.js new file mode 100644 index 0000000..11af1a7 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-src/wrap-request.js @@ -0,0 +1,47 @@ +const noop = () => Promise.resolve(); +// @ts-expect-error +export function wrapRequest(state, request, options) { + return state.retryLimiter.schedule(doRequest, state, request, options); +} +// @ts-expect-error +async function doRequest(state, request, options) { + const isWrite = options.method !== "GET" && options.method !== "HEAD"; + const { pathname } = new URL(options.url, "http://github.test"); + const isSearch = options.method === "GET" && pathname.startsWith("/search/"); + const isGraphQL = pathname.startsWith("/graphql"); + const retryCount = ~~request.retryCount; + const jobOptions = retryCount > 0 ? { priority: 0, weight: 0 } : {}; + if (state.clustering) { + // Remove a job from Redis if it has not completed or failed within 60s + // Examples: Node process terminated, client disconnected, etc. + // @ts-expect-error + jobOptions.expiration = 1000 * 60; + } + // Guarantee at least 1000ms between writes + // GraphQL can also trigger writes + if (isWrite || isGraphQL) { + await state.write.key(state.id).schedule(jobOptions, noop); + } + // Guarantee at least 3000ms between requests that trigger notifications + if (isWrite && state.triggersNotification(pathname)) { + await state.notifications.key(state.id).schedule(jobOptions, noop); + } + // Guarantee at least 2000ms between search requests + if (isSearch) { + await state.search.key(state.id).schedule(jobOptions, noop); + } + const req = state.global.key(state.id).schedule(jobOptions, request, options); + if (isGraphQL) { + const res = await req; + if (res.data.errors != null && + // @ts-expect-error + res.data.errors.some((error) => error.type === "RATE_LIMITED")) { + const error = Object.assign(new Error("GraphQL Rate Limit Exceeded"), { + response: res, + data: res.data, + }); + throw error; + } + } + return req; +} diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-types/generated/triggers-notification-paths.d.ts b/dist/node_modules/@octokit/plugin-throttling/dist-types/generated/triggers-notification-paths.d.ts new file mode 100644 index 0000000..d451d2b --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-types/generated/triggers-notification-paths.d.ts @@ -0,0 +1,2 @@ +declare const _default: string[]; +export default _default; diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-types/index.d.ts b/dist/node_modules/@octokit/plugin-throttling/dist-types/index.d.ts new file mode 100644 index 0000000..03203a4 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Octokit } from "@octokit/core"; +import { OctokitOptions } from "@octokit/core/dist-types/types.d"; +import { ThrottlingOptions } from "./types"; +export declare function throttling(octokit: Octokit, octokitOptions: OctokitOptions): {}; +export declare namespace throttling { + var VERSION: string; + var triggersNotification: (string: string) => boolean; +} +declare module "@octokit/core/dist-types/types.d" { + interface OctokitOptions { + throttle?: ThrottlingOptions; + } +} diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-types/route-matcher.d.ts b/dist/node_modules/@octokit/plugin-throttling/dist-types/route-matcher.d.ts new file mode 100644 index 0000000..7047f2d --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-types/route-matcher.d.ts @@ -0,0 +1 @@ +export declare function routeMatcher(paths: string[]): RegExp; diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-types/types.d.ts b/dist/node_modules/@octokit/plugin-throttling/dist-types/types.d.ts new file mode 100644 index 0000000..60d2d05 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-types/types.d.ts @@ -0,0 +1,44 @@ +import { Octokit } from "@octokit/core"; +import Bottleneck from "bottleneck"; +type LimitHandler = (retryAfter: number, options: object, octokit: Octokit, retryCount: number) => void; +export type AbuseLimitHandler = { + /** + * @deprecated "[@octokit/plugin-throttling] `onAbuseLimit()` is deprecated and will be removed in a future release of `@octokit/plugin-throttling`, please use the `onSecondaryRateLimit` handler instead" + */ + onAbuseLimit: LimitHandler; + onSecondaryRateLimit?: never; +}; +export type SecondaryLimitHandler = { + /** + * @deprecated "[@octokit/plugin-throttling] `onAbuseLimit()` is deprecated and will be removed in a future release of `@octokit/plugin-throttling`, please use the `onSecondaryRateLimit` handler instead" + */ + onAbuseLimit?: never; + onSecondaryRateLimit: LimitHandler; +}; +export type ThrottlingOptionsBase = { + enabled?: boolean; + Bottleneck?: typeof Bottleneck; + id?: string; + timeout?: number; + connection?: Bottleneck.RedisConnection | Bottleneck.IORedisConnection; + /** + * @deprecated use `fallbackSecondaryRateRetryAfter` + */ + minimalSecondaryRateRetryAfter?: number; + fallbackSecondaryRateRetryAfter?: number; + retryAfterBaseValue?: number; + write?: Bottleneck.Group; + search?: Bottleneck.Group; + notifications?: Bottleneck.Group; + onRateLimit: LimitHandler; +}; +export type ThrottlingOptions = (ThrottlingOptionsBase & (AbuseLimitHandler | SecondaryLimitHandler)) | (Partial & { + enabled: false; +}); +export type Groups = { + global?: Bottleneck.Group; + write?: Bottleneck.Group; + search?: Bottleneck.Group; + notifications?: Bottleneck.Group; +}; +export {}; diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-types/version.d.ts b/dist/node_modules/@octokit/plugin-throttling/dist-types/version.d.ts new file mode 100644 index 0000000..11b9365 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "5.2.3"; diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-types/wrap-request.d.ts b/dist/node_modules/@octokit/plugin-throttling/dist-types/wrap-request.d.ts new file mode 100644 index 0000000..9b5bd25 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-types/wrap-request.d.ts @@ -0,0 +1 @@ +export declare function wrapRequest(state: any, request: any, options: any): any; diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-web/index.js b/dist/node_modules/@octokit/plugin-throttling/dist-web/index.js new file mode 100644 index 0000000..e6fbb98 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-web/index.js @@ -0,0 +1,233 @@ +import BottleneckLight from 'bottleneck/light'; + +const VERSION = "5.2.3"; + +const noop = () => Promise.resolve(); +// @ts-expect-error +function wrapRequest(state, request, options) { + return state.retryLimiter.schedule(doRequest, state, request, options); +} +// @ts-expect-error +async function doRequest(state, request, options) { + const isWrite = options.method !== "GET" && options.method !== "HEAD"; + const { pathname } = new URL(options.url, "http://github.test"); + const isSearch = options.method === "GET" && pathname.startsWith("/search/"); + const isGraphQL = pathname.startsWith("/graphql"); + const retryCount = ~~request.retryCount; + const jobOptions = retryCount > 0 ? { priority: 0, weight: 0 } : {}; + if (state.clustering) { + // Remove a job from Redis if it has not completed or failed within 60s + // Examples: Node process terminated, client disconnected, etc. + // @ts-expect-error + jobOptions.expiration = 1000 * 60; + } + // Guarantee at least 1000ms between writes + // GraphQL can also trigger writes + if (isWrite || isGraphQL) { + await state.write.key(state.id).schedule(jobOptions, noop); + } + // Guarantee at least 3000ms between requests that trigger notifications + if (isWrite && state.triggersNotification(pathname)) { + await state.notifications.key(state.id).schedule(jobOptions, noop); + } + // Guarantee at least 2000ms between search requests + if (isSearch) { + await state.search.key(state.id).schedule(jobOptions, noop); + } + const req = state.global.key(state.id).schedule(jobOptions, request, options); + if (isGraphQL) { + const res = await req; + if (res.data.errors != null && + // @ts-expect-error + res.data.errors.some((error) => error.type === "RATE_LIMITED")) { + const error = Object.assign(new Error("GraphQL Rate Limit Exceeded"), { + response: res, + data: res.data, + }); + throw error; + } + } + return req; +} + +var triggersNotificationPaths = [ + "/orgs/{org}/invitations", + "/orgs/{org}/invitations/{invitation_id}", + "/orgs/{org}/teams/{team_slug}/discussions", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "/repos/{owner}/{repo}/collaborators/{username}", + "/repos/{owner}/{repo}/commits/{commit_sha}/comments", + "/repos/{owner}/{repo}/issues", + "/repos/{owner}/{repo}/issues/{issue_number}/comments", + "/repos/{owner}/{repo}/pulls", + "/repos/{owner}/{repo}/pulls/{pull_number}/comments", + "/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", + "/repos/{owner}/{repo}/pulls/{pull_number}/merge", + "/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "/repos/{owner}/{repo}/releases", + "/teams/{team_id}/discussions", + "/teams/{team_id}/discussions/{discussion_number}/comments", +]; + +function routeMatcher(paths) { + // EXAMPLE. For the following paths: + /* [ + "/orgs/{org}/invitations", + "/repos/{owner}/{repo}/collaborators/{username}" + ] */ + const regexes = paths.map((path) => path + .split("/") + .map((c) => (c.startsWith("{") ? "(?:.+?)" : c)) + .join("/")); + // 'regexes' would contain: + /* [ + '/orgs/(?:.+?)/invitations', + '/repos/(?:.+?)/(?:.+?)/collaborators/(?:.+?)' + ] */ + const regex = `^(?:${regexes.map((r) => `(?:${r})`).join("|")})[^/]*$`; + // 'regex' would contain: + /* + ^(?:(?:\/orgs\/(?:.+?)\/invitations)|(?:\/repos\/(?:.+?)\/(?:.+?)\/collaborators\/(?:.+?)))[^\/]*$ + + It may look scary, but paste it into https://www.debuggex.com/ + and it will make a lot more sense! + */ + return new RegExp(regex, "i"); +} + +// @ts-expect-error +// Workaround to allow tests to directly access the triggersNotification function. +const regex = routeMatcher(triggersNotificationPaths); +const triggersNotification = regex.test.bind(regex); +const groups = {}; +// @ts-expect-error +const createGroups = function (Bottleneck, common) { + groups.global = new Bottleneck.Group({ + id: "octokit-global", + maxConcurrent: 10, + ...common, + }); + groups.search = new Bottleneck.Group({ + id: "octokit-search", + maxConcurrent: 1, + minTime: 2000, + ...common, + }); + groups.write = new Bottleneck.Group({ + id: "octokit-write", + maxConcurrent: 1, + minTime: 1000, + ...common, + }); + groups.notifications = new Bottleneck.Group({ + id: "octokit-notifications", + maxConcurrent: 1, + minTime: 3000, + ...common, + }); +}; +function throttling(octokit, octokitOptions) { + const { enabled = true, Bottleneck = BottleneckLight, id = "no-id", timeout = 1000 * 60 * 2, // Redis TTL: 2 minutes + connection, } = octokitOptions.throttle || {}; + if (!enabled) { + return {}; + } + const common = { connection, timeout }; + if (groups.global == null) { + createGroups(Bottleneck, common); + } + if (octokitOptions.throttle && + octokitOptions.throttle.minimalSecondaryRateRetryAfter) { + octokit.log.warn("[@octokit/plugin-throttling] `options.throttle.minimalSecondaryRateRetryAfter` is deprecated, please use `options.throttle.fallbackSecondaryRateRetryAfter` instead"); + octokitOptions.throttle.fallbackSecondaryRateRetryAfter = + octokitOptions.throttle.minimalSecondaryRateRetryAfter; + delete octokitOptions.throttle.minimalSecondaryRateRetryAfter; + } + if (octokitOptions.throttle && octokitOptions.throttle.onAbuseLimit) { + octokit.log.warn("[@octokit/plugin-throttling] `onAbuseLimit()` is deprecated and will be removed in a future release of `@octokit/plugin-throttling`, please use the `onSecondaryRateLimit` handler instead"); + // @ts-ignore types don't allow for both properties to be set + octokitOptions.throttle.onSecondaryRateLimit = + octokitOptions.throttle.onAbuseLimit; + // @ts-ignore + delete octokitOptions.throttle.onAbuseLimit; + } + const state = Object.assign({ + clustering: connection != null, + triggersNotification, + fallbackSecondaryRateRetryAfter: 60, + retryAfterBaseValue: 1000, + retryLimiter: new Bottleneck(), + id, + ...groups, + }, octokitOptions.throttle); + if (typeof state.onSecondaryRateLimit !== "function" || + typeof state.onRateLimit !== "function") { + throw new Error(`octokit/plugin-throttling error: + You must pass the onSecondaryRateLimit and onRateLimit error handlers. + See https://octokit.github.io/rest.js/#throttling + + const octokit = new Octokit({ + throttle: { + onSecondaryRateLimit: (retryAfter, options) => {/* ... */}, + onRateLimit: (retryAfter, options) => {/* ... */} + } + }) + `); + } + const events = {}; + const emitter = new Bottleneck.Events(events); + // @ts-expect-error + events.on("secondary-limit", state.onSecondaryRateLimit); + // @ts-expect-error + events.on("rate-limit", state.onRateLimit); + // @ts-expect-error + events.on("error", (e) => octokit.log.warn("Error in throttling-plugin limit handler", e)); + // @ts-expect-error + state.retryLimiter.on("failed", async function (error, info) { + const [state, request, options] = info.args; + const { pathname } = new URL(options.url, "http://github.test"); + const shouldRetryGraphQL = pathname.startsWith("/graphql") && error.status !== 401; + if (!(shouldRetryGraphQL || error.status === 403)) { + return; + } + const retryCount = ~~request.retryCount; + request.retryCount = retryCount; + // backward compatibility + options.request.retryCount = retryCount; + const { wantRetry, retryAfter = 0 } = await (async function () { + if (/\bsecondary rate\b/i.test(error.message)) { + // The user has hit the secondary rate limit. (REST and GraphQL) + // https://docs.github.com/en/rest/overview/resources-in-the-rest-api#secondary-rate-limits + // The Retry-After header can sometimes be blank when hitting a secondary rate limit, + // but is always present after 2-3s, so make sure to set `retryAfter` to at least 5s by default. + const retryAfter = Number(error.response.headers["retry-after"]) || + state.fallbackSecondaryRateRetryAfter; + const wantRetry = await emitter.trigger("secondary-limit", retryAfter, options, octokit, retryCount); + return { wantRetry, retryAfter }; + } + if (error.response.headers != null && + error.response.headers["x-ratelimit-remaining"] === "0") { + // The user has used all their allowed calls for the current time period (REST and GraphQL) + // https://docs.github.com/en/rest/reference/rate-limit (REST) + // https://docs.github.com/en/graphql/overview/resource-limitations#rate-limit (GraphQL) + const rateLimitReset = new Date(~~error.response.headers["x-ratelimit-reset"] * 1000).getTime(); + const retryAfter = Math.max(Math.ceil((rateLimitReset - Date.now()) / 1000), 0); + const wantRetry = await emitter.trigger("rate-limit", retryAfter, options, octokit, retryCount); + return { wantRetry, retryAfter }; + } + return {}; + })(); + if (wantRetry) { + request.retryCount++; + return retryAfter * state.retryAfterBaseValue; + } + }); + octokit.hook.wrap("request", wrapRequest.bind(null, state)); + return {}; +} +throttling.VERSION = VERSION; +throttling.triggersNotification = triggersNotification; + +export { throttling }; +//# sourceMappingURL=index.js.map diff --git a/dist/node_modules/@octokit/plugin-throttling/dist-web/index.js.map b/dist/node_modules/@octokit/plugin-throttling/dist-web/index.js.map new file mode 100644 index 0000000..d3adbc4 --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/version.js","../dist-src/wrap-request.js","../dist-src/generated/triggers-notification-paths.js","../dist-src/route-matcher.js","../dist-src/index.js"],"sourcesContent":["export const VERSION = \"5.2.3\";\n","const noop = () => Promise.resolve();\n// @ts-expect-error\nexport function wrapRequest(state, request, options) {\n return state.retryLimiter.schedule(doRequest, state, request, options);\n}\n// @ts-expect-error\nasync function doRequest(state, request, options) {\n const isWrite = options.method !== \"GET\" && options.method !== \"HEAD\";\n const { pathname } = new URL(options.url, \"http://github.test\");\n const isSearch = options.method === \"GET\" && pathname.startsWith(\"/search/\");\n const isGraphQL = pathname.startsWith(\"/graphql\");\n const retryCount = ~~request.retryCount;\n const jobOptions = retryCount > 0 ? { priority: 0, weight: 0 } : {};\n if (state.clustering) {\n // Remove a job from Redis if it has not completed or failed within 60s\n // Examples: Node process terminated, client disconnected, etc.\n // @ts-expect-error\n jobOptions.expiration = 1000 * 60;\n }\n // Guarantee at least 1000ms between writes\n // GraphQL can also trigger writes\n if (isWrite || isGraphQL) {\n await state.write.key(state.id).schedule(jobOptions, noop);\n }\n // Guarantee at least 3000ms between requests that trigger notifications\n if (isWrite && state.triggersNotification(pathname)) {\n await state.notifications.key(state.id).schedule(jobOptions, noop);\n }\n // Guarantee at least 2000ms between search requests\n if (isSearch) {\n await state.search.key(state.id).schedule(jobOptions, noop);\n }\n const req = state.global.key(state.id).schedule(jobOptions, request, options);\n if (isGraphQL) {\n const res = await req;\n if (res.data.errors != null &&\n // @ts-expect-error\n res.data.errors.some((error) => error.type === \"RATE_LIMITED\")) {\n const error = Object.assign(new Error(\"GraphQL Rate Limit Exceeded\"), {\n response: res,\n data: res.data,\n });\n throw error;\n }\n }\n return req;\n}\n","export default [\n \"/orgs/{org}/invitations\",\n \"/orgs/{org}/invitations/{invitation_id}\",\n \"/orgs/{org}/teams/{team_slug}/discussions\",\n \"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\",\n \"/repos/{owner}/{repo}/collaborators/{username}\",\n \"/repos/{owner}/{repo}/commits/{commit_sha}/comments\",\n \"/repos/{owner}/{repo}/issues\",\n \"/repos/{owner}/{repo}/issues/{issue_number}/comments\",\n \"/repos/{owner}/{repo}/pulls\",\n \"/repos/{owner}/{repo}/pulls/{pull_number}/comments\",\n \"/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\",\n \"/repos/{owner}/{repo}/pulls/{pull_number}/merge\",\n \"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\",\n \"/repos/{owner}/{repo}/pulls/{pull_number}/reviews\",\n \"/repos/{owner}/{repo}/releases\",\n \"/teams/{team_id}/discussions\",\n \"/teams/{team_id}/discussions/{discussion_number}/comments\",\n];\n","export function routeMatcher(paths) {\n // EXAMPLE. For the following paths:\n /* [\n \"/orgs/{org}/invitations\",\n \"/repos/{owner}/{repo}/collaborators/{username}\"\n ] */\n const regexes = paths.map((path) => path\n .split(\"/\")\n .map((c) => (c.startsWith(\"{\") ? \"(?:.+?)\" : c))\n .join(\"/\"));\n // 'regexes' would contain:\n /* [\n '/orgs/(?:.+?)/invitations',\n '/repos/(?:.+?)/(?:.+?)/collaborators/(?:.+?)'\n ] */\n const regex = `^(?:${regexes.map((r) => `(?:${r})`).join(\"|\")})[^/]*$`;\n // 'regex' would contain:\n /*\n ^(?:(?:\\/orgs\\/(?:.+?)\\/invitations)|(?:\\/repos\\/(?:.+?)\\/(?:.+?)\\/collaborators\\/(?:.+?)))[^\\/]*$\n \n It may look scary, but paste it into https://www.debuggex.com/\n and it will make a lot more sense!\n */\n return new RegExp(regex, \"i\");\n}\n","// @ts-expect-error\nimport BottleneckLight from \"bottleneck/light\";\nimport { VERSION } from \"./version\";\nimport { wrapRequest } from \"./wrap-request\";\nimport triggersNotificationPaths from \"./generated/triggers-notification-paths\";\nimport { routeMatcher } from \"./route-matcher\";\n// Workaround to allow tests to directly access the triggersNotification function.\nconst regex = routeMatcher(triggersNotificationPaths);\nconst triggersNotification = regex.test.bind(regex);\nconst groups = {};\n// @ts-expect-error\nconst createGroups = function (Bottleneck, common) {\n groups.global = new Bottleneck.Group({\n id: \"octokit-global\",\n maxConcurrent: 10,\n ...common,\n });\n groups.search = new Bottleneck.Group({\n id: \"octokit-search\",\n maxConcurrent: 1,\n minTime: 2000,\n ...common,\n });\n groups.write = new Bottleneck.Group({\n id: \"octokit-write\",\n maxConcurrent: 1,\n minTime: 1000,\n ...common,\n });\n groups.notifications = new Bottleneck.Group({\n id: \"octokit-notifications\",\n maxConcurrent: 1,\n minTime: 3000,\n ...common,\n });\n};\nexport function throttling(octokit, octokitOptions) {\n const { enabled = true, Bottleneck = BottleneckLight, id = \"no-id\", timeout = 1000 * 60 * 2, // Redis TTL: 2 minutes\n connection, } = octokitOptions.throttle || {};\n if (!enabled) {\n return {};\n }\n const common = { connection, timeout };\n if (groups.global == null) {\n createGroups(Bottleneck, common);\n }\n if (octokitOptions.throttle &&\n octokitOptions.throttle.minimalSecondaryRateRetryAfter) {\n octokit.log.warn(\"[@octokit/plugin-throttling] `options.throttle.minimalSecondaryRateRetryAfter` is deprecated, please use `options.throttle.fallbackSecondaryRateRetryAfter` instead\");\n octokitOptions.throttle.fallbackSecondaryRateRetryAfter =\n octokitOptions.throttle.minimalSecondaryRateRetryAfter;\n delete octokitOptions.throttle.minimalSecondaryRateRetryAfter;\n }\n if (octokitOptions.throttle && octokitOptions.throttle.onAbuseLimit) {\n octokit.log.warn(\"[@octokit/plugin-throttling] `onAbuseLimit()` is deprecated and will be removed in a future release of `@octokit/plugin-throttling`, please use the `onSecondaryRateLimit` handler instead\");\n // @ts-ignore types don't allow for both properties to be set\n octokitOptions.throttle.onSecondaryRateLimit =\n octokitOptions.throttle.onAbuseLimit;\n // @ts-ignore\n delete octokitOptions.throttle.onAbuseLimit;\n }\n const state = Object.assign({\n clustering: connection != null,\n triggersNotification,\n fallbackSecondaryRateRetryAfter: 60,\n retryAfterBaseValue: 1000,\n retryLimiter: new Bottleneck(),\n id,\n ...groups,\n }, octokitOptions.throttle);\n if (typeof state.onSecondaryRateLimit !== \"function\" ||\n typeof state.onRateLimit !== \"function\") {\n throw new Error(`octokit/plugin-throttling error:\n You must pass the onSecondaryRateLimit and onRateLimit error handlers.\n See https://octokit.github.io/rest.js/#throttling\n\n const octokit = new Octokit({\n throttle: {\n onSecondaryRateLimit: (retryAfter, options) => {/* ... */},\n onRateLimit: (retryAfter, options) => {/* ... */}\n }\n })\n `);\n }\n const events = {};\n const emitter = new Bottleneck.Events(events);\n // @ts-expect-error\n events.on(\"secondary-limit\", state.onSecondaryRateLimit);\n // @ts-expect-error\n events.on(\"rate-limit\", state.onRateLimit);\n // @ts-expect-error\n events.on(\"error\", (e) => octokit.log.warn(\"Error in throttling-plugin limit handler\", e));\n // @ts-expect-error\n state.retryLimiter.on(\"failed\", async function (error, info) {\n const [state, request, options] = info.args;\n const { pathname } = new URL(options.url, \"http://github.test\");\n const shouldRetryGraphQL = pathname.startsWith(\"/graphql\") && error.status !== 401;\n if (!(shouldRetryGraphQL || error.status === 403)) {\n return;\n }\n const retryCount = ~~request.retryCount;\n request.retryCount = retryCount;\n // backward compatibility\n options.request.retryCount = retryCount;\n const { wantRetry, retryAfter = 0 } = await (async function () {\n if (/\\bsecondary rate\\b/i.test(error.message)) {\n // The user has hit the secondary rate limit. (REST and GraphQL)\n // https://docs.github.com/en/rest/overview/resources-in-the-rest-api#secondary-rate-limits\n // The Retry-After header can sometimes be blank when hitting a secondary rate limit,\n // but is always present after 2-3s, so make sure to set `retryAfter` to at least 5s by default.\n const retryAfter = Number(error.response.headers[\"retry-after\"]) ||\n state.fallbackSecondaryRateRetryAfter;\n const wantRetry = await emitter.trigger(\"secondary-limit\", retryAfter, options, octokit, retryCount);\n return { wantRetry, retryAfter };\n }\n if (error.response.headers != null &&\n error.response.headers[\"x-ratelimit-remaining\"] === \"0\") {\n // The user has used all their allowed calls for the current time period (REST and GraphQL)\n // https://docs.github.com/en/rest/reference/rate-limit (REST)\n // https://docs.github.com/en/graphql/overview/resource-limitations#rate-limit (GraphQL)\n const rateLimitReset = new Date(~~error.response.headers[\"x-ratelimit-reset\"] * 1000).getTime();\n const retryAfter = Math.max(Math.ceil((rateLimitReset - Date.now()) / 1000), 0);\n const wantRetry = await emitter.trigger(\"rate-limit\", retryAfter, options, octokit, retryCount);\n return { wantRetry, retryAfter };\n }\n return {};\n })();\n if (wantRetry) {\n request.retryCount++;\n return retryAfter * state.retryAfterBaseValue;\n }\n });\n octokit.hook.wrap(\"request\", wrapRequest.bind(null, state));\n return {};\n}\nthrottling.VERSION = VERSION;\nthrottling.triggersNotification = triggersNotification;\n"],"names":[],"mappings":";;AAAO,MAAM,OAAO,GAAG,mBAAmB;;ACA1C,MAAM,IAAI,GAAG,MAAM,OAAO,CAAC,OAAO,EAAE,CAAC;AACrC;AACA,AAAO,SAAS,WAAW,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,EAAE;AACrD,IAAI,OAAO,KAAK,CAAC,YAAY,CAAC,QAAQ,CAAC,SAAS,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;AAC3E,CAAC;AACD;AACA,eAAe,SAAS,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,EAAE;AAClD,IAAI,MAAM,OAAO,GAAG,OAAO,CAAC,MAAM,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,CAAC;AAC1E,IAAI,MAAM,EAAE,QAAQ,EAAE,GAAG,IAAI,GAAG,CAAC,OAAO,CAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;AACpE,IAAI,MAAM,QAAQ,GAAG,OAAO,CAAC,MAAM,KAAK,KAAK,IAAI,QAAQ,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;AACjF,IAAI,MAAM,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;AACtD,IAAI,MAAM,UAAU,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC;AAC5C,IAAI,MAAM,UAAU,GAAG,UAAU,GAAG,CAAC,GAAG,EAAE,QAAQ,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC;AACxE,IAAI,IAAI,KAAK,CAAC,UAAU,EAAE;AAC1B;AACA;AACA;AACA,QAAQ,UAAU,CAAC,UAAU,GAAG,IAAI,GAAG,EAAE,CAAC;AAC1C,KAAK;AACL;AACA;AACA,IAAI,IAAI,OAAO,IAAI,SAAS,EAAE;AAC9B,QAAQ,MAAM,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;AACnE,KAAK;AACL;AACA,IAAI,IAAI,OAAO,IAAI,KAAK,CAAC,oBAAoB,CAAC,QAAQ,CAAC,EAAE;AACzD,QAAQ,MAAM,KAAK,CAAC,aAAa,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;AAC3E,KAAK;AACL;AACA,IAAI,IAAI,QAAQ,EAAE;AAClB,QAAQ,MAAM,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;AACpE,KAAK;AACL,IAAI,MAAM,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,UAAU,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;AAClF,IAAI,IAAI,SAAS,EAAE;AACnB,QAAQ,MAAM,GAAG,GAAG,MAAM,GAAG,CAAC;AAC9B,QAAQ,IAAI,GAAG,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI;AACnC;AACA,YAAY,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,IAAI,KAAK,cAAc,CAAC,EAAE;AAC5E,YAAY,MAAM,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;AAClF,gBAAgB,QAAQ,EAAE,GAAG;AAC7B,gBAAgB,IAAI,EAAE,GAAG,CAAC,IAAI;AAC9B,aAAa,CAAC,CAAC;AACf,YAAY,MAAM,KAAK,CAAC;AACxB,SAAS;AACT,KAAK;AACL,IAAI,OAAO,GAAG,CAAC;AACf,CAAC;;AC9CD,gCAAe;AACf,IAAI,yBAAyB;AAC7B,IAAI,yCAAyC;AAC7C,IAAI,2CAA2C;AAC/C,IAAI,wEAAwE;AAC5E,IAAI,gDAAgD;AACpD,IAAI,qDAAqD;AACzD,IAAI,8BAA8B;AAClC,IAAI,sDAAsD;AAC1D,IAAI,6BAA6B;AACjC,IAAI,oDAAoD;AACxD,IAAI,yEAAyE;AAC7E,IAAI,iDAAiD;AACrD,IAAI,+DAA+D;AACnE,IAAI,mDAAmD;AACvD,IAAI,gCAAgC;AACpC,IAAI,8BAA8B;AAClC,IAAI,2DAA2D;AAC/D,CAAC,CAAC;;AClBK,SAAS,YAAY,CAAC,KAAK,EAAE;AACpC;AACA;AACA;AACA;AACA;AACA,IAAI,MAAM,OAAO,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,IAAI;AAC5C,SAAS,KAAK,CAAC,GAAG,CAAC;AACnB,SAAS,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,SAAS,GAAG,CAAC,CAAC,CAAC;AACxD,SAAS,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AACpB;AACA;AACA;AACA;AACA;AACA,IAAI,MAAM,KAAK,GAAG,CAAC,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC;AAC3E;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAI,OAAO,IAAI,MAAM,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AAClC,CAAC;;ACxBD;AACA,AAKA;AACA,MAAM,KAAK,GAAG,YAAY,CAAC,yBAAyB,CAAC,CAAC;AACtD,MAAM,oBAAoB,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACpD,MAAM,MAAM,GAAG,EAAE,CAAC;AAClB;AACA,MAAM,YAAY,GAAG,UAAU,UAAU,EAAE,MAAM,EAAE;AACnD,IAAI,MAAM,CAAC,MAAM,GAAG,IAAI,UAAU,CAAC,KAAK,CAAC;AACzC,QAAQ,EAAE,EAAE,gBAAgB;AAC5B,QAAQ,aAAa,EAAE,EAAE;AACzB,QAAQ,GAAG,MAAM;AACjB,KAAK,CAAC,CAAC;AACP,IAAI,MAAM,CAAC,MAAM,GAAG,IAAI,UAAU,CAAC,KAAK,CAAC;AACzC,QAAQ,EAAE,EAAE,gBAAgB;AAC5B,QAAQ,aAAa,EAAE,CAAC;AACxB,QAAQ,OAAO,EAAE,IAAI;AACrB,QAAQ,GAAG,MAAM;AACjB,KAAK,CAAC,CAAC;AACP,IAAI,MAAM,CAAC,KAAK,GAAG,IAAI,UAAU,CAAC,KAAK,CAAC;AACxC,QAAQ,EAAE,EAAE,eAAe;AAC3B,QAAQ,aAAa,EAAE,CAAC;AACxB,QAAQ,OAAO,EAAE,IAAI;AACrB,QAAQ,GAAG,MAAM;AACjB,KAAK,CAAC,CAAC;AACP,IAAI,MAAM,CAAC,aAAa,GAAG,IAAI,UAAU,CAAC,KAAK,CAAC;AAChD,QAAQ,EAAE,EAAE,uBAAuB;AACnC,QAAQ,aAAa,EAAE,CAAC;AACxB,QAAQ,OAAO,EAAE,IAAI;AACrB,QAAQ,GAAG,MAAM;AACjB,KAAK,CAAC,CAAC;AACP,CAAC,CAAC;AACF,AAAO,SAAS,UAAU,CAAC,OAAO,EAAE,cAAc,EAAE;AACpD,IAAI,MAAM,EAAE,OAAO,GAAG,IAAI,EAAE,UAAU,GAAG,eAAe,EAAE,EAAE,GAAG,OAAO,EAAE,OAAO,GAAG,IAAI,GAAG,EAAE,GAAG,CAAC;AAC/F,IAAI,UAAU,GAAG,GAAG,cAAc,CAAC,QAAQ,IAAI,EAAE,CAAC;AAClD,IAAI,IAAI,CAAC,OAAO,EAAE;AAClB,QAAQ,OAAO,EAAE,CAAC;AAClB,KAAK;AACL,IAAI,MAAM,MAAM,GAAG,EAAE,UAAU,EAAE,OAAO,EAAE,CAAC;AAC3C,IAAI,IAAI,MAAM,CAAC,MAAM,IAAI,IAAI,EAAE;AAC/B,QAAQ,YAAY,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;AACzC,KAAK;AACL,IAAI,IAAI,cAAc,CAAC,QAAQ;AAC/B,QAAQ,cAAc,CAAC,QAAQ,CAAC,8BAA8B,EAAE;AAChE,QAAQ,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,qKAAqK,CAAC,CAAC;AAChM,QAAQ,cAAc,CAAC,QAAQ,CAAC,+BAA+B;AAC/D,YAAY,cAAc,CAAC,QAAQ,CAAC,8BAA8B,CAAC;AACnE,QAAQ,OAAO,cAAc,CAAC,QAAQ,CAAC,8BAA8B,CAAC;AACtE,KAAK;AACL,IAAI,IAAI,cAAc,CAAC,QAAQ,IAAI,cAAc,CAAC,QAAQ,CAAC,YAAY,EAAE;AACzE,QAAQ,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,4LAA4L,CAAC,CAAC;AACvN;AACA,QAAQ,cAAc,CAAC,QAAQ,CAAC,oBAAoB;AACpD,YAAY,cAAc,CAAC,QAAQ,CAAC,YAAY,CAAC;AACjD;AACA,QAAQ,OAAO,cAAc,CAAC,QAAQ,CAAC,YAAY,CAAC;AACpD,KAAK;AACL,IAAI,MAAM,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC;AAChC,QAAQ,UAAU,EAAE,UAAU,IAAI,IAAI;AACtC,QAAQ,oBAAoB;AAC5B,QAAQ,+BAA+B,EAAE,EAAE;AAC3C,QAAQ,mBAAmB,EAAE,IAAI;AACjC,QAAQ,YAAY,EAAE,IAAI,UAAU,EAAE;AACtC,QAAQ,EAAE;AACV,QAAQ,GAAG,MAAM;AACjB,KAAK,EAAE,cAAc,CAAC,QAAQ,CAAC,CAAC;AAChC,IAAI,IAAI,OAAO,KAAK,CAAC,oBAAoB,KAAK,UAAU;AACxD,QAAQ,OAAO,KAAK,CAAC,WAAW,KAAK,UAAU,EAAE;AACjD,QAAQ,MAAM,IAAI,KAAK,CAAC,CAAC;AACzB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAI,CAAC,CAAC,CAAC;AACP,KAAK;AACL,IAAI,MAAM,MAAM,GAAG,EAAE,CAAC;AACtB,IAAI,MAAM,OAAO,GAAG,IAAI,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;AAClD;AACA,IAAI,MAAM,CAAC,EAAE,CAAC,iBAAiB,EAAE,KAAK,CAAC,oBAAoB,CAAC,CAAC;AAC7D;AACA,IAAI,MAAM,CAAC,EAAE,CAAC,YAAY,EAAE,KAAK,CAAC,WAAW,CAAC,CAAC;AAC/C;AACA,IAAI,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC,KAAK,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,0CAA0C,EAAE,CAAC,CAAC,CAAC,CAAC;AAC/F;AACA,IAAI,KAAK,CAAC,YAAY,CAAC,EAAE,CAAC,QAAQ,EAAE,gBAAgB,KAAK,EAAE,IAAI,EAAE;AACjE,QAAQ,MAAM,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC;AACpD,QAAQ,MAAM,EAAE,QAAQ,EAAE,GAAG,IAAI,GAAG,CAAC,OAAO,CAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;AACxE,QAAQ,MAAM,kBAAkB,GAAG,QAAQ,CAAC,UAAU,CAAC,UAAU,CAAC,IAAI,KAAK,CAAC,MAAM,KAAK,GAAG,CAAC;AAC3F,QAAQ,IAAI,EAAE,kBAAkB,IAAI,KAAK,CAAC,MAAM,KAAK,GAAG,CAAC,EAAE;AAC3D,YAAY,OAAO;AACnB,SAAS;AACT,QAAQ,MAAM,UAAU,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC;AAChD,QAAQ,OAAO,CAAC,UAAU,GAAG,UAAU,CAAC;AACxC;AACA,QAAQ,OAAO,CAAC,OAAO,CAAC,UAAU,GAAG,UAAU,CAAC;AAChD,QAAQ,MAAM,EAAE,SAAS,EAAE,UAAU,GAAG,CAAC,EAAE,GAAG,MAAM,CAAC,kBAAkB;AACvE,YAAY,IAAI,qBAAqB,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,EAAE;AAC3D;AACA;AACA;AACA;AACA,gBAAgB,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC;AAChF,oBAAoB,KAAK,CAAC,+BAA+B,CAAC;AAC1D,gBAAgB,MAAM,SAAS,GAAG,MAAM,OAAO,CAAC,OAAO,CAAC,iBAAiB,EAAE,UAAU,EAAE,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;AACrH,gBAAgB,OAAO,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC;AACjD,aAAa;AACb,YAAY,IAAI,KAAK,CAAC,QAAQ,CAAC,OAAO,IAAI,IAAI;AAC9C,gBAAgB,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,uBAAuB,CAAC,KAAK,GAAG,EAAE;AACzE;AACA;AACA;AACA,gBAAgB,MAAM,cAAc,GAAG,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,CAAC,OAAO,EAAE,CAAC;AAChH,gBAAgB,MAAM,UAAU,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,cAAc,GAAG,IAAI,CAAC,GAAG,EAAE,IAAI,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;AAChG,gBAAgB,MAAM,SAAS,GAAG,MAAM,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,UAAU,EAAE,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;AAChH,gBAAgB,OAAO,EAAE,SAAS,EAAE,UAAU,EAAE,CAAC;AACjD,aAAa;AACb,YAAY,OAAO,EAAE,CAAC;AACtB,SAAS,GAAG,CAAC;AACb,QAAQ,IAAI,SAAS,EAAE;AACvB,YAAY,OAAO,CAAC,UAAU,EAAE,CAAC;AACjC,YAAY,OAAO,UAAU,GAAG,KAAK,CAAC,mBAAmB,CAAC;AAC1D,SAAS;AACT,KAAK,CAAC,CAAC;AACP,IAAI,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,WAAW,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC;AAChE,IAAI,OAAO,EAAE,CAAC;AACd,CAAC;AACD,UAAU,CAAC,OAAO,GAAG,OAAO,CAAC;AAC7B,UAAU,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;;;;"} \ No newline at end of file diff --git a/dist/node_modules/@octokit/plugin-throttling/package.json b/dist/node_modules/@octokit/plugin-throttling/package.json new file mode 100644 index 0000000..4ac425d --- /dev/null +++ b/dist/node_modules/@octokit/plugin-throttling/package.json @@ -0,0 +1,49 @@ +{ + "name": "@octokit/plugin-throttling", + "description": "Octokit plugin for GitHub's recommended request throttling", + "version": "5.2.3", + "license": "MIT", + "files": [ + "dist-*/**", + "bin/**" + ], + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "pika": true, + "sideEffects": false, + "repository": "github:octokit/plugin-throttling.js", + "dependencies": { + "@octokit/types": "^9.0.0", + "bottleneck": "^2.15.3" + }, + "peerDependencies": { + "@octokit/core": "^4.0.0" + }, + "devDependencies": { + "@octokit/core": "^4.0.0", + "@octokit/request-error": "^3.0.0", + "@pika/pack": "^0.3.7", + "@pika/plugin-build-node": "^0.9.1", + "@pika/plugin-build-web": "^0.9.1", + "@pika/plugin-ts-standard-pkg": "^0.9.1", + "@types/fetch-mock": "^7.3.1", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "fetch-mock": "^9.0.0", + "github-openapi-graphql-query": "^4.0.0", + "jest": "^29.0.0", + "npm-run-all": "^4.1.5", + "prettier": "2.8.8", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/dist/node_modules/@octokit/request-error/LICENSE b/dist/node_modules/@octokit/request-error/LICENSE new file mode 100644 index 0000000..ef2c18e --- /dev/null +++ b/dist/node_modules/@octokit/request-error/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/@octokit/request-error/README.md b/dist/node_modules/@octokit/request-error/README.md new file mode 100644 index 0000000..343c456 --- /dev/null +++ b/dist/node_modules/@octokit/request-error/README.md @@ -0,0 +1,71 @@ +# http-error.js + +> Error class for Octokit request errors + +[![@latest](https://img.shields.io/npm/v/@octokit/request-error.svg)](https://www.npmjs.com/package/@octokit/request-error) +[![Build Status](https://github.com/octokit/request-error.js/workflows/Test/badge.svg)](https://github.com/octokit/request-error.js/actions?query=workflow%3ATest) + +## Usage + + + + + + +
+Browsers + +Load @octokit/request-error directly from cdn.skypack.dev + +```html + +``` + +
+Node + + +Install with npm install @octokit/request-error + +```js +const { RequestError } = require("@octokit/request-error"); +// or: import { RequestError } from "@octokit/request-error"; +``` + +
+ +```js +const error = new RequestError("Oops", 500, { + request: { + method: "POST", + url: "https://api.github.com/foo", + body: { + bar: "baz", + }, + headers: { + authorization: "token secret123", + }, + }, + response: { + status: 500, + url: "https://api.github.com/foo", + headers: { + "x-github-request-id": "1:2:3:4", + }, + data: { + foo: "bar", + }, + }, +}); + +error.message; // Oops +error.status; // 500 +error.request; // { method, url, headers, body } +error.response; // { url, status, headers, data } +``` + +## LICENSE + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/request-error/dist-node/index.js b/dist/node_modules/@octokit/request-error/dist-node/index.js new file mode 100644 index 0000000..c676580 --- /dev/null +++ b/dist/node_modules/@octokit/request-error/dist-node/index.js @@ -0,0 +1,65 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var deprecation = require('deprecation'); +var once = _interopDefault(require('once')); + +const logOnceCode = once(deprecation => console.warn(deprecation)); +const logOnceHeaders = once(deprecation => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); + // Maintains proper stack trace (only available on V8) + /* istanbul ignore next */ + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + // redact request credentials without mutating original request options + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + }); + } + requestCopy.url = requestCopy.url + // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") + // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + // deprecations + Object.defineProperty(this, "code", { + get() { + logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + } + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + } + }); + } +} + +exports.RequestError = RequestError; +//# sourceMappingURL=index.js.map diff --git a/dist/node_modules/@octokit/request-error/dist-node/index.js.map b/dist/node_modules/@octokit/request-error/dist-node/index.js.map new file mode 100644 index 0000000..5381b78 --- /dev/null +++ b/dist/node_modules/@octokit/request-error/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/index.js"],"sourcesContent":["import { Deprecation } from \"deprecation\";\nimport once from \"once\";\nconst logOnceCode = once((deprecation) => console.warn(deprecation));\nconst logOnceHeaders = once((deprecation) => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\nexport class RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message);\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n // redact request credentials without mutating original request options\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\"),\n });\n }\n requestCopy.url = requestCopy.url\n // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\")\n // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n // deprecations\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n },\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n },\n });\n }\n}\n"],"names":["logOnceCode","once","deprecation","console","warn","logOnceHeaders","RequestError","Error","constructor","message","statusCode","options","captureStackTrace","name","status","headers","response","requestCopy","Object","assign","request","authorization","replace","url","defineProperty","get","Deprecation"],"mappings":";;;;;;;;;AAEA,MAAMA,WAAW,GAAGC,IAAI,CAAEC,WAAW,IAAKC,OAAO,CAACC,IAAI,CAACF,WAAW,CAAC,CAAC;AACpE,MAAMG,cAAc,GAAGJ,IAAI,CAAEC,WAAW,IAAKC,OAAO,CAACC,IAAI,CAACF,WAAW,CAAC,CAAC;AACvE;AACA;AACA;AACO,MAAMI,YAAY,SAASC,KAAK,CAAC;EACpCC,WAAW,CAACC,OAAO,EAAEC,UAAU,EAAEC,OAAO,EAAE;IACtC,KAAK,CAACF,OAAO,CAAC;;;IAGd,IAAIF,KAAK,CAACK,iBAAiB,EAAE;MACzBL,KAAK,CAACK,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAACJ,WAAW,CAAC;;IAEnD,IAAI,CAACK,IAAI,GAAG,WAAW;IACvB,IAAI,CAACC,MAAM,GAAGJ,UAAU;IACxB,IAAIK,OAAO;IACX,IAAI,SAAS,IAAIJ,OAAO,IAAI,OAAOA,OAAO,CAACI,OAAO,KAAK,WAAW,EAAE;MAChEA,OAAO,GAAGJ,OAAO,CAACI,OAAO;;IAE7B,IAAI,UAAU,IAAIJ,OAAO,EAAE;MACvB,IAAI,CAACK,QAAQ,GAAGL,OAAO,CAACK,QAAQ;MAChCD,OAAO,GAAGJ,OAAO,CAACK,QAAQ,CAACD,OAAO;;;IAGtC,MAAME,WAAW,GAAGC,MAAM,CAACC,MAAM,CAAC,EAAE,EAAER,OAAO,CAACS,OAAO,CAAC;IACtD,IAAIT,OAAO,CAACS,OAAO,CAACL,OAAO,CAACM,aAAa,EAAE;MACvCJ,WAAW,CAACF,OAAO,GAAGG,MAAM,CAACC,MAAM,CAAC,EAAE,EAAER,OAAO,CAACS,OAAO,CAACL,OAAO,EAAE;QAC7DM,aAAa,EAAEV,OAAO,CAACS,OAAO,CAACL,OAAO,CAACM,aAAa,CAACC,OAAO,CAAC,MAAM,EAAE,aAAa;OACrF,CAAC;;IAENL,WAAW,CAACM,GAAG,GAAGN,WAAW,CAACM;;;KAGzBD,OAAO,CAAC,sBAAsB,EAAE,0BAA0B;;;KAG1DA,OAAO,CAAC,qBAAqB,EAAE,yBAAyB,CAAC;IAC9D,IAAI,CAACF,OAAO,GAAGH,WAAW;;IAE1BC,MAAM,CAACM,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE;MAChCC,GAAG,GAAG;QACFzB,WAAW,CAAC,IAAI0B,uBAAW,CAAC,0EAA0E,CAAC,CAAC;QACxG,OAAOhB,UAAU;;KAExB,CAAC;IACFQ,MAAM,CAACM,cAAc,CAAC,IAAI,EAAE,SAAS,EAAE;MACnCC,GAAG,GAAG;QACFpB,cAAc,CAAC,IAAIqB,uBAAW,CAAC,uFAAuF,CAAC,CAAC;QACxH,OAAOX,OAAO,IAAI,EAAE;;KAE3B,CAAC;;AAEV;;;;"} \ No newline at end of file diff --git a/dist/node_modules/@octokit/request-error/dist-src/index.js b/dist/node_modules/@octokit/request-error/dist-src/index.js new file mode 100644 index 0000000..5eb1927 --- /dev/null +++ b/dist/node_modules/@octokit/request-error/dist-src/index.js @@ -0,0 +1,55 @@ +import { Deprecation } from "deprecation"; +import once from "once"; +const logOnceCode = once((deprecation) => console.warn(deprecation)); +const logOnceHeaders = once((deprecation) => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ +export class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); + // Maintains proper stack trace (only available on V8) + /* istanbul ignore next */ + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + // redact request credentials without mutating original request options + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]"), + }); + } + requestCopy.url = requestCopy.url + // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") + // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + // deprecations + Object.defineProperty(this, "code", { + get() { + logOnceCode(new Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + }, + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + }, + }); + } +} diff --git a/dist/node_modules/@octokit/request-error/dist-src/types.js b/dist/node_modules/@octokit/request-error/dist-src/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/dist/node_modules/@octokit/request-error/dist-src/types.js @@ -0,0 +1 @@ +export {}; diff --git a/dist/node_modules/@octokit/request-error/dist-types/index.d.ts b/dist/node_modules/@octokit/request-error/dist-types/index.d.ts new file mode 100644 index 0000000..d6e089c --- /dev/null +++ b/dist/node_modules/@octokit/request-error/dist-types/index.d.ts @@ -0,0 +1,33 @@ +import { RequestOptions, ResponseHeaders, OctokitResponse } from "@octokit/types"; +import { RequestErrorOptions } from "./types"; +/** + * Error with extra properties to help with debugging + */ +export declare class RequestError extends Error { + name: "HttpError"; + /** + * http status code + */ + status: number; + /** + * http status code + * + * @deprecated `error.code` is deprecated in favor of `error.status` + */ + code: number; + /** + * Request options that lead to the error. + */ + request: RequestOptions; + /** + * error response headers + * + * @deprecated `error.headers` is deprecated in favor of `error.response.headers` + */ + headers: ResponseHeaders; + /** + * Response object if a response was received + */ + response?: OctokitResponse; + constructor(message: string, statusCode: number, options: RequestErrorOptions); +} diff --git a/dist/node_modules/@octokit/request-error/dist-types/types.d.ts b/dist/node_modules/@octokit/request-error/dist-types/types.d.ts new file mode 100644 index 0000000..e5e1fab --- /dev/null +++ b/dist/node_modules/@octokit/request-error/dist-types/types.d.ts @@ -0,0 +1,9 @@ +import { RequestOptions, ResponseHeaders, OctokitResponse } from "@octokit/types"; +export type RequestErrorOptions = { + /** @deprecated set `response` instead */ + headers?: ResponseHeaders; + request: RequestOptions; +} | { + response: OctokitResponse; + request: RequestOptions; +}; diff --git a/dist/node_modules/@octokit/request-error/dist-web/index.js b/dist/node_modules/@octokit/request-error/dist-web/index.js new file mode 100644 index 0000000..0fb64be --- /dev/null +++ b/dist/node_modules/@octokit/request-error/dist-web/index.js @@ -0,0 +1,59 @@ +import { Deprecation } from 'deprecation'; +import once from 'once'; + +const logOnceCode = once((deprecation) => console.warn(deprecation)); +const logOnceHeaders = once((deprecation) => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); + // Maintains proper stack trace (only available on V8) + /* istanbul ignore next */ + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + // redact request credentials without mutating original request options + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]"), + }); + } + requestCopy.url = requestCopy.url + // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") + // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + // deprecations + Object.defineProperty(this, "code", { + get() { + logOnceCode(new Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + }, + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + }, + }); + } +} + +export { RequestError }; +//# sourceMappingURL=index.js.map diff --git a/dist/node_modules/@octokit/request-error/dist-web/index.js.map b/dist/node_modules/@octokit/request-error/dist-web/index.js.map new file mode 100644 index 0000000..78f677f --- /dev/null +++ b/dist/node_modules/@octokit/request-error/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/index.js"],"sourcesContent":["import { Deprecation } from \"deprecation\";\nimport once from \"once\";\nconst logOnceCode = once((deprecation) => console.warn(deprecation));\nconst logOnceHeaders = once((deprecation) => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\nexport class RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message);\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n // redact request credentials without mutating original request options\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\"),\n });\n }\n requestCopy.url = requestCopy.url\n // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\")\n // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n // deprecations\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n },\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n },\n });\n }\n}\n"],"names":[],"mappings":";;;AAEA,MAAM,WAAW,GAAG,IAAI,CAAC,CAAC,WAAW,KAAK,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;AACrE,MAAM,cAAc,GAAG,IAAI,CAAC,CAAC,WAAW,KAAK,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;AACxE;AACA;AACA;AACO,MAAM,YAAY,SAAS,KAAK,CAAC;AACxC,IAAI,WAAW,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,EAAE;AAC9C,QAAQ,KAAK,CAAC,OAAO,CAAC,CAAC;AACvB;AACA;AACA,QAAQ,IAAI,KAAK,CAAC,iBAAiB,EAAE;AACrC,YAAY,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;AAC5D,SAAS;AACT,QAAQ,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;AAChC,QAAQ,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;AACjC,QAAQ,IAAI,OAAO,CAAC;AACpB,QAAQ,IAAI,SAAS,IAAI,OAAO,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,WAAW,EAAE;AAC5E,YAAY,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACtC,SAAS;AACT,QAAQ,IAAI,UAAU,IAAI,OAAO,EAAE;AACnC,YAAY,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;AAC7C,YAAY,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC;AAC/C,SAAS;AACT;AACA,QAAQ,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;AAC/D,QAAQ,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,EAAE;AACnD,YAAY,WAAW,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,OAAO,EAAE;AAC7E,gBAAgB,aAAa,EAAE,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC,OAAO,CAAC,MAAM,EAAE,aAAa,CAAC;AACnG,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,WAAW,CAAC,GAAG,GAAG,WAAW,CAAC,GAAG;AACzC;AACA;AACA,aAAa,OAAO,CAAC,sBAAsB,EAAE,0BAA0B,CAAC;AACxE;AACA;AACA,aAAa,OAAO,CAAC,qBAAqB,EAAE,yBAAyB,CAAC,CAAC;AACvE,QAAQ,IAAI,CAAC,OAAO,GAAG,WAAW,CAAC;AACnC;AACA,QAAQ,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE;AAC5C,YAAY,GAAG,GAAG;AAClB,gBAAgB,WAAW,CAAC,IAAI,WAAW,CAAC,0EAA0E,CAAC,CAAC,CAAC;AACzH,gBAAgB,OAAO,UAAU,CAAC;AAClC,aAAa;AACb,SAAS,CAAC,CAAC;AACX,QAAQ,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,SAAS,EAAE;AAC/C,YAAY,GAAG,GAAG;AAClB,gBAAgB,cAAc,CAAC,IAAI,WAAW,CAAC,uFAAuF,CAAC,CAAC,CAAC;AACzI,gBAAgB,OAAO,OAAO,IAAI,EAAE,CAAC;AACrC,aAAa;AACb,SAAS,CAAC,CAAC;AACX,KAAK;AACL;;;;"} \ No newline at end of file diff --git a/dist/node_modules/@octokit/request-error/package.json b/dist/node_modules/@octokit/request-error/package.json new file mode 100644 index 0000000..2215caa --- /dev/null +++ b/dist/node_modules/@octokit/request-error/package.json @@ -0,0 +1,49 @@ +{ + "name": "@octokit/request-error", + "description": "Error class for Octokit request errors", + "version": "3.0.3", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "pika": true, + "sideEffects": false, + "keywords": [ + "octokit", + "github", + "api", + "error" + ], + "repository": "github:octokit/request-error.js", + "dependencies": { + "@octokit/types": "^9.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "devDependencies": { + "@pika/pack": "^0.3.7", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-bundle-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "@types/once": "^1.4.0", + "jest": "^29.0.0", + "pika-plugin-unpkg-field": "^1.1.0", + "prettier": "2.8.3", + "ts-jest": "^29.0.0", + "typescript": "^4.0.0" + }, + "engines": { + "node": ">= 14" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/dist/node_modules/@octokit/request/LICENSE b/dist/node_modules/@octokit/request/LICENSE new file mode 100644 index 0000000..af5366d --- /dev/null +++ b/dist/node_modules/@octokit/request/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/@octokit/request/README.md b/dist/node_modules/@octokit/request/README.md new file mode 100644 index 0000000..934138d --- /dev/null +++ b/dist/node_modules/@octokit/request/README.md @@ -0,0 +1,553 @@ +# request.js + +> Send parameterized requests to GitHub’s APIs with sensible defaults in browsers and Node + +[![@latest](https://img.shields.io/npm/v/@octokit/request.svg)](https://www.npmjs.com/package/@octokit/request) +[![Build Status](https://github.com/octokit/request.js/workflows/Test/badge.svg)](https://github.com/octokit/request.js/actions?query=workflow%3ATest+branch%3Amain) + +`@octokit/request` is a request library for browsers & node that makes it easier +to interact with [GitHub’s REST API](https://developer.github.com/v3/) and +[GitHub’s GraphQL API](https://developer.github.com/v4/guides/forming-calls/#the-graphql-endpoint). + +It uses [`@octokit/endpoint`](https://github.com/octokit/endpoint.js) to parse +the passed options and sends the request using [fetch](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) +([node-fetch](https://github.com/bitinn/node-fetch) when the runtime has no native `fetch` API). + + + + + +- [Features](#features) +- [Usage](#usage) + - [REST API example](#rest-api-example) + - [GraphQL example](#graphql-example) + - [Alternative: pass `method` & `url` as part of options](#alternative-pass-method--url-as-part-of-options) +- [Authentication](#authentication) +- [request()](#request) +- [`request.defaults()`](#requestdefaults) +- [`request.endpoint`](#requestendpoint) +- [Special cases](#special-cases) + - [The `data` parameter – set request body directly](#the-data-parameter-%E2%80%93-set-request-body-directly) + - [Set parameters for both the URL/query and the request body](#set-parameters-for-both-the-urlquery-and-the-request-body) +- [LICENSE](#license) + + + +## Features + +🤩 1:1 mapping of REST API endpoint documentation, e.g. [Add labels to an issue](https://developer.github.com/v3/issues/labels/#add-labels-to-an-issue) becomes + +```js +request("POST /repos/{owner}/{repo}/issues/{number}/labels", { + mediaType: { + previews: ["symmetra"], + }, + owner: "octokit", + repo: "request.js", + number: 1, + labels: ["🐛 bug"], +}); +``` + +👶 [Small bundle size](https://bundlephobia.com/result?p=@octokit/request@5.0.3) (\<4kb minified + gzipped) + +😎 [Authenticate](#authentication) with any of [GitHubs Authentication Strategies](https://github.com/octokit/auth.js). + +👍 Sensible defaults + +- `baseUrl`: `https://api.github.com` +- `headers.accept`: `application/vnd.github.v3+json` +- `headers.agent`: `octokit-request.js/ `, e.g. `octokit-request.js/1.2.3 Node.js/10.15.0 (macOS Mojave; x64)` + +👌 Simple to test: mock requests by passing a custom fetch method. + +🧐 Simple to debug: Sets `error.request` to request options causing the error (with redacted credentials). + +## Usage + + + + + + +
+Browsers + +Load @octokit/request directly from esm.sh + +```html + +``` + +
+Node + + +Install with npm install @octokit/request + +```js +const { request } = require("@octokit/request"); +// or: import { request } from "@octokit/request"; +``` + +
+ +### REST API example + +```js +// Following GitHub docs formatting: +// https://developer.github.com/v3/repos/#list-organization-repositories +const result = await request("GET /orgs/{org}/repos", { + headers: { + authorization: "token 0000000000000000000000000000000000000001", + }, + org: "octokit", + type: "private", +}); + +console.log(`${result.data.length} repos found.`); +``` + +### GraphQL example + +For GraphQL request we recommend using [`@octokit/graphql`](https://github.com/octokit/graphql.js#readme) + +```js +const result = await request("POST /graphql", { + headers: { + authorization: "token 0000000000000000000000000000000000000001", + }, + query: `query ($login: String!) { + organization(login: $login) { + repositories(privacy: PRIVATE) { + totalCount + } + } + }`, + variables: { + login: "octokit", + }, +}); +``` + +### Alternative: pass `method` & `url` as part of options + +Alternatively, pass in a method and a url + +```js +const result = await request({ + method: "GET", + url: "/orgs/{org}/repos", + headers: { + authorization: "token 0000000000000000000000000000000000000001", + }, + org: "octokit", + type: "private", +}); +``` + +## Authentication + +The simplest way to authenticate a request is to set the `Authorization` header directly, e.g. to a [personal access token](https://github.com/settings/tokens/). + +```js +const requestWithAuth = request.defaults({ + headers: { + authorization: "token 0000000000000000000000000000000000000001", + }, +}); +const result = await requestWithAuth("GET /user"); +``` + +For more complex authentication strategies such as GitHub Apps or Basic, we recommend the according authentication library exported by [`@octokit/auth`](https://github.com/octokit/auth.js). + +```js +const { createAppAuth } = require("@octokit/auth-app"); +const auth = createAppAuth({ + appId: process.env.APP_ID, + privateKey: process.env.PRIVATE_KEY, + installationId: 123, +}); +const requestWithAuth = request.defaults({ + request: { + hook: auth.hook, + }, + mediaType: { + previews: ["machine-man"], + }, +}); + +const { data: app } = await requestWithAuth("GET /app"); +const { data: app } = await requestWithAuth( + "POST /repos/{owner}/{repo}/issues", + { + owner: "octocat", + repo: "hello-world", + title: "Hello from the engine room", + } +); +``` + +## request() + +`request(route, options)` or `request(options)`. + +**Options** + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ route + + String + + **Required**. If route is set it has to be a string consisting of the request method and URL, e.g. GET /orgs/{org} +
+ options.baseUrl + + String + + The base URL that route or url will be prefixed with, if they use relative paths. Defaults to https://api.github.com. +
+ options.headers + + Object + + Custom headers. Passed headers are merged with defaults:
+ headers['user-agent'] defaults to octokit-rest.js/1.2.3 (where 1.2.3 is the released version).
+ headers['accept'] defaults to application/vnd.github.v3+json.
Use options.mediaType.{format,previews} to request API previews and custom media types. +
+ options.mediaType.format + + String + + Media type param, such as `raw`, `html`, or `full`. See Media Types. +
+ options.mediaType.previews + + Array of strings + + Name of previews, such as `mercy`, `symmetra`, or `scarlet-witch`. See API Previews. +
+ options.method + + String + + Any supported http verb, case insensitive. Defaults to Get. +
+ options.url + + String + + **Required**. A path or full URL which may contain :variable or {variable} placeholders, + e.g. /orgs/{org}/repos. The url is parsed using url-template. +
+ options.data + + Any + + Set request body directly instead of setting it to JSON based on additional parameters. See "The `data` parameter" below. +
+ options.request.agent + + http(s).Agent instance + + Node only. Useful for custom proxy, certificate, or dns lookup. +
+ options.request.fetch + + Function + + Custom replacement for built-in fetch method. Useful for testing or request hooks. +
+ options.request.hook + + Function + + Function with the signature hook(request, endpointOptions), where endpointOptions are the parsed options as returned by endpoint.merge(), and request is request(). This option works great in conjuction with before-after-hook. +
+ options.request.signal + + new AbortController().signal + + Use an AbortController instance to cancel a request. In node you can only cancel streamed requests. +
+ options.request.log + + object + + Used for internal logging. Defaults to console. +
+ options.request.timeout + + Number + + Node only. Request/response timeout in ms, it resets on redirect. 0 to disable (OS limit applies). options.request.signal is recommended instead. +
+ +All other options except `options.request.*` will be passed depending on the `method` and `url` options. + +1. If the option key is a placeholder in the `url`, it will be used as replacement. For example, if the passed options are `{url: '/orgs/{org}/repos', org: 'foo'}` the returned `options.url` is `https://api.github.com/orgs/foo/repos` +2. If the `method` is `GET` or `HEAD`, the option is passed as query parameter +3. Otherwise the parameter is passed in the request body as JSON key. + +**Result** + +`request` returns a promise. If the request was successful, the promise resolves with an object containing 4 keys: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ key + + type + + description +
statusIntegerResponse status status
urlStringURL of response. If a request results in redirects, this is the final URL. You can send a HEAD request to retrieve it without loading the full response body.
headersObjectAll response headers
dataAnyThe response body as returned from server. If the response is JSON then it will be parsed into an object
+ +If an error occurs, the promise is rejected with an `error` object containing 3 keys to help with debugging: + +- `error.status` The http response status code +- `error.request` The request options such as `method`, `url` and `data` +- `error.response` The http response object with `url`, `headers`, and `data` + +If the error is due to an `AbortSignal` being used, the resulting `AbortError` is bubbled up to the caller. + +## `request.defaults()` + +Override or set default options. Example: + +```js +const myrequest = require("@octokit/request").defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api/v3", + headers: { + "user-agent": "myApp/1.2.3", + authorization: `token 0000000000000000000000000000000000000001`, + }, + org: "my-project", + per_page: 100, +}); + +myrequest(`GET /orgs/{org}/repos`); +``` + +You can call `.defaults()` again on the returned method, the defaults will cascade. + +```js +const myProjectRequest = request.defaults({ + baseUrl: "https://github-enterprise.acme-inc.com/api/v3", + headers: { + "user-agent": "myApp/1.2.3", + }, + org: "my-project", +}); +const myProjectRequestWithAuth = myProjectRequest.defaults({ + headers: { + authorization: `token 0000000000000000000000000000000000000001`, + }, +}); +``` + +`myProjectRequest` now defaults the `baseUrl`, `headers['user-agent']`, +`org` and `headers['authorization']` on top of `headers['accept']` that is set +by the global default. + +## `request.endpoint` + +See https://github.com/octokit/endpoint.js. Example + +```js +const options = request.endpoint("GET /orgs/{org}/repos", { + org: "my-project", + type: "private", +}); + +// { +// method: 'GET', +// url: 'https://api.github.com/orgs/my-project/repos?type=private', +// headers: { +// accept: 'application/vnd.github.v3+json', +// authorization: 'token 0000000000000000000000000000000000000001', +// 'user-agent': 'octokit/endpoint.js v1.2.3' +// } +// } +``` + +All of the [`@octokit/endpoint`](https://github.com/octokit/endpoint.js) API can be used: + +- [`octokitRequest.endpoint()`](#endpoint) +- [`octokitRequest.endpoint.defaults()`](#endpointdefaults) +- [`octokitRequest.endpoint.merge()`](#endpointdefaults) +- [`octokitRequest.endpoint.parse()`](#endpointmerge) + +## Special cases + + + +### The `data` parameter – set request body directly + +Some endpoints such as [Render a Markdown document in raw mode](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode) don’t have parameters that are sent as request body keys, instead the request body needs to be set directly. In these cases, set the `data` parameter. + +```js +const response = await request("POST /markdown/raw", { + data: "Hello world github/linguist#1 **cool**, and #1!", + headers: { + accept: "text/html;charset=utf-8", + "content-type": "text/plain", + }, +}); + +// Request is sent as +// +// { +// method: 'post', +// url: 'https://api.github.com/markdown/raw', +// headers: { +// accept: 'text/html;charset=utf-8', +// 'content-type': 'text/plain', +// 'user-agent': userAgent +// }, +// body: 'Hello world github/linguist#1 **cool**, and #1!' +// } +// +// not as +// +// { +// ... +// body: '{"data": "Hello world github/linguist#1 **cool**, and #1!"}' +// } +``` + +### Set parameters for both the URL/query and the request body + +There are API endpoints that accept both query parameters as well as a body. In that case you need to add the query parameters as templates to `options.url`, as defined in the [RFC 6570 URI Template specification](https://tools.ietf.org/html/rfc6570). + +Example + +```js +request( + "POST https://uploads.github.com/repos/octocat/Hello-World/releases/1/assets{?name,label}", + { + name: "example.zip", + label: "short description", + headers: { + "content-type": "text/plain", + "content-length": 14, + authorization: `token 0000000000000000000000000000000000000001`, + }, + data: "Hello, world!", + } +); +``` + +## LICENSE + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/request/dist-node/index.js b/dist/node_modules/@octokit/request/dist-node/index.js new file mode 100644 index 0000000..56df5de --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-node/index.js @@ -0,0 +1,207 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = require("@octokit/endpoint"); +var import_universal_user_agent = require("universal-user-agent"); + +// pkg/dist-src/version.js +var VERSION = "6.2.8"; + +// pkg/dist-src/fetch-wrapper.js +var import_is_plain_object = require("is-plain-object"); +var import_node_fetch = __toESM(require("node-fetch")); +var import_request_error = require("@octokit/request-error"); + +// pkg/dist-src/get-buffer-response.js +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +// pkg/dist-src/fetch-wrapper.js +function fetchWrapper(requestOptions) { + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + if ((0, import_is_plain_object.isPlainObject)(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || globalThis.fetch || /* istanbul ignore next */ + import_node_fetch.default; + return fetch( + requestOptions.url, + Object.assign( + { + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }, + // `requestOptions.request.agent` type is incompatible + // see https://github.com/octokit/types.ts/pull/264 + requestOptions.request + ) + ).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new import_request_error.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions + }); + } + if (status === 304) { + throw new import_request_error.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new import_request_error.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return getResponseData(response); + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + throw new import_request_error.RequestError(error.message, 500, { + request: requestOptions + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json(); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + return data.message; + } + return `Unknown error: ${JSON.stringify(data)}`; +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); + } + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }; + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }; + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); +} + +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + } +}); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + request +}); diff --git a/dist/node_modules/@octokit/request/dist-node/index.js.map b/dist/node_modules/@octokit/request/dist-node/index.js.map new file mode 100644 index 0000000..d730700 --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js", "../dist-src/fetch-wrapper.js", "../dist-src/get-buffer-response.js", "../dist-src/with-defaults.js"], + "sourcesContent": ["import { endpoint } from \"@octokit/endpoint\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport withDefaults from \"./with-defaults\";\nconst request = withDefaults(endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${getUserAgent()}`\n }\n});\nexport {\n request\n};\n", "const VERSION = \"6.2.8\";\nexport {\n VERSION\n};\n", "import { isPlainObject } from \"is-plain-object\";\nimport nodeFetch, {} from \"node-fetch\";\nimport { RequestError } from \"@octokit/request-error\";\nimport getBuffer from \"./get-buffer-response\";\nfunction fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || globalThis.fetch || /* istanbul ignore next */\n nodeFetch;\n return fetch(\n requestOptions.url,\n Object.assign(\n {\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect,\n // duplex must be set if request.body is ReadableStream or Async Iterables.\n // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex.\n ...requestOptions.body && { duplex: \"half\" }\n },\n // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request\n )\n ).then(async (response) => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(\n `[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`\n );\n }\n if (status === 204 || status === 205) {\n return;\n }\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: void 0\n },\n request: requestOptions\n });\n }\n if (status === 304) {\n throw new RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n return getResponseData(response);\n }).then((data) => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch((error) => {\n if (error instanceof RequestError)\n throw error;\n else if (error.name === \"AbortError\")\n throw error;\n throw new RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBuffer(response);\n}\nfunction toErrorMessage(data) {\n if (typeof data === \"string\")\n return data;\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n return data.message;\n }\n return `Unknown error: ${JSON.stringify(data)}`;\n}\nexport {\n fetchWrapper as default\n};\n", "function getBufferResponse(response) {\n return response.arrayBuffer();\n}\nexport {\n getBufferResponse as default\n};\n", "import fetchWrapper from \"./fetch-wrapper\";\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n const newApi = function(route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n const request = (route2, parameters2) => {\n return fetchWrapper(\n endpoint.parse(endpoint.merge(route2, parameters2))\n );\n };\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\nexport {\n withDefaults as default\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAyB;AACzB,kCAA6B;;;ACD7B,IAAM,UAAU;;;ACAhB,6BAA8B;AAC9B,wBAA0B;AAC1B,2BAA6B;;;ACF7B,SAAS,kBAAkB,UAAU;AACnC,SAAO,SAAS,YAAY;AAC9B;;;ADEA,SAAS,aAAa,gBAAgB;AACpC,QAAM,MAAM,eAAe,WAAW,eAAe,QAAQ,MAAM,eAAe,QAAQ,MAAM;AAChG,UAAI,sCAAc,eAAe,IAAI,KAAK,MAAM,QAAQ,eAAe,IAAI,GAAG;AAC5E,mBAAe,OAAO,KAAK,UAAU,eAAe,IAAI;AAAA,EAC1D;AACA,MAAI,UAAU,CAAC;AACf,MAAI;AACJ,MAAI;AACJ,QAAM,QAAQ,eAAe,WAAW,eAAe,QAAQ,SAAS,WAAW;AAAA,EACnF,kBAAAA;AACA,SAAO;AAAA,IACL,eAAe;AAAA,IACf,OAAO;AAAA,MACL;AAAA,QACE,QAAQ,eAAe;AAAA,QACvB,MAAM,eAAe;AAAA,QACrB,SAAS,eAAe;AAAA,QACxB,UAAU,eAAe;AAAA;AAAA;AAAA,QAGzB,GAAG,eAAe,QAAQ,EAAE,QAAQ,OAAO;AAAA,MAC7C;AAAA;AAAA;AAAA,MAGA,eAAe;AAAA,IACjB;AAAA,EACF,EAAE,KAAK,OAAO,aAAa;AACzB,UAAM,SAAS;AACf,aAAS,SAAS;AAClB,eAAW,eAAe,SAAS,SAAS;AAC1C,cAAQ,YAAY,CAAC,CAAC,IAAI,YAAY,CAAC;AAAA,IACzC;AACA,QAAI,iBAAiB,SAAS;AAC5B,YAAM,UAAU,QAAQ,QAAQ,QAAQ,KAAK,MAAM,8BAA8B;AACjF,YAAM,kBAAkB,WAAW,QAAQ,IAAI;AAC/C,UAAI;AAAA,QACF,uBAAuB,eAAe,UAAU,eAAe,wDAAwD,QAAQ,SAAS,kBAAkB,SAAS,oBAAoB;AAAA,MACzL;AAAA,IACF;AACA,QAAI,WAAW,OAAO,WAAW,KAAK;AACpC;AAAA,IACF;AACA,QAAI,eAAe,WAAW,QAAQ;AACpC,UAAI,SAAS,KAAK;AAChB;AAAA,MACF;AACA,YAAM,IAAI,kCAAa,SAAS,YAAY,QAAQ;AAAA,QAClD,UAAU;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA,MAAM;AAAA,QACR;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AACA,QAAI,WAAW,KAAK;AAClB,YAAM,IAAI,kCAAa,gBAAgB,QAAQ;AAAA,QAC7C,UAAU;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA,MAAM,MAAM,gBAAgB,QAAQ;AAAA,QACtC;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AACA,QAAI,UAAU,KAAK;AACjB,YAAM,OAAO,MAAM,gBAAgB,QAAQ;AAC3C,YAAM,QAAQ,IAAI,kCAAa,eAAe,IAAI,GAAG,QAAQ;AAAA,QAC3D,UAAU;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AACD,YAAM;AAAA,IACR;AACA,WAAO,gBAAgB,QAAQ;AAAA,EACjC,CAAC,EAAE,KAAK,CAAC,SAAS;AAChB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC,EAAE,MAAM,CAAC,UAAU;AAClB,QAAI,iBAAiB;AACnB,YAAM;AAAA,aACC,MAAM,SAAS;AACtB,YAAM;AACR,UAAM,IAAI,kCAAa,MAAM,SAAS,KAAK;AAAA,MACzC,SAAS;AAAA,IACX,CAAC;AAAA,EACH,CAAC;AACH;AACA,eAAe,gBAAgB,UAAU;AACvC,QAAM,cAAc,SAAS,QAAQ,IAAI,cAAc;AACvD,MAAI,oBAAoB,KAAK,WAAW,GAAG;AACzC,WAAO,SAAS,KAAK;AAAA,EACvB;AACA,MAAI,CAAC,eAAe,yBAAyB,KAAK,WAAW,GAAG;AAC9D,WAAO,SAAS,KAAK;AAAA,EACvB;AACA,SAAO,kBAAU,QAAQ;AAC3B;AACA,SAAS,eAAe,MAAM;AAC5B,MAAI,OAAO,SAAS;AAClB,WAAO;AACT,MAAI,aAAa,MAAM;AACrB,QAAI,MAAM,QAAQ,KAAK,MAAM,GAAG;AAC9B,aAAO,GAAG,KAAK,YAAY,KAAK,OAAO,IAAI,KAAK,SAAS,EAAE,KAAK,IAAI;AAAA,IACtE;AACA,WAAO,KAAK;AAAA,EACd;AACA,SAAO,kBAAkB,KAAK,UAAU,IAAI;AAC9C;;;AEzHA,SAAS,aAAa,aAAa,aAAa;AAC9C,QAAMC,YAAW,YAAY,SAAS,WAAW;AACjD,QAAM,SAAS,SAAS,OAAO,YAAY;AACzC,UAAM,kBAAkBA,UAAS,MAAM,OAAO,UAAU;AACxD,QAAI,CAAC,gBAAgB,WAAW,CAAC,gBAAgB,QAAQ,MAAM;AAC7D,aAAO,aAAaA,UAAS,MAAM,eAAe,CAAC;AAAA,IACrD;AACA,UAAMC,WAAU,CAAC,QAAQ,gBAAgB;AACvC,aAAO;AAAA,QACLD,UAAS,MAAMA,UAAS,MAAM,QAAQ,WAAW,CAAC;AAAA,MACpD;AAAA,IACF;AACA,WAAO,OAAOC,UAAS;AAAA,MACrB,UAAAD;AAAA,MACA,UAAU,aAAa,KAAK,MAAMA,SAAQ;AAAA,IAC5C,CAAC;AACD,WAAO,gBAAgB,QAAQ,KAAKC,UAAS,eAAe;AAAA,EAC9D;AACA,SAAO,OAAO,OAAO,QAAQ;AAAA,IAC3B,UAAAD;AAAA,IACA,UAAU,aAAa,KAAK,MAAMA,SAAQ;AAAA,EAC5C,CAAC;AACH;;;AJnBA,IAAM,UAAU,aAAa,0BAAU;AAAA,EACrC,SAAS;AAAA,IACP,cAAc,sBAAsB,eAAW,0CAAa;AAAA,EAC9D;AACF,CAAC;", + "names": ["nodeFetch", "endpoint", "request"] +} diff --git a/dist/node_modules/@octokit/request/dist-src/fetch-wrapper.js b/dist/node_modules/@octokit/request/dist-src/fetch-wrapper.js new file mode 100644 index 0000000..49dee58 --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-src/fetch-wrapper.js @@ -0,0 +1,126 @@ +import { isPlainObject } from "is-plain-object"; +import nodeFetch, {} from "node-fetch"; +import { RequestError } from "@octokit/request-error"; +import getBuffer from "./get-buffer-response"; +function fetchWrapper(requestOptions) { + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || globalThis.fetch || /* istanbul ignore next */ + nodeFetch; + return fetch( + requestOptions.url, + Object.assign( + { + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }, + // `requestOptions.request.agent` type is incompatible + // see https://github.com/octokit/types.ts/pull/264 + requestOptions.request + ) + ).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions + }); + } + if (status === 304) { + throw new RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return getResponseData(response); + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + throw new RequestError(error.message, 500, { + request: requestOptions + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json(); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBuffer(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + return data.message; + } + return `Unknown error: ${JSON.stringify(data)}`; +} +export { + fetchWrapper as default +}; diff --git a/dist/node_modules/@octokit/request/dist-src/get-buffer-response.js b/dist/node_modules/@octokit/request/dist-src/get-buffer-response.js new file mode 100644 index 0000000..792c895 --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-src/get-buffer-response.js @@ -0,0 +1,6 @@ +function getBufferResponse(response) { + return response.arrayBuffer(); +} +export { + getBufferResponse as default +}; diff --git a/dist/node_modules/@octokit/request/dist-src/index.js b/dist/node_modules/@octokit/request/dist-src/index.js new file mode 100644 index 0000000..1b3e12a --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-src/index.js @@ -0,0 +1,12 @@ +import { endpoint } from "@octokit/endpoint"; +import { getUserAgent } from "universal-user-agent"; +import { VERSION } from "./version"; +import withDefaults from "./with-defaults"; +const request = withDefaults(endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${getUserAgent()}` + } +}); +export { + request +}; diff --git a/dist/node_modules/@octokit/request/dist-src/version.js b/dist/node_modules/@octokit/request/dist-src/version.js new file mode 100644 index 0000000..842090e --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "6.2.8"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/request/dist-src/with-defaults.js b/dist/node_modules/@octokit/request/dist-src/with-defaults.js new file mode 100644 index 0000000..0ee34b5 --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-src/with-defaults.js @@ -0,0 +1,27 @@ +import fetchWrapper from "./fetch-wrapper"; +function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); + } + const request = (route2, parameters2) => { + return fetchWrapper( + endpoint.parse(endpoint.merge(route2, parameters2)) + ); + }; + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); + return endpointOptions.request.hook(request, endpointOptions); + }; + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); +} +export { + withDefaults as default +}; diff --git a/dist/node_modules/@octokit/request/dist-types/fetch-wrapper.d.ts b/dist/node_modules/@octokit/request/dist-types/fetch-wrapper.d.ts new file mode 100644 index 0000000..630e0b4 --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-types/fetch-wrapper.d.ts @@ -0,0 +1,11 @@ +import type { EndpointInterface } from "@octokit/types"; +export default function fetchWrapper(requestOptions: ReturnType & { + redirect?: "error" | "follow" | "manual"; +}): Promise<{ + status: number; + url: string; + headers: { + [header: string]: string; + }; + data: any; +}>; diff --git a/dist/node_modules/@octokit/request/dist-types/get-buffer-response.d.ts b/dist/node_modules/@octokit/request/dist-types/get-buffer-response.d.ts new file mode 100644 index 0000000..0ddc1b9 --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-types/get-buffer-response.d.ts @@ -0,0 +1,2 @@ +import type { Response } from "node-fetch"; +export default function getBufferResponse(response: Response): Promise; diff --git a/dist/node_modules/@octokit/request/dist-types/index.d.ts b/dist/node_modules/@octokit/request/dist-types/index.d.ts new file mode 100644 index 0000000..1030809 --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-types/index.d.ts @@ -0,0 +1 @@ +export declare const request: import("@octokit/types").RequestInterface; diff --git a/dist/node_modules/@octokit/request/dist-types/version.d.ts b/dist/node_modules/@octokit/request/dist-types/version.d.ts new file mode 100644 index 0000000..897ad85 --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "6.2.8"; diff --git a/dist/node_modules/@octokit/request/dist-types/with-defaults.d.ts b/dist/node_modules/@octokit/request/dist-types/with-defaults.d.ts new file mode 100644 index 0000000..4300682 --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-types/with-defaults.d.ts @@ -0,0 +1,2 @@ +import type { EndpointInterface, RequestInterface, RequestParameters } from "@octokit/types"; +export default function withDefaults(oldEndpoint: EndpointInterface, newDefaults: RequestParameters): RequestInterface; diff --git a/dist/node_modules/@octokit/request/dist-web/index.js b/dist/node_modules/@octokit/request/dist-web/index.js new file mode 100644 index 0000000..43a785e --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-web/index.js @@ -0,0 +1,172 @@ +// pkg/dist-src/index.js +import { endpoint } from "@octokit/endpoint"; +import { getUserAgent } from "universal-user-agent"; + +// pkg/dist-src/version.js +var VERSION = "6.2.8"; + +// pkg/dist-src/fetch-wrapper.js +import { isPlainObject } from "is-plain-object"; +import nodeFetch, {} from "node-fetch"; +import { RequestError } from "@octokit/request-error"; + +// pkg/dist-src/get-buffer-response.js +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +// pkg/dist-src/fetch-wrapper.js +function fetchWrapper(requestOptions) { + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || globalThis.fetch || /* istanbul ignore next */ + nodeFetch; + return fetch( + requestOptions.url, + Object.assign( + { + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }, + // `requestOptions.request.agent` type is incompatible + // see https://github.com/octokit/types.ts/pull/264 + requestOptions.request + ) + ).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions + }); + } + if (status === 304) { + throw new RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return getResponseData(response); + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + throw new RequestError(error.message, 500, { + request: requestOptions + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json(); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + return data.message; + } + return `Unknown error: ${JSON.stringify(data)}`; +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); + } + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }; + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }; + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); +} + +// pkg/dist-src/index.js +var request = withDefaults(endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${getUserAgent()}` + } +}); +export { + request +}; diff --git a/dist/node_modules/@octokit/request/dist-web/index.js.map b/dist/node_modules/@octokit/request/dist-web/index.js.map new file mode 100644 index 0000000..4d5acef --- /dev/null +++ b/dist/node_modules/@octokit/request/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/version.js", "../dist-src/fetch-wrapper.js", "../dist-src/get-buffer-response.js", "../dist-src/with-defaults.js"], + "sourcesContent": ["import { endpoint } from \"@octokit/endpoint\";\nimport { getUserAgent } from \"universal-user-agent\";\nimport { VERSION } from \"./version\";\nimport withDefaults from \"./with-defaults\";\nconst request = withDefaults(endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${getUserAgent()}`\n }\n});\nexport {\n request\n};\n", "const VERSION = \"6.2.8\";\nexport {\n VERSION\n};\n", "import { isPlainObject } from \"is-plain-object\";\nimport nodeFetch, {} from \"node-fetch\";\nimport { RequestError } from \"@octokit/request-error\";\nimport getBuffer from \"./get-buffer-response\";\nfunction fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || globalThis.fetch || /* istanbul ignore next */\n nodeFetch;\n return fetch(\n requestOptions.url,\n Object.assign(\n {\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect,\n // duplex must be set if request.body is ReadableStream or Async Iterables.\n // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex.\n ...requestOptions.body && { duplex: \"half\" }\n },\n // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request\n )\n ).then(async (response) => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(\n `[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`\n );\n }\n if (status === 204 || status === 205) {\n return;\n }\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: void 0\n },\n request: requestOptions\n });\n }\n if (status === 304) {\n throw new RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n return getResponseData(response);\n }).then((data) => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch((error) => {\n if (error instanceof RequestError)\n throw error;\n else if (error.name === \"AbortError\")\n throw error;\n throw new RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBuffer(response);\n}\nfunction toErrorMessage(data) {\n if (typeof data === \"string\")\n return data;\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n return data.message;\n }\n return `Unknown error: ${JSON.stringify(data)}`;\n}\nexport {\n fetchWrapper as default\n};\n", "function getBufferResponse(response) {\n return response.arrayBuffer();\n}\nexport {\n getBufferResponse as default\n};\n", "import fetchWrapper from \"./fetch-wrapper\";\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n const newApi = function(route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n const request = (route2, parameters2) => {\n return fetchWrapper(\n endpoint.parse(endpoint.merge(route2, parameters2))\n );\n };\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\nexport {\n withDefaults as default\n};\n"], + "mappings": ";AAAA,SAAS,gBAAgB;AACzB,SAAS,oBAAoB;;;ACD7B,IAAM,UAAU;;;ACAhB,SAAS,qBAAqB;AAC9B,OAAO,mBAAmB;AAC1B,SAAS,oBAAoB;;;ACF7B,SAAS,kBAAkB,UAAU;AACnC,SAAO,SAAS,YAAY;AAC9B;;;ADEA,SAAS,aAAa,gBAAgB;AACpC,QAAM,MAAM,eAAe,WAAW,eAAe,QAAQ,MAAM,eAAe,QAAQ,MAAM;AAChG,MAAI,cAAc,eAAe,IAAI,KAAK,MAAM,QAAQ,eAAe,IAAI,GAAG;AAC5E,mBAAe,OAAO,KAAK,UAAU,eAAe,IAAI;AAAA,EAC1D;AACA,MAAI,UAAU,CAAC;AACf,MAAI;AACJ,MAAI;AACJ,QAAM,QAAQ,eAAe,WAAW,eAAe,QAAQ,SAAS,WAAW;AAAA,EACnF;AACA,SAAO;AAAA,IACL,eAAe;AAAA,IACf,OAAO;AAAA,MACL;AAAA,QACE,QAAQ,eAAe;AAAA,QACvB,MAAM,eAAe;AAAA,QACrB,SAAS,eAAe;AAAA,QACxB,UAAU,eAAe;AAAA;AAAA;AAAA,QAGzB,GAAG,eAAe,QAAQ,EAAE,QAAQ,OAAO;AAAA,MAC7C;AAAA;AAAA;AAAA,MAGA,eAAe;AAAA,IACjB;AAAA,EACF,EAAE,KAAK,OAAO,aAAa;AACzB,UAAM,SAAS;AACf,aAAS,SAAS;AAClB,eAAW,eAAe,SAAS,SAAS;AAC1C,cAAQ,YAAY,CAAC,CAAC,IAAI,YAAY,CAAC;AAAA,IACzC;AACA,QAAI,iBAAiB,SAAS;AAC5B,YAAM,UAAU,QAAQ,QAAQ,QAAQ,KAAK,MAAM,8BAA8B;AACjF,YAAM,kBAAkB,WAAW,QAAQ,IAAI;AAC/C,UAAI;AAAA,QACF,uBAAuB,eAAe,UAAU,eAAe,wDAAwD,QAAQ,SAAS,kBAAkB,SAAS,oBAAoB;AAAA,MACzL;AAAA,IACF;AACA,QAAI,WAAW,OAAO,WAAW,KAAK;AACpC;AAAA,IACF;AACA,QAAI,eAAe,WAAW,QAAQ;AACpC,UAAI,SAAS,KAAK;AAChB;AAAA,MACF;AACA,YAAM,IAAI,aAAa,SAAS,YAAY,QAAQ;AAAA,QAClD,UAAU;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA,MAAM;AAAA,QACR;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AACA,QAAI,WAAW,KAAK;AAClB,YAAM,IAAI,aAAa,gBAAgB,QAAQ;AAAA,QAC7C,UAAU;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA,MAAM,MAAM,gBAAgB,QAAQ;AAAA,QACtC;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AACA,QAAI,UAAU,KAAK;AACjB,YAAM,OAAO,MAAM,gBAAgB,QAAQ;AAC3C,YAAM,QAAQ,IAAI,aAAa,eAAe,IAAI,GAAG,QAAQ;AAAA,QAC3D,UAAU;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,QACA,SAAS;AAAA,MACX,CAAC;AACD,YAAM;AAAA,IACR;AACA,WAAO,gBAAgB,QAAQ;AAAA,EACjC,CAAC,EAAE,KAAK,CAAC,SAAS;AAChB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,CAAC,EAAE,MAAM,CAAC,UAAU;AAClB,QAAI,iBAAiB;AACnB,YAAM;AAAA,aACC,MAAM,SAAS;AACtB,YAAM;AACR,UAAM,IAAI,aAAa,MAAM,SAAS,KAAK;AAAA,MACzC,SAAS;AAAA,IACX,CAAC;AAAA,EACH,CAAC;AACH;AACA,eAAe,gBAAgB,UAAU;AACvC,QAAM,cAAc,SAAS,QAAQ,IAAI,cAAc;AACvD,MAAI,oBAAoB,KAAK,WAAW,GAAG;AACzC,WAAO,SAAS,KAAK;AAAA,EACvB;AACA,MAAI,CAAC,eAAe,yBAAyB,KAAK,WAAW,GAAG;AAC9D,WAAO,SAAS,KAAK;AAAA,EACvB;AACA,SAAO,kBAAU,QAAQ;AAC3B;AACA,SAAS,eAAe,MAAM;AAC5B,MAAI,OAAO,SAAS;AAClB,WAAO;AACT,MAAI,aAAa,MAAM;AACrB,QAAI,MAAM,QAAQ,KAAK,MAAM,GAAG;AAC9B,aAAO,GAAG,KAAK,YAAY,KAAK,OAAO,IAAI,KAAK,SAAS,EAAE,KAAK,IAAI;AAAA,IACtE;AACA,WAAO,KAAK;AAAA,EACd;AACA,SAAO,kBAAkB,KAAK,UAAU,IAAI;AAC9C;;;AEzHA,SAAS,aAAa,aAAa,aAAa;AAC9C,QAAMA,YAAW,YAAY,SAAS,WAAW;AACjD,QAAM,SAAS,SAAS,OAAO,YAAY;AACzC,UAAM,kBAAkBA,UAAS,MAAM,OAAO,UAAU;AACxD,QAAI,CAAC,gBAAgB,WAAW,CAAC,gBAAgB,QAAQ,MAAM;AAC7D,aAAO,aAAaA,UAAS,MAAM,eAAe,CAAC;AAAA,IACrD;AACA,UAAMC,WAAU,CAAC,QAAQ,gBAAgB;AACvC,aAAO;AAAA,QACLD,UAAS,MAAMA,UAAS,MAAM,QAAQ,WAAW,CAAC;AAAA,MACpD;AAAA,IACF;AACA,WAAO,OAAOC,UAAS;AAAA,MACrB,UAAAD;AAAA,MACA,UAAU,aAAa,KAAK,MAAMA,SAAQ;AAAA,IAC5C,CAAC;AACD,WAAO,gBAAgB,QAAQ,KAAKC,UAAS,eAAe;AAAA,EAC9D;AACA,SAAO,OAAO,OAAO,QAAQ;AAAA,IAC3B,UAAAD;AAAA,IACA,UAAU,aAAa,KAAK,MAAMA,SAAQ;AAAA,EAC5C,CAAC;AACH;;;AJnBA,IAAM,UAAU,aAAa,UAAU;AAAA,EACrC,SAAS;AAAA,IACP,cAAc,sBAAsB,WAAW,aAAa;AAAA,EAC9D;AACF,CAAC;", + "names": ["endpoint", "request"] +} diff --git a/dist/node_modules/@octokit/request/package.json b/dist/node_modules/@octokit/request/package.json new file mode 100644 index 0000000..787976e --- /dev/null +++ b/dist/node_modules/@octokit/request/package.json @@ -0,0 +1,57 @@ +{ + "name": "@octokit/request", + "version": "6.2.8", + "publishConfig": { + "access": "public" + }, + "description": "Send parameterized requests to GitHub's APIs with sensible defaults in browsers and Node", + "repository": "github:octokit/request.js", + "keywords": [ + "octokit", + "github", + "api", + "request" + ], + "author": "Gregor Martynus (https://github.com/gr2m)", + "license": "MIT", + "dependencies": { + "@octokit/endpoint": "^7.0.0", + "@octokit/request-error": "^3.0.0", + "@octokit/types": "^9.0.0", + "is-plain-object": "^5.0.0", + "node-fetch": "^2.6.7", + "universal-user-agent": "^6.0.0" + }, + "devDependencies": { + "@octokit/auth-app": "^4.0.0", + "@octokit/tsconfig": "^2.0.0", + "@types/fetch-mock": "^7.2.4", + "@types/jest": "^29.0.0", + "@types/lolex": "^5.1.0", + "@types/node": "^18.0.0", + "@types/node-fetch": "^2.3.3", + "@types/once": "^1.4.0", + "esbuild": "^0.18.0", + "fetch-mock": "^9.3.1", + "glob": "^10.2.4", + "jest": "^29.0.0", + "lolex": "^6.0.0", + "prettier": "2.8.8", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "string-to-arraybuffer": "^1.0.2", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "browser": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "module": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/tsconfig/LICENSE b/dist/node_modules/@octokit/tsconfig/LICENSE new file mode 100644 index 0000000..d7d5927 --- /dev/null +++ b/dist/node_modules/@octokit/tsconfig/LICENSE @@ -0,0 +1,7 @@ +MIT License Copyright (c) 2020 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/@octokit/tsconfig/README.md b/dist/node_modules/@octokit/tsconfig/README.md new file mode 100644 index 0000000..a9b2844 --- /dev/null +++ b/dist/node_modules/@octokit/tsconfig/README.md @@ -0,0 +1,31 @@ +# tsconfig + +> TypeScript configuration for Octokit packages + +[![@latest](https://img.shields.io/npm/v/@octokit/tsconfig.svg)](https://www.npmjs.com/package/@octokit/tsconfig) +[![Build Status](https://github.com/octokit/tsconfig/workflows/Test/badge.svg)](https://github.com/octokit/tsconfig/actions?query=workflow%3ATest+branch%3Amain) +[![Dependabot Status](https://api.dependabot.com/badges/status?host=github&repo=octokit/tsconfig)](https://dependabot.com/) + +## Usage + +Install package with `npm` or `yarn` + +``` +npm install --save-dev @octokit/tsconfig +``` + +Then create `tsconfig.json` file with the following content + +```js +{ + "extends": "@octokit/tsconfig" +} +``` + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/tsconfig/package.json b/dist/node_modules/@octokit/tsconfig/package.json new file mode 100644 index 0000000..09c6ff1 --- /dev/null +++ b/dist/node_modules/@octokit/tsconfig/package.json @@ -0,0 +1,28 @@ +{ + "name": "@octokit/tsconfig", + "publishConfig": { + "access": "public" + }, + "version": "1.0.2", + "description": "TypeScript configuration for Octokit packages", + "scripts": { + "test": "node -p 'require(\".\")'" + }, + "main": "tsconfig.json", + "files": [ + "tsconfig.json" + ], + "repository": "https://github.com/octokit/tsconfig", + "author": "Gregor Martynus (https://dev.to/gr2m)", + "license": "MIT", + "release": { + "branches": [ + "main" + ] + }, + "renovate": { + "extends": [ + "github>octokit/.github" + ] + } +} diff --git a/dist/node_modules/@octokit/tsconfig/tsconfig.json b/dist/node_modules/@octokit/tsconfig/tsconfig.json new file mode 100644 index 0000000..334346c --- /dev/null +++ b/dist/node_modules/@octokit/tsconfig/tsconfig.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "module": "esnext", + "moduleResolution": "node", + "noUnusedLocals": true, + "noUnusedParameters": true, + "strict": true, + "target": "es2020", + "resolveJsonModule": true + } +} diff --git a/dist/node_modules/@octokit/types/LICENSE b/dist/node_modules/@octokit/types/LICENSE new file mode 100644 index 0000000..57bee5f --- /dev/null +++ b/dist/node_modules/@octokit/types/LICENSE @@ -0,0 +1,7 @@ +MIT License Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/@octokit/types/README.md b/dist/node_modules/@octokit/types/README.md new file mode 100644 index 0000000..c48ce42 --- /dev/null +++ b/dist/node_modules/@octokit/types/README.md @@ -0,0 +1,65 @@ +# types.ts + +> Shared TypeScript definitions for Octokit projects + +[![@latest](https://img.shields.io/npm/v/@octokit/types.svg)](https://www.npmjs.com/package/@octokit/types) +[![Build Status](https://github.com/octokit/types.ts/workflows/Test/badge.svg)](https://github.com/octokit/types.ts/actions?workflow=Test) + + + +- [Usage](#usage) +- [Examples](#examples) + - [Get parameter and response data types for a REST API endpoint](#get-parameter-and-response-data-types-for-a-rest-api-endpoint) + - [Get response types from endpoint methods](#get-response-types-from-endpoint-methods) +- [Contributing](#contributing) +- [License](#license) + + + +## Usage + +See all exported types at https://octokit.github.io/types.ts + +## Examples + +### Get parameter and response data types for a REST API endpoint + +```ts +import { Endpoints } from "@octokit/types"; + +type listUserReposParameters = + Endpoints["GET /repos/{owner}/{repo}"]["parameters"]; +type listUserReposResponse = Endpoints["GET /repos/{owner}/{repo}"]["response"]; + +async function listRepos( + options: listUserReposParameters +): listUserReposResponse["data"] { + // ... +} +``` + +### Get response types from endpoint methods + +```ts +import { + GetResponseTypeFromEndpointMethod, + GetResponseDataTypeFromEndpointMethod, +} from "@octokit/types"; +import { Octokit } from "@octokit/rest"; + +const octokit = new Octokit(); +type CreateLabelResponseType = GetResponseTypeFromEndpointMethod< + typeof octokit.issues.createLabel +>; +type CreateLabelResponseDataType = GetResponseDataTypeFromEndpointMethod< + typeof octokit.issues.createLabel +>; +``` + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/types/dist-types/AuthInterface.d.ts b/dist/node_modules/@octokit/types/dist-types/AuthInterface.d.ts new file mode 100644 index 0000000..8b39d61 --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/AuthInterface.d.ts @@ -0,0 +1,31 @@ +import { EndpointOptions } from "./EndpointOptions"; +import { OctokitResponse } from "./OctokitResponse"; +import { RequestInterface } from "./RequestInterface"; +import { RequestParameters } from "./RequestParameters"; +import { Route } from "./Route"; +/** + * Interface to implement complex authentication strategies for Octokit. + * An object Implementing the AuthInterface can directly be passed as the + * `auth` option in the Octokit constructor. + * + * For the official implementations of the most common authentication + * strategies, see https://github.com/octokit/auth.js + */ +export interface AuthInterface { + (...args: AuthOptions): Promise; + hook: { + /** + * Sends a request using the passed `request` instance + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (request: RequestInterface, options: EndpointOptions): Promise>; + /** + * Sends a request using the passed `request` instance + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (request: RequestInterface, route: Route, parameters?: RequestParameters): Promise>; + }; +} diff --git a/dist/node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts b/dist/node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts new file mode 100644 index 0000000..a04ad60 --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts @@ -0,0 +1,21 @@ +import { RequestHeaders } from "./RequestHeaders"; +import { RequestMethod } from "./RequestMethod"; +import { RequestParameters } from "./RequestParameters"; +import { Url } from "./Url"; +/** + * The `.endpoint()` method is guaranteed to set all keys defined by RequestParameters + * as well as the method property. + */ +export type EndpointDefaults = RequestParameters & { + baseUrl: Url; + method: RequestMethod; + url?: Url; + headers: RequestHeaders & { + accept: string; + "user-agent": string; + }; + mediaType: { + format: string; + previews: string[]; + }; +}; diff --git a/dist/node_modules/@octokit/types/dist-types/EndpointInterface.d.ts b/dist/node_modules/@octokit/types/dist-types/EndpointInterface.d.ts new file mode 100644 index 0000000..d7b4009 --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/EndpointInterface.d.ts @@ -0,0 +1,65 @@ +import { EndpointDefaults } from "./EndpointDefaults"; +import { RequestOptions } from "./RequestOptions"; +import { RequestParameters } from "./RequestParameters"; +import { Route } from "./Route"; +import { Endpoints } from "./generated/Endpoints"; +export interface EndpointInterface { + /** + * Transforms a GitHub REST API endpoint into generic request options + * + * @param {object} endpoint Must set `url` unless it's set defaults. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (options: O & { + method?: string; + } & ("url" extends keyof D ? { + url?: string; + } : { + url: string; + })): RequestOptions & Pick; + /** + * Transforms a GitHub REST API endpoint into generic request options + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (route: keyof Endpoints | R, parameters?: P): (R extends keyof Endpoints ? Endpoints[R]["request"] : RequestOptions) & Pick; + /** + * Object with current default route and parameters + */ + DEFAULTS: D & EndpointDefaults; + /** + * Returns a new `endpoint` interface with new defaults + */ + defaults: (newDefaults: O) => EndpointInterface; + merge: { + /** + * Merges current endpoint defaults with passed route and parameters, + * without transforming them into request options. + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + * + */ + (route: keyof Endpoints | R, parameters?: P): D & (R extends keyof Endpoints ? Endpoints[R]["request"] & Endpoints[R]["parameters"] : EndpointDefaults) & P; + /** + * Merges current endpoint defaults with passed route and parameters, + * without transforming them into request options. + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ +

(options: P): EndpointDefaults & D & P; + /** + * Returns current default options. + * + * @deprecated use endpoint.DEFAULTS instead + */ + (): D & EndpointDefaults; + }; + /** + * Stateless method to turn endpoint options into request options. + * Calling `endpoint(options)` is the same as calling `endpoint.parse(endpoint.merge(options))`. + * + * @param {object} options `method`, `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + parse: (options: O) => RequestOptions & Pick; +} diff --git a/dist/node_modules/@octokit/types/dist-types/EndpointOptions.d.ts b/dist/node_modules/@octokit/types/dist-types/EndpointOptions.d.ts new file mode 100644 index 0000000..8eccf5e --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/EndpointOptions.d.ts @@ -0,0 +1,7 @@ +import { RequestMethod } from "./RequestMethod"; +import { Url } from "./Url"; +import { RequestParameters } from "./RequestParameters"; +export type EndpointOptions = RequestParameters & { + method: RequestMethod; + url: Url; +}; diff --git a/dist/node_modules/@octokit/types/dist-types/Fetch.d.ts b/dist/node_modules/@octokit/types/dist-types/Fetch.d.ts new file mode 100644 index 0000000..983c79b --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/Fetch.d.ts @@ -0,0 +1,4 @@ +/** + * Browser's fetch method (or compatible such as fetch-mock) + */ +export type Fetch = any; diff --git a/dist/node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts b/dist/node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts new file mode 100644 index 0000000..2daaf34 --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts @@ -0,0 +1,5 @@ +type Unwrap = T extends Promise ? U : T; +type AnyFunction = (...args: any[]) => any; +export type GetResponseTypeFromEndpointMethod = Unwrap>; +export type GetResponseDataTypeFromEndpointMethod = Unwrap>["data"]; +export {}; diff --git a/dist/node_modules/@octokit/types/dist-types/OctokitResponse.d.ts b/dist/node_modules/@octokit/types/dist-types/OctokitResponse.d.ts new file mode 100644 index 0000000..8686e7f --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/OctokitResponse.d.ts @@ -0,0 +1,17 @@ +import { ResponseHeaders } from "./ResponseHeaders"; +import { Url } from "./Url"; +export type OctokitResponse = { + headers: ResponseHeaders; + /** + * http response code + */ + status: S; + /** + * URL of response after all redirects + */ + url: Url; + /** + * Response data as documented in the REST API reference documentation at https://docs.github.com/rest/reference + */ + data: T; +}; diff --git a/dist/node_modules/@octokit/types/dist-types/RequestError.d.ts b/dist/node_modules/@octokit/types/dist-types/RequestError.d.ts new file mode 100644 index 0000000..4608392 --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/RequestError.d.ts @@ -0,0 +1,11 @@ +export type RequestError = { + name: string; + status: number; + documentation_url: string; + errors?: Array<{ + resource: string; + code: string; + field: string; + message?: string; + }>; +}; diff --git a/dist/node_modules/@octokit/types/dist-types/RequestHeaders.d.ts b/dist/node_modules/@octokit/types/dist-types/RequestHeaders.d.ts new file mode 100644 index 0000000..4231159 --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/RequestHeaders.d.ts @@ -0,0 +1,15 @@ +export type RequestHeaders = { + /** + * Avoid setting `headers.accept`, use `mediaType.{format|previews}` option instead. + */ + accept?: string; + /** + * Use `authorization` to send authenticated request, remember `token ` / `bearer ` prefixes. Example: `token 1234567890abcdef1234567890abcdef12345678` + */ + authorization?: string; + /** + * `user-agent` is set do a default and can be overwritten as needed. + */ + "user-agent"?: string; + [header: string]: string | number | undefined; +}; diff --git a/dist/node_modules/@octokit/types/dist-types/RequestInterface.d.ts b/dist/node_modules/@octokit/types/dist-types/RequestInterface.d.ts new file mode 100644 index 0000000..851811f --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/RequestInterface.d.ts @@ -0,0 +1,34 @@ +import { EndpointInterface } from "./EndpointInterface"; +import { OctokitResponse } from "./OctokitResponse"; +import { RequestParameters } from "./RequestParameters"; +import { Route } from "./Route"; +import { Endpoints } from "./generated/Endpoints"; +export interface RequestInterface { + /** + * Sends a request based on endpoint options + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (options: O & { + method?: string; + } & ("url" extends keyof D ? { + url?: string; + } : { + url: string; + })): Promise>; + /** + * Sends a request based on endpoint options + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (route: keyof Endpoints | R, options?: R extends keyof Endpoints ? Endpoints[R]["parameters"] & RequestParameters : RequestParameters): R extends keyof Endpoints ? Promise : Promise>; + /** + * Returns a new `request` with updated route and parameters + */ + defaults: (newDefaults: O) => RequestInterface; + /** + * Octokit endpoint API, see {@link https://github.com/octokit/endpoint.js|@octokit/endpoint} + */ + endpoint: EndpointInterface; +} diff --git a/dist/node_modules/@octokit/types/dist-types/RequestMethod.d.ts b/dist/node_modules/@octokit/types/dist-types/RequestMethod.d.ts new file mode 100644 index 0000000..4cdfe61 --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/RequestMethod.d.ts @@ -0,0 +1,4 @@ +/** + * HTTP Verb supported by GitHub's REST API + */ +export type RequestMethod = "DELETE" | "GET" | "HEAD" | "PATCH" | "POST" | "PUT"; diff --git a/dist/node_modules/@octokit/types/dist-types/RequestOptions.d.ts b/dist/node_modules/@octokit/types/dist-types/RequestOptions.d.ts new file mode 100644 index 0000000..f83f5ab --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/RequestOptions.d.ts @@ -0,0 +1,14 @@ +import { RequestHeaders } from "./RequestHeaders"; +import { RequestMethod } from "./RequestMethod"; +import { RequestRequestOptions } from "./RequestRequestOptions"; +import { Url } from "./Url"; +/** + * Generic request options as they are returned by the `endpoint()` method + */ +export type RequestOptions = { + method: RequestMethod; + url: Url; + headers: RequestHeaders; + body?: any; + request?: RequestRequestOptions; +}; diff --git a/dist/node_modules/@octokit/types/dist-types/RequestParameters.d.ts b/dist/node_modules/@octokit/types/dist-types/RequestParameters.d.ts new file mode 100644 index 0000000..bf51f85 --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/RequestParameters.d.ts @@ -0,0 +1,45 @@ +import { RequestRequestOptions } from "./RequestRequestOptions"; +import { RequestHeaders } from "./RequestHeaders"; +import { Url } from "./Url"; +/** + * Parameters that can be passed into `request(route, parameters)` or `endpoint(route, parameters)` methods + */ +export type RequestParameters = { + /** + * Base URL to be used when a relative URL is passed, such as `/orgs/{org}`. + * If `baseUrl` is `https://enterprise.acme-inc.com/api/v3`, then the request + * will be sent to `https://enterprise.acme-inc.com/api/v3/orgs/{org}`. + */ + baseUrl?: Url; + /** + * HTTP headers. Use lowercase keys. + */ + headers?: RequestHeaders; + /** + * Media type options, see {@link https://developer.github.com/v3/media/|GitHub Developer Guide} + */ + mediaType?: { + /** + * `json` by default. Can be `raw`, `text`, `html`, `full`, `diff`, `patch`, `sha`, `base64`. Depending on endpoint + */ + format?: string; + /** + * Custom media type names of {@link https://developer.github.com/v3/media/|API Previews} without the `-preview` suffix. + * Example for single preview: `['squirrel-girl']`. + * Example for multiple previews: `['squirrel-girl', 'mister-fantastic']`. + */ + previews?: string[]; + }; + /** + * Pass custom meta information for the request. The `request` object will be returned as is. + */ + request?: RequestRequestOptions; + /** + * Any additional parameter will be passed as follows + * 1. URL parameter if `':parameter'` or `{parameter}` is part of `url` + * 2. Query parameter if `method` is `'GET'` or `'HEAD'` + * 3. Request body if `parameter` is `'data'` + * 4. JSON in the request body in the form of `body[parameter]` unless `parameter` key is `'data'` + */ + [parameter: string]: unknown; +}; diff --git a/dist/node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts b/dist/node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts new file mode 100644 index 0000000..b875b7e --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts @@ -0,0 +1,26 @@ +import { Fetch } from "./Fetch"; +import { Signal } from "./Signal"; +/** + * Octokit-specific request options which are ignored for the actual request, but can be used by Octokit or plugins to manipulate how the request is sent or how a response is handled + */ +export type RequestRequestOptions = { + /** + * Node only. Useful for custom proxy, certificate, or dns lookup. + * + * @see https://nodejs.org/api/http.html#http_class_http_agent + */ + agent?: unknown; + /** + * Custom replacement for built-in fetch method. Useful for testing or request hooks. + */ + fetch?: Fetch; + /** + * Use an `AbortController` instance to cancel a request. In node you can only cancel streamed requests. + */ + signal?: Signal; + /** + * Node only. Request/response timeout in ms, it resets on redirect. 0 to disable (OS limit applies). `options.request.signal` is recommended instead. + */ + timeout?: number; + [option: string]: any; +}; diff --git a/dist/node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts b/dist/node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts new file mode 100644 index 0000000..ff7af38 --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts @@ -0,0 +1,20 @@ +export type ResponseHeaders = { + "cache-control"?: string; + "content-length"?: number; + "content-type"?: string; + date?: string; + etag?: string; + "last-modified"?: string; + link?: string; + location?: string; + server?: string; + status?: string; + vary?: string; + "x-github-mediatype"?: string; + "x-github-request-id"?: string; + "x-oauth-scopes"?: string; + "x-ratelimit-limit"?: string; + "x-ratelimit-remaining"?: string; + "x-ratelimit-reset"?: string; + [header: string]: string | number | undefined; +}; diff --git a/dist/node_modules/@octokit/types/dist-types/Route.d.ts b/dist/node_modules/@octokit/types/dist-types/Route.d.ts new file mode 100644 index 0000000..808991e --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/Route.d.ts @@ -0,0 +1,4 @@ +/** + * String consisting of an optional HTTP method and relative path or absolute URL. Examples: `'/orgs/{org}'`, `'PUT /orgs/{org}'`, `GET https://example.com/foo/bar` + */ +export type Route = string; diff --git a/dist/node_modules/@octokit/types/dist-types/Signal.d.ts b/dist/node_modules/@octokit/types/dist-types/Signal.d.ts new file mode 100644 index 0000000..bdf9700 --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/Signal.d.ts @@ -0,0 +1,6 @@ +/** + * Abort signal + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ +export type Signal = any; diff --git a/dist/node_modules/@octokit/types/dist-types/StrategyInterface.d.ts b/dist/node_modules/@octokit/types/dist-types/StrategyInterface.d.ts new file mode 100644 index 0000000..405cbd2 --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/StrategyInterface.d.ts @@ -0,0 +1,4 @@ +import { AuthInterface } from "./AuthInterface"; +export interface StrategyInterface { + (...args: StrategyOptions): AuthInterface; +} diff --git a/dist/node_modules/@octokit/types/dist-types/Url.d.ts b/dist/node_modules/@octokit/types/dist-types/Url.d.ts new file mode 100644 index 0000000..521f5ad --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/Url.d.ts @@ -0,0 +1,4 @@ +/** + * Relative or absolute URL. Examples: `'/orgs/{org}'`, `https://example.com/foo/bar` + */ +export type Url = string; diff --git a/dist/node_modules/@octokit/types/dist-types/VERSION.d.ts b/dist/node_modules/@octokit/types/dist-types/VERSION.d.ts new file mode 100644 index 0000000..9721fe0 --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/VERSION.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "9.3.2"; diff --git a/dist/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts b/dist/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts new file mode 100644 index 0000000..5871b3e --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts @@ -0,0 +1,3639 @@ +import { paths } from "@octokit/openapi-types"; +import { OctokitResponse } from "../OctokitResponse"; +import { RequestHeaders } from "../RequestHeaders"; +import { RequestRequestOptions } from "../RequestRequestOptions"; +type UnionToIntersection = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never; +type ExtractParameters = "parameters" extends keyof T ? UnionToIntersection<{ + [K in keyof T["parameters"]]-?: T["parameters"][K]; +}[keyof T["parameters"]]> : {}; +type ExtractRequestBody = "requestBody" extends keyof T ? "content" extends keyof T["requestBody"] ? "application/json" extends keyof T["requestBody"]["content"] ? T["requestBody"]["content"]["application/json"] : { + data: { + [K in keyof T["requestBody"]["content"]]: T["requestBody"]["content"][K]; + }[keyof T["requestBody"]["content"]]; +} : "application/json" extends keyof T["requestBody"] ? T["requestBody"]["application/json"] : { + data: { + [K in keyof T["requestBody"]]: T["requestBody"][K]; + }[keyof T["requestBody"]]; +} : {}; +type ToOctokitParameters = ExtractParameters & ExtractRequestBody>; +type RequiredPreview = T extends string ? { + mediaType: { + previews: [T, ...string[]]; + }; +} : {}; +type Operation = { + parameters: ToOctokitParameters & RequiredPreview; + request: { + method: Method extends keyof MethodsMap ? MethodsMap[Method] : never; + url: Url; + headers: RequestHeaders; + request: RequestRequestOptions; + }; + response: ExtractOctokitResponse; +}; +type MethodsMap = { + delete: "DELETE"; + get: "GET"; + patch: "PATCH"; + post: "POST"; + put: "PUT"; +}; +type SuccessStatuses = 200 | 201 | 202 | 204; +type RedirectStatuses = 301 | 302; +type EmptyResponseStatuses = 201 | 204; +type KnownJsonResponseTypes = "application/json" | "application/scim+json" | "text/html"; +type SuccessResponseDataType = { + [K in SuccessStatuses & keyof Responses]: GetContentKeyIfPresent extends never ? never : OctokitResponse, K>; +}[SuccessStatuses & keyof Responses]; +type RedirectResponseDataType = { + [K in RedirectStatuses & keyof Responses]: OctokitResponse; +}[RedirectStatuses & keyof Responses]; +type EmptyResponseDataType = { + [K in EmptyResponseStatuses & keyof Responses]: OctokitResponse; +}[EmptyResponseStatuses & keyof Responses]; +type GetContentKeyIfPresent = "content" extends keyof T ? DataType : DataType; +type DataType = { + [K in KnownJsonResponseTypes & keyof T]: T[K]; +}[KnownJsonResponseTypes & keyof T]; +type ExtractOctokitResponse = "responses" extends keyof R ? SuccessResponseDataType extends never ? RedirectResponseDataType extends never ? EmptyResponseDataType : RedirectResponseDataType : SuccessResponseDataType : unknown; +export interface Endpoints { + /** + * @see https://docs.github.com/rest/reference/apps#delete-an-installation-for-the-authenticated-app + */ + "DELETE /app/installations/{installation_id}": Operation<"/app/installations/{installation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#unsuspend-an-app-installation + */ + "DELETE /app/installations/{installation_id}/suspended": Operation<"/app/installations/{installation_id}/suspended", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#delete-an-app-authorization + */ + "DELETE /applications/{client_id}/grant": Operation<"/applications/{client_id}/grant", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#delete-an-app-token + */ + "DELETE /applications/{client_id}/token": Operation<"/applications/{client_id}/token", "delete">; + /** + * @see https://docs.github.com/rest/reference/gists#delete-a-gist + */ + "DELETE /gists/{gist_id}": Operation<"/gists/{gist_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/gists#delete-a-gist-comment + */ + "DELETE /gists/{gist_id}/comments/{comment_id}": Operation<"/gists/{gist_id}/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/gists#unstar-a-gist + */ + "DELETE /gists/{gist_id}/star": Operation<"/gists/{gist_id}/star", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#revoke-an-installation-access-token + */ + "DELETE /installation/token": Operation<"/installation/token", "delete">; + /** + * @see https://docs.github.com/rest/reference/activity#delete-a-thread-subscription + */ + "DELETE /notifications/threads/{thread_id}/subscription": Operation<"/notifications/threads/{thread_id}/subscription", "delete">; + /** + * @see https://docs.github.com/rest/orgs/orgs/#delete-an-organization + */ + "DELETE /orgs/{org}": Operation<"/orgs/{org}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#disable-a-selected-repository-for-github-actions-in-an-organization + */ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}": Operation<"/orgs/{org}/actions/permissions/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-required-workflow + */ + "DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-repository-from-selected-repositories-list-for-a-required-workflow + */ + "DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-self-hosted-runner-from-an-organization + */ + "DELETE /orgs/{org}/actions/runners/{runner_id}": Operation<"/orgs/{org}/actions/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-all-custom-labels-from-a-self-hosted-runner-for-an-organization + */ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-custom-label-from-a-self-hosted-runner-for-an-organization + */ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-an-organization-secret + */ + "DELETE /orgs/{org}/actions/secrets/{secret_name}": Operation<"/orgs/{org}/actions/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret + */ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/actions/variables#delete-an-organization-variable + */ + "DELETE /orgs/{org}/actions/variables/{name}": Operation<"/orgs/{org}/actions/variables/{name}", "delete">; + /** + * @see https://docs.github.com/rest/actions/variables#remove-selected-repository-from-an-organization-variable + */ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/variables/{name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#unblock-a-user-from-an-organization + */ + "DELETE /orgs/{org}/blocks/{username}": Operation<"/orgs/{org}/blocks/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-codespaces-billing-users + */ + "DELETE /orgs/{org}/codespaces/billing/selected_users": Operation<"/orgs/{org}/codespaces/billing/selected_users", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-an-organization-secret + */ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#remove-selected-repository-from-an-organization-secret + */ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/dependabot#delete-an-organization-secret + */ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/dependabot#remove-selected-repository-from-an-organization-secret + */ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#delete-an-organization-webhook + */ + "DELETE /orgs/{org}/hooks/{hook_id}": Operation<"/orgs/{org}/hooks/{hook_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/interactions#remove-interaction-restrictions-for-an-organization + */ + "DELETE /orgs/{org}/interaction-limits": Operation<"/orgs/{org}/interaction-limits", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#cancel-an-organization-invitation + */ + "DELETE /orgs/{org}/invitations/{invitation_id}": Operation<"/orgs/{org}/invitations/{invitation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-an-organization-member + */ + "DELETE /orgs/{org}/members/{username}": Operation<"/orgs/{org}/members/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces + */ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}": Operation<"/orgs/{org}/members/{username}/codespaces/{codespace_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-organization-membership-for-a-user + */ + "DELETE /orgs/{org}/memberships/{username}": Operation<"/orgs/{org}/memberships/{username}", "delete">; + /** + * @see https://docs.github.com/rest/migrations/orgs#delete-an-organization-migration-archive + */ + "DELETE /orgs/{org}/migrations/{migration_id}/archive": Operation<"/orgs/{org}/migrations/{migration_id}/archive", "delete">; + /** + * @see https://docs.github.com/rest/migrations/orgs#unlock-an-organization-repository + */ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock": Operation<"/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-outside-collaborator-from-an-organization + */ + "DELETE /orgs/{org}/outside_collaborators/{username}": Operation<"/orgs/{org}/outside_collaborators/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-for-an-organization + */ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-version-for-an-organization + */ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-public-organization-membership-for-the-authenticated-user + */ + "DELETE /orgs/{org}/public_members/{username}": Operation<"/orgs/{org}/public_members/{username}", "delete">; + /** + * @see https://docs.github.com/rest/repos/rules#delete-organization-ruleset + */ + "DELETE /orgs/{org}/rulesets/{ruleset_id}": Operation<"/orgs/{org}/rulesets/{ruleset_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-a-security-manager-team + */ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}": Operation<"/orgs/{org}/security-managers/teams/{team_slug}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-team + */ + "DELETE /orgs/{org}/teams/{team_slug}": Operation<"/orgs/{org}/teams/{team_slug}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion-comment + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-team-discussion-comment-reaction + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-team-discussion-reaction + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user + */ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}": Operation<"/orgs/{org}/teams/{team_slug}/memberships/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team + */ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}": Operation<"/orgs/{org}/teams/{team_slug}/projects/{project_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#remove-a-repository-from-a-team + */ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": Operation<"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#delete-a-project-card + */ + "DELETE /projects/columns/cards/{card_id}": Operation<"/projects/columns/cards/{card_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#delete-a-project-column + */ + "DELETE /projects/columns/{column_id}": Operation<"/projects/columns/{column_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#delete-a-project + */ + "DELETE /projects/{project_id}": Operation<"/projects/{project_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#remove-project-collaborator + */ + "DELETE /projects/{project_id}/collaborators/{username}": Operation<"/projects/{project_id}/collaborators/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-repository + */ + "DELETE /repos/{owner}/{repo}": Operation<"/repos/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-an-artifact + */ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}": Operation<"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}", "delete">; + /** + * @see https://docs.github.com/rest/actions/cache#delete-a-github-actions-cache-for-a-repository-using-a-cache-id + */ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}": Operation<"/repos/{owner}/{repo}/actions/caches/{cache_id}", "delete">; + /** + * @see https://docs.github.com/rest/actions/cache#delete-github-actions-caches-for-a-repository-using-a-cache-key + */ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}": Operation<"/repos/{owner}/{repo}/actions/caches", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-self-hosted-runner-from-a-repository + */ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-all-custom-labels-from-a-self-hosted-runner-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-custom-label-from-a-self-hosted-runner-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-workflow-run + */ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-workflow-run-logs + */ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/logs", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-repository-secret + */ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/actions/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/actions/variables#delete-a-repository-variable + */ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}": Operation<"/repos/{owner}/{repo}/actions/variables/{name}", "delete">; + /** + * @see https://docs.github.com/rest/repos/autolinks#delete-an-autolink-reference-from-a-repository + */ + "DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}": Operation<"/repos/{owner}/{repo}/autolinks/{autolink_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#disable-automated-security-fixes + */ + "DELETE /repos/{owner}/{repo}/automated-security-fixes": Operation<"/repos/{owner}/{repo}/automated-security-fixes", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#delete-branch-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#delete-admin-branch-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#delete-pull-request-review-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#delete-commit-signature-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#remove-status-check-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#remove-status-check-contexts + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#delete-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#remove-app-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#remove-team-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "delete">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#remove-user-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "delete">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#delete-a-code-scanning-analysis-from-a-repository + */ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}": Operation<"/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-a-repository-secret + */ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#remove-a-repository-collaborator + */ + "DELETE /repos/{owner}/{repo}/collaborators/{username}": Operation<"/repos/{owner}/{repo}/collaborators/{username}", "delete">; + /** + * @see https://docs.github.com/rest/commits/comments#delete-a-commit-comment + */ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-a-commit-comment-reaction + */ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-file + */ + "DELETE /repos/{owner}/{repo}/contents/{path}": Operation<"/repos/{owner}/{repo}/contents/{path}", "delete">; + /** + * @see https://docs.github.com/rest/reference/dependabot#delete-a-repository-secret + */ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/deployments/deployments#delete-a-deployment + */ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}", "delete">; + /** + * @see https://docs.github.com/rest/deployments/environments#delete-an-environment + */ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}", "delete">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#delete-deployment-branch-policy + */ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}", "delete">; + /** + * @see https://docs.github.com/rest/deployments/protection-rules#disable-deployment-protection-rule + */ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/git#delete-a-reference + */ + "DELETE /repos/{owner}/{repo}/git/refs/{ref}": Operation<"/repos/{owner}/{repo}/git/refs/{ref}", "delete">; + /** + * @see https://docs.github.com/rest/webhooks/repos#delete-a-repository-webhook + */ + "DELETE /repos/{owner}/{repo}/hooks/{hook_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}", "delete">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#cancel-an-import + */ + "DELETE /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "delete">; + /** + * @see https://docs.github.com/rest/reference/interactions#remove-interaction-restrictions-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/interaction-limits": Operation<"/repos/{owner}/{repo}/interaction-limits", "delete">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#delete-a-repository-invitation + */ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}": Operation<"/repos/{owner}/{repo}/invitations/{invitation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#delete-an-issue-comment + */ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-an-issue-comment-reaction + */ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#remove-assignees-from-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/assignees", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#remove-all-labels-from-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#remove-a-label-from-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#unlock-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/lock", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-an-issue-reaction + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/deploy-keys#delete-a-deploy-key + */ + "DELETE /repos/{owner}/{repo}/keys/{key_id}": Operation<"/repos/{owner}/{repo}/keys/{key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#delete-a-label + */ + "DELETE /repos/{owner}/{repo}/labels/{name}": Operation<"/repos/{owner}/{repo}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#disable-git-lfs-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/lfs": Operation<"/repos/{owner}/{repo}/lfs", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#delete-a-milestone + */ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}", "delete">; + /** + * @see https://docs.github.com/rest/pages#delete-a-github-pages-site + */ + "DELETE /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "delete">; + /** + * @see https://docs.github.com/rest/reference/pulls#delete-a-review-comment-for-a-pull-request + */ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-a-pull-request-comment-reaction + */ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/pulls#remove-requested-reviewers-from-a-pull-request + */ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "delete">; + /** + * @see https://docs.github.com/rest/reference/pulls#delete-a-pending-review-for-a-pull-request + */ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-release-asset + */ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}": Operation<"/repos/{owner}/{repo}/releases/assets/{asset_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-release + */ + "DELETE /repos/{owner}/{repo}/releases/{release_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions/#delete-a-release-reaction + */ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/repos/rules#delete-repository-ruleset + */ + "DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}": Operation<"/repos/{owner}/{repo}/rulesets/{ruleset_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/activity#delete-a-repository-subscription + */ + "DELETE /repos/{owner}/{repo}/subscription": Operation<"/repos/{owner}/{repo}/subscription", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-tag-protection-state-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}": Operation<"/repos/{owner}/{repo}/tags/protection/{tag_protection_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#disable-vulnerability-alerts + */ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts": Operation<"/repos/{owner}/{repo}/vulnerability-alerts", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-an-environment-secret + */ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/actions/variables#delete-an-environment-variable + */ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/variables/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#delete-a-team-legacy + */ + "DELETE /teams/{team_id}": Operation<"/teams/{team_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion-legacy + */ + "DELETE /teams/{team_id}/discussions/{discussion_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion-comment-legacy + */ + "DELETE /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-team-member-legacy + */ + "DELETE /teams/{team_id}/members/{username}": Operation<"/teams/{team_id}/members/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user-legacy + */ + "DELETE /teams/{team_id}/memberships/{username}": Operation<"/teams/{team_id}/memberships/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#remove-a-project-from-a-team-legacy + */ + "DELETE /teams/{team_id}/projects/{project_id}": Operation<"/teams/{team_id}/projects/{project_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#remove-a-repository-from-a-team-legacy + */ + "DELETE /teams/{team_id}/repos/{owner}/{repo}": Operation<"/teams/{team_id}/repos/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#unblock-a-user + */ + "DELETE /user/blocks/{username}": Operation<"/user/blocks/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-a-secret-for-the-authenticated-user + */ + "DELETE /user/codespaces/secrets/{secret_name}": Operation<"/user/codespaces/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#remove-a-selected-repository-from-a-user-secret + */ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/user/codespaces/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-a-codespace-for-the-authenticated-user + */ + "DELETE /user/codespaces/{codespace_name}": Operation<"/user/codespaces/{codespace_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-an-email-address-for-the-authenticated-user + */ + "DELETE /user/emails": Operation<"/user/emails", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#unfollow-a-user + */ + "DELETE /user/following/{username}": Operation<"/user/following/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-a-gpg-key-for-the-authenticated-user + */ + "DELETE /user/gpg_keys/{gpg_key_id}": Operation<"/user/gpg_keys/{gpg_key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#remove-a-repository-from-an-app-installation + */ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}": Operation<"/user/installations/{installation_id}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/interactions#remove-interaction-restrictions-from-your-public-repositories + */ + "DELETE /user/interaction-limits": Operation<"/user/interaction-limits", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-a-public-ssh-key-for-the-authenticated-user + */ + "DELETE /user/keys/{key_id}": Operation<"/user/keys/{key_id}", "delete">; + /** + * @see https://docs.github.com/rest/migrations/users#delete-a-user-migration-archive + */ + "DELETE /user/migrations/{migration_id}/archive": Operation<"/user/migrations/{migration_id}/archive", "delete">; + /** + * @see https://docs.github.com/rest/migrations/users#unlock-a-user-repository + */ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock": Operation<"/user/migrations/{migration_id}/repos/{repo_name}/lock", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-for-the-authenticated-user + */ + "DELETE /user/packages/{package_type}/{package_name}": Operation<"/user/packages/{package_type}/{package_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-version-for-the-authenticated-user + */ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/user/packages/{package_type}/{package_name}/versions/{package_version_id}", "delete">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#decline-a-repository-invitation + */ + "DELETE /user/repository_invitations/{invitation_id}": Operation<"/user/repository_invitations/{invitation_id}", "delete">; + /** + * @see https://docs.github.com/rest/users/social-accounts#delete-social-account-for-authenticated-user + */ + "DELETE /user/social_accounts": Operation<"/user/social_accounts", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-a-ssh-signing-key-for-the-authenticated-user + */ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}": Operation<"/user/ssh_signing_keys/{ssh_signing_key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/activity#unstar-a-repository-for-the-authenticated-user + */ + "DELETE /user/starred/{owner}/{repo}": Operation<"/user/starred/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-for-a-user + */ + "DELETE /users/{username}/packages/{package_type}/{package_name}": Operation<"/users/{username}/packages/{package_type}/{package_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-version-for-a-user + */ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", "delete">; + /** + * @see https://docs.github.com/rest/overview/resources-in-the-rest-api#root-endpoint + */ + "GET /": Operation<"/", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-the-authenticated-app + */ + "GET /app": Operation<"/app", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-webhook-configuration-for-an-app + */ + "GET /app/hook/config": Operation<"/app/hook/config", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-deliveries-for-an-app-webhook + */ + "GET /app/hook/deliveries": Operation<"/app/hook/deliveries", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-delivery-for-an-app-webhook + */ + "GET /app/hook/deliveries/{delivery_id}": Operation<"/app/hook/deliveries/{delivery_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-installation-requests-for-the-authenticated-app + */ + "GET /app/installation-requests": Operation<"/app/installation-requests", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app + */ + "GET /app/installations": Operation<"/app/installations", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-an-installation-for-the-authenticated-app + */ + "GET /app/installations/{installation_id}": Operation<"/app/installations/{installation_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps/#get-an-app + */ + "GET /apps/{app_slug}": Operation<"/apps/{app_slug}", "get">; + /** + * @see https://docs.github.com/rest/reference/codes-of-conduct#get-all-codes-of-conduct + */ + "GET /codes_of_conduct": Operation<"/codes_of_conduct", "get">; + /** + * @see https://docs.github.com/rest/reference/codes-of-conduct#get-a-code-of-conduct + */ + "GET /codes_of_conduct/{key}": Operation<"/codes_of_conduct/{key}", "get">; + /** + * @see https://docs.github.com/rest/reference/emojis#get-emojis + */ + "GET /emojis": Operation<"/emojis", "get">; + /** + * @see https://docs.github.com/rest/dependabot/alerts#list-dependabot-alerts-for-an-enterprise + */ + "GET /enterprises/{enterprise}/dependabot/alerts": Operation<"/enterprises/{enterprise}/dependabot/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-an-enterprise + */ + "GET /enterprises/{enterprise}/secret-scanning/alerts": Operation<"/enterprises/{enterprise}/secret-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events + */ + "GET /events": Operation<"/events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-feeds + */ + "GET /feeds": Operation<"/feeds", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gists-for-the-authenticated-user + */ + "GET /gists": Operation<"/gists", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-public-gists + */ + "GET /gists/public": Operation<"/gists/public", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-starred-gists + */ + "GET /gists/starred": Operation<"/gists/starred", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#get-a-gist + */ + "GET /gists/{gist_id}": Operation<"/gists/{gist_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-comments + */ + "GET /gists/{gist_id}/comments": Operation<"/gists/{gist_id}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#get-a-gist-comment + */ + "GET /gists/{gist_id}/comments/{comment_id}": Operation<"/gists/{gist_id}/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-commits + */ + "GET /gists/{gist_id}/commits": Operation<"/gists/{gist_id}/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-forks + */ + "GET /gists/{gist_id}/forks": Operation<"/gists/{gist_id}/forks", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#check-if-a-gist-is-starred + */ + "GET /gists/{gist_id}/star": Operation<"/gists/{gist_id}/star", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#get-a-gist-revision + */ + "GET /gists/{gist_id}/{sha}": Operation<"/gists/{gist_id}/{sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/gitignore#get-all-gitignore-templates + */ + "GET /gitignore/templates": Operation<"/gitignore/templates", "get">; + /** + * @see https://docs.github.com/rest/reference/gitignore#get-a-gitignore-template + */ + "GET /gitignore/templates/{name}": Operation<"/gitignore/templates/{name}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-repositories-accessible-to-the-app-installation + */ + "GET /installation/repositories": Operation<"/installation/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issues-assigned-to-the-authenticated-user + */ + "GET /issues": Operation<"/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/licenses#get-all-commonly-used-licenses + */ + "GET /licenses": Operation<"/licenses", "get">; + /** + * @see https://docs.github.com/rest/reference/licenses#get-a-license + */ + "GET /licenses/{license}": Operation<"/licenses/{license}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-subscription-plan-for-an-account + */ + "GET /marketplace_listing/accounts/{account_id}": Operation<"/marketplace_listing/accounts/{account_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-plans + */ + "GET /marketplace_listing/plans": Operation<"/marketplace_listing/plans", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-accounts-for-a-plan + */ + "GET /marketplace_listing/plans/{plan_id}/accounts": Operation<"/marketplace_listing/plans/{plan_id}/accounts", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-subscription-plan-for-an-account-stubbed + */ + "GET /marketplace_listing/stubbed/accounts/{account_id}": Operation<"/marketplace_listing/stubbed/accounts/{account_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-plans-stubbed + */ + "GET /marketplace_listing/stubbed/plans": Operation<"/marketplace_listing/stubbed/plans", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-accounts-for-a-plan-stubbed + */ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts": Operation<"/marketplace_listing/stubbed/plans/{plan_id}/accounts", "get">; + /** + * @see https://docs.github.com/rest/reference/meta#get-github-meta-information + */ + "GET /meta": Operation<"/meta", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-for-a-network-of-repositories + */ + "GET /networks/{owner}/{repo}/events": Operation<"/networks/{owner}/{repo}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user + */ + "GET /notifications": Operation<"/notifications", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-a-thread + */ + "GET /notifications/threads/{thread_id}": Operation<"/notifications/threads/{thread_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-a-thread-subscription-for-the-authenticated-user + */ + "GET /notifications/threads/{thread_id}/subscription": Operation<"/notifications/threads/{thread_id}/subscription", "get">; + /** + * @see https://docs.github.com/rest/reference/meta#get-octocat + */ + "GET /octocat": Operation<"/octocat", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations + */ + "GET /organizations": Operation<"/organizations", "get">; + /** + * @see https://docs.github.com/rest/orgs/orgs#list-requests-to-access-organization-resources-with-fine-grained-personal-access-tokens + */ + "GET /organizations/{org}/personal-access-token-requests": Operation<"/organizations/{org}/personal-access-token-requests", "get">; + /** + * @see https://docs.github.com/rest/orgs/orgs#list-repositories-requested-to-be-accessed-by-a-fine-grained-personal-access-token + */ + "GET /organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories": Operation<"/organizations/{org}/personal-access-token-requests/{pat_request_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/orgs/orgs#list-fine-grained-personal-access-tokens-with-access-to-organization-resources + */ + "GET /organizations/{org}/personal-access-tokens": Operation<"/organizations/{org}/personal-access-tokens", "get">; + /** + * @see https://docs.github.com/rest/orgs/orgs#list-repositories-a-fine-grained-personal-access-token-has-access-to + */ + "GET /organizations/{org}/personal-access-tokens/{pat_id}/repositories": Operation<"/organizations/{org}/personal-access-tokens/{pat_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-in-organization + * @deprecated "org_id" is now "org" + */ + "GET /orgs/{org_id}/codespaces": Operation<"/orgs/{org}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-an-organization + */ + "GET /orgs/{org}": Operation<"/orgs/{org}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-cache-usage-for-an-organization + */ + "GET /orgs/{org}/actions/cache/usage": Operation<"/orgs/{org}/actions/cache/usage", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repositories-with-github-actions-cache-usage-for-an-organization + */ + "GET /orgs/{org}/actions/cache/usage-by-repository": Operation<"/orgs/{org}/actions/cache/usage-by-repository", "get">; + /** + * @see https://docs.github.com/rest/actions/oidc#get-the-customization-template-for-an-oidc-subject-claim-for-an-organization + */ + "GET /orgs/{org}/actions/oidc/customization/sub": Operation<"/orgs/{org}/actions/oidc/customization/sub", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-permissions-for-an-organization + */ + "GET /orgs/{org}/actions/permissions": Operation<"/orgs/{org}/actions/permissions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-enabled-for-github-actions-in-an-organization + */ + "GET /orgs/{org}/actions/permissions/repositories": Operation<"/orgs/{org}/actions/permissions/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-allowed-actions-for-an-organization + */ + "GET /orgs/{org}/actions/permissions/selected-actions": Operation<"/orgs/{org}/actions/permissions/selected-actions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-default-workflow-permissions + */ + "GET /orgs/{org}/actions/permissions/workflow": Operation<"/orgs/{org}/actions/permissions/workflow", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-required-workflows + */ + "GET /orgs/{org}/actions/required_workflows": Operation<"/orgs/{org}/actions/required_workflows", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-required-workflow + */ + "GET /orgs/{org}/actions/required_workflows/{required_workflow_id}": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-required-workflows + */ + "GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-an-organization + */ + "GET /orgs/{org}/actions/runners": Operation<"/orgs/{org}/actions/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-runner-applications-for-an-organization + */ + "GET /orgs/{org}/actions/runners/downloads": Operation<"/orgs/{org}/actions/runners/downloads", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-for-an-organization + */ + "GET /orgs/{org}/actions/runners/{runner_id}": Operation<"/orgs/{org}/actions/runners/{runner_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-labels-for-a-self-hosted-runner-for-an-organization + */ + "GET /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-organization-secrets + */ + "GET /orgs/{org}/actions/secrets": Operation<"/orgs/{org}/actions/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-organization-public-key + */ + "GET /orgs/{org}/actions/secrets/public-key": Operation<"/orgs/{org}/actions/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-organization-secret + */ + "GET /orgs/{org}/actions/secrets/{secret_name}": Operation<"/orgs/{org}/actions/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#list-organization-variables + */ + "GET /orgs/{org}/actions/variables": Operation<"/orgs/{org}/actions/variables", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#get-an-organization-variable + */ + "GET /orgs/{org}/actions/variables/{name}": Operation<"/orgs/{org}/actions/variables/{name}", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#list-selected-repositories-for-an-organization-variable + */ + "GET /orgs/{org}/actions/variables/{name}/repositories": Operation<"/orgs/{org}/actions/variables/{name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-users-blocked-by-an-organization + */ + "GET /orgs/{org}/blocks": Operation<"/orgs/{org}/blocks", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#check-if-a-user-is-blocked-by-an-organization + */ + "GET /orgs/{org}/blocks/{username}": Operation<"/orgs/{org}/blocks/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-for-an-organization + */ + "GET /orgs/{org}/code-scanning/alerts": Operation<"/orgs/{org}/code-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-in-organization + */ + "GET /orgs/{org}/codespaces": Operation<"/orgs/{org}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-organization-secrets + */ + "GET /orgs/{org}/codespaces/secrets": Operation<"/orgs/{org}/codespaces/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-an-organization-public-key + */ + "GET /orgs/{org}/codespaces/secrets/public-key": Operation<"/orgs/{org}/codespaces/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-an-organization-secret + */ + "GET /orgs/{org}/codespaces/secrets/{secret_name}": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/dependabot/alerts#list-dependabot-alerts-for-an-organization + */ + "GET /orgs/{org}/dependabot/alerts": Operation<"/orgs/{org}/dependabot/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-organization-secrets + */ + "GET /orgs/{org}/dependabot/secrets": Operation<"/orgs/{org}/dependabot/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-an-organization-public-key + */ + "GET /orgs/{org}/dependabot/secrets/public-key": Operation<"/orgs/{org}/dependabot/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-an-organization-secret + */ + "GET /orgs/{org}/dependabot/secrets/{secret_name}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-docker-migration-conflicting-packages-for-organization + */ + "GET /orgs/{org}/docker/conflicts": Operation<"/orgs/{org}/docker/conflicts", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-organization-events + */ + "GET /orgs/{org}/events": Operation<"/orgs/{org}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-failed-organization-invitations + */ + "GET /orgs/{org}/failed_invitations": Operation<"/orgs/{org}/failed_invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-webhooks + */ + "GET /orgs/{org}/hooks": Operation<"/orgs/{org}/hooks", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}": Operation<"/orgs/{org}/hooks/{hook_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-a-webhook-configuration-for-an-organization + */ + "GET /orgs/{org}/hooks/{hook_id}/config": Operation<"/orgs/{org}/hooks/{hook_id}/config", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-deliveries-for-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}/deliveries": Operation<"/orgs/{org}/hooks/{hook_id}/deliveries", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-a-webhook-delivery-for-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}": Operation<"/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-an-organization-installation-for-the-authenticated-app + */ + "GET /orgs/{org}/installation": Operation<"/orgs/{org}/installation", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-app-installations-for-an-organization + */ + "GET /orgs/{org}/installations": Operation<"/orgs/{org}/installations", "get">; + /** + * @see https://docs.github.com/rest/reference/interactions#get-interaction-restrictions-for-an-organization + */ + "GET /orgs/{org}/interaction-limits": Operation<"/orgs/{org}/interaction-limits", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-pending-organization-invitations + */ + "GET /orgs/{org}/invitations": Operation<"/orgs/{org}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-invitation-teams + */ + "GET /orgs/{org}/invitations/{invitation_id}/teams": Operation<"/orgs/{org}/invitations/{invitation_id}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-organization-issues-assigned-to-the-authenticated-user + */ + "GET /orgs/{org}/issues": Operation<"/orgs/{org}/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-members + */ + "GET /orgs/{org}/members": Operation<"/orgs/{org}/members", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#check-organization-membership-for-a-user + */ + "GET /orgs/{org}/members/{username}": Operation<"/orgs/{org}/members/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-codespaces-for-user-in-org + */ + "GET /orgs/{org}/members/{username}/codespaces": Operation<"/orgs/{org}/members/{username}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user + */ + "GET /orgs/{org}/memberships/{username}": Operation<"/orgs/{org}/memberships/{username}", "get">; + /** + * @see https://docs.github.com/rest/migrations/orgs#list-organization-migrations + */ + "GET /orgs/{org}/migrations": Operation<"/orgs/{org}/migrations", "get">; + /** + * @see https://docs.github.com/rest/migrations/orgs#get-an-organization-migration-status + */ + "GET /orgs/{org}/migrations/{migration_id}": Operation<"/orgs/{org}/migrations/{migration_id}", "get">; + /** + * @see https://docs.github.com/rest/migrations/orgs#download-an-organization-migration-archive + */ + "GET /orgs/{org}/migrations/{migration_id}/archive": Operation<"/orgs/{org}/migrations/{migration_id}/archive", "get">; + /** + * @see https://docs.github.com/rest/migrations/orgs#list-repositories-in-an-organization-migration + */ + "GET /orgs/{org}/migrations/{migration_id}/repositories": Operation<"/orgs/{org}/migrations/{migration_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-outside-collaborators-for-an-organization + */ + "GET /orgs/{org}/outside_collaborators": Operation<"/orgs/{org}/outside_collaborators", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-an-organization + */ + "GET /orgs/{org}/packages": Operation<"/orgs/{org}/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-for-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}", "get">; + /** + * @see https://docs.github.com/rest/packages#get-all-package-versions-for-a-package-owned-by-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-version-for-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-organization-projects + */ + "GET /orgs/{org}/projects": Operation<"/orgs/{org}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-public-organization-members + */ + "GET /orgs/{org}/public_members": Operation<"/orgs/{org}/public_members", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#check-public-organization-membership-for-a-user + */ + "GET /orgs/{org}/public_members/{username}": Operation<"/orgs/{org}/public_members/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-organization-repositories + */ + "GET /orgs/{org}/repos": Operation<"/orgs/{org}/repos", "get">; + /** + * @see https://docs.github.com/rest/repos/rules#get-organization-rulesets + */ + "GET /orgs/{org}/rulesets": Operation<"/orgs/{org}/rulesets", "get">; + /** + * @see https://docs.github.com/rest/repos/rules#get-organization-ruleset + */ + "GET /orgs/{org}/rulesets/{ruleset_id}": Operation<"/orgs/{org}/rulesets/{ruleset_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-an-organization + */ + "GET /orgs/{org}/secret-scanning/alerts": Operation<"/orgs/{org}/secret-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-security-manager-teams + */ + "GET /orgs/{org}/security-managers": Operation<"/orgs/{org}/security-managers", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-actions-billing-for-an-organization + */ + "GET /orgs/{org}/settings/billing/actions": Operation<"/orgs/{org}/settings/billing/actions", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-packages-billing-for-an-organization + */ + "GET /orgs/{org}/settings/billing/packages": Operation<"/orgs/{org}/settings/billing/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-shared-storage-billing-for-an-organization + */ + "GET /orgs/{org}/settings/billing/shared-storage": Operation<"/orgs/{org}/settings/billing/shared-storage", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-teams + */ + "GET /orgs/{org}/teams": Operation<"/orgs/{org}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-team-by-name + */ + "GET /orgs/{org}/teams/{team_slug}": Operation<"/orgs/{org}/teams/{team_slug}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussions + */ + "GET /orgs/{org}/teams/{team_slug}/discussions": Operation<"/orgs/{org}/teams/{team_slug}/discussions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussion-comments + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion-comment + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-pending-team-invitations + */ + "GET /orgs/{org}/teams/{team_slug}/invitations": Operation<"/orgs/{org}/teams/{team_slug}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-members + */ + "GET /orgs/{org}/teams/{team_slug}/members": Operation<"/orgs/{org}/teams/{team_slug}/members", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user + */ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}": Operation<"/orgs/{org}/teams/{team_slug}/memberships/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-projects + */ + "GET /orgs/{org}/teams/{team_slug}/projects": Operation<"/orgs/{org}/teams/{team_slug}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project + */ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}": Operation<"/orgs/{org}/teams/{team_slug}/projects/{project_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-repositories + */ + "GET /orgs/{org}/teams/{team_slug}/repos": Operation<"/orgs/{org}/teams/{team_slug}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#check-team-permissions-for-a-repository + */ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": Operation<"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-child-teams + */ + "GET /orgs/{org}/teams/{team_slug}/teams": Operation<"/orgs/{org}/teams/{team_slug}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-a-project-card + */ + "GET /projects/columns/cards/{card_id}": Operation<"/projects/columns/cards/{card_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-a-project-column + */ + "GET /projects/columns/{column_id}": Operation<"/projects/columns/{column_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-cards + */ + "GET /projects/columns/{column_id}/cards": Operation<"/projects/columns/{column_id}/cards", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-a-project + */ + "GET /projects/{project_id}": Operation<"/projects/{project_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-collaborators + */ + "GET /projects/{project_id}/collaborators": Operation<"/projects/{project_id}/collaborators", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-project-permission-for-a-user + */ + "GET /projects/{project_id}/collaborators/{username}/permission": Operation<"/projects/{project_id}/collaborators/{username}/permission", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-columns + */ + "GET /projects/{project_id}/columns": Operation<"/projects/{project_id}/columns", "get">; + /** + * @see https://docs.github.com/rest/reference/rate-limit#get-rate-limit-status-for-the-authenticated-user + */ + "GET /rate_limit": Operation<"/rate_limit", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-required-workflows + */ + "GET /repos/{org}/{repo}/actions/required_workflows": Operation<"/repos/{org}/{repo}/actions/required_workflows", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-repository-required-workflow + */ + "GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}": Operation<"/repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-repository-required-workflow-usage + */ + "GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing": Operation<"/repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository + */ + "GET /repos/{owner}/{repo}": Operation<"/repos/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-artifacts-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/artifacts": Operation<"/repos/{owner}/{repo}/actions/artifacts", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-artifact + */ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}": Operation<"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-an-artifact + */ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}": Operation<"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-cache-usage-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/cache/usage": Operation<"/repos/{owner}/{repo}/actions/cache/usage", "get">; + /** + * @see https://docs.github.com/rest/actions/cache#list-github-actions-caches-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/caches": Operation<"/repos/{owner}/{repo}/actions/caches", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-job-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}": Operation<"/repos/{owner}/{repo}/actions/jobs/{job_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-job-logs-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs": Operation<"/repos/{owner}/{repo}/actions/jobs/{job_id}/logs", "get">; + /** + * @see https://docs.github.com/rest/actions/oidc#get-the-customization-template-for-an-oidc-subject-claim-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/oidc/customization/sub": Operation<"/repos/{owner}/{repo}/actions/oidc/customization/sub", "get">; + /** + * @see https://docs.github.com/rest/actions/secrets#list-repository-organization-secrets + */ + "GET /repos/{owner}/{repo}/actions/organization-secrets": Operation<"/repos/{owner}/{repo}/actions/organization-secrets", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#list-repository-organization-variables + */ + "GET /repos/{owner}/{repo}/actions/organization-variables": Operation<"/repos/{owner}/{repo}/actions/organization-variables", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-permissions-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions": Operation<"/repos/{owner}/{repo}/actions/permissions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-workflow-access-level-to-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions/access": Operation<"/repos/{owner}/{repo}/actions/permissions/access", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-allowed-actions-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions": Operation<"/repos/{owner}/{repo}/actions/permissions/selected-actions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-default-workflow-permissions-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions/workflow": Operation<"/repos/{owner}/{repo}/actions/permissions/workflow", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-required-workflow-runs + */ + "GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs": Operation<"/repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners": Operation<"/repos/{owner}/{repo}/actions/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-runner-applications-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners/downloads": Operation<"/repos/{owner}/{repo}/actions/runners/downloads", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-labels-for-a-self-hosted-runner-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-runs-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runs": Operation<"/repos/{owner}/{repo}/actions/runs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-the-review-history-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/approvals", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-run-artifacts + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-workflow-run-attempt + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-jobs-for-a-workflow-run-attempt + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-workflow-run-attempt-logs + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-jobs-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-workflow-run-logs + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/logs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-pending-deployments-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-workflow-run-usage + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/timing", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/actions/secrets": Operation<"/repos/{owner}/{repo}/actions/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-repository-public-key + */ + "GET /repos/{owner}/{repo}/actions/secrets/public-key": Operation<"/repos/{owner}/{repo}/actions/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-repository-secret + */ + "GET /repos/{owner}/{repo}/actions/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/actions/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#list-repository-variables + */ + "GET /repos/{owner}/{repo}/actions/variables": Operation<"/repos/{owner}/{repo}/actions/variables", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#get-a-repository-variable + */ + "GET /repos/{owner}/{repo}/actions/variables/{name}": Operation<"/repos/{owner}/{repo}/actions/variables/{name}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-workflows + */ + "GET /repos/{owner}/{repo}/actions/workflows": Operation<"/repos/{owner}/{repo}/actions/workflows", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-workflow + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-runs + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-workflow-usage + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-assignees + */ + "GET /repos/{owner}/{repo}/assignees": Operation<"/repos/{owner}/{repo}/assignees", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#check-if-a-user-can-be-assigned + */ + "GET /repos/{owner}/{repo}/assignees/{assignee}": Operation<"/repos/{owner}/{repo}/assignees/{assignee}", "get">; + /** + * @see https://docs.github.com/rest/repos/autolinks#list-all-autolinks-of-a-repository + */ + "GET /repos/{owner}/{repo}/autolinks": Operation<"/repos/{owner}/{repo}/autolinks", "get">; + /** + * @see https://docs.github.com/rest/repos/autolinks#get-an-autolink-reference-of-a-repository + */ + "GET /repos/{owner}/{repo}/autolinks/{autolink_id}": Operation<"/repos/{owner}/{repo}/autolinks/{autolink_id}", "get">; + /** + * @see https://docs.github.com/rest/branches/branches#list-branches + */ + "GET /repos/{owner}/{repo}/branches": Operation<"/repos/{owner}/{repo}/branches", "get">; + /** + * @see https://docs.github.com/rest/branches/branches#get-a-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}": Operation<"/repos/{owner}/{repo}/branches/{branch}", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-branch-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-admin-branch-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-pull-request-review-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-commit-signature-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-status-checks-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-all-status-check-contexts + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#get-access-restrictions + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#list-apps-with-access-to-the-protected-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#list-teams-with-access-to-the-protected-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "get">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#list-users-with-access-to-the-protected-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#get-a-check-run + */ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-run-annotations + */ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#get-a-check-suite + */ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}": Operation<"/repos/{owner}/{repo}/check-suites/{check_suite_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-runs-in-a-check-suite + */ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs": Operation<"/repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts": Operation<"/repos/{owner}/{repo}/code-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-code-scanning-alert + * @deprecated "alert_id" is now "alert_number" + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-code-scanning-alert + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-instances-of-a-code-scanning-alert + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-analyses-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/analyses": Operation<"/repos/{owner}/{repo}/code-scanning/analyses", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}": Operation<"/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-codeql-databases-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases": Operation<"/repos/{owner}/{repo}/code-scanning/codeql/databases", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-codeql-database-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}": Operation<"/repos/{owner}/{repo}/code-scanning/codeql/databases/{language}", "get">; + /** + * @see https://docs.github.com/rest/code-scanning#get-a-code-scanning-default-setup-configuration + */ + "GET /repos/{owner}/{repo}/code-scanning/default-setup": Operation<"/repos/{owner}/{repo}/code-scanning/default-setup", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-information-about-a-sarif-upload + */ + "GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}": Operation<"/repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-codeowners-errors + */ + "GET /repos/{owner}/{repo}/codeowners/errors": Operation<"/repos/{owner}/{repo}/codeowners/errors", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-codespaces-in-a-repository-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/codespaces": Operation<"/repos/{owner}/{repo}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-devcontainers-in-a-repository-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/codespaces/devcontainers": Operation<"/repos/{owner}/{repo}/codespaces/devcontainers", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-available-machine-types-for-a-repository + */ + "GET /repos/{owner}/{repo}/codespaces/machines": Operation<"/repos/{owner}/{repo}/codespaces/machines", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#preview-attributes-for-a-new-codespace + */ + "GET /repos/{owner}/{repo}/codespaces/new": Operation<"/repos/{owner}/{repo}/codespaces/new", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/codespaces/secrets": Operation<"/repos/{owner}/{repo}/codespaces/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-repository-public-key + */ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key": Operation<"/repos/{owner}/{repo}/codespaces/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-repository-secret + */ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#list-repository-collaborators + */ + "GET /repos/{owner}/{repo}/collaborators": Operation<"/repos/{owner}/{repo}/collaborators", "get">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#check-if-a-user-is-a-repository-collaborator + */ + "GET /repos/{owner}/{repo}/collaborators/{username}": Operation<"/repos/{owner}/{repo}/collaborators/{username}", "get">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#get-repository-permissions-for-a-user + */ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission": Operation<"/repos/{owner}/{repo}/collaborators/{username}/permission", "get">; + /** + * @see https://docs.github.com/rest/commits/comments#list-commit-comments-for-a-repository + */ + "GET /repos/{owner}/{repo}/comments": Operation<"/repos/{owner}/{repo}/comments", "get">; + /** + * @see https://docs.github.com/rest/commits/comments#get-a-commit-comment + */ + "GET /repos/{owner}/{repo}/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-commit-comment + */ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/comments/{comment_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#list-commits + */ + "GET /repos/{owner}/{repo}/commits": Operation<"/repos/{owner}/{repo}/commits", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#list-branches-for-head-commit + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", "get">; + /** + * @see https://docs.github.com/rest/commits/comments#list-commit-comments + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/comments", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#list-pull-requests-associated-with-a-commit + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/pulls", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#get-a-commit + */ + "GET /repos/{owner}/{repo}/commits/{ref}": Operation<"/repos/{owner}/{repo}/commits/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-runs-for-a-git-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs": Operation<"/repos/{owner}/{repo}/commits/{ref}/check-runs", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-suites-for-a-git-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites": Operation<"/repos/{owner}/{repo}/commits/{ref}/check-suites", "get">; + /** + * @see https://docs.github.com/rest/commits/statuses#get-the-combined-status-for-a-specific-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/status": Operation<"/repos/{owner}/{repo}/commits/{ref}/status", "get">; + /** + * @see https://docs.github.com/rest/commits/statuses#list-commit-statuses-for-a-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses": Operation<"/repos/{owner}/{repo}/commits/{ref}/statuses", "get">; + /** + * @see https://docs.github.com/rest/metrics/community#get-community-profile-metrics + */ + "GET /repos/{owner}/{repo}/community/profile": Operation<"/repos/{owner}/{repo}/community/profile", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#compare-two-commits + */ + "GET /repos/{owner}/{repo}/compare/{basehead}": Operation<"/repos/{owner}/{repo}/compare/{basehead}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#compare-two-commits + */ + "GET /repos/{owner}/{repo}/compare/{base}...{head}": Operation<"/repos/{owner}/{repo}/compare/{base}...{head}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-repository-content + */ + "GET /repos/{owner}/{repo}/contents/{path}": Operation<"/repos/{owner}/{repo}/contents/{path}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-contributors + */ + "GET /repos/{owner}/{repo}/contributors": Operation<"/repos/{owner}/{repo}/contributors", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-dependabot-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/dependabot/alerts": Operation<"/repos/{owner}/{repo}/dependabot/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-a-dependabot-alert + */ + "GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/dependabot/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/dependabot/secrets": Operation<"/repos/{owner}/{repo}/dependabot/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-a-repository-public-key + */ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key": Operation<"/repos/{owner}/{repo}/dependabot/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-a-repository-secret + */ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/dependency-graph#get-a-diff-of-the-dependencies-between-commits + */ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}": Operation<"/repos/{owner}/{repo}/dependency-graph/compare/{basehead}", "get">; + /** + * @see https://docs.github.com/rest/dependency-graph/sboms#export-a-software-bill-of-materials-sbom-for-a-repository + */ + "GET /repos/{owner}/{repo}/dependency-graph/sbom": Operation<"/repos/{owner}/{repo}/dependency-graph/sbom", "get">; + /** + * @see https://docs.github.com/rest/deployments/deployments#list-deployments + */ + "GET /repos/{owner}/{repo}/deployments": Operation<"/repos/{owner}/{repo}/deployments", "get">; + /** + * @see https://docs.github.com/rest/deployments/deployments#get-a-deployment + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}", "get">; + /** + * @see https://docs.github.com/rest/deployments/statuses#list-deployment-statuses + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "get">; + /** + * @see https://docs.github.com/rest/deployments/statuses#get-a-deployment-status + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}", "get">; + /** + * @see https://docs.github.com/rest/deployments/environments#list-environments + */ + "GET /repos/{owner}/{repo}/environments": Operation<"/repos/{owner}/{repo}/environments", "get">; + /** + * @see https://docs.github.com/rest/deployments/environments#get-an-environment + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}", "get">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#list-deployment-branch-policies + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", "get">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#get-deployment-branch-policy + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}", "get">; + /** + * @see https://docs.github.com/rest/deployments/protection-rules#get-all-deployment-protection-rules + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules", "get">; + /** + * @see https://docs.github.com/rest/deployments/protection-rules#list-custom-deployment-rule-integrations + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", "get">; + /** + * @see https://docs.github.com/rest/deployments/protection-rules#get-a-deployment-protection-rule + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repository-events + */ + "GET /repos/{owner}/{repo}/events": Operation<"/repos/{owner}/{repo}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-forks + */ + "GET /repos/{owner}/{repo}/forks": Operation<"/repos/{owner}/{repo}/forks", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-blob + */ + "GET /repos/{owner}/{repo}/git/blobs/{file_sha}": Operation<"/repos/{owner}/{repo}/git/blobs/{file_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-commit + */ + "GET /repos/{owner}/{repo}/git/commits/{commit_sha}": Operation<"/repos/{owner}/{repo}/git/commits/{commit_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#list-matching-references + */ + "GET /repos/{owner}/{repo}/git/matching-refs/{ref}": Operation<"/repos/{owner}/{repo}/git/matching-refs/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-reference + */ + "GET /repos/{owner}/{repo}/git/ref/{ref}": Operation<"/repos/{owner}/{repo}/git/ref/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-tag + */ + "GET /repos/{owner}/{repo}/git/tags/{tag_sha}": Operation<"/repos/{owner}/{repo}/git/tags/{tag_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-tree + */ + "GET /repos/{owner}/{repo}/git/trees/{tree_sha}": Operation<"/repos/{owner}/{repo}/git/trees/{tree_sha}", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repos#list-repository-webhooks + */ + "GET /repos/{owner}/{repo}/hooks": Operation<"/repos/{owner}/{repo}/hooks", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repos#get-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repo-config#get-a-webhook-configuration-for-a-repository + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/config", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repo-deliveries#list-deliveries-for-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repo-deliveries#get-a-delivery-for-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}", "get">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#get-an-import-status + */ + "GET /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "get">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#get-commit-authors + */ + "GET /repos/{owner}/{repo}/import/authors": Operation<"/repos/{owner}/{repo}/import/authors", "get">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#get-large-files + */ + "GET /repos/{owner}/{repo}/import/large_files": Operation<"/repos/{owner}/{repo}/import/large_files", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-repository-installation-for-the-authenticated-app + */ + "GET /repos/{owner}/{repo}/installation": Operation<"/repos/{owner}/{repo}/installation", "get">; + /** + * @see https://docs.github.com/rest/reference/interactions#get-interaction-restrictions-for-a-repository + */ + "GET /repos/{owner}/{repo}/interaction-limits": Operation<"/repos/{owner}/{repo}/interaction-limits", "get">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#list-repository-invitations + */ + "GET /repos/{owner}/{repo}/invitations": Operation<"/repos/{owner}/{repo}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-repository-issues + */ + "GET /repos/{owner}/{repo}/issues": Operation<"/repos/{owner}/{repo}/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-comments-for-a-repository + */ + "GET /repos/{owner}/{repo}/issues/comments": Operation<"/repos/{owner}/{repo}/issues/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-an-issue-comment + */ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue-comment + */ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-events-for-a-repository + */ + "GET /repos/{owner}/{repo}/issues/events": Operation<"/repos/{owner}/{repo}/issues/events", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-an-issue-event + */ + "GET /repos/{owner}/{repo}/issues/events/{event_id}": Operation<"/repos/{owner}/{repo}/issues/events/{event_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#check-if-a-user-can-be-assigned-to-a-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-comments + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-events + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/events": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-timeline-events-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/timeline", "get">; + /** + * @see https://docs.github.com/rest/deploy-keys#list-deploy-keys + */ + "GET /repos/{owner}/{repo}/keys": Operation<"/repos/{owner}/{repo}/keys", "get">; + /** + * @see https://docs.github.com/rest/deploy-keys#get-a-deploy-key + */ + "GET /repos/{owner}/{repo}/keys/{key_id}": Operation<"/repos/{owner}/{repo}/keys/{key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-a-repository + */ + "GET /repos/{owner}/{repo}/labels": Operation<"/repos/{owner}/{repo}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-a-label + */ + "GET /repos/{owner}/{repo}/labels/{name}": Operation<"/repos/{owner}/{repo}/labels/{name}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-languages + */ + "GET /repos/{owner}/{repo}/languages": Operation<"/repos/{owner}/{repo}/languages", "get">; + /** + * @see https://docs.github.com/rest/reference/licenses/#get-the-license-for-a-repository + */ + "GET /repos/{owner}/{repo}/license": Operation<"/repos/{owner}/{repo}/license", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-milestones + */ + "GET /repos/{owner}/{repo}/milestones": Operation<"/repos/{owner}/{repo}/milestones", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-a-milestone + */ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-issues-in-a-milestone + */ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/notifications": Operation<"/repos/{owner}/{repo}/notifications", "get">; + /** + * @see https://docs.github.com/rest/pages#get-a-github-pages-site + */ + "GET /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "get">; + /** + * @see https://docs.github.com/rest/pages#list-github-pages-builds + */ + "GET /repos/{owner}/{repo}/pages/builds": Operation<"/repos/{owner}/{repo}/pages/builds", "get">; + /** + * @see https://docs.github.com/rest/pages#get-latest-pages-build + */ + "GET /repos/{owner}/{repo}/pages/builds/latest": Operation<"/repos/{owner}/{repo}/pages/builds/latest", "get">; + /** + * @see https://docs.github.com/rest/pages#get-github-pages-build + */ + "GET /repos/{owner}/{repo}/pages/builds/{build_id}": Operation<"/repos/{owner}/{repo}/pages/builds/{build_id}", "get">; + /** + * @see https://docs.github.com/rest/pages#get-a-dns-health-check-for-github-pages + */ + "GET /repos/{owner}/{repo}/pages/health": Operation<"/repos/{owner}/{repo}/pages/health", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-repository-projects + */ + "GET /repos/{owner}/{repo}/projects": Operation<"/repos/{owner}/{repo}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests + */ + "GET /repos/{owner}/{repo}/pulls": Operation<"/repos/{owner}/{repo}/pulls", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-review-comments-in-a-repository + */ + "GET /repos/{owner}/{repo}/pulls/comments": Operation<"/repos/{owner}/{repo}/pulls/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-a-review-comment-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-pull-request-review-comment + */ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-review-comments-on-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-commits-on-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests-files + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/files", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#check-if-a-pull-request-has-been-merged + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/merge": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/merge", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-all-requested-reviewers-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-reviews-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-a-review-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-comments-for-a-pull-request-review + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository-readme + */ + "GET /repos/{owner}/{repo}/readme": Operation<"/repos/{owner}/{repo}/readme", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository-directory-readme + */ + "GET /repos/{owner}/{repo}/readme/{dir}": Operation<"/repos/{owner}/{repo}/readme/{dir}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-releases + */ + "GET /repos/{owner}/{repo}/releases": Operation<"/repos/{owner}/{repo}/releases", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-release-asset + */ + "GET /repos/{owner}/{repo}/releases/assets/{asset_id}": Operation<"/repos/{owner}/{repo}/releases/assets/{asset_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-the-latest-release + */ + "GET /repos/{owner}/{repo}/releases/latest": Operation<"/repos/{owner}/{repo}/releases/latest", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-release-by-tag-name + */ + "GET /repos/{owner}/{repo}/releases/tags/{tag}": Operation<"/repos/{owner}/{repo}/releases/tags/{tag}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-release + */ + "GET /repos/{owner}/{repo}/releases/{release_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-release-assets + */ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets": Operation<"/repos/{owner}/{repo}/releases/{release_id}/assets", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-release + */ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/repos/rules#get-rules-for-a-branch + */ + "GET /repos/{owner}/{repo}/rules/branches/{branch}": Operation<"/repos/{owner}/{repo}/rules/branches/{branch}", "get">; + /** + * @see https://docs.github.com/rest/repos/rules#get-repository-rulesets + */ + "GET /repos/{owner}/{repo}/rulesets": Operation<"/repos/{owner}/{repo}/rulesets", "get">; + /** + * @see https://docs.github.com/rest/repos/rules#get-repository-ruleset + */ + "GET /repos/{owner}/{repo}/rulesets/{ruleset_id}": Operation<"/repos/{owner}/{repo}/rulesets/{ruleset_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#get-a-secret-scanning-alert + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-locations-for-a-secret-scanning-alert + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "get">; + /** + * @see https://docs.github.com/rest/security-advisories/repository-advisories#list-repository-security-advisories + */ + "GET /repos/{owner}/{repo}/security-advisories": Operation<"/repos/{owner}/{repo}/security-advisories", "get">; + /** + * @see https://docs.github.com/rest/security-advisories/repository-advisories#get-a-repository-security-advisory + */ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}": Operation<"/repos/{owner}/{repo}/security-advisories/{ghsa_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-stargazers + */ + "GET /repos/{owner}/{repo}/stargazers": Operation<"/repos/{owner}/{repo}/stargazers", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-the-weekly-commit-activity + */ + "GET /repos/{owner}/{repo}/stats/code_frequency": Operation<"/repos/{owner}/{repo}/stats/code_frequency", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-the-last-year-of-commit-activity + */ + "GET /repos/{owner}/{repo}/stats/commit_activity": Operation<"/repos/{owner}/{repo}/stats/commit_activity", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-all-contributor-commit-activity + */ + "GET /repos/{owner}/{repo}/stats/contributors": Operation<"/repos/{owner}/{repo}/stats/contributors", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-the-weekly-commit-count + */ + "GET /repos/{owner}/{repo}/stats/participation": Operation<"/repos/{owner}/{repo}/stats/participation", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-the-hourly-commit-count-for-each-day + */ + "GET /repos/{owner}/{repo}/stats/punch_card": Operation<"/repos/{owner}/{repo}/stats/punch_card", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-watchers + */ + "GET /repos/{owner}/{repo}/subscribers": Operation<"/repos/{owner}/{repo}/subscribers", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-a-repository-subscription + */ + "GET /repos/{owner}/{repo}/subscription": Operation<"/repos/{owner}/{repo}/subscription", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-tags + */ + "GET /repos/{owner}/{repo}/tags": Operation<"/repos/{owner}/{repo}/tags", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-tag-protection-state-of-a-repository + */ + "GET /repos/{owner}/{repo}/tags/protection": Operation<"/repos/{owner}/{repo}/tags/protection", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#download-a-repository-archive + */ + "GET /repos/{owner}/{repo}/tarball/{ref}": Operation<"/repos/{owner}/{repo}/tarball/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-teams + */ + "GET /repos/{owner}/{repo}/teams": Operation<"/repos/{owner}/{repo}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-all-repository-topics + */ + "GET /repos/{owner}/{repo}/topics": Operation<"/repos/{owner}/{repo}/topics", "get">; + /** + * @see https://docs.github.com/rest/metrics/traffic#get-repository-clones + */ + "GET /repos/{owner}/{repo}/traffic/clones": Operation<"/repos/{owner}/{repo}/traffic/clones", "get">; + /** + * @see https://docs.github.com/rest/metrics/traffic#get-top-referral-paths + */ + "GET /repos/{owner}/{repo}/traffic/popular/paths": Operation<"/repos/{owner}/{repo}/traffic/popular/paths", "get">; + /** + * @see https://docs.github.com/rest/metrics/traffic#get-top-referral-sources + */ + "GET /repos/{owner}/{repo}/traffic/popular/referrers": Operation<"/repos/{owner}/{repo}/traffic/popular/referrers", "get">; + /** + * @see https://docs.github.com/rest/metrics/traffic#get-page-views + */ + "GET /repos/{owner}/{repo}/traffic/views": Operation<"/repos/{owner}/{repo}/traffic/views", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#check-if-vulnerability-alerts-are-enabled-for-a-repository + */ + "GET /repos/{owner}/{repo}/vulnerability-alerts": Operation<"/repos/{owner}/{repo}/vulnerability-alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#download-a-repository-archive + */ + "GET /repos/{owner}/{repo}/zipball/{ref}": Operation<"/repos/{owner}/{repo}/zipball/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-public-repositories + */ + "GET /repositories": Operation<"/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-environment-secrets + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-environment-public-key + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-environment-secret + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#list-environment-variables + */ + "GET /repositories/{repository_id}/environments/{environment_name}/variables": Operation<"/repositories/{repository_id}/environments/{environment_name}/variables", "get">; + /** + * @see https://docs.github.com/rest/actions/variables#get-an-environment-variable + */ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/variables/{name}", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-code + */ + "GET /search/code": Operation<"/search/code", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-commits + */ + "GET /search/commits": Operation<"/search/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-issues-and-pull-requests + */ + "GET /search/issues": Operation<"/search/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-labels + */ + "GET /search/labels": Operation<"/search/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-repositories + */ + "GET /search/repositories": Operation<"/search/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-topics + */ + "GET /search/topics": Operation<"/search/topics", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-users + */ + "GET /search/users": Operation<"/search/users", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#get-a-team-legacy + */ + "GET /teams/{team_id}": Operation<"/teams/{team_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussions-legacy + */ + "GET /teams/{team_id}/discussions": Operation<"/teams/{team_id}/discussions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussion-comments-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion-comment-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-comment-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-pending-team-invitations-legacy + */ + "GET /teams/{team_id}/invitations": Operation<"/teams/{team_id}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-members-legacy + */ + "GET /teams/{team_id}/members": Operation<"/teams/{team_id}/members", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-team-member-legacy + */ + "GET /teams/{team_id}/members/{username}": Operation<"/teams/{team_id}/members/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user-legacy + */ + "GET /teams/{team_id}/memberships/{username}": Operation<"/teams/{team_id}/memberships/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#list-team-projects-legacy + */ + "GET /teams/{team_id}/projects": Operation<"/teams/{team_id}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#check-team-permissions-for-a-project-legacy + */ + "GET /teams/{team_id}/projects/{project_id}": Operation<"/teams/{team_id}/projects/{project_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#list-team-repositories-legacy + */ + "GET /teams/{team_id}/repos": Operation<"/teams/{team_id}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#check-team-permissions-for-a-repository-legacy + */ + "GET /teams/{team_id}/repos/{owner}/{repo}": Operation<"/teams/{team_id}/repos/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#list-child-teams-legacy + */ + "GET /teams/{team_id}/teams": Operation<"/teams/{team_id}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-the-authenticated-user + */ + "GET /user": Operation<"/user", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-users-blocked-by-the-authenticated-user + */ + "GET /user/blocks": Operation<"/user/blocks", "get">; + /** + * @see https://docs.github.com/rest/reference/users#check-if-a-user-is-blocked-by-the-authenticated-user + */ + "GET /user/blocks/{username}": Operation<"/user/blocks/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-codespaces-for-the-authenticated-user + */ + "GET /user/codespaces": Operation<"/user/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-secrets-for-the-authenticated-user + */ + "GET /user/codespaces/secrets": Operation<"/user/codespaces/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-public-key-for-the-authenticated-user + */ + "GET /user/codespaces/secrets/public-key": Operation<"/user/codespaces/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-secret-for-the-authenticated-user + */ + "GET /user/codespaces/secrets/{secret_name}": Operation<"/user/codespaces/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-a-user-secret + */ + "GET /user/codespaces/secrets/{secret_name}/repositories": Operation<"/user/codespaces/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-codespace-for-the-authenticated-user + */ + "GET /user/codespaces/{codespace_name}": Operation<"/user/codespaces/{codespace_name}", "get">; + /** + * @see https://docs.github.com/rest/codespaces/codespaces#get-details-about-a-codespace-export + */ + "GET /user/codespaces/{codespace_name}/exports/{export_id}": Operation<"/user/codespaces/{codespace_name}/exports/{export_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-machine-types-for-a-codespace + */ + "GET /user/codespaces/{codespace_name}/machines": Operation<"/user/codespaces/{codespace_name}/machines", "get">; + /** + * @see https://docs.github.com/rest/packages#list-docker-migration-conflicting-packages-for-authenticated-user + */ + "GET /user/docker/conflicts": Operation<"/user/docker/conflicts", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-email-addresses-for-the-authenticated-user + */ + "GET /user/emails": Operation<"/user/emails", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-followers-of-the-authenticated-user + */ + "GET /user/followers": Operation<"/user/followers", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-the-people-the-authenticated-user-follows + */ + "GET /user/following": Operation<"/user/following", "get">; + /** + * @see https://docs.github.com/rest/reference/users#check-if-a-person-is-followed-by-the-authenticated-user + */ + "GET /user/following/{username}": Operation<"/user/following/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-gpg-keys-for-the-authenticated-user + */ + "GET /user/gpg_keys": Operation<"/user/gpg_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-gpg-key-for-the-authenticated-user + */ + "GET /user/gpg_keys/{gpg_key_id}": Operation<"/user/gpg_keys/{gpg_key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-app-installations-accessible-to-the-user-access-token + */ + "GET /user/installations": Operation<"/user/installations", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-repositories-accessible-to-the-user-access-token + */ + "GET /user/installations/{installation_id}/repositories": Operation<"/user/installations/{installation_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/interactions#get-interaction-restrictions-for-your-public-repositories + */ + "GET /user/interaction-limits": Operation<"/user/interaction-limits", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-user-account-issues-assigned-to-the-authenticated-user + */ + "GET /user/issues": Operation<"/user/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-ssh-keys-for-the-authenticated-user + */ + "GET /user/keys": Operation<"/user/keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-public-ssh-key-for-the-authenticated-user + */ + "GET /user/keys/{key_id}": Operation<"/user/keys/{key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-subscriptions-for-the-authenticated-user + */ + "GET /user/marketplace_purchases": Operation<"/user/marketplace_purchases", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-subscriptions-for-the-authenticated-user-stubbed + */ + "GET /user/marketplace_purchases/stubbed": Operation<"/user/marketplace_purchases/stubbed", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-memberships-for-the-authenticated-user + */ + "GET /user/memberships/orgs": Operation<"/user/memberships/orgs", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-an-organization-membership-for-the-authenticated-user + */ + "GET /user/memberships/orgs/{org}": Operation<"/user/memberships/orgs/{org}", "get">; + /** + * @see https://docs.github.com/rest/migrations/users#list-user-migrations + */ + "GET /user/migrations": Operation<"/user/migrations", "get">; + /** + * @see https://docs.github.com/rest/migrations/users#get-a-user-migration-status + */ + "GET /user/migrations/{migration_id}": Operation<"/user/migrations/{migration_id}", "get">; + /** + * @see https://docs.github.com/rest/migrations/users#download-a-user-migration-archive + */ + "GET /user/migrations/{migration_id}/archive": Operation<"/user/migrations/{migration_id}/archive", "get">; + /** + * @see https://docs.github.com/rest/migrations/users#list-repositories-for-a-user-migration + */ + "GET /user/migrations/{migration_id}/repositories": Operation<"/user/migrations/{migration_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user + */ + "GET /user/orgs": Operation<"/user/orgs", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-the-authenticated-user + */ + "GET /user/packages": Operation<"/user/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-for-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}": Operation<"/user/packages/{package_type}/{package_name}", "get">; + /** + * @see https://docs.github.com/rest/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}/versions": Operation<"/user/packages/{package_type}/{package_name}/versions", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-version-for-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/user/packages/{package_type}/{package_name}/versions/{package_version_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-email-addresses-for-the-authenticated-user + */ + "GET /user/public_emails": Operation<"/user/public_emails", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-the-authenticated-user + */ + "GET /user/repos": Operation<"/user/repos", "get">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#list-repository-invitations-for-the-authenticated-user + */ + "GET /user/repository_invitations": Operation<"/user/repository_invitations", "get">; + /** + * @see https://docs.github.com/rest/users/social-accounts#list-social-accounts-for-the-authenticated-user + */ + "GET /user/social_accounts": Operation<"/user/social_accounts", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-ssh-signing-keys-for-the-authenticated-user + */ + "GET /user/ssh_signing_keys": Operation<"/user/ssh_signing_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-ssh-signing-key-for-the-authenticated-user + */ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}": Operation<"/user/ssh_signing_keys/{ssh_signing_key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-starred-by-the-authenticated-user + */ + "GET /user/starred": Operation<"/user/starred", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#check-if-a-repository-is-starred-by-the-authenticated-user + */ + "GET /user/starred/{owner}/{repo}": Operation<"/user/starred/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-watched-by-the-authenticated-user + */ + "GET /user/subscriptions": Operation<"/user/subscriptions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-teams-for-the-authenticated-user + */ + "GET /user/teams": Operation<"/user/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-users + */ + "GET /users": Operation<"/users", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-user + */ + "GET /users/{username}": Operation<"/users/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-docker-migration-conflicting-packages-for-user + */ + "GET /users/{username}/docker/conflicts": Operation<"/users/{username}/docker/conflicts", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-events-for-the-authenticated-user + */ + "GET /users/{username}/events": Operation<"/users/{username}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-organization-events-for-the-authenticated-user + */ + "GET /users/{username}/events/orgs/{org}": Operation<"/users/{username}/events/orgs/{org}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-for-a-user + */ + "GET /users/{username}/events/public": Operation<"/users/{username}/events/public", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-followers-of-a-user + */ + "GET /users/{username}/followers": Operation<"/users/{username}/followers", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-the-people-a-user-follows + */ + "GET /users/{username}/following": Operation<"/users/{username}/following", "get">; + /** + * @see https://docs.github.com/rest/reference/users#check-if-a-user-follows-another-user + */ + "GET /users/{username}/following/{target_user}": Operation<"/users/{username}/following/{target_user}", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gists-for-a-user + */ + "GET /users/{username}/gists": Operation<"/users/{username}/gists", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-gpg-keys-for-a-user + */ + "GET /users/{username}/gpg_keys": Operation<"/users/{username}/gpg_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-contextual-information-for-a-user + */ + "GET /users/{username}/hovercard": Operation<"/users/{username}/hovercard", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-user-installation-for-the-authenticated-app + */ + "GET /users/{username}/installation": Operation<"/users/{username}/installation", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-keys-for-a-user + */ + "GET /users/{username}/keys": Operation<"/users/{username}/keys", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-a-user + */ + "GET /users/{username}/orgs": Operation<"/users/{username}/orgs", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-user + */ + "GET /users/{username}/packages": Operation<"/users/{username}/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-for-a-user + */ + "GET /users/{username}/packages/{package_type}/{package_name}": Operation<"/users/{username}/packages/{package_type}/{package_name}", "get">; + /** + * @see https://docs.github.com/rest/packages#get-all-package-versions-for-a-package-owned-by-a-user + */ + "GET /users/{username}/packages/{package_type}/{package_name}/versions": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-version-for-a-user + */ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-user-projects + */ + "GET /users/{username}/projects": Operation<"/users/{username}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-events-received-by-the-authenticated-user + */ + "GET /users/{username}/received_events": Operation<"/users/{username}/received_events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-received-by-a-user + */ + "GET /users/{username}/received_events/public": Operation<"/users/{username}/received_events/public", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-a-user + */ + "GET /users/{username}/repos": Operation<"/users/{username}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-actions-billing-for-a-user + */ + "GET /users/{username}/settings/billing/actions": Operation<"/users/{username}/settings/billing/actions", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-packages-billing-for-a-user + */ + "GET /users/{username}/settings/billing/packages": Operation<"/users/{username}/settings/billing/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-shared-storage-billing-for-a-user + */ + "GET /users/{username}/settings/billing/shared-storage": Operation<"/users/{username}/settings/billing/shared-storage", "get">; + /** + * @see https://docs.github.com/rest/users/social-accounts#list-social-accounts-for-a-user + */ + "GET /users/{username}/social_accounts": Operation<"/users/{username}/social_accounts", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-ssh-signing-keys-for-a-user + */ + "GET /users/{username}/ssh_signing_keys": Operation<"/users/{username}/ssh_signing_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-starred-by-a-user + */ + "GET /users/{username}/starred": Operation<"/users/{username}/starred", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-watched-by-a-user + */ + "GET /users/{username}/subscriptions": Operation<"/users/{username}/subscriptions", "get">; + /** + * @see https://docs.github.com/rest/reference/meta#get-all-api-versions + */ + "GET /versions": Operation<"/versions", "get">; + /** + * @see https://docs.github.com/rest/meta#get-the-zen-of-github + */ + "GET /zen": Operation<"/zen", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#update-a-webhook-configuration-for-an-app + */ + "PATCH /app/hook/config": Operation<"/app/hook/config", "patch">; + /** + * @see https://docs.github.com/rest/reference/apps#reset-a-token + */ + "PATCH /applications/{client_id}/token": Operation<"/applications/{client_id}/token", "patch">; + /** + * @see https://docs.github.com/rest/reference/gists/#update-a-gist + */ + "PATCH /gists/{gist_id}": Operation<"/gists/{gist_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/gists#update-a-gist-comment + */ + "PATCH /gists/{gist_id}/comments/{comment_id}": Operation<"/gists/{gist_id}/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/activity#mark-a-thread-as-read + */ + "PATCH /notifications/threads/{thread_id}": Operation<"/notifications/threads/{thread_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-an-organization + */ + "PATCH /orgs/{org}": Operation<"/orgs/{org}", "patch">; + /** + * @see https://docs.github.com/rest/reference/actions#update-a-required-workflow + */ + "PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}", "patch">; + /** + * @see https://docs.github.com/rest/actions/variables#update-an-organization-variable + */ + "PATCH /orgs/{org}/actions/variables/{name}": Operation<"/orgs/{org}/actions/variables/{name}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-an-organization-webhook + */ + "PATCH /orgs/{org}/hooks/{hook_id}": Operation<"/orgs/{org}/hooks/{hook_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-a-webhook-configuration-for-an-organization + */ + "PATCH /orgs/{org}/hooks/{hook_id}/config": Operation<"/orgs/{org}/hooks/{hook_id}/config", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-team + */ + "PATCH /orgs/{org}/teams/{team_slug}": Operation<"/orgs/{org}/teams/{team_slug}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion + */ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion-comment + */ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/projects#update-a-project-card + */ + "PATCH /projects/columns/cards/{card_id}": Operation<"/projects/columns/cards/{card_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/projects#update-a-project-column + */ + "PATCH /projects/columns/{column_id}": Operation<"/projects/columns/{column_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/projects#update-a-project + */ + "PATCH /projects/{project_id}": Operation<"/projects/{project_id}", "patch">; + /** + * @see https://docs.github.com/rest/repos/repos#update-a-repository + */ + "PATCH /repos/{owner}/{repo}": Operation<"/repos/{owner}/{repo}", "patch">; + /** + * @see https://docs.github.com/rest/actions/variables#update-a-repository-variable + */ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}": Operation<"/repos/{owner}/{repo}/actions/variables/{name}", "patch">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#update-pull-request-review-protection + */ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", "patch">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#update-status-check-protection + */ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", "patch">; + /** + * @see https://docs.github.com/rest/reference/checks#update-a-check-run + */ + "PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites + */ + "PATCH /repos/{owner}/{repo}/check-suites/preferences": Operation<"/repos/{owner}/{repo}/check-suites/preferences", "patch">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#update-a-code-scanning-alert + */ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", "patch">; + /** + * @see https://docs.github.com/rest/code-scanning#update-a-code-scanning-default-setup-configuration + */ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup": Operation<"/repos/{owner}/{repo}/code-scanning/default-setup", "patch">; + /** + * @see https://docs.github.com/rest/commits/comments#update-a-commit-comment + */ + "PATCH /repos/{owner}/{repo}/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/dependabot#update-a-dependabot-alert + */ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/dependabot/alerts/{alert_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/git#update-a-reference + */ + "PATCH /repos/{owner}/{repo}/git/refs/{ref}": Operation<"/repos/{owner}/{repo}/git/refs/{ref}", "patch">; + /** + * @see https://docs.github.com/rest/webhooks/repos#update-a-repository-webhook + */ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}", "patch">; + /** + * @see https://docs.github.com/rest/webhooks/repo-config#update-a-webhook-configuration-for-a-repository + */ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/config", "patch">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#update-an-import + */ + "PATCH /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "patch">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author + */ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}": Operation<"/repos/{owner}/{repo}/import/authors/{author_id}", "patch">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference + */ + "PATCH /repos/{owner}/{repo}/import/lfs": Operation<"/repos/{owner}/{repo}/import/lfs", "patch">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#update-a-repository-invitation + */ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}": Operation<"/repos/{owner}/{repo}/invitations/{invitation_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-an-issue-comment + */ + "PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-an-issue + */ + "PATCH /repos/{owner}/{repo}/issues/{issue_number}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-a-label + */ + "PATCH /repos/{owner}/{repo}/labels/{name}": Operation<"/repos/{owner}/{repo}/labels/{name}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-a-milestone + */ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/pulls#update-a-review-comment-for-a-pull-request + */ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/pulls/#update-a-pull-request + */ + "PATCH /repos/{owner}/{repo}/pulls/{pull_number}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-a-release-asset + */ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}": Operation<"/repos/{owner}/{repo}/releases/assets/{asset_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-a-release + */ + "PATCH /repos/{owner}/{repo}/releases/{release_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#update-a-secret-scanning-alert + */ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", "patch">; + /** + * @see https://docs.github.com/rest/security-advisories/repository-advisories#update-a-repository-security-advisory + */ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}": Operation<"/repos/{owner}/{repo}/security-advisories/{ghsa_id}", "patch">; + /** + * @see https://docs.github.com/rest/actions/variables#update-an-environment-variable + */ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/variables/{name}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams/#update-a-team-legacy + */ + "PATCH /teams/{team_id}": Operation<"/teams/{team_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion-legacy + */ + "PATCH /teams/{team_id}/discussions/{discussion_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion-comment-legacy + */ + "PATCH /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/users/#update-the-authenticated-user + */ + "PATCH /user": Operation<"/user", "patch">; + /** + * @see https://docs.github.com/rest/reference/codespaces#update-a-codespace-for-the-authenticated-user + */ + "PATCH /user/codespaces/{codespace_name}": Operation<"/user/codespaces/{codespace_name}", "patch">; + /** + * @see https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user + */ + "PATCH /user/email/visibility": Operation<"/user/email/visibility", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-an-organization-membership-for-the-authenticated-user + */ + "PATCH /user/memberships/orgs/{org}": Operation<"/user/memberships/orgs/{org}", "patch">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#accept-a-repository-invitation + */ + "PATCH /user/repository_invitations/{invitation_id}": Operation<"/user/repository_invitations/{invitation_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/apps#create-a-github-app-from-a-manifest + */ + "POST /app-manifests/{code}/conversions": Operation<"/app-manifests/{code}/conversions", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#redeliver-a-delivery-for-an-app-webhook + */ + "POST /app/hook/deliveries/{delivery_id}/attempts": Operation<"/app/hook/deliveries/{delivery_id}/attempts", "post">; + /** + * @see https://docs.github.com/rest/reference/apps/#create-an-installation-access-token-for-an-app + */ + "POST /app/installations/{installation_id}/access_tokens": Operation<"/app/installations/{installation_id}/access_tokens", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#check-a-token + */ + "POST /applications/{client_id}/token": Operation<"/applications/{client_id}/token", "post">; + /** + * @see https://docs.github.com/rest/apps/apps#create-a-scoped-access-token + */ + "POST /applications/{client_id}/token/scoped": Operation<"/applications/{client_id}/token/scoped", "post">; + /** + * @see https://docs.github.com/rest/reference/gists#create-a-gist + */ + "POST /gists": Operation<"/gists", "post">; + /** + * @see https://docs.github.com/rest/reference/gists#create-a-gist-comment + */ + "POST /gists/{gist_id}/comments": Operation<"/gists/{gist_id}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/gists#fork-a-gist + */ + "POST /gists/{gist_id}/forks": Operation<"/gists/{gist_id}/forks", "post">; + /** + * @see https://docs.github.com/rest/reference/markdown#render-a-markdown-document + */ + "POST /markdown": Operation<"/markdown", "post">; + /** + * @see https://docs.github.com/rest/reference/markdown#render-a-markdown-document-in-raw-mode + */ + "POST /markdown/raw": Operation<"/markdown/raw", "post">; + /** + * @see https://docs.github.com/rest/orgs/orgs#review-requests-to-access-organization-resources-with-a-fine-grained-personal-access-token + */ + "POST /organizations/{org}/personal-access-token-requests": Operation<"/organizations/{org}/personal-access-token-requests", "post">; + /** + * @see https://docs.github.com/rest/orgs/orgs#review-a-request-to-access-organization-resources-with-a-fine-grained-personal-access-token + */ + "POST /organizations/{org}/personal-access-token-requests/{pat_request_id}": Operation<"/organizations/{org}/personal-access-token-requests/{pat_request_id}", "post">; + /** + * @see https://docs.github.com/rest/orgs/orgs#update-the-access-to-organization-resources-via-fine-grained-personal-access-tokens + */ + "POST /organizations/{org}/personal-access-tokens": Operation<"/organizations/{org}/personal-access-tokens", "post">; + /** + * @see https://docs.github.com/rest/orgs/orgs#update-the-access-a-fine-grained-personal-access-token-has-to-organization-resources + */ + "POST /organizations/{org}/personal-access-tokens/{pat_id}": Operation<"/organizations/{org}/personal-access-tokens/{pat_id}", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-required-workflow + */ + "POST /orgs/{org}/actions/required_workflows": Operation<"/orgs/{org}/actions/required_workflows", "post">; + /** + * @see https://docs.github.com/rest/actions/self-hosted-runners#create-configuration-for-a-just-in-time-runner-for-an-organization + */ + "POST /orgs/{org}/actions/runners/generate-jitconfig": Operation<"/orgs/{org}/actions/runners/generate-jitconfig", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-registration-token-for-an-organization + */ + "POST /orgs/{org}/actions/runners/registration-token": Operation<"/orgs/{org}/actions/runners/registration-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-remove-token-for-an-organization + */ + "POST /orgs/{org}/actions/runners/remove-token": Operation<"/orgs/{org}/actions/runners/remove-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#add-custom-labels-to-a-self-hosted-runner-for-an-organization + */ + "POST /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "post">; + /** + * @see https://docs.github.com/rest/actions/variables#create-an-organization-variable + */ + "POST /orgs/{org}/actions/variables": Operation<"/orgs/{org}/actions/variables", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#set-codespaces-billing-users + */ + "POST /orgs/{org}/codespaces/billing/selected_users": Operation<"/orgs/{org}/codespaces/billing/selected_users", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#create-an-organization-webhook + */ + "POST /orgs/{org}/hooks": Operation<"/orgs/{org}/hooks", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#redeliver-a-delivery-for-an-organization-webhook + */ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": Operation<"/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#ping-an-organization-webhook + */ + "POST /orgs/{org}/hooks/{hook_id}/pings": Operation<"/orgs/{org}/hooks/{hook_id}/pings", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#create-an-organization-invitation + */ + "POST /orgs/{org}/invitations": Operation<"/orgs/{org}/invitations", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces + */ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop": Operation<"/orgs/{org}/members/{username}/codespaces/{codespace_name}/stop", "post">; + /** + * @see https://docs.github.com/rest/migrations/orgs#start-an-organization-migration + */ + "POST /orgs/{org}/migrations": Operation<"/orgs/{org}/migrations", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-for-an-organization + */ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-version-for-an-organization + */ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-an-organization-project + */ + "POST /orgs/{org}/projects": Operation<"/orgs/{org}/projects", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-an-organization-repository + */ + "POST /orgs/{org}/repos": Operation<"/orgs/{org}/repos", "post">; + /** + * @see https://docs.github.com/rest/repos/rules#create-organization-repository-ruleset + */ + "POST /orgs/{org}/rulesets": Operation<"/orgs/{org}/rulesets", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-team + */ + "POST /orgs/{org}/teams": Operation<"/orgs/{org}/teams", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion + */ + "POST /orgs/{org}/teams/{team_slug}/discussions": Operation<"/orgs/{org}/teams/{team_slug}/discussions", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion-comment + */ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion-comment + */ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion + */ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#enable-or-disable-security-product-on-all-org-repos + */ + "POST /orgs/{org}/{security_product}/{enablement}": Operation<"/orgs/{org}/{security_product}/{enablement}", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#move-a-project-card + */ + "POST /projects/columns/cards/{card_id}/moves": Operation<"/projects/columns/cards/{card_id}/moves", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-project-card + */ + "POST /projects/columns/{column_id}/cards": Operation<"/projects/columns/{column_id}/cards", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#move-a-project-column + */ + "POST /projects/columns/{column_id}/moves": Operation<"/projects/columns/{column_id}/moves", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-project-column + */ + "POST /projects/{project_id}/columns": Operation<"/projects/{project_id}/columns", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#re-run-job-for-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun": Operation<"/repos/{owner}/{repo}/actions/jobs/{job_id}/rerun", "post">; + /** + * @see https://docs.github.com/rest/actions/self-hosted-runners#create-configuration-for-a-just-in-time-runner-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig": Operation<"/repos/{owner}/{repo}/actions/runners/generate-jitconfig", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-registration-token-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/registration-token": Operation<"/repos/{owner}/{repo}/actions/runners/registration-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-remove-token-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/remove-token": Operation<"/repos/{owner}/{repo}/actions/runners/remove-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#add-custom-labels-to-a-self-hosted-runner-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#approve-a-workflow-run-for-a-fork-pull-request + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/approve", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#cancel-a-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/cancel", "post">; + /** + * @see https://docs.github.com/rest/actions/workflow-runs#review-custom-deployment-protection-rules-for-a-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#review-pending-deployments-for-a-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#re-run-a-workflow + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/rerun", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#re-run-workflow-failed-jobs + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs", "post">; + /** + * @see https://docs.github.com/rest/actions/variables#create-a-repository-variable + */ + "POST /repos/{owner}/{repo}/actions/variables": Operation<"/repos/{owner}/{repo}/actions/variables", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-workflow-dispatch-event + */ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches", "post">; + /** + * @see https://docs.github.com/rest/repos/autolinks#create-an-autolink-reference-for-a-repository + */ + "POST /repos/{owner}/{repo}/autolinks": Operation<"/repos/{owner}/{repo}/autolinks", "post">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#set-admin-branch-protection + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", "post">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#create-commit-signature-protection + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", "post">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#add-status-check-contexts + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "post">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#add-app-access-restrictions + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "post">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#add-team-access-restrictions + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "post">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#add-user-access-restrictions + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "post">; + /** + * @see https://docs.github.com/rest/branches/branches#rename-a-branch + */ + "POST /repos/{owner}/{repo}/branches/{branch}/rename": Operation<"/repos/{owner}/{repo}/branches/{branch}/rename", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#create-a-check-run + */ + "POST /repos/{owner}/{repo}/check-runs": Operation<"/repos/{owner}/{repo}/check-runs", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#rerequest-a-check-run + */ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#create-a-check-suite + */ + "POST /repos/{owner}/{repo}/check-suites": Operation<"/repos/{owner}/{repo}/check-suites", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#rerequest-a-check-suite + */ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest": Operation<"/repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", "post">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#upload-an-analysis-as-sarif-data + */ + "POST /repos/{owner}/{repo}/code-scanning/sarifs": Operation<"/repos/{owner}/{repo}/code-scanning/sarifs", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-a-codespace-in-a-repository + */ + "POST /repos/{owner}/{repo}/codespaces": Operation<"/repos/{owner}/{repo}/codespaces", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-commit-comment + */ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/comments/{comment_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/commits/comments#create-a-commit-comment + */ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/dependency-graph#create-a-snapshot-of-dependencies-for-a-repository + */ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots": Operation<"/repos/{owner}/{repo}/dependency-graph/snapshots", "post">; + /** + * @see https://docs.github.com/rest/deployments/deployments#create-a-deployment + */ + "POST /repos/{owner}/{repo}/deployments": Operation<"/repos/{owner}/{repo}/deployments", "post">; + /** + * @see https://docs.github.com/rest/deployments/statuses#create-a-deployment-status + */ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-dispatch-event + */ + "POST /repos/{owner}/{repo}/dispatches": Operation<"/repos/{owner}/{repo}/dispatches", "post">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#create-deployment-branch-policy + */ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", "post">; + /** + * @see https://docs.github.com/rest/deployments/deployment-protection-rules#create-a-deployment-protection-rule + */ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-fork + */ + "POST /repos/{owner}/{repo}/forks": Operation<"/repos/{owner}/{repo}/forks", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-blob + */ + "POST /repos/{owner}/{repo}/git/blobs": Operation<"/repos/{owner}/{repo}/git/blobs", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-commit + */ + "POST /repos/{owner}/{repo}/git/commits": Operation<"/repos/{owner}/{repo}/git/commits", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-reference + */ + "POST /repos/{owner}/{repo}/git/refs": Operation<"/repos/{owner}/{repo}/git/refs", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-tag-object + */ + "POST /repos/{owner}/{repo}/git/tags": Operation<"/repos/{owner}/{repo}/git/tags", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-tree + */ + "POST /repos/{owner}/{repo}/git/trees": Operation<"/repos/{owner}/{repo}/git/trees", "post">; + /** + * @see https://docs.github.com/rest/webhooks/repos#create-a-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks": Operation<"/repos/{owner}/{repo}/hooks", "post">; + /** + * @see https://docs.github.com/rest/webhooks/repo-deliveries#redeliver-a-delivery-for-a-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", "post">; + /** + * @see https://docs.github.com/rest/webhooks/repos#ping-a-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/pings": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/pings", "post">; + /** + * @see https://docs.github.com/rest/webhooks/repos#test-the-push-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/tests": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/tests", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-an-issue + */ + "POST /repos/{owner}/{repo}/issues": Operation<"/repos/{owner}/{repo}/issues", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-an-issue-comment + */ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#add-assignees-to-an-issue + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/assignees", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-an-issue-comment + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#add-labels-to-an-issue + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-an-issue + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/deploy-keys#create-a-deploy-key + */ + "POST /repos/{owner}/{repo}/keys": Operation<"/repos/{owner}/{repo}/keys", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-a-label + */ + "POST /repos/{owner}/{repo}/labels": Operation<"/repos/{owner}/{repo}/labels", "post">; + /** + * @see https://docs.github.com/rest/branches/branches#sync-a-fork-branch-with-the-upstream-repository + */ + "POST /repos/{owner}/{repo}/merge-upstream": Operation<"/repos/{owner}/{repo}/merge-upstream", "post">; + /** + * @see https://docs.github.com/rest/branches/branches#merge-a-branch + */ + "POST /repos/{owner}/{repo}/merges": Operation<"/repos/{owner}/{repo}/merges", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-a-milestone + */ + "POST /repos/{owner}/{repo}/milestones": Operation<"/repos/{owner}/{repo}/milestones", "post">; + /** + * @see https://docs.github.com/rest/pages#create-a-github-pages-site + */ + "POST /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "post">; + /** + * @see https://docs.github.com/rest/pages#request-a-github-pages-build + */ + "POST /repos/{owner}/{repo}/pages/builds": Operation<"/repos/{owner}/{repo}/pages/builds", "post">; + /** + * @see https://docs.github.com/rest/pages#create-a-github-pages-deployment + */ + "POST /repos/{owner}/{repo}/pages/deployment": Operation<"/repos/{owner}/{repo}/pages/deployment", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-repository-project + */ + "POST /repos/{owner}/{repo}/projects": Operation<"/repos/{owner}/{repo}/projects", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls": Operation<"/repos/{owner}/{repo}/pulls", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-pull-request-review-comment + */ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-a-codespace-from-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/codespaces", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-review-comment-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-reply-for-a-review-comment + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#request-reviewers-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-review-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#submit-a-review-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events", "post">; + /** + * @see https://docs.github.com/rest/releases/releases#create-a-release + */ + "POST /repos/{owner}/{repo}/releases": Operation<"/repos/{owner}/{repo}/releases", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#generate-release-notes + */ + "POST /repos/{owner}/{repo}/releases/generate-notes": Operation<"/repos/{owner}/{repo}/releases/generate-notes", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-release + */ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/repos/rules#create-repository-ruleset + */ + "POST /repos/{owner}/{repo}/rulesets": Operation<"/repos/{owner}/{repo}/rulesets", "post">; + /** + * @see https://docs.github.com/rest/security-advisories/repository-advisories#create-a-repository-security-advisory + */ + "POST /repos/{owner}/{repo}/security-advisories": Operation<"/repos/{owner}/{repo}/security-advisories", "post">; + /** + * @see https://docs.github.com/rest/security-advisories/repository-advisories#privately-report-a-security-vulnerability + */ + "POST /repos/{owner}/{repo}/security-advisories/reports": Operation<"/repos/{owner}/{repo}/security-advisories/reports", "post">; + /** + * @see https://docs.github.com/rest/commits/statuses#create-a-commit-status + */ + "POST /repos/{owner}/{repo}/statuses/{sha}": Operation<"/repos/{owner}/{repo}/statuses/{sha}", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-tag-protection-state-for-a-repository + */ + "POST /repos/{owner}/{repo}/tags/protection": Operation<"/repos/{owner}/{repo}/tags/protection", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#transfer-a-repository + */ + "POST /repos/{owner}/{repo}/transfer": Operation<"/repos/{owner}/{repo}/transfer", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-using-a-template + */ + "POST /repos/{template_owner}/{template_repo}/generate": Operation<"/repos/{template_owner}/{template_repo}/generate", "post">; + /** + * @see https://docs.github.com/rest/actions/variables#create-an-environment-variable + */ + "POST /repositories/{repository_id}/environments/{environment_name}/variables": Operation<"/repositories/{repository_id}/environments/{environment_name}/variables", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion-legacy + */ + "POST /teams/{team_id}/discussions": Operation<"/teams/{team_id}/discussions", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion-comment-legacy + */ + "POST /teams/{team_id}/discussions/{discussion_number}/comments": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-team-discussion-comment-legacy + */ + "POST /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-team-discussion-legacy + */ + "POST /teams/{team_id}/discussions/{discussion_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces": Operation<"/user/codespaces", "post">; + /** + * @see https://docs.github.com/rest/codespaces/codespaces#export-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces/{codespace_name}/exports": Operation<"/user/codespaces/{codespace_name}/exports", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces/codespaces#create-a-repository-from-an-unpublished-codespace + */ + "POST /user/codespaces/{codespace_name}/publish": Operation<"/user/codespaces/{codespace_name}/publish", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#start-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces/{codespace_name}/start": Operation<"/user/codespaces/{codespace_name}/start", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#stop-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces/{codespace_name}/stop": Operation<"/user/codespaces/{codespace_name}/stop", "post">; + /** + * @see https://docs.github.com/rest/reference/users#add-an-email-address-for-the-authenticated-user + */ + "POST /user/emails": Operation<"/user/emails", "post">; + /** + * @see https://docs.github.com/rest/reference/users#create-a-gpg-key-for-the-authenticated-user + */ + "POST /user/gpg_keys": Operation<"/user/gpg_keys", "post">; + /** + * @see https://docs.github.com/rest/reference/users#create-a-public-ssh-key-for-the-authenticated-user + */ + "POST /user/keys": Operation<"/user/keys", "post">; + /** + * @see https://docs.github.com/rest/migrations/users#start-a-user-migration + */ + "POST /user/migrations": Operation<"/user/migrations", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-for-the-authenticated-user + */ + "POST /user/packages/{package_type}/{package_name}/restore{?token}": Operation<"/user/packages/{package_type}/{package_name}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-version-for-the-authenticated-user + */ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": Operation<"/user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-user-project + */ + "POST /user/projects": Operation<"/user/projects", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user + */ + "POST /user/repos": Operation<"/user/repos", "post">; + /** + * @see https://docs.github.com/rest/users/social-accounts#add-social-account-for-authenticated-user + */ + "POST /user/social_accounts": Operation<"/user/social_accounts", "post">; + /** + * @see https://docs.github.com/rest/reference/users#create-an-ssh-signing-key-for-the-authenticated-user + */ + "POST /user/ssh_signing_keys": Operation<"/user/ssh_signing_keys", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-for-a-user + */ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}": Operation<"/users/{username}/packages/{package_type}/{package_name}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-version-for-a-user + */ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#upload-a-release-asset + */ + "POST {origin}/repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}": Operation<"/repos/{owner}/{repo}/releases/{release_id}/assets", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#suspend-an-app-installation + */ + "PUT /app/installations/{installation_id}/suspended": Operation<"/app/installations/{installation_id}/suspended", "put">; + /** + * @see https://docs.github.com/rest/reference/gists#star-a-gist + */ + "PUT /gists/{gist_id}/star": Operation<"/gists/{gist_id}/star", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#mark-notifications-as-read + */ + "PUT /notifications": Operation<"/notifications", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#set-a-thread-subscription + */ + "PUT /notifications/threads/{thread_id}/subscription": Operation<"/notifications/threads/{thread_id}/subscription", "put">; + /** + * @see https://docs.github.com/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-an-organization + */ + "PUT /orgs/{org}/actions/oidc/customization/sub": Operation<"/orgs/{org}/actions/oidc/customization/sub", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-github-actions-permissions-for-an-organization + */ + "PUT /orgs/{org}/actions/permissions": Operation<"/orgs/{org}/actions/permissions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-selected-repositories-enabled-for-github-actions-in-an-organization + */ + "PUT /orgs/{org}/actions/permissions/repositories": Operation<"/orgs/{org}/actions/permissions/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#enable-a-selected-repository-for-github-actions-in-an-organization + */ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}": Operation<"/orgs/{org}/actions/permissions/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-allowed-actions-for-an-organization + */ + "PUT /orgs/{org}/actions/permissions/selected-actions": Operation<"/orgs/{org}/actions/permissions/selected-actions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-default-workflow-permissions + */ + "PUT /orgs/{org}/actions/permissions/workflow": Operation<"/orgs/{org}/actions/permissions/workflow", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-selected-repositories-for-a-required-workflow + */ + "PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-a-repository-to-selected-repositories-list-for-a-required-workflow + */ + "PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-custom-labels-for-a-self-hosted-runner-for-an-organization + */ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret + */ + "PUT /orgs/{org}/actions/secrets/{secret_name}": Operation<"/orgs/{org}/actions/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret + */ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-selected-repository-to-an-organization-secret + */ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/actions/variables#set-selected-repositories-for-an-organization-variable + */ + "PUT /orgs/{org}/actions/variables/{name}/repositories": Operation<"/orgs/{org}/actions/variables/{name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/actions/variables#add-selected-repository-to-an-organization-variable + */ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/variables/{name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#block-a-user-from-an-organization + */ + "PUT /orgs/{org}/blocks/{username}": Operation<"/orgs/{org}/blocks/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#set-codespaces-billing + */ + "PUT /orgs/{org}/codespaces/billing": Operation<"/orgs/{org}/codespaces/billing", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret + */ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-an-organization-secret + */ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#add-selected-repository-to-an-organization-secret + */ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret + */ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#set-selected-repositories-for-an-organization-secret + */ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#add-selected-repository-to-an-organization-secret + */ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/interactions#set-interaction-restrictions-for-an-organization + */ + "PUT /orgs/{org}/interaction-limits": Operation<"/orgs/{org}/interaction-limits", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#set-organization-membership-for-a-user + */ + "PUT /orgs/{org}/memberships/{username}": Operation<"/orgs/{org}/memberships/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#convert-an-organization-member-to-outside-collaborator + */ + "PUT /orgs/{org}/outside_collaborators/{username}": Operation<"/orgs/{org}/outside_collaborators/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#set-public-organization-membership-for-the-authenticated-user + */ + "PUT /orgs/{org}/public_members/{username}": Operation<"/orgs/{org}/public_members/{username}", "put">; + /** + * @see https://docs.github.com/rest/repos/rules#update-organization-ruleset + */ + "PUT /orgs/{org}/rulesets/{ruleset_id}": Operation<"/orgs/{org}/rulesets/{ruleset_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#add-a-security-manager-team + */ + "PUT /orgs/{org}/security-managers/teams/{team_slug}": Operation<"/orgs/{org}/security-managers/teams/{team_slug}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user + */ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}": Operation<"/orgs/{org}/teams/{team_slug}/memberships/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions + */ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}": Operation<"/orgs/{org}/teams/{team_slug}/projects/{project_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams/#add-or-update-team-repository-permissions + */ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": Operation<"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", "put">; + /** + * @see https://docs.github.com/rest/reference/projects#add-project-collaborator + */ + "PUT /projects/{project_id}/collaborators/{username}": Operation<"/projects/{project_id}/collaborators/{username}", "put">; + /** + * @see https://docs.github.com/rest/actions/oidc#set-the-customization-template-for-an-oidc-subject-claim-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub": Operation<"/repos/{owner}/{repo}/actions/oidc/customization/sub", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-github-actions-permissions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions": Operation<"/repos/{owner}/{repo}/actions/permissions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-workflow-access-to-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions/access": Operation<"/repos/{owner}/{repo}/actions/permissions/access", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-allowed-actions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions": Operation<"/repos/{owner}/{repo}/actions/permissions/selected-actions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-default-workflow-permissions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow": Operation<"/repos/{owner}/{repo}/actions/permissions/workflow", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-custom-labels-for-a-self-hosted-runner-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#create-or-update-a-repository-secret + */ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/actions/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#disable-a-workflow + */ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#enable-a-workflow + */ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#enable-automated-security-fixes + */ + "PUT /repos/{owner}/{repo}/automated-security-fixes": Operation<"/repos/{owner}/{repo}/automated-security-fixes", "put">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#update-branch-protection + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection", "put">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#set-status-check-contexts + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "put">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#set-app-access-restrictions + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "put">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#set-team-access-restrictions + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "put">; + /** + * @see https://docs.github.com/rest/branches/branch-protection#set-user-access-restrictions + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-or-update-a-repository-secret + */ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#add-a-repository-collaborator + */ + "PUT /repos/{owner}/{repo}/collaborators/{username}": Operation<"/repos/{owner}/{repo}/collaborators/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#create-or-update-file-contents + */ + "PUT /repos/{owner}/{repo}/contents/{path}": Operation<"/repos/{owner}/{repo}/contents/{path}", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#create-or-update-a-repository-secret + */ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/deployments/environments#create-or-update-an-environment + */ + "PUT /repos/{owner}/{repo}/environments/{environment_name}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}", "put">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#update-deployment-branch-policy + */ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}", "put">; + /** + * @see https://docs.github.com/rest/migrations/source-imports#start-an-import + */ + "PUT /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "put">; + /** + * @see https://docs.github.com/rest/reference/interactions#set-interaction-restrictions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/interaction-limits": Operation<"/repos/{owner}/{repo}/interaction-limits", "put">; + /** + * @see https://docs.github.com/rest/reference/issues#set-labels-for-an-issue + */ + "PUT /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/issues#lock-an-issue + */ + "PUT /repos/{owner}/{repo}/issues/{issue_number}/lock": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/lock", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#enable-git-lfs-for-a-repository + */ + "PUT /repos/{owner}/{repo}/lfs": Operation<"/repos/{owner}/{repo}/lfs", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#mark-repository-notifications-as-read + */ + "PUT /repos/{owner}/{repo}/notifications": Operation<"/repos/{owner}/{repo}/notifications", "put">; + /** + * @see https://docs.github.com/rest/pages#update-information-about-a-github-pages-site + */ + "PUT /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#merge-a-pull-request + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/merge", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#update-a-review-for-a-pull-request + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#dismiss-a-review-for-a-pull-request + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#update-a-pull-request-branch + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/update-branch", "put">; + /** + * @see https://docs.github.com/rest/repos/rules#update-repository-ruleset + */ + "PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}": Operation<"/repos/{owner}/{repo}/rulesets/{ruleset_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#set-a-repository-subscription + */ + "PUT /repos/{owner}/{repo}/subscription": Operation<"/repos/{owner}/{repo}/subscription", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#replace-all-repository-topics + */ + "PUT /repos/{owner}/{repo}/topics": Operation<"/repos/{owner}/{repo}/topics", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#enable-vulnerability-alerts + */ + "PUT /repos/{owner}/{repo}/vulnerability-alerts": Operation<"/repos/{owner}/{repo}/vulnerability-alerts", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#create-or-update-an-environment-secret + */ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-team-member-legacy + */ + "PUT /teams/{team_id}/members/{username}": Operation<"/teams/{team_id}/members/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user-legacy + */ + "PUT /teams/{team_id}/memberships/{username}": Operation<"/teams/{team_id}/memberships/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams/#add-or-update-team-project-permissions-legacy + */ + "PUT /teams/{team_id}/projects/{project_id}": Operation<"/teams/{team_id}/projects/{project_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-repository-permissions-legacy + */ + "PUT /teams/{team_id}/repos/{owner}/{repo}": Operation<"/teams/{team_id}/repos/{owner}/{repo}", "put">; + /** + * @see https://docs.github.com/rest/reference/users#block-a-user + */ + "PUT /user/blocks/{username}": Operation<"/user/blocks/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-or-update-a-secret-for-the-authenticated-user + */ + "PUT /user/codespaces/secrets/{secret_name}": Operation<"/user/codespaces/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-a-user-secret + */ + "PUT /user/codespaces/secrets/{secret_name}/repositories": Operation<"/user/codespaces/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#add-a-selected-repository-to-a-user-secret + */ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/user/codespaces/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/users#follow-a-user + */ + "PUT /user/following/{username}": Operation<"/user/following/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/apps#add-a-repository-to-an-app-installation + */ + "PUT /user/installations/{installation_id}/repositories/{repository_id}": Operation<"/user/installations/{installation_id}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/interactions#set-interaction-restrictions-for-your-public-repositories + */ + "PUT /user/interaction-limits": Operation<"/user/interaction-limits", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#star-a-repository-for-the-authenticated-user + */ + "PUT /user/starred/{owner}/{repo}": Operation<"/user/starred/{owner}/{repo}", "put">; +} +export {}; diff --git a/dist/node_modules/@octokit/types/dist-types/index.d.ts b/dist/node_modules/@octokit/types/dist-types/index.d.ts new file mode 100644 index 0000000..004ae9b --- /dev/null +++ b/dist/node_modules/@octokit/types/dist-types/index.d.ts @@ -0,0 +1,21 @@ +export * from "./AuthInterface"; +export * from "./EndpointDefaults"; +export * from "./EndpointInterface"; +export * from "./EndpointOptions"; +export * from "./Fetch"; +export * from "./OctokitResponse"; +export * from "./RequestError"; +export * from "./RequestHeaders"; +export * from "./RequestInterface"; +export * from "./RequestMethod"; +export * from "./RequestOptions"; +export * from "./RequestParameters"; +export * from "./RequestRequestOptions"; +export * from "./ResponseHeaders"; +export * from "./Route"; +export * from "./Signal"; +export * from "./StrategyInterface"; +export * from "./Url"; +export * from "./VERSION"; +export * from "./GetResponseTypeFromEndpointMethod"; +export * from "./generated/Endpoints"; diff --git a/dist/node_modules/@octokit/types/package.json b/dist/node_modules/@octokit/types/package.json new file mode 100644 index 0000000..a3095ef --- /dev/null +++ b/dist/node_modules/@octokit/types/package.json @@ -0,0 +1,46 @@ +{ + "name": "@octokit/types", + "version": "9.3.2", + "publishConfig": { + "access": "public" + }, + "description": "Shared TypeScript definitions for Octokit projects", + "dependencies": { + "@octokit/openapi-types": "^18.0.0" + }, + "repository": "github:octokit/types.ts", + "keywords": [ + "github", + "api", + "sdk", + "toolkit", + "typescript" + ], + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "devDependencies": { + "@octokit/tsconfig": "^1.0.2", + "@types/node": ">= 8", + "github-openapi-graphql-query": "^4.0.0", + "handlebars": "^4.7.6", + "json-schema-to-typescript": "^13.0.0", + "lodash.set": "^4.3.2", + "npm-run-all": "^4.1.5", + "pascal-case": "^3.1.1", + "prettier": "^2.0.0", + "semantic-release": "^21.0.0", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "sort-keys": "^4.2.0", + "string-to-jsdoc-comment": "^1.0.0", + "typedoc": "^0.24.0", + "typescript": "^5.0.0" + }, + "octokit": { + "openapi-version": "12.0.0" + }, + "files": [ + "dist-types/**" + ], + "types": "dist-types/index.d.ts", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/webhooks-methods/LICENSE b/dist/node_modules/@octokit/webhooks-methods/LICENSE new file mode 100644 index 0000000..57049d0 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/LICENSE @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2021 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/@octokit/webhooks-methods/README.md b/dist/node_modules/@octokit/webhooks-methods/README.md new file mode 100644 index 0000000..b0df807 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/README.md @@ -0,0 +1,274 @@ +# webhooks-methods.js + +> Methods to handle GitHub Webhook requests + +[![@latest](https://img.shields.io/npm/v/@octokit/webhooks-methods.svg)](https://www.npmjs.com/package/@octokit/webhooks-methods) +[![Build Status](https://github.com/octokit/webhooks-methods.js/workflows/Test/badge.svg)](https://github.com/octokit/webhooks-methods.js/actions?query=workflow%3ATest+branch%3Amain) + +

+Table of contents + + + +- [usage](#usage) +- [Methods](#methods) + - [`sign()`](#sign) + - [`verify()`](#verify) + - [`verifyWithFallback()`](#verifyWithFallback) +- [Contributing](#contributing) +- [License](#license) + + + +
+ +## usage + + + + + + +
+ +Browsers + + + +🚧 `@octokit/webhooks-methods` is not meant to be used in browsers. The webhook secret is a sensitive credential that must not be exposed to users. + +Load `@octokit/webhooks-methods` directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+ +Node + + + +Install with `npm install @octokit/core @octokit/webhooks-methods` + +```js +const { + sign, + verify, + verifyWithFallback, +} = require("@octokit/webhooks-methods"); +``` + +
+ +```js +await sign("mysecret", eventPayloadString); +// resolves with a string like "sha256=4864d2759938a15468b5df9ade20bf161da9b4f737ea61794142f3484236bda3" + +await sign({ secret: "mysecret", algorithm: "sha1" }, eventPayloadString); +// resolves with a string like "sha1=d03207e4b030cf234e3447bac4d93add4c6643d8" + +await verify("mysecret", eventPayloadString, "sha256=486d27..."); +// resolves with true or false + +await verifyWithFallback("mysecret", eventPayloadString, "sha256=486d27...", ["oldsecret", ...]); +// resolves with true or false +``` + +## Methods + +### `sign()` + +```js +await sign(secret, eventPayloadString); +await sign({ secret, algorithm }, eventPayloadString); +``` + + + + + + + + + + + + + + +
+ + secret + + (String) + + Required. + Secret as configured in GitHub Settings. +
+ + algorithm + + + (String) + + + +Algorithm to calculate signature. Can be set to `sha1` or `sha256`. `sha1` is supported for legacy reasons. GitHub Enterprise Server 2.22 and older do not send the `X-Hub-Signature-256` header. Defaults to `sha256`. + +Learn more at [Validating payloads from GitHub](https://docs.github.com/en/developers/webhooks-and-events/securing-your-webhooks#validating-payloads-from-github) + +
+ + eventPayloadString + + + (String) + + + Required. + Webhook request payload as received from GitHub.
+
+ If you have only access to an already parsed object, stringify it with JSON.stringify(payload) +
+ +Resolves with a `signature` string. Throws an error if an argument is missing. + +### `verify()` + +```js +await verify(secret, eventPayloadString, signature); +``` + + + + + + + + + + + + + + +
+ + secret + + (String) + + Required. + Secret as configured in GitHub Settings. +
+ + eventPayloadString + + + (String) + + + Required. + Webhook request payload as received from GitHub.
+
+ If you have only access to an already parsed object, stringify it with JSON.stringify(payload) +
+ + signature + + + (String) + + + Required. + Signature string as calculated by sign(). +
+ +Resolves with `true` or `false`. Throws error if an argument is missing. + +### `verifyWithFallback()` + +```js +await verifyWithFallback( + secret, + eventPayloadString, + signature, + additionalSecrets +); +``` + + + + + + + + + + + + + + + + + + +
+ + secret + + (String) + + Required. + Secret as configured in GitHub Settings. +
+ + eventPayloadString + + + (String) + + + Required. + Webhook request payload as received from GitHub.
+
+ If you have only access to an already parsed object, stringify it with JSON.stringify(payload) +
+ + signature + + + (String) + + + Required. + Signature string as calculated by sign(). +
+ + additionalSecrets + + + (Array of String) + + + If given, each additional secret will be tried in turn. +
+ +This is a thin wrapper around [`verify()`](#verify) that is intended to ease callers' support for key rotation. +Resolves with `true` or `false`. Throws error if a required argument is missing. + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-node/index.js b/dist/node_modules/@octokit/webhooks-methods/dist-node/index.js new file mode 100644 index 0000000..2574189 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-node/index.js @@ -0,0 +1,111 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + sign: () => sign, + verify: () => verify, + verifyWithFallback: () => verifyWithFallback +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/node/sign.js +var import_crypto = require("crypto"); + +// pkg/dist-src/types.js +var Algorithm = /* @__PURE__ */ ((Algorithm2) => { + Algorithm2["SHA1"] = "sha1"; + Algorithm2["SHA256"] = "sha256"; + return Algorithm2; +})(Algorithm || {}); + +// pkg/dist-src/version.js +var VERSION = "3.0.3"; + +// pkg/dist-src/node/sign.js +async function sign(options, payload) { + const { secret, algorithm } = typeof options === "object" ? { + secret: options.secret, + algorithm: options.algorithm || Algorithm.SHA256 + } : { secret: options, algorithm: Algorithm.SHA256 }; + if (!secret || !payload) { + throw new TypeError( + "[@octokit/webhooks-methods] secret & payload required for sign()" + ); + } + if (!Object.values(Algorithm).includes(algorithm)) { + throw new TypeError( + `[@octokit/webhooks] Algorithm ${algorithm} is not supported. Must be 'sha1' or 'sha256'` + ); + } + return `${algorithm}=${(0, import_crypto.createHmac)(algorithm, secret).update(payload).digest("hex")}`; +} +sign.VERSION = VERSION; + +// pkg/dist-src/node/verify.js +var import_crypto2 = require("crypto"); +var import_buffer = require("buffer"); + +// pkg/dist-src/utils.js +var getAlgorithm = (signature) => { + return signature.startsWith("sha256=") ? "sha256" : "sha1"; +}; + +// pkg/dist-src/node/verify.js +async function verify(secret, eventPayload, signature) { + if (!secret || !eventPayload || !signature) { + throw new TypeError( + "[@octokit/webhooks-methods] secret, eventPayload & signature required" + ); + } + const signatureBuffer = import_buffer.Buffer.from(signature); + const algorithm = getAlgorithm(signature); + const verificationBuffer = import_buffer.Buffer.from( + await sign({ secret, algorithm }, eventPayload) + ); + if (signatureBuffer.length !== verificationBuffer.length) { + return false; + } + return (0, import_crypto2.timingSafeEqual)(signatureBuffer, verificationBuffer); +} +verify.VERSION = VERSION; + +// pkg/dist-src/index.js +async function verifyWithFallback(secret, payload, signature, additionalSecrets) { + const firstPass = await verify(secret, payload, signature); + if (firstPass) { + return true; + } + if (additionalSecrets !== void 0) { + for (const s of additionalSecrets) { + const v = await verify(s, payload, signature); + if (v) { + return v; + } + } + } + return false; +} +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + sign, + verify, + verifyWithFallback +}); diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-node/index.js.map b/dist/node_modules/@octokit/webhooks-methods/dist-node/index.js.map new file mode 100644 index 0000000..7ea8a90 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/node/sign.js", "../dist-src/types.js", "../dist-src/version.js", "../dist-src/node/verify.js", "../dist-src/utils.js"], + "sourcesContent": ["import { sign } from \"./node/sign\";\nimport { verify } from \"./node/verify\";\nasync function verifyWithFallback(secret, payload, signature, additionalSecrets) {\n const firstPass = await verify(secret, payload, signature);\n if (firstPass) {\n return true;\n }\n if (additionalSecrets !== void 0) {\n for (const s of additionalSecrets) {\n const v = await verify(s, payload, signature);\n if (v) {\n return v;\n }\n }\n }\n return false;\n}\nexport {\n sign,\n verify,\n verifyWithFallback\n};\n", "import { createHmac } from \"crypto\";\nimport { Algorithm } from \"../types\";\nimport { VERSION } from \"../version\";\nasync function sign(options, payload) {\n const { secret, algorithm } = typeof options === \"object\" ? {\n secret: options.secret,\n algorithm: options.algorithm || Algorithm.SHA256\n } : { secret: options, algorithm: Algorithm.SHA256 };\n if (!secret || !payload) {\n throw new TypeError(\n \"[@octokit/webhooks-methods] secret & payload required for sign()\"\n );\n }\n if (!Object.values(Algorithm).includes(algorithm)) {\n throw new TypeError(\n `[@octokit/webhooks] Algorithm ${algorithm} is not supported. Must be 'sha1' or 'sha256'`\n );\n }\n return `${algorithm}=${createHmac(algorithm, secret).update(payload).digest(\"hex\")}`;\n}\nsign.VERSION = VERSION;\nexport {\n sign\n};\n", "var Algorithm = /* @__PURE__ */ ((Algorithm2) => {\n Algorithm2[\"SHA1\"] = \"sha1\";\n Algorithm2[\"SHA256\"] = \"sha256\";\n return Algorithm2;\n})(Algorithm || {});\nexport {\n Algorithm\n};\n", "const VERSION = \"3.0.3\";\nexport {\n VERSION\n};\n", "import { timingSafeEqual } from \"crypto\";\nimport { Buffer } from \"buffer\";\nimport { sign } from \"./sign\";\nimport { VERSION } from \"../version\";\nimport { getAlgorithm } from \"../utils\";\nasync function verify(secret, eventPayload, signature) {\n if (!secret || !eventPayload || !signature) {\n throw new TypeError(\n \"[@octokit/webhooks-methods] secret, eventPayload & signature required\"\n );\n }\n const signatureBuffer = Buffer.from(signature);\n const algorithm = getAlgorithm(signature);\n const verificationBuffer = Buffer.from(\n await sign({ secret, algorithm }, eventPayload)\n );\n if (signatureBuffer.length !== verificationBuffer.length) {\n return false;\n }\n return timingSafeEqual(signatureBuffer, verificationBuffer);\n}\nverify.VERSION = VERSION;\nexport {\n verify\n};\n", "const getAlgorithm = (signature) => {\n return signature.startsWith(\"sha256=\") ? \"sha256\" : \"sha1\";\n};\nexport {\n getAlgorithm\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,oBAA2B;;;ACA3B,IAAI,YAA6B,kBAAC,eAAe;AAC/C,aAAW,MAAM,IAAI;AACrB,aAAW,QAAQ,IAAI;AACvB,SAAO;AACT,GAAG,aAAa,CAAC,CAAC;;;ACJlB,IAAM,UAAU;;;AFGhB,eAAe,KAAK,SAAS,SAAS;AACpC,QAAM,EAAE,QAAQ,UAAU,IAAI,OAAO,YAAY,WAAW;AAAA,IAC1D,QAAQ,QAAQ;AAAA,IAChB,WAAW,QAAQ,aAAa,UAAU;AAAA,EAC5C,IAAI,EAAE,QAAQ,SAAS,WAAW,UAAU,OAAO;AACnD,MAAI,CAAC,UAAU,CAAC,SAAS;AACvB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,OAAO,OAAO,SAAS,EAAE,SAAS,SAAS,GAAG;AACjD,UAAM,IAAI;AAAA,MACR,iCAAiC;AAAA,IACnC;AAAA,EACF;AACA,SAAO,GAAG,iBAAa,0BAAW,WAAW,MAAM,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AACnF;AACA,KAAK,UAAU;;;AGpBf,IAAAA,iBAAgC;AAChC,oBAAuB;;;ACDvB,IAAM,eAAe,CAAC,cAAc;AAClC,SAAO,UAAU,WAAW,SAAS,IAAI,WAAW;AACtD;;;ADGA,eAAe,OAAO,QAAQ,cAAc,WAAW;AACrD,MAAI,CAAC,UAAU,CAAC,gBAAgB,CAAC,WAAW;AAC1C,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,QAAM,kBAAkB,qBAAO,KAAK,SAAS;AAC7C,QAAM,YAAY,aAAa,SAAS;AACxC,QAAM,qBAAqB,qBAAO;AAAA,IAChC,MAAM,KAAK,EAAE,QAAQ,UAAU,GAAG,YAAY;AAAA,EAChD;AACA,MAAI,gBAAgB,WAAW,mBAAmB,QAAQ;AACxD,WAAO;AAAA,EACT;AACA,aAAO,gCAAgB,iBAAiB,kBAAkB;AAC5D;AACA,OAAO,UAAU;;;AJnBjB,eAAe,mBAAmB,QAAQ,SAAS,WAAW,mBAAmB;AAC/E,QAAM,YAAY,MAAM,OAAO,QAAQ,SAAS,SAAS;AACzD,MAAI,WAAW;AACb,WAAO;AAAA,EACT;AACA,MAAI,sBAAsB,QAAQ;AAChC,eAAW,KAAK,mBAAmB;AACjC,YAAM,IAAI,MAAM,OAAO,GAAG,SAAS,SAAS;AAC5C,UAAI,GAAG;AACL,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;", + "names": ["import_crypto"] +} diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-src/index.js b/dist/node_modules/@octokit/webhooks-methods/dist-src/index.js new file mode 100644 index 0000000..fcd0bc6 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-src/index.js @@ -0,0 +1,22 @@ +import { sign } from "./node/sign"; +import { verify } from "./node/verify"; +async function verifyWithFallback(secret, payload, signature, additionalSecrets) { + const firstPass = await verify(secret, payload, signature); + if (firstPass) { + return true; + } + if (additionalSecrets !== void 0) { + for (const s of additionalSecrets) { + const v = await verify(s, payload, signature); + if (v) { + return v; + } + } + } + return false; +} +export { + sign, + verify, + verifyWithFallback +}; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-src/node/sign.js b/dist/node_modules/@octokit/webhooks-methods/dist-src/node/sign.js new file mode 100644 index 0000000..9c99285 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-src/node/sign.js @@ -0,0 +1,24 @@ +import { createHmac } from "crypto"; +import { Algorithm } from "../types"; +import { VERSION } from "../version"; +async function sign(options, payload) { + const { secret, algorithm } = typeof options === "object" ? { + secret: options.secret, + algorithm: options.algorithm || Algorithm.SHA256 + } : { secret: options, algorithm: Algorithm.SHA256 }; + if (!secret || !payload) { + throw new TypeError( + "[@octokit/webhooks-methods] secret & payload required for sign()" + ); + } + if (!Object.values(Algorithm).includes(algorithm)) { + throw new TypeError( + `[@octokit/webhooks] Algorithm ${algorithm} is not supported. Must be 'sha1' or 'sha256'` + ); + } + return `${algorithm}=${createHmac(algorithm, secret).update(payload).digest("hex")}`; +} +sign.VERSION = VERSION; +export { + sign +}; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-src/node/verify.js b/dist/node_modules/@octokit/webhooks-methods/dist-src/node/verify.js new file mode 100644 index 0000000..962c121 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-src/node/verify.js @@ -0,0 +1,25 @@ +import { timingSafeEqual } from "crypto"; +import { Buffer } from "buffer"; +import { sign } from "./sign"; +import { VERSION } from "../version"; +import { getAlgorithm } from "../utils"; +async function verify(secret, eventPayload, signature) { + if (!secret || !eventPayload || !signature) { + throw new TypeError( + "[@octokit/webhooks-methods] secret, eventPayload & signature required" + ); + } + const signatureBuffer = Buffer.from(signature); + const algorithm = getAlgorithm(signature); + const verificationBuffer = Buffer.from( + await sign({ secret, algorithm }, eventPayload) + ); + if (signatureBuffer.length !== verificationBuffer.length) { + return false; + } + return timingSafeEqual(signatureBuffer, verificationBuffer); +} +verify.VERSION = VERSION; +export { + verify +}; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-src/types.js b/dist/node_modules/@octokit/webhooks-methods/dist-src/types.js new file mode 100644 index 0000000..e7d7669 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-src/types.js @@ -0,0 +1,8 @@ +var Algorithm = /* @__PURE__ */ ((Algorithm2) => { + Algorithm2["SHA1"] = "sha1"; + Algorithm2["SHA256"] = "sha256"; + return Algorithm2; +})(Algorithm || {}); +export { + Algorithm +}; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-src/utils.js b/dist/node_modules/@octokit/webhooks-methods/dist-src/utils.js new file mode 100644 index 0000000..ed164f9 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-src/utils.js @@ -0,0 +1,6 @@ +const getAlgorithm = (signature) => { + return signature.startsWith("sha256=") ? "sha256" : "sha1"; +}; +export { + getAlgorithm +}; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-src/version.js b/dist/node_modules/@octokit/webhooks-methods/dist-src/version.js new file mode 100644 index 0000000..42387d8 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "3.0.3"; +export { + VERSION +}; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-src/web.js b/dist/node_modules/@octokit/webhooks-methods/dist-src/web.js new file mode 100644 index 0000000..4365a29 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-src/web.js @@ -0,0 +1,75 @@ +import { Algorithm } from "./types"; +import { getAlgorithm } from "./utils"; +const enc = new TextEncoder(); +function hexToUInt8Array(string) { + const pairs = string.match(/[\dA-F]{2}/gi); + const integers = pairs.map(function(s) { + return parseInt(s, 16); + }); + return new Uint8Array(integers); +} +function UInt8ArrayToHex(signature) { + return Array.prototype.map.call(new Uint8Array(signature), (x) => x.toString(16).padStart(2, "0")).join(""); +} +function getHMACHashName(algorithm) { + return { + [Algorithm.SHA1]: "SHA-1", + [Algorithm.SHA256]: "SHA-256" + }[algorithm]; +} +async function importKey(secret, algorithm) { + return crypto.subtle.importKey( + "raw", + // raw format of the key - should be Uint8Array + enc.encode(secret), + { + // algorithm details + name: "HMAC", + hash: { name: getHMACHashName(algorithm) } + }, + false, + // export = false + ["sign", "verify"] + // what this key can do + ); +} +async function sign(options, payload) { + const { secret, algorithm } = typeof options === "object" ? { + secret: options.secret, + algorithm: options.algorithm || Algorithm.SHA256 + } : { secret: options, algorithm: Algorithm.SHA256 }; + if (!secret || !payload) { + throw new TypeError( + "[@octokit/webhooks-methods] secret & payload required for sign()" + ); + } + if (!Object.values(Algorithm).includes(algorithm)) { + throw new TypeError( + `[@octokit/webhooks] Algorithm ${algorithm} is not supported. Must be 'sha1' or 'sha256'` + ); + } + const signature = await crypto.subtle.sign( + "HMAC", + await importKey(secret, algorithm), + enc.encode(payload) + ); + return `${algorithm}=${UInt8ArrayToHex(signature)}`; +} +async function verify(secret, eventPayload, signature) { + if (!secret || !eventPayload || !signature) { + throw new TypeError( + "[@octokit/webhooks-methods] secret, eventPayload & signature required" + ); + } + const algorithm = getAlgorithm(signature); + return await crypto.subtle.verify( + "HMAC", + await importKey(secret, algorithm), + hexToUInt8Array(signature.replace(`${algorithm}=`, "")), + enc.encode(eventPayload) + ); +} +export { + sign, + verify +}; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-types/index.d.ts b/dist/node_modules/@octokit/webhooks-methods/dist-types/index.d.ts new file mode 100644 index 0000000..df9095b --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-types/index.d.ts @@ -0,0 +1,4 @@ +export { sign } from "./node/sign"; +import { verify } from "./node/verify"; +export { verify }; +export declare function verifyWithFallback(secret: string, payload: string, signature: string, additionalSecrets: undefined | string[]): Promise; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-types/node/sign.d.ts b/dist/node_modules/@octokit/webhooks-methods/dist-types/node/sign.d.ts new file mode 100644 index 0000000..b1dacfa --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-types/node/sign.d.ts @@ -0,0 +1,5 @@ +import { type SignOptions } from "../types"; +export declare function sign(options: SignOptions | string, payload: string): Promise; +export declare namespace sign { + var VERSION: string; +} diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-types/node/verify.d.ts b/dist/node_modules/@octokit/webhooks-methods/dist-types/node/verify.d.ts new file mode 100644 index 0000000..1d475a4 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-types/node/verify.d.ts @@ -0,0 +1,4 @@ +export declare function verify(secret: string, eventPayload: string, signature: string): Promise; +export declare namespace verify { + var VERSION: string; +} diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-types/types.d.ts b/dist/node_modules/@octokit/webhooks-methods/dist-types/types.d.ts new file mode 100644 index 0000000..52c29af --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-types/types.d.ts @@ -0,0 +1,9 @@ +export declare enum Algorithm { + SHA1 = "sha1", + SHA256 = "sha256" +} +export type AlgorithmLike = Algorithm | "sha1" | "sha256"; +export type SignOptions = { + secret: string; + algorithm?: AlgorithmLike; +}; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-types/utils.d.ts b/dist/node_modules/@octokit/webhooks-methods/dist-types/utils.d.ts new file mode 100644 index 0000000..5bab855 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-types/utils.d.ts @@ -0,0 +1 @@ +export declare const getAlgorithm: (signature: string) => "sha1" | "sha256"; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-types/version.d.ts b/dist/node_modules/@octokit/webhooks-methods/dist-types/version.d.ts new file mode 100644 index 0000000..09c2448 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "3.0.3"; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-types/web.d.ts b/dist/node_modules/@octokit/webhooks-methods/dist-types/web.d.ts new file mode 100644 index 0000000..2798c22 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-types/web.d.ts @@ -0,0 +1,3 @@ +import { type SignOptions } from "./types"; +export declare function sign(options: SignOptions | string, payload: string): Promise; +export declare function verify(secret: string, eventPayload: string, signature: string): Promise; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-web/index.js b/dist/node_modules/@octokit/webhooks-methods/dist-web/index.js new file mode 100644 index 0000000..de69920 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-web/index.js @@ -0,0 +1,86 @@ +// pkg/dist-src/types.js +var Algorithm = /* @__PURE__ */ ((Algorithm2) => { + Algorithm2["SHA1"] = "sha1"; + Algorithm2["SHA256"] = "sha256"; + return Algorithm2; +})(Algorithm || {}); + +// pkg/dist-src/utils.js +var getAlgorithm = (signature) => { + return signature.startsWith("sha256=") ? "sha256" : "sha1"; +}; + +// pkg/dist-src/web.js +var enc = new TextEncoder(); +function hexToUInt8Array(string) { + const pairs = string.match(/[\dA-F]{2}/gi); + const integers = pairs.map(function(s) { + return parseInt(s, 16); + }); + return new Uint8Array(integers); +} +function UInt8ArrayToHex(signature) { + return Array.prototype.map.call(new Uint8Array(signature), (x) => x.toString(16).padStart(2, "0")).join(""); +} +function getHMACHashName(algorithm) { + return { + [Algorithm.SHA1]: "SHA-1", + [Algorithm.SHA256]: "SHA-256" + }[algorithm]; +} +async function importKey(secret, algorithm) { + return crypto.subtle.importKey( + "raw", + // raw format of the key - should be Uint8Array + enc.encode(secret), + { + // algorithm details + name: "HMAC", + hash: { name: getHMACHashName(algorithm) } + }, + false, + // export = false + ["sign", "verify"] + // what this key can do + ); +} +async function sign(options, payload) { + const { secret, algorithm } = typeof options === "object" ? { + secret: options.secret, + algorithm: options.algorithm || Algorithm.SHA256 + } : { secret: options, algorithm: Algorithm.SHA256 }; + if (!secret || !payload) { + throw new TypeError( + "[@octokit/webhooks-methods] secret & payload required for sign()" + ); + } + if (!Object.values(Algorithm).includes(algorithm)) { + throw new TypeError( + `[@octokit/webhooks] Algorithm ${algorithm} is not supported. Must be 'sha1' or 'sha256'` + ); + } + const signature = await crypto.subtle.sign( + "HMAC", + await importKey(secret, algorithm), + enc.encode(payload) + ); + return `${algorithm}=${UInt8ArrayToHex(signature)}`; +} +async function verify(secret, eventPayload, signature) { + if (!secret || !eventPayload || !signature) { + throw new TypeError( + "[@octokit/webhooks-methods] secret, eventPayload & signature required" + ); + } + const algorithm = getAlgorithm(signature); + return await crypto.subtle.verify( + "HMAC", + await importKey(secret, algorithm), + hexToUInt8Array(signature.replace(`${algorithm}=`, "")), + enc.encode(eventPayload) + ); +} +export { + sign, + verify +}; diff --git a/dist/node_modules/@octokit/webhooks-methods/dist-web/index.js.map b/dist/node_modules/@octokit/webhooks-methods/dist-web/index.js.map new file mode 100644 index 0000000..4e7fb49 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/types.js", "../dist-src/utils.js", "../dist-src/web.js"], + "sourcesContent": ["var Algorithm = /* @__PURE__ */ ((Algorithm2) => {\n Algorithm2[\"SHA1\"] = \"sha1\";\n Algorithm2[\"SHA256\"] = \"sha256\";\n return Algorithm2;\n})(Algorithm || {});\nexport {\n Algorithm\n};\n", "const getAlgorithm = (signature) => {\n return signature.startsWith(\"sha256=\") ? \"sha256\" : \"sha1\";\n};\nexport {\n getAlgorithm\n};\n", "import { Algorithm } from \"./types\";\nimport { getAlgorithm } from \"./utils\";\nconst enc = new TextEncoder();\nfunction hexToUInt8Array(string) {\n const pairs = string.match(/[\\dA-F]{2}/gi);\n const integers = pairs.map(function(s) {\n return parseInt(s, 16);\n });\n return new Uint8Array(integers);\n}\nfunction UInt8ArrayToHex(signature) {\n return Array.prototype.map.call(new Uint8Array(signature), (x) => x.toString(16).padStart(2, \"0\")).join(\"\");\n}\nfunction getHMACHashName(algorithm) {\n return {\n [Algorithm.SHA1]: \"SHA-1\",\n [Algorithm.SHA256]: \"SHA-256\"\n }[algorithm];\n}\nasync function importKey(secret, algorithm) {\n return crypto.subtle.importKey(\n \"raw\",\n // raw format of the key - should be Uint8Array\n enc.encode(secret),\n {\n // algorithm details\n name: \"HMAC\",\n hash: { name: getHMACHashName(algorithm) }\n },\n false,\n // export = false\n [\"sign\", \"verify\"]\n // what this key can do\n );\n}\nasync function sign(options, payload) {\n const { secret, algorithm } = typeof options === \"object\" ? {\n secret: options.secret,\n algorithm: options.algorithm || Algorithm.SHA256\n } : { secret: options, algorithm: Algorithm.SHA256 };\n if (!secret || !payload) {\n throw new TypeError(\n \"[@octokit/webhooks-methods] secret & payload required for sign()\"\n );\n }\n if (!Object.values(Algorithm).includes(algorithm)) {\n throw new TypeError(\n `[@octokit/webhooks] Algorithm ${algorithm} is not supported. Must be 'sha1' or 'sha256'`\n );\n }\n const signature = await crypto.subtle.sign(\n \"HMAC\",\n await importKey(secret, algorithm),\n enc.encode(payload)\n );\n return `${algorithm}=${UInt8ArrayToHex(signature)}`;\n}\nasync function verify(secret, eventPayload, signature) {\n if (!secret || !eventPayload || !signature) {\n throw new TypeError(\n \"[@octokit/webhooks-methods] secret, eventPayload & signature required\"\n );\n }\n const algorithm = getAlgorithm(signature);\n return await crypto.subtle.verify(\n \"HMAC\",\n await importKey(secret, algorithm),\n hexToUInt8Array(signature.replace(`${algorithm}=`, \"\")),\n enc.encode(eventPayload)\n );\n}\nexport {\n sign,\n verify\n};\n"], + "mappings": ";AAAA,IAAI,YAA6B,kBAAC,eAAe;AAC/C,aAAW,MAAM,IAAI;AACrB,aAAW,QAAQ,IAAI;AACvB,SAAO;AACT,GAAG,aAAa,CAAC,CAAC;;;ACJlB,IAAM,eAAe,CAAC,cAAc;AAClC,SAAO,UAAU,WAAW,SAAS,IAAI,WAAW;AACtD;;;ACAA,IAAM,MAAM,IAAI,YAAY;AAC5B,SAAS,gBAAgB,QAAQ;AAC/B,QAAM,QAAQ,OAAO,MAAM,cAAc;AACzC,QAAM,WAAW,MAAM,IAAI,SAAS,GAAG;AACrC,WAAO,SAAS,GAAG,EAAE;AAAA,EACvB,CAAC;AACD,SAAO,IAAI,WAAW,QAAQ;AAChC;AACA,SAAS,gBAAgB,WAAW;AAClC,SAAO,MAAM,UAAU,IAAI,KAAK,IAAI,WAAW,SAAS,GAAG,CAAC,MAAM,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG,CAAC,EAAE,KAAK,EAAE;AAC5G;AACA,SAAS,gBAAgB,WAAW;AAClC,SAAO;AAAA,IACL,CAAC,UAAU,IAAI,GAAG;AAAA,IAClB,CAAC,UAAU,MAAM,GAAG;AAAA,EACtB,EAAE,SAAS;AACb;AACA,eAAe,UAAU,QAAQ,WAAW;AAC1C,SAAO,OAAO,OAAO;AAAA,IACnB;AAAA;AAAA,IAEA,IAAI,OAAO,MAAM;AAAA,IACjB;AAAA;AAAA,MAEE,MAAM;AAAA,MACN,MAAM,EAAE,MAAM,gBAAgB,SAAS,EAAE;AAAA,IAC3C;AAAA,IACA;AAAA;AAAA,IAEA,CAAC,QAAQ,QAAQ;AAAA;AAAA,EAEnB;AACF;AACA,eAAe,KAAK,SAAS,SAAS;AACpC,QAAM,EAAE,QAAQ,UAAU,IAAI,OAAO,YAAY,WAAW;AAAA,IAC1D,QAAQ,QAAQ;AAAA,IAChB,WAAW,QAAQ,aAAa,UAAU;AAAA,EAC5C,IAAI,EAAE,QAAQ,SAAS,WAAW,UAAU,OAAO;AACnD,MAAI,CAAC,UAAU,CAAC,SAAS;AACvB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,OAAO,OAAO,SAAS,EAAE,SAAS,SAAS,GAAG;AACjD,UAAM,IAAI;AAAA,MACR,iCAAiC;AAAA,IACnC;AAAA,EACF;AACA,QAAM,YAAY,MAAM,OAAO,OAAO;AAAA,IACpC;AAAA,IACA,MAAM,UAAU,QAAQ,SAAS;AAAA,IACjC,IAAI,OAAO,OAAO;AAAA,EACpB;AACA,SAAO,GAAG,aAAa,gBAAgB,SAAS;AAClD;AACA,eAAe,OAAO,QAAQ,cAAc,WAAW;AACrD,MAAI,CAAC,UAAU,CAAC,gBAAgB,CAAC,WAAW;AAC1C,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,QAAM,YAAY,aAAa,SAAS;AACxC,SAAO,MAAM,OAAO,OAAO;AAAA,IACzB;AAAA,IACA,MAAM,UAAU,QAAQ,SAAS;AAAA,IACjC,gBAAgB,UAAU,QAAQ,GAAG,cAAc,EAAE,CAAC;AAAA,IACtD,IAAI,OAAO,YAAY;AAAA,EACzB;AACF;", + "names": [] +} diff --git a/dist/node_modules/@octokit/webhooks-methods/package.json b/dist/node_modules/@octokit/webhooks-methods/package.json new file mode 100644 index 0000000..3b2b08d --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-methods/package.json @@ -0,0 +1,58 @@ +{ + "name": "@octokit/webhooks-methods", + "publishConfig": { + "access": "public" + }, + "version": "3.0.3", + "description": "Methods to handle GitHub Webhook requests", + "repository": "github:octokit/webhooks-methods.js", + "keywords": [ + "github", + "api", + "sdk", + "toolkit" + ], + "author": "Gregor Martynus (https://dev.to/gr2m)", + "license": "MIT", + "devDependencies": { + "@octokit/tsconfig": "^1.0.2", + "@pika/plugin-ts-standard-pkg": "^0.9.2", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "esbuild": "^0.17.19", + "glob": "^10.2.6", + "jest": "^29.0.0", + "prettier": "2.8.8", + "puppeteer": "^20.0.0", + "semantic-release": "^21.0.0", + "semantic-release-plugin-update-version-in-files": "^1.1.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "jest": { + "preset": "ts-jest", + "coverageThreshold": { + "global": { + "statements": 100, + "branches": 100, + "functions": 100, + "lines": 100 + } + }, + "testPathIgnorePatterns": [ + "/node_modules/", + "/test/deno/" + ] + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**" + ], + "main": "dist-node/index.js", + "browser": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "module": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@octokit/webhooks-types/README.md b/dist/node_modules/@octokit/webhooks-types/README.md new file mode 100644 index 0000000..fd19f32 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-types/README.md @@ -0,0 +1,46 @@ +# Octokit Webhooks + +> machine-readable, always up-to-date GitHub Webhooks specifications + +![Update status](https://github.com/octokit/webhooks/workflows/Update/badge.svg) + +## Download + +Download the latest specification at +[octokit.github.io/webhooks/payload-examples/api.github.com/index.json](https://octokit.github.io/webhooks/payload-examples/api.github.com/index.json) + +## Usage + +This package ships with types for the webhook events generated from the +respective json schemas, which you can use like so: + +```typescript +import { WebhookEvent, IssuesOpenedEvent } from "@octokit/webhooks-types"; + +const handleWebhookEvent = (event: WebhookEvent) => { + if ("action" in event && event.action === "completed") { + console.log(`${event.sender.login} completed something!`); + } +}; + +const handleIssuesOpenedEvent = (event: IssuesOpenedEvent) => { + console.log( + `${event.sender.login} opened "${event.issue.title}" on ${event.repository.full_name}` + ); +}; +``` + +**⚠️ Caution ⚠️**: Webhooks Types are expected to be used with the [`strictNullChecks` option](https://www.typescriptlang.org/tsconfig#strictNullChecks) enabled in your `tsconfig`. If you don't have this option enabled, there's the possibility that you get `never` as the inferred type in some use cases. See [#395](https://github.com/octokit/webhooks/issues/395) for details. + +## See also + +- [octokit/graphql-schema](https://github.com/octokit/graphql-schema) – GitHub’s + GraphQL Schema with validation +- [octokit/openapi](https://github.com/octokit/openapi) – GitHub REST API route + specifications +- [octokit/app-permissions](https://github.com/octokit/app-permissions) – GitHub + App permission specifications + +## LICENSE + +[MIT](LICENSE.md) diff --git a/dist/node_modules/@octokit/webhooks-types/package.json b/dist/node_modules/@octokit/webhooks-types/package.json new file mode 100644 index 0000000..6c0107c --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-types/package.json @@ -0,0 +1,20 @@ +{ + "name": "@octokit/webhooks-types", + "version": "6.11.0", + "description": "Generated TypeScript definitions based on community contributed JSON Schemas", + "keywords": [], + "repository": "github:octokit/webhooks", + "license": "MIT", + "author": "Gregor Martynus (https://github.com/gr2m)", + "main": "", + "types": "schema.d.ts", + "files": [ + "schema.d.ts" + ], + "scripts": {}, + "dependencies": {}, + "devDependencies": {}, + "publishConfig": { + "access": "public" + } +} diff --git a/dist/node_modules/@octokit/webhooks-types/schema.d.ts b/dist/node_modules/@octokit/webhooks-types/schema.d.ts new file mode 100644 index 0000000..5983944 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks-types/schema.d.ts @@ -0,0 +1,8099 @@ +/* eslint-disable */ +/** + * This file was automatically generated by json-schema-to-typescript. + * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, + * and run json-schema-to-typescript to regenerate this file. + */ + +export type Schema = + | BranchProtectionRuleEvent + | CheckRunEvent + | CheckSuiteEvent + | CodeScanningAlertEvent + | CommitCommentEvent + | CreateEvent + | DeleteEvent + | DependabotAlertEvent + | DeployKeyEvent + | DeploymentEvent + | DeploymentStatusEvent + | DiscussionEvent + | DiscussionCommentEvent + | ForkEvent + | GithubAppAuthorizationEvent + | GollumEvent + | InstallationEvent + | InstallationRepositoriesEvent + | InstallationTargetEvent + | IssueCommentEvent + | IssuesEvent + | LabelEvent + | MarketplacePurchaseEvent + | MemberEvent + | MembershipEvent + | MergeGroupEvent + | MetaEvent + | MilestoneEvent + | OrgBlockEvent + | OrganizationEvent + | PackageEvent + | PageBuildEvent + | PingEvent + | ProjectEvent + | ProjectCardEvent + | ProjectColumnEvent + | ProjectsV2ItemEvent + | PublicEvent + | PullRequestEvent + | PullRequestReviewEvent + | PullRequestReviewCommentEvent + | PullRequestReviewThreadEvent + | PushEvent + | RegistryPackageEvent + | ReleaseEvent + | RepositoryEvent + | RepositoryDispatchEvent + | RepositoryImportEvent + | RepositoryVulnerabilityAlertEvent + | SecretScanningAlertEvent + | SecurityAdvisoryEvent + | SponsorshipEvent + | StarEvent + | StatusEvent + | TeamEvent + | TeamAddEvent + | WatchEvent + | WorkflowDispatchEvent + | WorkflowJobEvent + | WorkflowRunEvent; +export type BranchProtectionRuleEvent = + | BranchProtectionRuleCreatedEvent + | BranchProtectionRuleDeletedEvent + | BranchProtectionRuleEditedEvent; +export type BranchProtectionRuleEnforcementLevel = + | "off" + | "non_admins" + | "everyone"; +export type BranchProtectionRuleNumber = number; +export type BranchProtectionRuleBoolean = boolean; +export type BranchProtectionRuleArray = string[]; +export type CheckRunEvent = + | CheckRunCompletedEvent + | CheckRunCreatedEvent + | CheckRunRequestedActionEvent + | CheckRunRerequestedEvent; +export type CheckSuiteEvent = + | CheckSuiteCompletedEvent + | CheckSuiteRequestedEvent + | CheckSuiteRerequestedEvent; +export type CodeScanningAlertEvent = + | CodeScanningAlertAppearedInBranchEvent + | CodeScanningAlertClosedByUserEvent + | CodeScanningAlertCreatedEvent + | CodeScanningAlertFixedEvent + | CodeScanningAlertReopenedEvent + | CodeScanningAlertReopenedByUserEvent; +export type CommitCommentEvent = CommitCommentCreatedEvent; +/** + * How the author is associated with the repository. + */ +export type AuthorAssociation = + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; +export type DependabotAlertEvent = + | DependabotAlertCreatedEvent + | DependabotAlertDismissedEvent + | DependabotAlertFixedEvent + | DependabotAlertReintroducedEvent + | DependabotAlertReopenedEvent; +export type DeployKeyEvent = DeployKeyCreatedEvent | DeployKeyDeletedEvent; +export type DeploymentEvent = DeploymentCreatedEvent; +export type DeploymentStatusEvent = DeploymentStatusCreatedEvent; +export type DiscussionEvent = + | DiscussionAnsweredEvent + | DiscussionCategoryChangedEvent + | DiscussionCreatedEvent + | DiscussionDeletedEvent + | DiscussionEditedEvent + | DiscussionLabeledEvent + | DiscussionLockedEvent + | DiscussionPinnedEvent + | DiscussionTransferredEvent + | DiscussionUnansweredEvent + | DiscussionUnlabeledEvent + | DiscussionUnlockedEvent + | DiscussionUnpinnedEvent; +export type DiscussionCommentEvent = + | DiscussionCommentCreatedEvent + | DiscussionCommentDeletedEvent + | DiscussionCommentEditedEvent; +export type GithubAppAuthorizationEvent = GithubAppAuthorizationRevokedEvent; +export type InstallationEvent = + | InstallationCreatedEvent + | InstallationDeletedEvent + | InstallationNewPermissionsAcceptedEvent + | InstallationSuspendEvent + | InstallationUnsuspendEvent; +export type InstallationRepositoriesEvent = + | InstallationRepositoriesAddedEvent + | InstallationRepositoriesRemovedEvent; +export type InstallationTargetEvent = InstallationTargetRenamedEvent; +export type IssueCommentEvent = + | IssueCommentCreatedEvent + | IssueCommentDeletedEvent + | IssueCommentEditedEvent; +export type IssuesEvent = + | IssuesAssignedEvent + | IssuesClosedEvent + | IssuesDeletedEvent + | IssuesDemilestonedEvent + | IssuesEditedEvent + | IssuesLabeledEvent + | IssuesLockedEvent + | IssuesMilestonedEvent + | IssuesOpenedEvent + | IssuesPinnedEvent + | IssuesReopenedEvent + | IssuesTransferredEvent + | IssuesUnassignedEvent + | IssuesUnlabeledEvent + | IssuesUnlockedEvent + | IssuesUnpinnedEvent; +export type LabelEvent = + | LabelCreatedEvent + | LabelDeletedEvent + | LabelEditedEvent; +export type MarketplacePurchaseEvent = + | MarketplacePurchaseCancelledEvent + | MarketplacePurchaseChangedEvent + | MarketplacePurchasePendingChangeEvent + | MarketplacePurchasePendingChangeCancelledEvent + | MarketplacePurchasePurchasedEvent; +export type MemberEvent = + | MemberAddedEvent + | MemberEditedEvent + | MemberRemovedEvent; +export type MembershipEvent = MembershipAddedEvent | MembershipRemovedEvent; +export type MergeGroupEvent = MergeGroupChecksRequestedEvent; +export type MetaEvent = MetaDeletedEvent; +export type WebhookEvents = + | ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "create" + | "delete" + | "deployment" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "meta" + | "milestone" + | "organization" + | "org_block" + | "package" + | "page_build" + | "project" + | "projects_v2_item" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "pull_request_review_thread" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_import" + | "repository_vulnerability_alert" + | "secret_scanning_alert" + | "secret_scanning_alert_location" + | "security_and_analysis" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_job" + | "workflow_run" + )[] + | ["*"]; +export type MilestoneEvent = + | MilestoneClosedEvent + | MilestoneCreatedEvent + | MilestoneDeletedEvent + | MilestoneEditedEvent + | MilestoneOpenedEvent; +export type OrgBlockEvent = OrgBlockBlockedEvent | OrgBlockUnblockedEvent; +export type OrganizationEvent = + | OrganizationDeletedEvent + | OrganizationMemberAddedEvent + | OrganizationMemberInvitedEvent + | OrganizationMemberRemovedEvent + | OrganizationRenamedEvent; +export type PackageEvent = PackagePublishedEvent | PackageUpdatedEvent; +export type ProjectEvent = + | ProjectClosedEvent + | ProjectCreatedEvent + | ProjectDeletedEvent + | ProjectEditedEvent + | ProjectReopenedEvent; +export type ProjectCardEvent = + | ProjectCardConvertedEvent + | ProjectCardCreatedEvent + | ProjectCardDeletedEvent + | ProjectCardEditedEvent + | ProjectCardMovedEvent; +export type ProjectColumnEvent = + | ProjectColumnCreatedEvent + | ProjectColumnDeletedEvent + | ProjectColumnEditedEvent + | ProjectColumnMovedEvent; +export type ProjectsV2ItemEvent = + | ProjectsV2ItemArchivedEvent + | ProjectsV2ItemConvertedEvent + | ProjectsV2ItemCreatedEvent + | ProjectsV2ItemDeletedEvent + | ProjectsV2ItemEditedEvent + | ProjectsV2ItemReorderedEvent + | ProjectsV2ItemRestoredEvent; +export type PullRequestEvent = + | PullRequestAssignedEvent + | PullRequestAutoMergeDisabledEvent + | PullRequestAutoMergeEnabledEvent + | PullRequestClosedEvent + | PullRequestConvertedToDraftEvent + | PullRequestDemilestonedEvent + | PullRequestDequeuedEvent + | PullRequestEditedEvent + | PullRequestLabeledEvent + | PullRequestLockedEvent + | PullRequestMilestonedEvent + | PullRequestOpenedEvent + | PullRequestQueuedEvent + | PullRequestReadyForReviewEvent + | PullRequestReopenedEvent + | PullRequestReviewRequestRemovedEvent + | PullRequestReviewRequestedEvent + | PullRequestSynchronizeEvent + | PullRequestUnassignedEvent + | PullRequestUnlabeledEvent + | PullRequestUnlockedEvent; +export type PullRequestReviewRequestRemovedEvent = + | { + action: "review_request_removed"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + requested_reviewer: User; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; + } + | { + action: "review_request_removed"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + requested_team: Team; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; + }; +export type PullRequestReviewRequestedEvent = + | { + action: "review_requested"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + requested_reviewer: User; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; + } + | { + action: "review_requested"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + requested_team: Team; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; + }; +export type PullRequestReviewEvent = + | PullRequestReviewDismissedEvent + | PullRequestReviewEditedEvent + | PullRequestReviewSubmittedEvent; +export type PullRequestReviewCommentEvent = + | PullRequestReviewCommentCreatedEvent + | PullRequestReviewCommentDeletedEvent + | PullRequestReviewCommentEditedEvent; +export type PullRequestReviewThreadEvent = + | PullRequestReviewThreadResolvedEvent + | PullRequestReviewThreadUnresolvedEvent; +export type RegistryPackageEvent = + | RegistryPackagePublishedEvent + | RegistryPackageUpdatedEvent; +export type ReleaseEvent = + | ReleaseCreatedEvent + | ReleaseDeletedEvent + | ReleaseEditedEvent + | ReleasePrereleasedEvent + | ReleasePublishedEvent + | ReleaseReleasedEvent + | ReleaseUnpublishedEvent; +export type RepositoryEvent = + | RepositoryArchivedEvent + | RepositoryCreatedEvent + | RepositoryDeletedEvent + | RepositoryEditedEvent + | RepositoryPrivatizedEvent + | RepositoryPublicizedEvent + | RepositoryRenamedEvent + | RepositoryTransferredEvent + | RepositoryUnarchivedEvent; +export type RepositoryVulnerabilityAlertEvent = + | RepositoryVulnerabilityAlertCreateEvent + | RepositoryVulnerabilityAlertDismissEvent + | RepositoryVulnerabilityAlertReopenEvent + | RepositoryVulnerabilityAlertResolveEvent; +export type SecretScanningAlertEvent = + | SecretScanningAlertCreatedEvent + | SecretScanningAlertReopenedEvent + | SecretScanningAlertResolvedEvent; +export type SecurityAdvisoryEvent = + | SecurityAdvisoryPerformedEvent + | SecurityAdvisoryPublishedEvent + | SecurityAdvisoryUpdatedEvent + | SecurityAdvisoryWithdrawnEvent; +export type SponsorshipEvent = + | SponsorshipCancelledEvent + | SponsorshipCreatedEvent + | SponsorshipEditedEvent + | SponsorshipPendingCancellationEvent + | SponsorshipPendingTierChangeEvent + | SponsorshipTierChangedEvent; +export type StarEvent = StarCreatedEvent | StarDeletedEvent; +export type TeamEvent = + | TeamAddedToRepositoryEvent + | TeamCreatedEvent + | TeamDeletedEvent + | TeamEditedEvent + | TeamRemovedFromRepositoryEvent; +export type WatchEvent = WatchStartedEvent; +export type WorkflowJobEvent = + | WorkflowJobCompletedEvent + | WorkflowJobInProgressEvent + | WorkflowJobQueuedEvent; +export type WorkflowStep = WorkflowStepInProgress | WorkflowStepCompleted; +export type WorkflowRunEvent = + | WorkflowRunCompletedEvent + | WorkflowRunInProgressEvent + | WorkflowRunRequestedEvent; + +/** + * Activity related to a branch protection rule. For more information, see "[About branch protection rules](https://docs.github.com/en/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#about-branch-protection-rules)." + */ +export interface BranchProtectionRuleCreatedEvent { + action: "created"; + rule: BranchProtectionRule; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * The branch protection rule. Includes a `name` and all the [branch protection settings](https://docs.github.com/en/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#about-branch-protection-settings) applied to branches that match the name. Binary settings are boolean. Multi-level configurations are one of `off`, `non_admins`, or `everyone`. Actor and build lists are arrays of strings. + */ +export interface BranchProtectionRule { + id: number; + repository_id: number; + name: string; + created_at: string; + updated_at: string; + pull_request_reviews_enforcement_level: BranchProtectionRuleEnforcementLevel; + required_approving_review_count: BranchProtectionRuleNumber; + dismiss_stale_reviews_on_push: BranchProtectionRuleBoolean; + require_code_owner_review: BranchProtectionRuleBoolean; + authorized_dismissal_actors_only: BranchProtectionRuleBoolean; + ignore_approvals_from_contributors: BranchProtectionRuleBoolean; + required_status_checks: BranchProtectionRuleArray; + required_status_checks_enforcement_level: BranchProtectionRuleEnforcementLevel; + strict_required_status_checks_policy: BranchProtectionRuleBoolean; + signature_requirement_enforcement_level: BranchProtectionRuleEnforcementLevel; + linear_history_requirement_enforcement_level: BranchProtectionRuleEnforcementLevel; + admin_enforced: BranchProtectionRuleBoolean; + create_protected?: BranchProtectionRuleBoolean; + allow_force_pushes_enforcement_level: BranchProtectionRuleEnforcementLevel; + allow_deletions_enforcement_level: BranchProtectionRuleEnforcementLevel; + merge_queue_enforcement_level: BranchProtectionRuleEnforcementLevel; + required_deployments_enforcement_level: BranchProtectionRuleEnforcementLevel; + required_conversation_resolution_level: BranchProtectionRuleEnforcementLevel; + authorized_actors_only: BranchProtectionRuleBoolean; + authorized_actor_names: BranchProtectionRuleArray; +} +/** + * A git repository + */ +export interface Repository { + /** + * Unique identifier of the repository + */ + id: number; + /** + * The GraphQL identifier of the repository. + */ + node_id: string; + /** + * The name of the repository. + */ + name: string; + /** + * The full, globally unique, name of the repository. + */ + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + owner: User; + /** + * The URL to view the repository on GitHub.com. + */ + html_url: string; + /** + * The repository description. + */ + description: string | null; + /** + * Whether the repository is a fork. + */ + fork: boolean; + /** + * The URL to get more information about the repository from the GitHub API. + */ + url: string; + /** + * The API URL to list the forks of the repository. + */ + forks_url: string; + /** + * A template for the API URL to get information about deploy keys on the repository. + */ + keys_url: string; + /** + * A template for the API URL to get information about collaborators of the repository. + */ + collaborators_url: string; + /** + * The API URL to list the teams on the repository. + */ + teams_url: string; + /** + * The API URL to list the hooks on the repository. + */ + hooks_url: string; + /** + * A template for the API URL to get information about issue events on the repository. + */ + issue_events_url: string; + /** + * The API URL to list the events of the repository. + */ + events_url: string; + /** + * A template for the API URL to list the available assignees for issues in the repository. + */ + assignees_url: string; + /** + * A template for the API URL to get information about branches in the repository. + */ + branches_url: string; + /** + * The API URL to get information about tags on the repository. + */ + tags_url: string; + /** + * A template for the API URL to create or retrieve a raw Git blob in the repository. + */ + blobs_url: string; + /** + * A template for the API URL to get information about Git tags of the repository. + */ + git_tags_url: string; + /** + * A template for the API URL to get information about Git refs of the repository. + */ + git_refs_url: string; + /** + * A template for the API URL to create or retrieve a raw Git tree of the repository. + */ + trees_url: string; + /** + * A template for the API URL to get information about statuses of a commit. + */ + statuses_url: string; + /** + * The API URL to get information about the languages of the repository. + */ + languages_url: string; + /** + * The API URL to list the stargazers on the repository. + */ + stargazers_url: string; + /** + * A template for the API URL to list the contributors to the repository. + */ + contributors_url: string; + /** + * The API URL to list the subscribers on the repository. + */ + subscribers_url: string; + /** + * The API URL to subscribe to notifications for this repository. + */ + subscription_url: string; + /** + * A template for the API URL to get information about commits on the repository. + */ + commits_url: string; + /** + * A template for the API URL to get information about Git commits of the repository. + */ + git_commits_url: string; + /** + * A template for the API URL to get information about comments on the repository. + */ + comments_url: string; + /** + * A template for the API URL to get information about issue comments on the repository. + */ + issue_comment_url: string; + /** + * A template for the API URL to get the contents of the repository. + */ + contents_url: string; + /** + * A template for the API URL to compare two commits or refs. + */ + compare_url: string; + /** + * The API URL to merge branches in the repository. + */ + merges_url: string; + /** + * A template for the API URL to download the repository as an archive. + */ + archive_url: string; + /** + * The API URL to list the downloads on the repository. + */ + downloads_url: string; + /** + * A template for the API URL to get information about issues on the repository. + */ + issues_url: string; + /** + * A template for the API URL to get information about pull requests on the repository. + */ + pulls_url: string; + /** + * A template for the API URL to get information about milestones of the repository. + */ + milestones_url: string; + /** + * A template for the API URL to get information about notifications on the repository. + */ + notifications_url: string; + /** + * A template for the API URL to get information about labels of the repository. + */ + labels_url: string; + /** + * A template for the API URL to get information about releases on the repository. + */ + releases_url: string; + /** + * The API URL to list the deployments of the repository. + */ + deployments_url: string; + created_at: number | string; + updated_at: string; + pushed_at: number | string | null; + git_url: string; + ssh_url: string; + clone_url: string; + svn_url: string; + homepage: string | null; + size: number; + stargazers_count: number; + watchers_count: number; + language: string | null; + /** + * Whether issues are enabled. + */ + has_issues: boolean; + /** + * Whether projects are enabled. + */ + has_projects: boolean; + /** + * Whether downloads are enabled. + */ + has_downloads: boolean; + /** + * Whether the wiki is enabled. + */ + has_wiki: boolean; + has_pages: boolean; + /** + * Whether discussions are enabled. + */ + has_discussions?: boolean; + forks_count: number; + mirror_url: string | null; + /** + * Whether the repository is archived. + */ + archived: boolean; + /** + * Returns whether or not this repository is disabled. + */ + disabled?: boolean; + open_issues_count: number; + license: License | null; + forks: number; + open_issues: number; + watchers: number; + stargazers?: number; + /** + * The default branch of the repository. + */ + default_branch: string; + /** + * Whether to allow squash merges for pull requests. + */ + allow_squash_merge?: boolean; + /** + * Whether to allow merge commits for pull requests. + */ + allow_merge_commit?: boolean; + /** + * Whether to allow rebase merges for pull requests. + */ + allow_rebase_merge?: boolean; + /** + * Whether to allow auto-merge for pull requests. + */ + allow_auto_merge?: boolean; + /** + * Whether to allow private forks + */ + allow_forking?: boolean; + allow_update_branch?: boolean; + use_squash_pr_title_as_default?: boolean; + squash_merge_commit_message?: string; + squash_merge_commit_title?: string; + merge_commit_message?: string; + merge_commit_title?: string; + is_template: boolean; + web_commit_signoff_required: boolean; + topics: string[]; + visibility: "public" | "private" | "internal"; + /** + * Whether to delete head branches when pull requests are merged + */ + delete_branch_on_merge?: boolean; + master_branch?: string; + permissions?: { + pull: boolean; + push: boolean; + admin: boolean; + maintain?: boolean; + triage?: boolean; + }; + public?: boolean; + organization?: string; +} +export interface User { + login: string; + id: number; + node_id: string; + name?: string; + email?: string | null; + avatar_url: string; + gravatar_id: string; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: "Bot" | "User" | "Organization"; + site_admin: boolean; +} +export interface License { + key: string; + name: string; + spdx_id: string; + url: string | null; + node_id: string; +} +/** + * Installation + */ +export interface InstallationLite { + /** + * The ID of the installation. + */ + id: number; + node_id: string; +} +export interface Organization { + login: string; + id: number; + node_id: string; + url: string; + html_url?: string; + repos_url: string; + events_url: string; + hooks_url: string; + issues_url: string; + members_url: string; + public_members_url: string; + avatar_url: string; + description: string | null; +} +/** + * Activity related to a branch protection rule. For more information, see "[About branch protection rules](https://docs.github.com/en/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#about-branch-protection-rules)." + */ +export interface BranchProtectionRuleDeletedEvent { + action: "deleted"; + rule: BranchProtectionRule; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * Activity related to a branch protection rule. For more information, see "[About branch protection rules](https://docs.github.com/en/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#about-branch-protection-rules)." + */ +export interface BranchProtectionRuleEditedEvent { + action: "edited"; + rule: BranchProtectionRule; + /** + * If the action was `edited`, the changes to the rule. + */ + changes?: { + admin_enforced?: { + from: BranchProtectionRuleBoolean; + }; + allow_deletions_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel | null; + }; + allow_force_pushes_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + authorized_actors_only?: { + from: BranchProtectionRuleBoolean; + }; + authorized_actor_names?: { + from: BranchProtectionRuleArray; + }; + authorized_dismissal_actors_only?: { + from: BranchProtectionRuleBoolean | null; + }; + dismiss_stale_reviews_on_push?: { + from: BranchProtectionRuleBoolean; + }; + pull_request_reviews_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + require_code_owner_review?: { + from: BranchProtectionRuleBoolean; + }; + required_approving_review_count?: { + from: BranchProtectionRuleNumber; + }; + required_conversation_resolution_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + required_deployments_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + required_status_checks?: { + from: BranchProtectionRuleArray; + }; + required_status_checks_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + signature_requirement_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + linear_history_requirement_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CheckRunCompletedEvent { + action: "completed"; + /** + * The [check_run](https://docs.github.com/en/rest/reference/checks#get-a-check-run). + */ + check_run: { + /** + * The id of the check. + */ + id: number; + node_id?: string; + /** + * The SHA of the commit that is being checked. + */ + head_sha: string; + external_id: string; + url: string; + html_url: string; + details_url?: string; + /** + * The current status of the check run. Can be `queued`, `in_progress`, or `completed`. + */ + status: "completed"; + /** + * The result of the completed check run. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | null; + /** + * The time that the check run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at: string; + /** + * The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at: string; + output: { + title?: string | null; + summary: string | null; + text: string | null; + annotations_count: number; + annotations_url: string; + }; + /** + * The name of the check run. + */ + name: string; + check_suite: { + /** + * The id of the check suite that this check run is part of. + */ + id: number; + node_id?: string; + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + status: "in_progress" | "completed" | "queued"; + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_branch`. + * + * **Note:** + * + * * The `head_sha` of the check suite can differ from the `sha` of the pull request if subsequent pushes are made into the PR. + * * When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + app: App; + created_at: string; + updated_at: string; + }; + app: App; + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + }; + /** + * The action requested by the user. + */ + requested_action?: { + /** + * The integrator reference of the action requested by the user. + */ + identifier?: string; + } | null; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CheckRunPullRequest { + url: string; + id: number; + number: number; + head: { + ref: string; + sha: string; + repo: RepoRef; + }; + base: { + ref: string; + sha: string; + repo: RepoRef; + }; +} +export interface RepoRef { + id: number; + url: string; + name: string; +} +/** + * A deployment to a repository environment. This will only be populated if the check run was created by a GitHub Actions workflow job that references an environment. + */ +export interface CheckRunDeployment { + url: string; + id: number; + node_id: string; + task: string; + original_environment: string; + environment: string; + description: string | null; + created_at: string; + updated_at: string; + statuses_url: string; + repository_url: string; +} +/** + * GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ +export interface App { + /** + * Unique identifier of the GitHub app + */ + id: number; + /** + * The slug name of the GitHub app + */ + slug?: string; + node_id: string; + owner: User; + /** + * The name of the GitHub app + */ + name: string; + description: string | null; + external_url: string; + html_url: string; + created_at: string; + updated_at: string; + /** + * The set of permissions for the GitHub app + */ + permissions?: { + actions?: "read" | "write"; + administration?: "read" | "write"; + blocking?: "read" | "write"; + checks?: "read" | "write"; + content_references?: "read" | "write"; + contents?: "read" | "write"; + deployments?: "read" | "write"; + discussions?: "read" | "write"; + emails?: "read" | "write"; + environments?: "read" | "write"; + followers?: "read" | "write"; + gpg_keys?: "read" | "write"; + interaction_limits?: "read" | "write"; + issues?: "read" | "write"; + keys?: "read" | "write"; + members?: "read" | "write"; + merge_queues?: "read" | "write"; + metadata?: "read" | "write"; + organization_administration?: "read" | "write"; + organization_hooks?: "read" | "write"; + organization_packages?: "read" | "write"; + organization_plan?: "read" | "write"; + organization_projects?: "read" | "write"; + organization_secrets?: "read" | "write"; + organization_self_hosted_runners?: "read" | "write"; + organization_user_blocking?: "read" | "write"; + packages?: "read" | "write"; + pages?: "read" | "write"; + plan?: "read" | "write"; + pull_requests?: "read" | "write"; + repository_hooks?: "read" | "write"; + repository_projects?: "read" | "write"; + secret_scanning_alerts?: "read" | "write"; + secrets?: "read" | "write"; + security_events?: "read" | "write"; + security_scanning_alert?: "read" | "write"; + single_file?: "read" | "write"; + starring?: "read" | "write"; + statuses?: "read" | "write"; + team_discussions?: "read" | "write"; + vulnerability_alerts?: "read" | "write"; + watching?: "read" | "write"; + workflows?: "read" | "write"; + }; + /** + * The list of events for the GitHub app + */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "merge_group" + | "merge_queue_entry" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "projects_v2_item" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "pull_request_review_thread" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "secret_scanning_alert_location" + | "security_and_analysis" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "workflow_job" + )[]; +} +export interface CheckRunCreatedEvent { + action: "created"; + /** + * The [check_run](https://docs.github.com/en/rest/reference/checks#get-a-check-run). + */ + check_run: { + /** + * The id of the check. + */ + id: number; + node_id?: string; + /** + * The SHA of the commit that is being checked. + */ + head_sha: string; + external_id: string; + url: string; + html_url: string; + details_url?: string; + /** + * The current status of the check run. Can be `queued`, `in_progress`, or `completed`. + */ + status: "queued" | "in_progress" | "completed" | "waiting"; + /** + * The result of the completed check run. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | null; + /** + * The time that the check run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at: string; + /** + * The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at: string | null; + output: { + title?: string | null; + summary: string | null; + text: string | null; + annotations_count: number; + annotations_url: string; + }; + /** + * The name of the check run. + */ + name: string; + check_suite: { + /** + * The id of the check suite that this check run is part of. + */ + id: number; + node_id?: string; + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + status: "queued" | "in_progress" | "completed"; + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_branch`. + * + * **Note:** + * + * * The `head_sha` of the check suite can differ from the `sha` of the pull request if subsequent pushes are made into the PR. + * * When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + app: App; + created_at: string; + updated_at: string; + }; + app: App; + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + }; + /** + * The action requested by the user. + */ + requested_action?: { + /** + * The integrator reference of the action requested by the user. + */ + identifier?: string; + } | null; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CheckRunRequestedActionEvent { + action: "requested_action"; + /** + * The [check_run](https://docs.github.com/en/rest/reference/checks#get-a-check-run). + */ + check_run: { + /** + * The id of the check. + */ + id: number; + node_id?: string; + /** + * The SHA of the commit that is being checked. + */ + head_sha: string; + external_id: string; + url: string; + html_url: string; + details_url?: string; + /** + * The current status of the check run. Can be `queued`, `in_progress`, or `completed`. + */ + status: "queued" | "in_progress" | "completed"; + /** + * The result of the completed check run. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | null; + /** + * The time that the check run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at: string; + /** + * The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at: string | null; + output: { + title?: string | null; + summary: string | null; + text: string | null; + annotations_count: number; + annotations_url: string; + }; + /** + * The name of the check run. + */ + name: string; + check_suite: { + /** + * The id of the check suite that this check run is part of. + */ + id: number; + node_id?: string; + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + status: "queued" | "in_progress" | "completed" | "waiting"; + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_branch`. + * + * **Note:** + * + * * The `head_sha` of the check suite can differ from the `sha` of the pull request if subsequent pushes are made into the PR. + * * When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + app: App; + created_at: string; + updated_at: string; + }; + app: App; + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + }; + /** + * The action requested by the user. + */ + requested_action: { + /** + * The integrator reference of the action requested by the user. + */ + identifier?: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CheckRunRerequestedEvent { + action: "rerequested"; + /** + * The [check_run](https://docs.github.com/en/rest/reference/checks#get-a-check-run). + */ + check_run: { + /** + * The id of the check. + */ + id: number; + node_id?: string; + /** + * The SHA of the commit that is being checked. + */ + head_sha: string; + external_id: string; + url: string; + html_url: string; + details_url?: string; + /** + * The phase of the lifecycle that the check is currently in. + */ + status: "completed"; + /** + * The result of the completed check run. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has `completed`. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | null; + /** + * The time that the check run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at: string; + /** + * The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at: string; + output: { + title?: string | null; + summary: string | null; + text: string | null; + annotations_count: number; + annotations_url: string; + }; + /** + * The name of the check. + */ + name: string; + check_suite: { + /** + * The id of the check suite that this check run is part of. + */ + id: number; + node_id?: string; + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + status: "completed"; + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale"; + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_branch`. + * + * **Note:** + * + * * The `head_sha` of the check suite can differ from the `sha` of the pull request if subsequent pushes are made into the PR. + * * When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + app: App; + created_at: string; + updated_at: string; + }; + app: App; + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + }; + /** + * The action requested by the user. + */ + requested_action?: { + /** + * The integrator reference of the action requested by the user. + */ + identifier?: string; + } | null; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CheckSuiteCompletedEvent { + action: "completed"; + /** + * The [check_suite](https://docs.github.com/en/rest/reference/checks#suites). + */ + check_suite: { + id: number; + node_id: string; + /** + * The head branch name the changes are on. + */ + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + /** + * The summary status for all check runs that are part of the check suite. Can be `queued`, `requested`, `in_progress`, or `completed`. + */ + status: "requested" | "in_progress" | "completed" | "queued" | null; + /** + * The summary conclusion for all check runs that are part of the check suite. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has `completed`. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + /** + * URL that points to the check suite API resource. + */ + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_sha` and `head_branch`. When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + app: App; + created_at: string; + updated_at: string; + runs_rerequestable?: boolean; + rerequestable?: boolean; + latest_check_runs_count: number; + check_runs_url: string; + head_commit: SimpleCommit; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface SimpleCommit { + id: string; + tree_id: string; + message: string; + timestamp: string; + author: Committer; + committer: Committer; +} +/** + * Metaproperties for Git author/committer information. + */ +export interface Committer { + /** + * The git author's name. + */ + name: string; + /** + * The git author's email address. + */ + email: string | null; + date?: string; + username?: string; +} +export interface CheckSuiteRequestedEvent { + action: "requested"; + /** + * The [check_suite](https://docs.github.com/en/rest/reference/checks#suites). + */ + check_suite: { + id: number; + node_id: string; + /** + * The head branch name the changes are on. + */ + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + /** + * The summary status for all check runs that are part of the check suite. Can be `queued`, `requested`, `in_progress`, or `completed`. + */ + status: "requested" | "in_progress" | "completed" | "queued" | null; + /** + * The summary conclusion for all check runs that are part of the check suite. Can be one of `success`, `failure`,` neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + /** + * URL that points to the check suite API resource. + */ + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_sha` and `head_branch`. When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + app: App; + created_at: string; + updated_at: string; + runs_rerequestable?: boolean; + rerequestable?: boolean; + latest_check_runs_count: number; + check_runs_url: string; + head_commit: SimpleCommit; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CheckSuiteRerequestedEvent { + action: "rerequested"; + /** + * The [check_suite](https://docs.github.com/en/rest/reference/checks#suites). + */ + check_suite: { + id: number; + node_id: string; + /** + * The head branch name the changes are on. + */ + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + /** + * The summary status for all check runs that are part of the check suite. Can be `queued`, `requested`, `in_progress`, or `completed`. + */ + status: "requested" | "in_progress" | "completed" | "queued" | null; + /** + * The summary conclusion for all check runs that are part of the check suite. Can be one of `success`, `failure`,` neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + /** + * URL that points to the check suite API resource. + */ + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_sha` and `head_branch`. When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + app: App; + created_at: string; + updated_at: string; + latest_check_runs_count: number; + check_runs_url: string; + head_commit: SimpleCommit; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CodeScanningAlertAppearedInBranchEvent { + action: "appeared_in_branch"; + /** + * The code scanning alert involved in the event. + */ + alert: { + /** + * The code scanning alert number. + */ + number: number; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + instances: AlertInstance[]; + most_recent_instance?: AlertInstance; + /** + * State of a code scanning alert. + */ + state: "open" | "dismissed" | "fixed"; + dismissed_by: User | null; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: string | null; + /** + * The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + */ + dismissed_reason: "false positive" | "won't fix" | "used in tests" | null; + rule: { + /** + * A unique identifier for the rule used to detect the alert. + */ + id: string; + /** + * The severity of the alert. + */ + severity: "none" | "note" | "warning" | "error" | null; + /** + * A short description of the rule used to detect the alert. + */ + description: string; + }; + tool: { + /** + * The name of the tool used to generate the code scanning analysis alert. + */ + name: string; + /** + * The version of the tool used to detect the alert. + */ + version: string | null; + }; + }; + /** + * The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + ref: string; + /** + * The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + commit_oid: string; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +export interface AlertInstance { + /** + * The full Git reference, formatted as `refs/heads/`. + */ + ref: string; + /** + * Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name. + */ + analysis_key: string; + /** + * Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed. + */ + environment: string; + /** + * State of a code scanning alert. + */ + state: "open" | "dismissed" | "fixed"; + commit_sha?: string; + message?: { + text?: string; + }; + location?: { + path?: string; + start_line?: number; + end_line?: number; + start_column?: number; + end_column?: number; + }; + classifications?: string[]; +} +export interface GitHubOrg { + login: "github"; + id: 9919; + node_id: "MDEyOk9yZ2FuaXphdGlvbjk5MTk="; + name?: "GitHub"; + email?: null; + avatar_url: "https://avatars.githubusercontent.com/u/9919?v=4"; + gravatar_id: ""; + url: "https://api.github.com/users/github"; + html_url: "https://github.com/github"; + followers_url: "https://api.github.com/users/github/followers"; + following_url: "https://api.github.com/users/github/following{/other_user}"; + gists_url: "https://api.github.com/users/github/gists{/gist_id}"; + starred_url: "https://api.github.com/users/github/starred{/owner}{/repo}"; + subscriptions_url: "https://api.github.com/users/github/subscriptions"; + organizations_url: "https://api.github.com/users/github/orgs"; + repos_url: "https://api.github.com/users/github/repos"; + events_url: "https://api.github.com/users/github/events{/privacy}"; + received_events_url: "https://api.github.com/users/github/received_events"; + type: "Organization"; + site_admin: false; +} +export interface CodeScanningAlertClosedByUserEvent { + action: "closed_by_user"; + /** + * The code scanning alert involved in the event. + */ + alert: { + /** + * The code scanning alert number. + */ + number: number; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + instances: (AlertInstance & { + state: "dismissed"; + })[]; + most_recent_instance?: AlertInstance; + /** + * State of a code scanning alert. + */ + state: "dismissed"; + dismissed_by: User; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: string; + /** + * The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + */ + dismissed_reason: "false positive" | "won't fix" | "used in tests" | null; + rule: { + /** + * A unique identifier for the rule used to detect the alert. + */ + id: string; + /** + * The severity of the alert. + */ + severity: "none" | "note" | "warning" | "error" | null; + /** + * A short description of the rule used to detect the alert. + */ + description: string; + name?: string; + full_description?: string; + tags?: null; + help?: null; + }; + tool: { + /** + * The name of the tool used to generate the code scanning analysis alert. + */ + name: string; + /** + * The version of the tool used to detect the alert. + */ + version: string | null; + guid?: string | null; + }; + }; + /** + * The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + ref: string; + /** + * The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + commit_oid: string; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CodeScanningAlertCreatedEvent { + action: "created"; + /** + * The code scanning alert involved in the event. + */ + alert: { + /** + * The code scanning alert number. + */ + number: number; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + instances: (AlertInstance & { + state: "open" | "dismissed"; + })[]; + most_recent_instance?: AlertInstance; + /** + * State of a code scanning alert. + */ + state: "open" | "dismissed"; + dismissed_by: null; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: null; + /** + * The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + */ + dismissed_reason: null; + rule: { + /** + * A unique identifier for the rule used to detect the alert. + */ + id: string; + /** + * The severity of the alert. + */ + severity: "none" | "note" | "warning" | "error" | null; + /** + * A short description of the rule used to detect the alert. + */ + description: string; + name?: string; + full_description?: string; + tags?: null; + help?: null; + }; + tool: { + /** + * The name of the tool used to generate the code scanning analysis alert. + */ + name: string; + /** + * The version of the tool used to detect the alert. + */ + version: string | null; + guid?: string | null; + }; + }; + /** + * The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + ref: string; + /** + * The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + commit_oid: string; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +export interface CodeScanningAlertFixedEvent { + action: "fixed"; + /** + * The code scanning alert involved in the event. + */ + alert: { + /** + * The code scanning alert number. + */ + number: number; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + instances: (AlertInstance & { + state: "fixed"; + })[]; + /** + * State of a code scanning alert. + */ + state: "fixed"; + dismissed_by: User | null; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: string | null; + /** + * The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + */ + dismissed_reason: "false positive" | "won't fix" | "used in tests" | null; + rule: { + /** + * A unique identifier for the rule used to detect the alert. + */ + id: string; + /** + * The severity of the alert. + */ + severity: "none" | "note" | "warning" | "error" | null; + /** + * A short description of the rule used to detect the alert. + */ + description: string; + name?: string; + full_description?: string; + tags?: null; + help?: null; + }; + tool: { + /** + * The name of the tool used to generate the code scanning analysis alert. + */ + name: string; + /** + * The version of the tool used to detect the alert. + */ + version: string | null; + guid?: string | null; + }; + most_recent_instance?: AlertInstance; + instances_url?: string; + }; + /** + * The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + ref: string; + /** + * The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + commit_oid: string; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +export interface CodeScanningAlertReopenedEvent { + action: "reopened"; + /** + * The code scanning alert involved in the event. + */ + alert: { + /** + * The code scanning alert number. + */ + number: number; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + instances: (AlertInstance & { + state: "open"; + })[]; + most_recent_instance?: AlertInstance; + /** + * State of a code scanning alert. + */ + state: "open" | "dismissed" | "fixed"; + dismissed_by: null; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: null; + /** + * The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + */ + dismissed_reason: null; + rule: { + /** + * A unique identifier for the rule used to detect the alert. + */ + id: string; + /** + * The severity of the alert. + */ + severity: "none" | "note" | "warning" | "error" | null; + /** + * A short description of the rule used to detect the alert. + */ + description: string; + name?: string; + full_description?: string; + tags?: null; + help?: null; + }; + tool: { + /** + * The name of the tool used to generate the code scanning analysis alert. + */ + name: string; + /** + * The version of the tool used to detect the alert. + */ + version: string | null; + guid?: string | null; + }; + }; + /** + * The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + ref: string; + /** + * The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + commit_oid: string; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +export interface CodeScanningAlertReopenedByUserEvent { + action: "reopened_by_user"; + /** + * The code scanning alert involved in the event. + */ + alert: { + /** + * The code scanning alert number. + */ + number: number; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + instances: (AlertInstance & { + state: "open"; + })[]; + most_recent_instance?: AlertInstance; + /** + * State of a code scanning alert. + */ + state: "open"; + dismissed_by: null; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: null; + /** + * The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + */ + dismissed_reason: null; + rule: { + /** + * A unique identifier for the rule used to detect the alert. + */ + id: string; + /** + * The severity of the alert. + */ + severity: "none" | "note" | "warning" | "error" | null; + /** + * A short description of the rule used to detect the alert. + */ + description: string; + }; + tool: { + /** + * The name of the tool used to generate the code scanning analysis alert. + */ + name: string; + /** + * The version of the tool used to detect the alert. + */ + version: string | null; + }; + }; + /** + * The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + ref: string; + /** + * The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + commit_oid: string; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * A commit comment is created. The type of activity is specified in the `action` property. + */ +export interface CommitCommentCreatedEvent { + /** + * The action performed. Can be `created`. + */ + action: "created"; + /** + * The [commit comment](https://docs.github.com/en/rest/reference/repos#get-a-commit-comment) resource. + */ + comment: { + url: string; + html_url: string; + /** + * The ID of the commit comment. + */ + id: number; + /** + * The node ID of the commit comment. + */ + node_id: string; + user: User; + /** + * The line index in the diff to which the comment applies. + */ + position: number | null; + /** + * The line of the blob to which the comment applies. The last line of the range for a multi-line comment + */ + line: number | null; + /** + * The relative path of the file to which the comment applies. + */ + path: string | null; + /** + * The SHA of the commit to which the comment applies. + */ + commit_id: string; + created_at: string; + updated_at: string; + author_association: AuthorAssociation; + /** + * The text of the comment. + */ + body: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * A Git branch or tag is created. + */ +export interface CreateEvent { + /** + * The [`git ref`](https://docs.github.com/en/rest/reference/git#get-a-reference) resource. + */ + ref: string; + /** + * The type of Git ref object created in the repository. Can be either `branch` or `tag`. + */ + ref_type: "tag" | "branch"; + /** + * The name of the repository's default branch (usually `main`). + */ + master_branch: string; + /** + * The repository's current description. + */ + description: string | null; + /** + * The pusher type for the event. Can be either `user` or a deploy key. + */ + pusher_type: string; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * A Git branch or tag is deleted. + */ +export interface DeleteEvent { + /** + * The [`git ref`](https://docs.github.com/en/rest/reference/git#get-a-reference) resource. + */ + ref: string; + /** + * The type of Git ref object deleted in the repository. Can be either `branch` or `tag`. + */ + ref_type: "tag" | "branch"; + /** + * The pusher type for the event. Can be either `user` or a deploy key. + */ + pusher_type: string; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DependabotAlertCreatedEvent { + action: "created"; + alert: DependabotAlert & { + state: "open"; + fixed_at: null; + dismissed_at: null; + dismissed_by: null; + dismissed_reason: null; + dismissed_comment: null; + }; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +/** + * A Dependabot alert. + */ +export interface DependabotAlert { + /** + * The security alert number. + */ + number: number; + /** + * The state of the Dependabot alert. + */ + state: "dismissed" | "fixed" | "open"; + /** + * Details for the vulnerable dependency. + */ + dependency: { + package: DependabotAlertPackage; + /** + * The full path to the dependency manifest file, relative to the root of the repository. + */ + manifest_path: string; + /** + * The execution scope of the vulnerable dependency. + */ + scope: "development" | "runtime" | null; + }; + /** + * Details for the GitHub Security Advisory. + */ + security_advisory: { + /** + * Details for the GitHub Security Advisory. + */ + ghsa_id: string; + /** + * The unique CVE ID assigned to the advisory. + */ + cve_id: string | null; + /** + * A short, plain text summary of the advisory. + */ + summary: string; + /** + * A long-form Markdown-supported description of the advisory. + */ + description: string; + /** + * Vulnerable version range information for the advisory. + */ + vulnerabilities: { + package: DependabotAlertPackage; + /** + * The severity of the vulnerability. + */ + severity: "low" | "medium" | "high" | "critical"; + /** + * Conditions that identify vulnerable versions of this vulnerability's package. + */ + vulnerable_version_range: string; + /** + * Details pertaining to the package version that patches this vulnerability. + */ + first_patched_version: { + /** + * The package version that patches this vulnerability. + */ + identifier: string; + }; + }[]; + /** + * The severity of the advisory. + */ + severity: "low" | "medium" | "high" | "critical"; + cvss: SecurityAdvisoryCvss; + /** + * Details for the advisory pertaining to Common Weakness Enumeration. + */ + cwes: SecurityAdvisoryCwes[]; + /** + * Values that identify this advisory among security information sources. + */ + identifiers: { + /** + * The type of advisory identifier. + */ + type: "CVE" | "GHSA"; + /** + * The value of the advisory identifer. + */ + value: string; + }[]; + /** + * Links to additional advisory information. + */ + references: { + /** + * The URL of the reference. + */ + url: string; + }[]; + /** + * The time that the advisory was published in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + published_at: string; + /** + * The time that the advisory was last modified in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + updated_at: string; + /** + * The time that the advisory was withdrawn in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + withdrawn_at: string | null; + }; + /** + * Details pertaining to one vulnerable version range for the advisory. + */ + security_vulnerability: { + package: DependabotAlertPackage; + /** + * The severity of the vulnerability. + */ + severity: "low" | "medium" | "high" | "critical"; + /** + * Conditions that identify vulnerable versions of this vulnerability's package. + */ + vulnerable_version_range: string; + /** + * Details pertaining to the package version that patches this vulnerability. + */ + first_patched_version: { + /** + * The package version that patches this vulnerability. + */ + identifier: string; + }; + }; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + created_at: string; + /** + * The time that the alert was last updated in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + updated_at: string; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: string | null; + dismissed_by: User | null; + /** + * The reason that the alert was dismissed. + */ + dismissed_reason: + | "fix_started" + | "inaccurate" + | "no_bandwidth" + | "not_used" + | "tolerable_risk" + | null; + /** + * An optional comment associated with the alert's dismissal. + */ + dismissed_comment: string | null; + /** + * The time that the alert was no longer detected and was considered fixed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + fixed_at: string | null; +} +/** + * Details for the vulnerable package. + */ +export interface DependabotAlertPackage { + /** + * The unique package name within its ecosystem. + */ + name: string; + /** + * The package's language or package management ecosystem. + */ + ecosystem: string; +} +/** + * Details for the advisory pertaining to the Common Vulnerability Scoring System. + */ +export interface SecurityAdvisoryCvss { + /** + * The overall CVSS score of the advisory. + */ + score: number; + /** + * The full CVSS vector string for the advisory. + */ + vector_string: string | null; +} +/** + * A CWE weakness assigned to the advisory. + */ +export interface SecurityAdvisoryCwes { + /** + * The unique CWE ID. + */ + cwe_id: string; + /** + * The short, plain text name of the CWE. + */ + name: string; +} +export interface DependabotAlertDismissedEvent { + action: "dismissed"; + alert: DependabotAlert & { + state: "dismissed"; + dismissed_at: string; + dismissed_by: User; + dismissed_reason: + | "fix_started" + | "inaccurate" + | "no_bandwidth" + | "not_used" + | "tolerable_risk"; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DependabotAlertFixedEvent { + action: "fixed"; + alert: DependabotAlert & { + state: "fixed"; + fixed_at: string; + }; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +export interface DependabotAlertReintroducedEvent { + action: "reintroduced"; + alert: DependabotAlert; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +export interface DependabotAlertReopenedEvent { + action: "reopened"; + alert: DependabotAlert; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DeployKeyCreatedEvent { + action: "created"; + /** + * The [`deploy key`](https://docs.github.com/en/rest/reference/deployments#get-a-deploy-key) resource. + */ + key: { + id: number; + key: string; + url: string; + title: string; + verified: boolean; + created_at: string; + read_only: boolean; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DeployKeyDeletedEvent { + action: "deleted"; + /** + * The [`deploy key`](https://docs.github.com/en/rest/reference/deployments#get-a-deploy-key) resource. + */ + key: { + id: number; + key: string; + url: string; + title: string; + verified: boolean; + created_at: string; + read_only: boolean; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DeploymentCreatedEvent { + action: "created"; + deployment: Deployment; + workflow: Workflow | null; + workflow_run: DeploymentWorkflowRun | null; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * The [deployment](https://docs.github.com/en/rest/reference/deployments#list-deployments). + */ +export interface Deployment { + url: string; + /** + * Unique identifier of the deployment + */ + id: number; + node_id: string; + sha: string; + /** + * The ref to deploy. This can be a branch, tag, or sha. + */ + ref: string; + /** + * Parameter to specify a task to execute + */ + task: string; + payload: { + [k: string]: unknown; + }; + original_environment: string; + /** + * Name of the target deployment environment. + */ + environment: string; + /** + * Specifies if the given environment will no longer exist at some point in the future. Default: false. + */ + transient_environment?: boolean; + /** + * Specifies if the given environment is one that end-users directly interact with. Default: false. + */ + production_environment?: boolean; + description: string | null; + creator: User; + created_at: string; + updated_at: string; + statuses_url: string; + repository_url: string; + performed_via_github_app?: App | null; +} +export interface Workflow { + badge_url: string; + created_at: string; + html_url: string; + id: number; + name: string; + node_id: string; + path: string; + state: string; + updated_at: string; + url: string; +} +export interface DeploymentWorkflowRun { + id: number; + name: string; + path?: string; + display_title?: string; + node_id: string; + head_branch: string; + head_sha: string; + run_number: number; + event: string; + status: "requested" | "in_progress" | "completed" | "queued"; + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + workflow_id: number; + check_suite_id: number; + check_suite_node_id: string; + url: string; + html_url: string; + pull_requests: CheckRunPullRequest[]; + created_at: string; + updated_at: string; + actor: User; + triggering_actor: User; + run_attempt: number; + run_started_at: string; + referenced_workflows?: ReferencedWorkflow[]; +} +/** + * A workflow referenced/reused by the initial caller workflow + */ +export interface ReferencedWorkflow { + path: string; + sha: string; + ref?: string; +} +export interface DeploymentStatusCreatedEvent { + action: "created"; + /** + * The [deployment status](https://docs.github.com/en/rest/reference/deployments#list-deployment-statuses). + */ + deployment_status: { + url: string; + id: number; + node_id: string; + /** + * The new state. Can be `pending`, `success`, `failure`, or `error`. + */ + state: string; + creator: User; + /** + * The optional human-readable description added to the status. + */ + description: string; + environment: string; + environment_url?: string | ""; + log_url?: string; + /** + * The optional link added to the status. + */ + target_url: string; + created_at: string; + updated_at: string; + deployment_url: string; + repository_url: string; + performed_via_github_app?: App | null; + }; + deployment: Deployment; + check_run?: { + /** + * The id of the check. + */ + id: number; + /** + * The name of the check run. + */ + name: string; + node_id: string; + /** + * The SHA of the commit that is being checked. + */ + head_sha: string; + external_id: string; + url: string; + html_url: string; + details_url: string; + /** + * The current status of the check run. Can be `queued`, `in_progress`, or `completed`. + */ + status: "queued" | "in_progress" | "completed" | "waiting"; + /** + * The result of the completed check run. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | null; + started_at: string; + completed_at: string | null; + }; + workflow_run?: DeploymentWorkflowRun; + workflow?: Workflow; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionAnsweredEvent { + action: "answered"; + discussion: Discussion & { + category: { + is_answerable: true; + }; + answer_html_url: string; + answer_chosen_at: string; + answer_chosen_by: User; + }; + answer: { + id: number; + node_id: string; + html_url: string; + parent_id: null; + child_comment_count: number; + repository_url: string; + discussion_id: number; + author_association: AuthorAssociation; + user: User; + created_at: string; + updated_at: string; + body: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface Discussion { + repository_url: string; + category: { + id: number; + repository_id: number; + emoji: string; + name: string; + description: string; + created_at: string; + updated_at: string; + slug: string; + is_answerable: boolean; + }; + answer_html_url: string | null; + answer_chosen_at: string | null; + answer_chosen_by: User | null; + html_url: string; + id: number; + node_id: string; + number: number; + /** + * The discussion post's title. + */ + title: string; + user: User; + state: "open" | "locked" | "converting"; + locked: boolean; + comments: number; + created_at: string; + updated_at: string; + author_association: AuthorAssociation; + active_lock_reason: string | null; + /** + * The discussion post's body text. + */ + body: string; + reactions?: Reactions; +} +export interface Reactions { + url: string; + total_count: number; + "+1": number; + "-1": number; + laugh: number; + hooray: number; + confused: number; + heart: number; + rocket: number; + eyes: number; +} +export interface DiscussionCategoryChangedEvent { + changes: { + category: { + from: { + id: number; + repository_id: number; + emoji: string; + name: string; + description: string; + created_at: string; + updated_at: string; + slug: string; + is_answerable: boolean; + }; + }; + }; + action: "category_changed"; + discussion: Discussion; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionCreatedEvent { + action: "created"; + discussion: Discussion & { + state: "open" | "converting"; + locked: false; + answer_html_url: null; + answer_chosen_at: null; + answer_chosen_by: null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionDeletedEvent { + action: "deleted"; + discussion: Discussion; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionEditedEvent { + changes?: { + title?: { + from: string; + }; + body?: { + from: string; + }; + }; + action: "edited"; + discussion: Discussion; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionLabeledEvent { + action: "labeled"; + discussion: Discussion; + label: Label; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface Label { + id: number; + node_id: string; + /** + * URL for the label + */ + url: string; + /** + * The name of the label. + */ + name: string; + description: string | null; + /** + * 6-character hex code, without the leading #, identifying the color + */ + color: string; + default: boolean; +} +export interface DiscussionLockedEvent { + action: "locked"; + discussion: Discussion & { + state: "locked"; + locked: true; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionPinnedEvent { + action: "pinned"; + discussion: Discussion; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionTransferredEvent { + changes: { + new_discussion: Discussion; + new_repository: Repository; + }; + action: "transferred"; + discussion: Discussion; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionUnansweredEvent { + action: "unanswered"; + discussion: Discussion & { + category: { + is_answerable: true; + }; + answer_html_url: null; + answer_chosen_at: null; + answer_chosen_by: null; + }; + old_answer: { + id: number; + node_id: string; + html_url: string; + parent_id: null; + child_comment_count: number; + repository_url: string; + discussion_id: number; + author_association: AuthorAssociation; + user: User; + created_at: string; + updated_at: string; + body: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionUnlabeledEvent { + action: "unlabeled"; + discussion: Discussion; + label: Label; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionUnlockedEvent { + action: "unlocked"; + discussion: Discussion & { + state: "open"; + locked: false; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionUnpinnedEvent { + action: "unpinned"; + discussion: Discussion; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionCommentCreatedEvent { + action: "created"; + comment: { + id: number; + node_id: string; + html_url: string; + parent_id: number | null; + child_comment_count: number; + repository_url: string; + discussion_id: number; + author_association: AuthorAssociation; + user: User; + created_at: string; + updated_at: string; + /** + * The main text of the comment. + */ + body: string; + reactions: Reactions; + }; + discussion: Discussion; + repository: Repository; + sender: User; + installation: InstallationLite; + organization?: Organization; +} +export interface DiscussionCommentDeletedEvent { + action: "deleted"; + comment: { + id: number; + node_id: string; + html_url: string; + parent_id: number | null; + child_comment_count: number; + repository_url: string; + discussion_id: number; + author_association: AuthorAssociation; + user: User; + created_at: string; + updated_at: string; + /** + * The main text of the comment. + */ + body: string; + reactions: Reactions; + }; + discussion: Discussion; + repository: Repository; + sender: User; + installation: InstallationLite; + organization?: Organization; +} +export interface DiscussionCommentEditedEvent { + changes: { + body: { + from: string; + }; + }; + action: "edited"; + comment: { + id: number; + node_id: string; + html_url: string; + parent_id: number | null; + child_comment_count: number; + repository_url: string; + discussion_id: number; + author_association: AuthorAssociation; + user: User; + created_at: string; + updated_at: string; + /** + * The main text of the comment. + */ + body: string; + reactions: Reactions; + }; + discussion: Discussion; + repository: Repository; + sender: User; + installation: InstallationLite; + organization?: Organization; +} +/** + * A user forks a repository. + */ +export interface ForkEvent { + /** + * The created [`repository`](https://docs.github.com/en/rest/reference/repos#get-a-repository) resource. + */ + forkee: Repository & { + fork?: true; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface GithubAppAuthorizationRevokedEvent { + action: "revoked"; + sender: User; +} +/** + * A wiki page is created or updated. + */ +export interface GollumEvent { + /** + * The pages that were updated. + */ + pages: { + /** + * The name of the page. + */ + page_name: string; + /** + * The current page title. + */ + title: string; + summary: null; + /** + * The action that was performed on the page. Can be `created` or `edited`. + */ + action: "created" | "edited"; + /** + * The latest commit SHA of the page. + */ + sha: string; + /** + * Points to the HTML wiki page. + */ + html_url: string; + }[]; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface InstallationCreatedEvent { + action: "created"; + installation: Installation; + /** + * An array of repository objects that the installation can access. + */ + repositories?: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + requester?: User; + sender: User; +} +/** + * The GitHub App installation. + */ +export interface Installation { + /** + * The ID of the installation. + */ + id: number; + account: User; + /** + * Describe whether all repositories have been selected or there's a selection involved + */ + repository_selection: "all" | "selected"; + access_tokens_url: string; + repositories_url: string; + html_url: string; + app_id: number; + app_slug?: string; + /** + * The ID of the user or organization this token is being scoped to. + */ + target_id: number; + target_type: "User" | "Organization"; + permissions: { + /** + * The level of permission granted to the access token for GitHub Actions workflows, workflow runs, and artifacts. + */ + actions?: "read" | "write"; + /** + * The level of permission granted to the access token for repository creation, deletion, settings, teams, and collaborators creation. + */ + administration?: "read" | "write"; + blocking?: "read" | "write"; + /** + * The level of permission granted to the access token for checks on code. + */ + checks?: "read" | "write"; + content_references?: "read" | "write"; + /** + * The level of permission granted to the access token for repository contents, commits, branches, downloads, releases, and merges. + */ + contents?: "read" | "write"; + /** + * The level of permission granted to the access token for deployments and deployment statuses. + */ + deployments?: "read" | "write"; + discussions?: "read" | "write"; + emails?: "read" | "write"; + /** + * The level of permission granted to the access token for managing repository environments. + */ + environments?: "read" | "write"; + /** + * The level of permission granted to the access token for issues and related comments, assignees, labels, and milestones. + */ + issues?: "read" | "write"; + /** + * The level of permission granted to the access token for organization teams and members. + */ + members?: "read" | "write"; + merge_queues?: "read" | "write"; + /** + * The level of permission granted to the access token to search repositories, list collaborators, and access repository metadata. + */ + metadata?: "read" | "write"; + /** + * The level of permission granted to the access token to manage access to an organization. + */ + organization_administration?: "read" | "write"; + organization_events?: "read" | "write"; + /** + * The level of permission granted to the access token to manage the post-receive hooks for an organization. + */ + organization_hooks?: "read" | "write"; + /** + * The level of permission granted to the access token for organization packages published to GitHub Packages. + */ + organization_packages?: "read" | "write"; + /** + * The level of permission granted to the access token for viewing an organization's plan. + */ + organization_plan?: "read" | "write"; + /** + * The level of permission granted to the access token to manage organization projects and projects beta (where available). + */ + organization_projects?: "read" | "write"; + /** + * The level of permission granted to the access token to manage organization secrets. + */ + organization_secrets?: "read" | "write"; + /** + * The level of permission granted to the access token to view and manage GitHub Actions self-hosted runners available to an organization. + */ + organization_self_hosted_runners?: "read" | "write"; + /** + * The level of permission granted to the access token to view and manage users blocked by the organization. + */ + organization_user_blocking?: "read" | "write"; + /** + * The level of permission granted to the access token for packages published to GitHub Packages. + */ + packages?: "read" | "write"; + /** + * The level of permission granted to the access token to retrieve Pages statuses, configuration, and builds, as well as create new builds. + */ + pages?: "read" | "write"; + /** + * The level of permission granted to the access token for pull requests and related comments, assignees, labels, milestones, and merges. + */ + pull_requests?: "read" | "write"; + /** + * The level of permission granted to the access token to manage the post-receive hooks for a repository. + */ + repository_hooks?: "read" | "write"; + /** + * The level of permission granted to the access token to manage repository projects, columns, and cards. + */ + repository_projects?: "read" | "write"; + /** + * The level of permission granted to the access token to view and manage secret scanning alerts. + */ + secret_scanning_alerts?: "read" | "write"; + /** + * The level of permission granted to the access token to manage repository secrets. + */ + secrets?: "read" | "write"; + /** + * The level of permission granted to the access token to view and manage security events like code scanning alerts. + */ + security_events?: "read" | "write"; + security_scanning_alert?: "read" | "write"; + /** + * The level of permission granted to the access token to manage just a single file. + */ + single_file?: "read" | "write"; + /** + * The level of permission granted to the access token for commit statuses. + */ + statuses?: "read" | "write"; + /** + * The level of permission granted to the access token to manage team discussions and related comments. + */ + team_discussions?: "read" | "write"; + /** + * The level of permission granted to the access token to manage Dependabot alerts. + */ + vulnerability_alerts?: "read" | "write"; + /** + * The level of permission granted to the access token to update GitHub Actions workflow files. + */ + workflows?: "read" | "write"; + }; + events: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "merge_queue_entry" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "projects_v2_item" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "pull_request_review_thread" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "secret_scanning_alert_location" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_job" + | "workflow_run" + )[]; + created_at: string | number; + updated_at: string | number; + single_file_name: string | null; + has_multiple_single_files?: boolean; + single_file_paths?: string[]; + suspended_by: User | null; + suspended_at: string | null; +} +export interface InstallationDeletedEvent { + action: "deleted"; + installation: Installation; + /** + * An array of repository objects that the installation can access. + */ + repositories?: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + requester?: null; + sender: User; +} +export interface InstallationNewPermissionsAcceptedEvent { + action: "new_permissions_accepted"; + installation: Installation; + /** + * An array of repository objects that the installation can access. + */ + repositories?: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + requester?: null; + sender: User; +} +export interface InstallationSuspendEvent { + action: "suspend"; + installation: Installation & { + suspended_by: User; + suspended_at: string; + }; + /** + * An array of repository objects that the installation can access. + */ + repositories?: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + requester?: null; + sender: User; +} +export interface InstallationUnsuspendEvent { + action: "unsuspend"; + installation: Installation & { + suspended_by: null; + suspended_at: null; + }; + /** + * An array of repository objects that the installation can access. + */ + repositories?: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + requester?: null; + sender: User; +} +export interface InstallationRepositoriesAddedEvent { + action: "added"; + installation: Installation; + /** + * Describe whether all repositories have been selected or there's a selection involved + */ + repository_selection: "all" | "selected"; + /** + * An array of repository objects, which were added to the installation. + */ + repositories_added: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + /** + * An array of repository objects, which were removed from the installation. + * + * @maxItems 0 + */ + repositories_removed: []; + requester: User | null; + sender: User; +} +export interface InstallationRepositoriesRemovedEvent { + action: "removed"; + installation: Installation; + /** + * Describe whether all repositories have been selected or there's a selection involved + */ + repository_selection: "all" | "selected"; + /** + * An array of repository objects, which were added to the installation. + * + * @maxItems 0 + */ + repositories_added: []; + /** + * An array of repository objects, which were removed from the installation. + */ + repositories_removed: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + requester: User | null; + sender: User; +} +/** + * Somebody renamed the user or organization account that a GitHub App is installed on. + */ +export interface InstallationTargetRenamedEvent { + changes: { + login?: { + from: string; + }; + slug?: { + from: string; + }; + }; + action: "renamed"; + account: { + avatar_url: string; + created_at?: string; + description?: null; + events_url?: string; + followers?: number; + followers_url?: string; + following?: number; + following_url?: string; + gists_url?: string; + gravatar_id?: string; + has_organization_projects?: boolean; + has_repository_projects?: boolean; + hooks_url?: string; + html_url: string; + id: number; + is_verified?: boolean; + issues_url?: string; + login?: string; + members_url?: string; + name?: string; + node_id: string; + organizations_url?: string; + public_gists?: number; + public_members_url?: string; + public_repos?: number; + received_events_url?: string; + repos_url?: string; + site_admin?: boolean; + slug?: string; + starred_url?: string; + subscriptions_url?: string; + type?: "Bot" | "User" | "Organization"; + updated_at?: string; + url?: string; + website_url?: null; + }; + repository?: Repository; + sender?: User; + installation: InstallationLite; + organization?: Organization; + target_type: string; +} +export interface IssueCommentCreatedEvent { + action: "created"; + /** + * The [issue](https://docs.github.com/en/rest/reference/issues) the comment belongs to. + */ + issue: Issue & { + assignee: User | null; + /** + * State of the issue; either 'open' or 'closed' + */ + state: "open" | "closed"; + locked: boolean; + labels: Label[]; + }; + comment: IssueComment; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * The [issue](https://docs.github.com/en/rest/reference/issues) itself. + */ +export interface Issue { + /** + * URL for the issue + */ + url: string; + repository_url: string; + labels_url: string; + comments_url: string; + events_url: string; + html_url: string; + id: number; + node_id: string; + /** + * Number uniquely identifying the issue within its repository + */ + number: number; + /** + * Title of the issue + */ + title: string; + user: User; + labels?: Label[]; + /** + * State of the issue; either 'open' or 'closed' + */ + state?: "open" | "closed"; + locked?: boolean; + assignee?: User | null; + assignees: User[]; + milestone: Milestone | null; + comments: number; + created_at: string; + updated_at: string; + closed_at: string | null; + author_association: AuthorAssociation; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; + draft?: boolean; + performed_via_github_app?: App | null; + pull_request?: { + url?: string; + html_url?: string; + diff_url?: string; + patch_url?: string; + merged_at?: string | null; + }; + /** + * Contents of the issue + */ + body: string | null; + reactions: Reactions; + timeline_url?: string; + /** + * The reason for the current state + */ + state_reason?: string | null; +} +/** + * A collection of related issues and pull requests. + */ +export interface Milestone { + url: string; + html_url: string; + labels_url: string; + id: number; + node_id: string; + /** + * The number of the milestone. + */ + number: number; + /** + * The title of the milestone. + */ + title: string; + description: string | null; + creator: User; + open_issues: number; + closed_issues: number; + /** + * The state of the milestone. + */ + state: "open" | "closed"; + created_at: string; + updated_at: string; + due_on: string | null; + closed_at: string | null; +} +/** + * The [comment](https://docs.github.com/en/rest/reference/issues#comments) itself. + */ +export interface IssueComment { + /** + * URL for the issue comment + */ + url: string; + html_url: string; + issue_url: string; + /** + * Unique identifier of the issue comment + */ + id: number; + node_id: string; + user: User; + created_at: string; + updated_at: string; + author_association: AuthorAssociation; + /** + * Contents of the issue comment + */ + body: string; + reactions: Reactions; + performed_via_github_app: App | null; +} +export interface IssueCommentDeletedEvent { + action: "deleted"; + /** + * The [issue](https://docs.github.com/en/rest/reference/issues) the comment belongs to. + */ + issue: Issue & { + assignee: User | null; + /** + * State of the issue; either 'open' or 'closed' + */ + state: "open" | "closed"; + locked: boolean; + labels: Label[]; + }; + comment: IssueComment; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssueCommentEditedEvent { + action: "edited"; + /** + * The changes to the comment. + */ + changes: { + body?: { + /** + * The previous version of the body. + */ + from: string; + }; + }; + /** + * The [issue](https://docs.github.com/en/rest/reference/issues) the comment belongs to. + */ + issue: Issue & { + assignee: User | null; + /** + * State of the issue; either 'open' or 'closed' + */ + state: "open" | "closed"; + locked: boolean; + labels: Label[]; + }; + comment: IssueComment; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * Activity related to an issue. The type of activity is specified in the action property. + */ +export interface IssuesAssignedEvent { + /** + * The action that was performed. + */ + action: "assigned"; + issue: Issue; + /** + * The optional user who was assigned or unassigned from the issue. + */ + assignee?: User | null; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesClosedEvent { + /** + * The action that was performed. + */ + action: "closed"; + /** + * The [issue](https://docs.github.com/en/rest/reference/issues) itself. + */ + issue: Issue & { + state: "closed"; + closed_at: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesDeletedEvent { + action: "deleted"; + issue: Issue; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesDemilestonedEvent { + action: "demilestoned"; + issue: Issue & { + milestone: null; + }; + milestone: Milestone; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesEditedEvent { + action: "edited"; + issue: Issue; + label?: Label; + /** + * The changes to the issue. + */ + changes: { + body?: { + /** + * The previous version of the body. + */ + from: string; + }; + title?: { + /** + * The previous version of the title. + */ + from: string; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesLabeledEvent { + action: "labeled"; + issue: Issue; + label?: Label; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesLockedEvent { + action: "locked"; + issue: Issue & { + locked: true; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesMilestonedEvent { + action: "milestoned"; + issue: Issue & { + milestone: Milestone; + }; + milestone: Milestone; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesOpenedEvent { + action: "opened"; + changes?: { + old_issue: Issue; + old_repository: Repository; + }; + issue: Issue & { + state: "open"; + closed_at: null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesPinnedEvent { + action: "pinned"; + issue: Issue; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesReopenedEvent { + action: "reopened"; + issue: Issue & { + state: "open"; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesTransferredEvent { + action: "transferred"; + changes: { + new_issue: Issue; + new_repository: Repository; + }; + issue: Issue; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesUnassignedEvent { + /** + * The action that was performed. + */ + action: "unassigned"; + issue: Issue; + /** + * The optional user who was assigned or unassigned from the issue. + */ + assignee?: User | null; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesUnlabeledEvent { + action: "unlabeled"; + issue: Issue; + label?: Label; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesUnlockedEvent { + action: "unlocked"; + issue: Issue & { + locked: false; + active_lock_reason: null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesUnpinnedEvent { + action: "unpinned"; + issue: Issue; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface LabelCreatedEvent { + action: "created"; + label: Label; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface LabelDeletedEvent { + action: "deleted"; + label: Label; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface LabelEditedEvent { + action: "edited"; + label: Label; + /** + * The changes to the label if the action was `edited`. + */ + changes?: { + color?: { + /** + * The previous version of the color if the action was `edited`. + */ + from: string; + }; + name?: { + /** + * The previous version of the name if the action was `edited`. + */ + from: string; + }; + description?: { + /** + * The previous version of the description if the action was `edited`. + */ + from: string; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface MarketplacePurchaseCancelledEvent { + action: "cancelled"; + effective_date: string; + sender: { + login: string; + id: number; + avatar_url: string; + gravatar_id: string; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + email: string; + }; + marketplace_purchase: MarketplacePurchase & { + next_billing_date: string; + }; + previous_marketplace_purchase?: MarketplacePurchase; +} +export interface MarketplacePurchase { + account: { + type: string; + id: number; + node_id: string; + login: string; + organization_billing_email: string; + }; + billing_cycle: string; + unit_count: number; + on_free_trial: boolean; + free_trial_ends_on: string | null; + next_billing_date?: string; + plan: { + id: number; + name: string; + description: string; + monthly_price_in_cents: number; + yearly_price_in_cents: number; + price_model: string; + has_free_trial: boolean; + unit_name: string | null; + bullets: string[]; + }; +} +export interface MarketplacePurchaseChangedEvent { + action: "changed"; + effective_date: string; + sender: { + login: string; + id: number; + avatar_url: string; + gravatar_id: string; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + email: string; + }; + marketplace_purchase: MarketplacePurchase & { + next_billing_date: string; + }; + previous_marketplace_purchase?: MarketplacePurchase; +} +export interface MarketplacePurchasePendingChangeEvent { + action: "pending_change"; + effective_date: string; + sender: { + login: string; + id: number; + avatar_url: string; + gravatar_id: string; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + email: string; + }; + marketplace_purchase: MarketplacePurchase & { + next_billing_date: string; + }; + previous_marketplace_purchase?: MarketplacePurchase; +} +export interface MarketplacePurchasePendingChangeCancelledEvent { + action: "pending_change_cancelled"; + effective_date: string; + sender: { + login: string; + id: number; + avatar_url: string; + gravatar_id: string; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + email: string; + }; + marketplace_purchase: MarketplacePurchase & { + next_billing_date: string; + }; + previous_marketplace_purchase?: MarketplacePurchase; +} +export interface MarketplacePurchasePurchasedEvent { + action: "purchased"; + effective_date: string; + sender: { + login: string; + id: number; + avatar_url: string; + gravatar_id: string; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + email: string; + }; + marketplace_purchase: MarketplacePurchase & { + next_billing_date: string; + }; + previous_marketplace_purchase?: MarketplacePurchase; +} +/** + * Activity related to repository collaborators. The type of activity is specified in the action property. + */ +export interface MemberAddedEvent { + action: "added"; + changes?: { + permission?: { + to: "write" | "admin"; + }; + }; + member: User; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface MemberEditedEvent { + action: "edited"; + member: User; + /** + * The changes to the collaborator permissions + */ + changes: { + old_permission: { + /** + * The previous permissions of the collaborator if the action was edited. + */ + from: string; + }; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface MemberRemovedEvent { + action: "removed"; + member: User; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface MembershipAddedEvent { + action: "added"; + /** + * The scope of the membership. Currently, can only be `team`. + */ + scope: "team"; + member: User; + sender: User; + team: Team; + organization: Organization; + installation?: InstallationLite; +} +/** + * Groups of organization members that gives permissions on specified repositories. + */ +export interface Team { + /** + * Name of the team + */ + name: string; + /** + * Unique identifier of the team + */ + id: number; + node_id: string; + slug: string; + /** + * Description of the team + */ + description: string | null; + privacy: "open" | "closed" | "secret"; + /** + * URL for the team + */ + url: string; + html_url: string; + members_url: string; + repositories_url: string; + /** + * Permission that the team will have for its repositories + */ + permission: string; + parent?: { + /** + * Name of the team + */ + name: string; + /** + * Unique identifier of the team + */ + id: number; + node_id: string; + slug: string; + /** + * Description of the team + */ + description: string | null; + privacy: "open" | "closed" | "secret"; + /** + * URL for the team + */ + url: string; + html_url: string; + members_url: string; + repositories_url: string; + /** + * Permission that the team will have for its repositories + */ + permission: string; + } | null; +} +export interface MembershipRemovedEvent { + action: "removed"; + /** + * The scope of the membership. Currently, can only be `team`. + */ + scope: "team" | "organization"; + member: User; + sender: User; + /** + * The [team](https://docs.github.com/en/rest/reference/teams) for the membership. + */ + team: + | Team + | { + id: number; + name: string; + deleted?: boolean; + }; + organization: Organization; + installation?: InstallationLite; +} +export interface MergeGroupChecksRequestedEvent { + action: "checks_requested"; + /** + * The merge group. + */ + merge_group: { + /** + * The SHA of the merge group. + */ + head_sha: string; + /** + * The full ref of the merge group. + */ + head_ref: string; + /** + * The full ref of the branch the merge group will be merged into. + */ + base_ref: string; + /** + * The SHA of the merge group's parent commit. + */ + base_sha: string; + /** + * An expanded representation of the `head_sha` commit. + */ + head_commit: { + id: string; + tree_id: string; + message: string; + timestamp: string; + author: { + name: string; + email: string; + }; + committer: { + name: string; + email: string; + }; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface MetaDeletedEvent { + action: "deleted"; + /** + * The id of the modified webhook. + */ + hook_id: number; + /** + * The modified webhook. This will contain different keys based on the type of webhook it is: repository, organization, business, app, or GitHub Marketplace. + */ + hook: { + type: string; + id: number; + name: string; + active: boolean; + events: WebhookEvents; + /** + * Configuration object of the webhook + */ + config: { + /** + * The media type used to serialize the payloads. Supported values include `json` and `form`. The default is `form`. + */ + content_type: "json" | "form"; + /** + * If provided, the `secret` will be used as the `key` to generate the HMAC hex digest value for [delivery signature headers](https://docs.github.com/webhooks/event-payloads/#delivery-headers). + */ + secret?: string; + /** + * The URL to which the payloads will be delivered. + */ + url: string; + /** + * Determines whether the SSL certificate of the host for `url` will be verified when delivering payloads. Supported values include `0` (verification is performed) and `1` (verification is not performed). The default is `0`. + */ + insecure_ssl: "0" | "1"; + }; + updated_at: string; + created_at: string; + }; + repository: Repository; + sender: User; +} +export interface MilestoneClosedEvent { + action: "closed"; + milestone: Milestone & { + state: "closed"; + closed_at: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface MilestoneCreatedEvent { + action: "created"; + milestone: Milestone & { + state: "open"; + closed_at: null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface MilestoneDeletedEvent { + action: "deleted"; + milestone: Milestone; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface MilestoneEditedEvent { + action: "edited"; + /** + * The changes to the milestone if the action was `edited`. + */ + changes: { + description?: { + /** + * The previous version of the description if the action was `edited`. + */ + from: string; + }; + due_on?: { + /** + * The previous version of the due date if the action was `edited`. + */ + from: string; + }; + title?: { + /** + * The previous version of the title if the action was `edited`. + */ + from: string; + }; + }; + milestone: Milestone; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface MilestoneOpenedEvent { + action: "opened"; + milestone: Milestone & { + state: "open"; + closed_at: null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface OrgBlockBlockedEvent { + action: "blocked"; + blocked_user: User; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface OrgBlockUnblockedEvent { + action: "unblocked"; + blocked_user: User; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface OrganizationDeletedEvent { + action: "deleted"; + membership?: Membership; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +/** + * The membership between the user and the organization. Not present when the action is `member_invited`. + */ +export interface Membership { + url: string; + /** + * The state of the user's membership in the team. + */ + state: string; + /** + * The role of the user in the team. + */ + role: string; + organization_url: string; + user: User; +} +export interface OrganizationMemberAddedEvent { + action: "member_added"; + membership: Membership; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface OrganizationMemberInvitedEvent { + action: "member_invited"; + /** + * The invitation for the user or email if the action is `member_invited`. + */ + invitation: { + id: number; + node_id: string; + login: string; + email: string | null; + role: string; + created_at: string; + failed_at: string | null; + failed_reason: string | null; + inviter: User; + team_count: number; + invitation_teams_url: string; + }; + user: User; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface OrganizationMemberRemovedEvent { + action: "member_removed"; + membership: Membership; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface OrganizationRenamedEvent { + changes: { + login: { + from: string; + }; + }; + action: "renamed"; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface PackagePublishedEvent { + action: "published"; + /** + * Information about the package. + */ + package: { + /** + * Unique identifier of the package. + */ + id: number; + /** + * The name of the package. + */ + name: string; + namespace: string; + description: string | null; + ecosystem: string; + /** + * The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. + */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "CONTAINER"; + html_url: string; + created_at: string; + updated_at: string; + owner: User; + /** + * A version of a software package + */ + package_version: { + /** + * Unique identifier of the package version. + */ + id: number; + version: string; + summary: string; + /** + * The name of the package version. + */ + name: string; + description: string; + body?: + | string + | { + repository: { + repository: Repository; + }; + info: { + type: string; + oid: string; + mode: number; + name: string; + path: string; + size: number | null; + collection: boolean; + }; + attributes: {}; + _formatted: boolean; + }; + body_html?: string; + release?: { + url: string; + html_url: string; + id: number; + tag_name: string; + target_commitish: string; + name: string; + draft: boolean; + author: User; + prerelease: boolean; + created_at: string; + published_at: string; + }; + manifest?: string; + html_url: string; + tag_name?: string; + target_commitish?: string; + target_oid?: string; + draft?: boolean; + prerelease?: boolean; + created_at?: string; + updated_at?: string; + /** + * Package Version Metadata + */ + metadata: unknown[]; + container_metadata?: { + labels?: { + [k: string]: unknown; + } | null; + manifest?: { + [k: string]: unknown; + } | null; + tag?: { + digest?: string; + name?: string; + }; + } | null; + docker_metadata?: unknown[]; + npm_metadata?: PackageNPMMetadata | null; + nuget_metadata?: PackageNugetMetadata[] | null; + rubygems_metadata?: unknown[]; + package_files: { + download_url: string; + id: number; + name: string; + sha256: string; + sha1: string; + md5: string; + content_type: string; + state: string; + size: number; + created_at: string; + updated_at: string; + }[]; + package_url?: string; + author?: User; + source_url?: string; + installation_command: string; + } | null; + registry: { + about_url: string; + name: string; + type: string; + url: string; + vendor: string; + }; + }; + repository: Repository; + sender: User; + organization?: Organization; +} +export interface PackageNPMMetadata { + name?: string; + version?: string; + npm_user?: string; + author?: { + [k: string]: string; + } | null; + bugs?: { + [k: string]: string; + } | null; + dependencies?: { + [k: string]: string; + }; + dev_dependencies?: { + [k: string]: string; + }; + peer_dependencies?: { + [k: string]: string; + }; + optional_dependencies?: { + [k: string]: string; + }; + description?: string; + dist?: { + [k: string]: string; + } | null; + git_head?: string; + homepage?: string; + license?: string; + main?: string; + repository?: { + [k: string]: string; + } | null; + scripts?: { + [k: string]: unknown; + }; + id?: string; + node_version?: string; + npm_version?: string; + has_shrinkwrap?: boolean; + maintainers?: { + [k: string]: unknown; + }[]; + contributors?: { + [k: string]: unknown; + }[]; + engines?: { + [k: string]: string; + }; + keywords?: string[]; + files?: string[]; + bin?: { + [k: string]: unknown; + }; + man?: { + [k: string]: unknown; + }; + directories?: { + [k: string]: string; + } | null; + os?: string[]; + cpu?: string[]; + readme?: string; + installation_command?: string; + release_id?: number; + commit_oid?: string; + published_via_actions?: boolean; + deleted_by_id?: number; +} +export interface PackageNugetMetadata { + id?: + | string + | { + [k: string]: unknown; + } + | number; + name?: string; + value?: + | boolean + | string + | number + | { + url?: string; + branch?: string; + commit?: string; + type?: string; + }; +} +export interface PackageUpdatedEvent { + action: "updated"; + /** + * Information about the package. + */ + package: { + /** + * Unique identifier of the package. + */ + id: number; + /** + * The name of the package. + */ + name: string; + namespace: string; + description: string | null; + ecosystem: string; + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "CONTAINER"; + html_url: string; + created_at: string; + updated_at: string; + owner: User; + /** + * A version of a software package + */ + package_version: { + /** + * Unique identifier of the package version. + */ + id: number; + version: string; + summary: string; + /** + * The name of the package version. + */ + name: string; + description: string; + body?: + | string + | { + repository: { + repository: Repository; + }; + info: { + type: string; + oid: string; + mode: number; + name: string; + path: string; + size: number | null; + collection: boolean; + }; + attributes: {}; + _formatted: boolean; + }; + body_html?: string; + release?: { + url: string; + html_url: string; + id: number; + tag_name: string; + target_commitish: string; + name: string; + draft: boolean; + author: User; + prerelease: boolean; + created_at: string; + published_at: string; + }; + manifest?: string; + html_url: string; + tag_name?: string; + target_commitish?: string; + target_oid?: string; + draft?: boolean; + prerelease?: boolean; + created_at?: string; + updated_at?: string; + /** + * Package Version Metadata + */ + metadata: unknown[]; + container_metadata?: { + labels?: { + [k: string]: unknown; + } | null; + manifest?: { + [k: string]: unknown; + } | null; + tag?: { + digest?: string; + name?: string; + }; + } | null; + docker_metadata?: unknown[]; + npm_metadata?: PackageNPMMetadata | null; + nuget_metadata?: PackageNugetMetadata[] | null; + rubygems_metadata?: unknown[]; + package_files: { + download_url: string; + id: number; + name: string; + sha256: string; + sha1: string; + md5: string; + content_type: string; + state: string; + size: number; + created_at: string; + updated_at: string; + }[]; + package_url?: string; + author?: User; + source_url?: string; + installation_command: string; + } | null; + registry: { + about_url: string; + name: string; + type: string; + url: string; + vendor: string; + }; + }; + repository: Repository; + sender: User; + organization?: Organization; +} +/** + * Page Build + */ +export interface PageBuildEvent { + id: number; + /** + * The [List GitHub Pages builds](https://docs.github.com/en/rest/reference/repos#list-github-pages-builds) itself. + */ + build: { + url: string; + status: string; + error: { + message: string | null; + }; + pusher: User; + commit: string; + duration: number; + created_at: string; + updated_at: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface PingEvent { + zen: string; + /** + * The ID of the webhook that triggered the ping. + */ + hook_id: number; + /** + * The [webhook configuration](https://docs.github.com/en/rest/reference/repos#get-a-repository-webhook). + */ + hook: { + type: "Repository" | "Organization" | "App"; + id: number; + name: string; + active: boolean; + /** + * When you register a new GitHub App, GitHub sends a ping event to the **webhook URL** you specified during registration. The event contains the `app_id`, which is required for [authenticating](https://docs.github.com/en/apps/building-integrations/setting-up-and-registering-github-apps/about-authentication-options-for-github-apps) an app. + */ + app_id?: number; + events: WebhookEvents; + /** + * Configuration object of the webhook + */ + config: { + /** + * The media type used to serialize the payloads. Supported values include `json` and `form`. The default is `form`. + */ + content_type: "json" | "form"; + /** + * If provided, the `secret` will be used as the `key` to generate the HMAC hex digest value for [delivery signature headers](https://docs.github.com/webhooks/event-payloads/#delivery-headers). + */ + secret?: string; + /** + * The URL to which the payloads will be delivered. + */ + url: string; + /** + * Determines whether the SSL certificate of the host for `url` will be verified when delivering payloads. Supported values include `0` (verification is performed) and `1` (verification is not performed). The default is `0`. + */ + insecure_ssl: "0" | "1"; + }; + updated_at: string; + created_at: string; + url: string; + test_url?: string; + ping_url: string; + deliveries_url: string; + last_response?: { + code: null; + status: string; + message: null; + }; + }; + repository?: Repository; + sender?: User; + organization?: Organization; +} +export interface ProjectClosedEvent { + action: "closed"; + project: Project; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface Project { + owner_url: string; + url: string; + html_url: string; + columns_url: string; + id: number; + node_id: string; + /** + * Name of the project + */ + name: string; + /** + * Body of the project + */ + body: string | null; + number: number; + /** + * State of the project; either 'open' or 'closed' + */ + state: "open" | "closed"; + creator: User; + created_at: string; + updated_at: string; +} +export interface ProjectCreatedEvent { + action: "created"; + project: Project; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectDeletedEvent { + action: "deleted"; + project: Project; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectEditedEvent { + action: "edited"; + /** + * The changes to the project if the action was `edited`. + */ + changes?: { + name?: { + /** + * The changes to the project if the action was `edited`. + */ + from: string; + }; + body?: { + /** + * The previous version of the body if the action was `edited`. + */ + from: string; + }; + }; + project: Project; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectReopenedEvent { + action: "reopened"; + project: Project; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectCardConvertedEvent { + action: "converted"; + changes: { + note: { + from: string; + }; + }; + project_card: ProjectCard; + repository?: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectCard { + url: string; + project_url: string; + column_url: string; + column_id: number; + /** + * The project card's ID + */ + id: number; + node_id: string; + note: string | null; + /** + * Whether or not the card is archived + */ + archived: boolean; + creator: User; + created_at: string; + updated_at: string; + content_url?: string; + after_id?: string | number | null; +} +export interface ProjectCardCreatedEvent { + action: "created"; + project_card: ProjectCard; + repository?: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectCardDeletedEvent { + action: "deleted"; + project_card: ProjectCard; + repository?: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectCardEditedEvent { + action: "edited"; + changes: { + note: { + from: string; + }; + }; + project_card: ProjectCard; + repository?: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectCardMovedEvent { + action: "moved"; + changes?: { + column_id: { + from: number; + }; + }; + project_card: ProjectCard & { + after_id: number | null; + }; + repository?: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectColumnCreatedEvent { + action: "created"; + project_column: ProjectColumn; + repository?: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectColumn { + url: string; + project_url: string; + cards_url: string; + /** + * The unique identifier of the project column + */ + id: number; + node_id: string; + /** + * Name of the project column + */ + name: string; + created_at: string; + updated_at: string; +} +export interface ProjectColumnDeletedEvent { + action: "deleted"; + project_column: ProjectColumn; + repository?: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectColumnEditedEvent { + action: "edited"; + changes: { + name?: { + from: string; + }; + }; + project_column: ProjectColumn; + repository?: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectColumnMovedEvent { + action: "moved"; + project_column: ProjectColumn; + repository?: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectsV2ItemArchivedEvent { + changes: { + archived_at: { + from: null; + to: string; + }; + }; + action: "archived"; + projects_v2_item: ProjectsV2Item & { + archived_at: string; + }; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +/** + * The project item itself. To find more information about the project item, you can use `node_id` (the node ID of the project item) and `project_node_id` (the node ID of the project) to query information in the GraphQL API. For more information, see "[Using the API to manage projects](https://docs.github.com/en/issues/trying-out-the-new-projects-experience/using-the-api-to-manage-projects)." + */ +export interface ProjectsV2Item { + id: number; + node_id: string; + project_node_id: string; + content_node_id: string; + content_type: "DraftIssue" | "Issue" | "PullRequest"; + creator: User; + created_at: string; + updated_at: string; + archived_at: string | null; +} +export interface ProjectsV2ItemConvertedEvent { + changes: { + content_type: { + from: "DraftIssue"; + to: "Issue"; + }; + }; + action: "converted"; + projects_v2_item: ProjectsV2Item & { + content_type: "Issue"; + }; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectsV2ItemCreatedEvent { + action: "created"; + projects_v2_item: ProjectsV2Item & { + archived_at: null; + }; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectsV2ItemDeletedEvent { + action: "deleted"; + projects_v2_item: ProjectsV2Item; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectsV2ItemEditedEvent { + changes: { + field_value: { + field_type: "single_select" | "date" | "number" | "text" | "iteration"; + field_node_id: string; + }; + }; + action: "edited"; + projects_v2_item: ProjectsV2Item; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectsV2ItemReorderedEvent { + changes: { + previous_projects_v2_item_node_id: { + from: string; + to: string | null; + }; + }; + action: "reordered"; + projects_v2_item: ProjectsV2Item; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectsV2ItemRestoredEvent { + changes: { + archived_at: { + from: string; + to: null; + }; + }; + action: "restored"; + projects_v2_item: ProjectsV2Item & { + archived_at: null; + }; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +/** + * When a private repository is made public. + */ +export interface PublicEvent { + repository: Repository & { + private: false; + }; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface PullRequestAssignedEvent { + action: "assigned"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + assignee: User; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequest { + url: string; + id: number; + node_id: string; + html_url: string; + diff_url: string; + patch_url: string; + issue_url: string; + /** + * Number uniquely identifying the pull request within its repository. + */ + number: number; + /** + * State of this Pull Request. Either `open` or `closed`. + */ + state: "open" | "closed"; + locked: boolean; + /** + * The title of the pull request. + */ + title: string; + user: User; + body: string | null; + created_at: string; + updated_at: string; + closed_at: string | null; + merged_at: string | null; + merge_commit_sha: string | null; + assignee: User | null; + assignees: User[]; + requested_reviewers: (User | Team)[]; + requested_teams: Team[]; + labels: Label[]; + milestone: Milestone | null; + commits_url: string; + review_comments_url: string; + review_comment_url: string; + comments_url: string; + statuses_url: string; + head: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository | null; + }; + base: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + _links: { + self: Link; + html: Link; + issue: Link; + comments: Link; + review_comments: Link; + review_comment: Link; + commits: Link; + statuses: Link; + }; + author_association: AuthorAssociation; + auto_merge: PullRequestAutoMerge | null; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; + /** + * Indicates whether or not the pull request is a draft. + */ + draft: boolean; + merged: boolean | null; + mergeable: boolean | null; + rebaseable: boolean | null; + mergeable_state: string; + merged_by: User | null; + comments: number; + review_comments: number; + /** + * Indicates whether maintainers can modify the pull request. + */ + maintainer_can_modify: boolean; + commits: number; + additions: number; + deletions: number; + changed_files: number; +} +export interface Link { + href: string; +} +/** + * The status of auto merging a pull request. + */ +export interface PullRequestAutoMerge { + enabled_by: User; + /** + * The merge method to use. + */ + merge_method: "merge" | "squash" | "rebase"; + /** + * Title for the merge commit message. + */ + commit_title: string; + /** + * Commit message for the merge commit. + */ + commit_message: string; +} +export interface PullRequestAutoMergeDisabledEvent { + action: "auto_merge_disabled"; + number: number; + pull_request: PullRequest; + reason: string; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestAutoMergeEnabledEvent { + action: "auto_merge_enabled"; + number: number; + pull_request: PullRequest; + reason: string; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestClosedEvent { + action: "closed"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest & { + /** + * State of this Pull Request. Either `open` or `closed`. + */ + state: "closed"; + closed_at: string; + merged: boolean; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestConvertedToDraftEvent { + action: "converted_to_draft"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest & { + closed_at: null; + merged_at: null; + /** + * Indicates whether or not the pull request is a draft. + */ + draft: true; + merged: false; + merged_by: null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestDemilestonedEvent { + action: "demilestoned"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest & { + milestone: Milestone; + }; + milestone: Milestone; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface PullRequestDequeuedEvent { + action: "dequeued"; + /** + * The pull request number. + */ + number: number; + /** + * The reason the pull request was removed from a merge queue. + */ + reason: string; + pull_request: PullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestEditedEvent { + action: "edited"; + /** + * The pull request number. + */ + number: number; + /** + * The changes to the comment if the action was `edited`. + */ + changes: { + body?: { + /** + * The previous version of the body if the action was `edited`. + */ + from: string; + }; + title?: { + /** + * The previous version of the title if the action was `edited`. + */ + from: string; + }; + base?: { + ref: { + from: string; + }; + sha: { + from: string; + }; + }; + }; + pull_request: PullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestLabeledEvent { + action: "labeled"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + label: Label; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestLockedEvent { + action: "locked"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestMilestonedEvent { + action: "milestoned"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest & { + milestone: Milestone; + }; + milestone: Milestone; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface PullRequestOpenedEvent { + action: "opened"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest & { + state: "open"; + closed_at: null; + merged_at: null; + active_lock_reason: null; + merged_by: null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestQueuedEvent { + action: "queued"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReadyForReviewEvent { + action: "ready_for_review"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest & { + state: "open"; + closed_at: null; + merged_at: null; + /** + * Indicates whether or not the pull request is a draft. + */ + draft: false; + merged: boolean; + merged_by: null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReopenedEvent { + action: "reopened"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest & { + state: "open"; + closed_at: null; + merged_at: null; + merged: boolean; + merged_by: null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestSynchronizeEvent { + action: "synchronize"; + /** + * The pull request number. + */ + number: number; + before: string; + after: string; + pull_request: PullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestUnassignedEvent { + action: "unassigned"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + assignee: User; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestUnlabeledEvent { + action: "unlabeled"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + label: Label; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestUnlockedEvent { + action: "unlocked"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReviewDismissedEvent { + action: "dismissed"; + review: PullRequestReview & { + state: "dismissed"; + }; + pull_request: SimplePullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +/** + * The review that was affected. + */ +export interface PullRequestReview { + /** + * Unique identifier of the review + */ + id: number; + node_id: string; + user: User; + /** + * The text of the review. + */ + body: string | null; + /** + * A commit SHA for the review. + */ + commit_id: string; + submitted_at: string | null; + state: "commented" | "changes_requested" | "approved" | "dismissed"; + html_url: string; + pull_request_url: string; + author_association: AuthorAssociation; + _links: { + html: Link; + pull_request: Link; + }; +} +export interface SimplePullRequest { + url: string; + id: number; + node_id: string; + html_url: string; + diff_url: string; + patch_url: string; + issue_url: string; + number: number; + state: "open" | "closed"; + locked: boolean; + title: string; + user: User; + body: string | null; + created_at: string; + updated_at: string; + closed_at: string | null; + merged_at: string | null; + merge_commit_sha: string | null; + assignee: User | null; + assignees: User[]; + requested_reviewers: (User | Team)[]; + requested_teams: Team[]; + labels: Label[]; + milestone: Milestone | null; + draft: boolean; + commits_url: string; + review_comments_url: string; + review_comment_url: string; + comments_url: string; + statuses_url: string; + head: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + base: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + _links: { + self: Link; + html: Link; + issue: Link; + comments: Link; + review_comments: Link; + review_comment: Link; + commits: Link; + statuses: Link; + }; + author_association: AuthorAssociation; + auto_merge: PullRequestAutoMerge | null; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; +} +export interface PullRequestReviewEditedEvent { + action: "edited"; + changes: { + body?: { + /** + * The previous version of the body if the action was `edited`. + */ + from: string; + }; + }; + review: PullRequestReview; + pull_request: SimplePullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReviewSubmittedEvent { + action: "submitted"; + review: PullRequestReview; + pull_request: SimplePullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReviewCommentCreatedEvent { + action: "created"; + comment: PullRequestReviewComment; + pull_request: { + url: string; + id: number; + node_id: string; + html_url: string; + diff_url: string; + patch_url: string; + issue_url: string; + number: number; + state: "open" | "closed"; + locked: boolean; + title: string; + user: User; + body: string | null; + created_at: string; + updated_at: string; + closed_at: string | null; + merged_at: string | null; + merge_commit_sha: string | null; + assignee: User | null; + assignees: User[]; + requested_reviewers: (User | Team)[]; + requested_teams: Team[]; + labels: Label[]; + milestone: Milestone | null; + draft?: boolean; + commits_url: string; + review_comments_url: string; + review_comment_url: string; + comments_url: string; + statuses_url: string; + head: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + base: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + _links: { + self: Link; + html: Link; + issue: Link; + comments: Link; + review_comments: Link; + review_comment: Link; + commits: Link; + statuses: Link; + }; + auto_merge?: PullRequestAutoMerge | null; + author_association: AuthorAssociation; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +/** + * The [comment](https://docs.github.com/en/rest/reference/pulls#comments) itself. + */ +export interface PullRequestReviewComment { + /** + * URL for the pull request review comment + */ + url: string; + /** + * The ID of the pull request review to which the comment belongs. + */ + pull_request_review_id: number; + /** + * The ID of the pull request review comment. + */ + id: number; + /** + * The node ID of the pull request review comment. + */ + node_id: string; + /** + * The diff of the line that the comment refers to. + */ + diff_hunk: string; + /** + * The relative path of the file to which the comment applies. + */ + path: string; + /** + * The line index in the diff to which the comment applies. + */ + position: number | null; + /** + * The index of the original line in the diff to which the comment applies. + */ + original_position: number; + /** + * The SHA of the commit to which the comment applies. + */ + commit_id: string; + /** + * The SHA of the original commit to which the comment applies. + */ + original_commit_id: string; + user: User; + /** + * The text of the comment. + */ + body: string; + created_at: string; + updated_at: string; + /** + * HTML URL for the pull request review comment. + */ + html_url: string; + /** + * URL for the pull request that the review comment belongs to. + */ + pull_request_url: string; + author_association: AuthorAssociation; + _links: { + self: Link; + html: Link; + pull_request: Link; + }; + reactions: Reactions; + /** + * The first line of the range for a multi-line comment. + */ + start_line: number | null; + /** + * The first line of the range for a multi-line comment. + */ + original_start_line: number | null; + /** + * The side of the first line of the range for a multi-line comment. + */ + start_side: "LEFT" | "RIGHT" | null; + /** + * The line of the blob to which the comment applies. The last line of the range for a multi-line comment + */ + line: number | null; + /** + * The line of the blob to which the comment applies. The last line of the range for a multi-line comment + */ + original_line: number; + /** + * The side of the first line of the range for a multi-line comment. + */ + side: "LEFT" | "RIGHT"; + /** + * The comment ID to reply to. + */ + in_reply_to_id?: number; + /** + * The level at which the comment is targeted, can be a diff line or a file. + */ + subject_type?: "line" | "file"; +} +export interface PullRequestReviewCommentDeletedEvent { + action: "deleted"; + comment: PullRequestReviewComment; + pull_request: { + url: string; + id: number; + node_id: string; + html_url: string; + diff_url: string; + patch_url: string; + issue_url: string; + number: number; + state: "open" | "closed"; + locked: boolean; + title: string; + user: User; + body: string | null; + created_at: string; + updated_at: string; + closed_at: string | null; + merged_at: string | null; + merge_commit_sha: string | null; + assignee: User | null; + assignees: User[]; + requested_reviewers: (User | Team)[]; + requested_teams: Team[]; + labels: Label[]; + milestone: Milestone | null; + draft?: boolean; + commits_url: string; + review_comments_url: string; + review_comment_url: string; + comments_url: string; + statuses_url: string; + head: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + base: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + _links: { + self: Link; + html: Link; + issue: Link; + comments: Link; + review_comments: Link; + review_comment: Link; + commits: Link; + statuses: Link; + }; + auto_merge?: PullRequestAutoMerge | null; + author_association: AuthorAssociation; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReviewCommentEditedEvent { + action: "edited"; + /** + * The changes to the comment. + */ + changes: { + body?: { + /** + * The previous version of the body. + */ + from: string; + }; + }; + comment: PullRequestReviewComment; + pull_request: { + url: string; + id: number; + node_id: string; + html_url: string; + diff_url: string; + patch_url: string; + issue_url: string; + number: number; + state: "open" | "closed"; + locked: boolean; + title: string; + user: User; + body: string | null; + created_at: string; + updated_at: string; + closed_at: string | null; + merged_at: string | null; + merge_commit_sha: string | null; + assignee: User | null; + assignees: User[]; + requested_reviewers: (User | Team)[]; + requested_teams: Team[]; + labels: Label[]; + milestone: Milestone | null; + draft?: boolean; + commits_url: string; + review_comments_url: string; + review_comment_url: string; + comments_url: string; + statuses_url: string; + head: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + base: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + _links: { + self: Link; + html: Link; + issue: Link; + comments: Link; + review_comments: Link; + review_comment: Link; + commits: Link; + statuses: Link; + }; + auto_merge?: PullRequestAutoMerge | null; + author_association: AuthorAssociation; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReviewThreadResolvedEvent { + action: "resolved"; + thread: { + node_id: string; + comments: PullRequestReviewComment[]; + }; + pull_request: SimplePullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReviewThreadUnresolvedEvent { + action: "unresolved"; + thread: { + node_id: string; + comments: PullRequestReviewComment[]; + }; + pull_request: SimplePullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PushEvent { + /** + * The full git ref that was pushed. Example: `refs/heads/main` or `refs/tags/v3.14.1`. + */ + ref: string; + /** + * The SHA of the most recent commit on `ref` before the push. + */ + before: string; + /** + * The SHA of the most recent commit on `ref` after the push. + */ + after: string; + /** + * Whether this push created the `ref`. + */ + created: boolean; + /** + * Whether this push deleted the `ref`. + */ + deleted: boolean; + /** + * Whether this push was a force push of the `ref`. + */ + forced: boolean; + base_ref: string | null; + /** + * URL that shows the changes in this `ref` update, from the `before` commit to the `after` commit. For a newly created `ref` that is directly based on the default branch, this is the comparison between the head of the default branch and the `after` commit. Otherwise, this shows all commits until the `after` commit. + */ + compare: string; + /** + * An array of commit objects describing the pushed commits. (Pushed commits are all commits that are included in the `compare` between the `before` commit and the `after` commit.) The array includes a maximum of 20 commits. If necessary, you can use the [Commits API](https://docs.github.com/en/rest/reference/repos#commits) to fetch additional commits. This limit is applied to timeline events only and isn't applied to webhook deliveries. + */ + commits: Commit[]; + /** + * For pushes where `after` is or points to a commit object, an expanded representation of that commit. For pushes where `after` refers to an annotated tag object, an expanded representation of the commit pointed to by the annotated tag. + */ + head_commit: Commit | null; + repository: Repository; + pusher: Committer; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface Commit { + id: string; + tree_id: string; + /** + * Whether this commit is distinct from any that have been pushed before. + */ + distinct: boolean; + /** + * The commit message. + */ + message: string; + /** + * The ISO 8601 timestamp of the commit. + */ + timestamp: string; + /** + * URL that points to the commit API resource. + */ + url: string; + author: Committer; + committer: Committer; + /** + * An array of files added in the commit. For extremely large commits where GitHub is unable to calculate this list in a timely manner, this may be empty even if files were added. + */ + added: string[]; + /** + * An array of files modified by the commit. For extremely large commits where GitHub is unable to calculate this list in a timely manner, this may be empty even if files were modified. + */ + modified: string[]; + /** + * An array of files removed in the commit. For extremely large commits where GitHub is unable to calculate this list in a timely manner, this may be empty even if files were removed. + */ + removed: string[]; +} +export interface RegistryPackagePublishedEvent { + action: "published"; + /** + * Information about the package. + */ + registry_package: { + /** + * Unique identifier of the package. + */ + id: number; + /** + * The name of the package. + */ + name: string; + namespace: string; + description: string | null; + ecosystem: string; + /** + * The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. + */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "CONTAINER"; + html_url: string; + created_at: string; + updated_at: string | null; + owner: User; + /** + * A version of a software package + */ + package_version: { + /** + * Unique identifier of the package version. + */ + id: number; + version: string; + summary: string; + /** + * The name of the package version. + */ + name: string; + description: string; + body?: + | string + | { + repository: { + repository: Repository; + }; + info: { + type: string; + oid: string; + mode: number; + name: string; + path: string; + size: number | null; + collection: boolean | null; + }; + attributes?: { + [k: string]: unknown; + }; + _formatted?: boolean; + }; + body_html?: string; + release?: { + url: string; + html_url: string; + id: number; + tag_name: string; + target_commitish: string; + name: string; + draft: boolean; + author: User; + prerelease: boolean; + created_at: string; + published_at: string; + }; + manifest?: string; + html_url: string; + tag_name?: string; + target_commitish?: string; + target_oid?: string; + draft?: boolean; + prerelease?: boolean; + created_at?: string; + updated_at?: string; + /** + * Package Version Metadata + */ + metadata: unknown[]; + docker_metadata?: unknown[]; + container_metadata?: { + labels?: { + description?: string; + source?: string; + revision?: string; + image_url?: string; + licenses?: string; + all_labels?: { + [k: string]: string; + }; + } | null; + manifest?: { + digest?: string; + media_type?: string; + uri?: string; + size?: number; + config?: { + digest?: string; + media_type?: string; + size?: number; + }; + layers?: { + digest?: string; + media_type?: string; + size?: number; + }[]; + } | null; + tag?: { + digest?: string; + name?: string; + }; + }; + npm_metadata?: PackageNPMMetadata | null; + nuget_metadata?: PackageNugetMetadata[] | null; + rubygems_metadata?: unknown[]; + package_files: { + download_url: string; + id: number; + name: string; + sha256: string; + sha1: string; + md5: string; + content_type: string; + state: string; + size: number; + created_at: string; + updated_at: string; + }[]; + package_url?: string; + author?: { + avatar_url: string; + events_url: string; + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string; + html_url: string; + id: number; + login: string; + node_id: string; + organizations_url: string; + received_events_url: string; + repos_url: string; + site_admin: boolean; + starred_url: string; + subscriptions_url: string; + type: string; + url: string; + }; + source_url?: string; + installation_command: string; + } | null; + registry: { + about_url: string; + name: string; + type: string; + url: string; + vendor: string; + }; + }; + repository: Repository; + sender: User; + organization?: Organization; +} +export interface RegistryPackageUpdatedEvent { + action: "updated"; + /** + * Information about the package. + */ + registry_package: { + /** + * Unique identifier of the package. + */ + id: number; + /** + * The name of the package. + */ + name: string; + namespace: string; + description: string | null; + ecosystem: string; + /** + * The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. + */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "CONTAINER"; + html_url: string; + created_at: string; + updated_at: string | null; + owner: User; + /** + * A version of a software package + */ + package_version: { + /** + * Unique identifier of the package version. + */ + id: number; + version: string; + summary: string; + /** + * The name of the package version. + */ + name: string; + description: string; + body?: + | string + | { + repository: { + repository: Repository; + }; + info: { + type: string; + oid: string; + mode: number; + name: string; + path: string; + size: number | null; + collection: boolean | null; + }; + attributes?: { + [k: string]: unknown; + }; + _formatted?: boolean; + }; + body_html?: string; + release?: { + url: string; + html_url: string; + id: number; + tag_name: string; + target_commitish: string; + name: string; + draft: boolean; + author: User; + prerelease: boolean; + created_at: string; + published_at: string; + }; + manifest?: string; + html_url: string; + tag_name?: string; + target_commitish?: string; + target_oid?: string; + draft?: boolean; + prerelease?: boolean; + created_at?: string; + updated_at?: string; + /** + * Package Version Metadata + */ + metadata: unknown[]; + docker_metadata?: unknown[]; + container_metadata?: { + labels?: { + description?: string; + source?: string; + revision?: string; + image_url?: string; + licenses?: string; + all_labels?: { + [k: string]: string; + }; + } | null; + manifest?: { + digest?: string; + media_type?: string; + uri?: string; + size?: number; + config?: { + digest?: string; + media_type?: string; + size?: number; + }; + layers?: { + digest?: string; + media_type?: string; + size?: number; + }[]; + } | null; + tag?: { + digest?: string; + name?: string; + }; + }; + npm_metadata?: PackageNPMMetadata | null; + nuget_metadata?: PackageNugetMetadata[] | null; + rubygems_metadata?: unknown[]; + package_files: { + download_url: string; + id: number; + name: string; + sha256: string; + sha1: string; + md5: string; + content_type: string; + state: string; + size: number; + created_at: string; + updated_at: string; + }[]; + package_url?: string; + author?: { + avatar_url: string; + events_url: string; + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string; + html_url: string; + id: number; + login: string; + node_id: string; + organizations_url: string; + received_events_url: string; + repos_url: string; + site_admin: boolean; + starred_url: string; + subscriptions_url: string; + type: string; + url: string; + }; + source_url?: string; + installation_command: string; + } | null; + registry: { + about_url: string; + name: string; + type: string; + url: string; + vendor: string; + }; + }; + repository: Repository; + sender: User; + organization?: Organization; +} +export interface ReleaseCreatedEvent { + action: "created"; + release: Release; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * The [release](https://docs.github.com/en/rest/reference/repos/#get-a-release) object. + */ +export interface Release { + url: string; + assets_url: string; + upload_url: string; + html_url: string; + id: number; + node_id: string; + /** + * The name of the tag. + */ + tag_name: string; + /** + * Specifies the commitish value that determines where the Git tag is created from. + */ + target_commitish: string; + name: string; + /** + * Wether the release is a draft or published + */ + draft: boolean; + author: User; + /** + * Whether the release is identified as a prerelease or a full release. + */ + prerelease: boolean; + created_at: string | null; + published_at: string | null; + assets: ReleaseAsset[]; + tarball_url: string | null; + zipball_url: string | null; + body: string; + mentions_count?: number; + reactions?: Reactions; + discussion_url?: string; +} +/** + * Data related to a release. + */ +export interface ReleaseAsset { + url: string; + browser_download_url: string; + id: number; + node_id: string; + /** + * The file name of the asset. + */ + name: string; + label: string | null; + /** + * State of the release asset. + */ + state: "uploaded"; + content_type: string; + size: number; + download_count: number; + created_at: string; + updated_at: string; + uploader?: User; +} +export interface ReleaseDeletedEvent { + action: "deleted"; + release: Release; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ReleaseEditedEvent { + action: "edited"; + changes: { + body?: { + /** + * The previous version of the body if the action was `edited`. + */ + from: string; + }; + name?: { + /** + * The previous version of the name if the action was `edited`. + */ + from: string; + }; + }; + release: Release; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ReleasePrereleasedEvent { + action: "prereleased"; + release: Release & { + /** + * Whether the release is identified as a prerelease or a full release. + */ + prerelease: true; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ReleasePublishedEvent { + action: "published"; + release: Release & { + published_at: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ReleaseReleasedEvent { + action: "released"; + release: Release; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ReleaseUnpublishedEvent { + action: "unpublished"; + release: Release & { + published_at: null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryArchivedEvent { + action: "archived"; + repository: Repository & { + /** + * Whether the repository is archived. + */ + archived: true; + }; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryCreatedEvent { + action: "created"; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryDeletedEvent { + action: "deleted"; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryEditedEvent { + action: "edited"; + changes: { + description?: { + from: string | null; + }; + default_branch?: { + from: string; + }; + homepage?: { + from: string | null; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryPrivatizedEvent { + action: "privatized"; + repository: Repository & { + /** + * Whether the repository is private or public. + */ + private: true; + }; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryPublicizedEvent { + action: "publicized"; + repository: Repository & { + /** + * Whether the repository is private or public. + */ + private: false; + }; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryRenamedEvent { + action: "renamed"; + changes: { + repository: { + name: { + from: string; + }; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryTransferredEvent { + action: "transferred"; + changes: { + owner: { + from: { + user?: User; + }; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryUnarchivedEvent { + action: "unarchived"; + repository: Repository & { + /** + * Whether the repository is archived. + */ + archived: false; + }; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryDispatchEvent { + action: string; + branch: string; + client_payload: { + [k: string]: unknown; + }; + repository: Repository; + sender: User; + installation: InstallationLite; + organization?: Organization; +} +export interface RepositoryImportEvent { + status: "success" | "cancelled" | "failure"; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryVulnerabilityAlertCreateEvent { + action: "create"; + alert: RepositoryVulnerabilityAlertAlert & { + state: "open"; + }; + repository: Repository; + sender: GitHubOrg; + organization?: Organization; +} +/** + * The security alert of the vulnerable dependency. + */ +export interface RepositoryVulnerabilityAlertAlert { + id: number; + number: number; + node_id: string; + state: "open" | "dismissed" | "fixed"; + affected_range: string; + affected_package_name: string; + dismisser?: User; + dismiss_reason?: string; + dismissed_at?: string; + severity: string; + ghsa_id: string; + external_reference: string; + external_identifier: string; + fixed_in: string; + fixed_at?: string; + fix_reason?: string; + created_at: string; +} +export interface RepositoryVulnerabilityAlertDismissEvent { + action: "dismiss"; + alert: RepositoryVulnerabilityAlertAlert & { + dismisser: User; + dismiss_reason: string; + dismissed_at: string; + state: "dismissed"; + }; + repository: Repository; + sender: GitHubOrg; + organization?: Organization; +} +export interface RepositoryVulnerabilityAlertReopenEvent { + action: "reopen"; + alert: RepositoryVulnerabilityAlertAlert & { + state: "open"; + }; + repository: Repository; + sender: GitHubOrg; + organization?: Organization; +} +export interface RepositoryVulnerabilityAlertResolveEvent { + action: "resolve"; + alert: RepositoryVulnerabilityAlertAlert & { + state: "fixed"; + fixed_at: string; + fix_reason: string; + }; + repository: Repository; + sender: GitHubOrg; + organization?: Organization; +} +export interface SecretScanningAlertCreatedEvent { + action: "created"; + /** + * The secret scanning alert involved in the event. + */ + alert: { + number: number; + secret_type: string; + resolution: null; + resolved_by: null; + resolved_at: null; + }; + repository: Repository; + organization?: Organization; + installation?: InstallationLite; +} +export interface SecretScanningAlertReopenedEvent { + action: "reopened"; + /** + * The secret scanning alert involved in the event. + */ + alert: { + number: number; + secret_type: string; + resolution: null; + resolved_by: null; + resolved_at: null; + }; + repository: Repository; + organization?: Organization; + installation?: InstallationLite; + sender: User; +} +export interface SecretScanningAlertResolvedEvent { + action: "resolved"; + /** + * The secret scanning alert involved in the event. + */ + alert: { + number: number; + secret_type: string; + resolution: "false_positive" | "wontfix" | "revoked" | "used_in_tests"; + resolved_by: User; + resolved_at: string; + }; + repository: Repository; + organization?: Organization; + installation?: InstallationLite; + sender: User; +} +export interface SecurityAdvisoryPerformedEvent { + action: "performed"; + /** + * The details of the security advisory, including summary, description, and severity. + */ + security_advisory: { + cvss: { + vector_string: string | null; + score: number; + }; + cwes: { + cwe_id: string; + name: string; + }[]; + ghsa_id: string; + cve_id: string | null; + summary: string; + description: string; + severity: string; + identifiers: { + value: string; + type: string; + }[]; + references: { + url: string; + }[]; + published_at: string; + updated_at: string; + withdrawn_at: string | null; + vulnerabilities: { + package: { + ecosystem: string; + name: string; + }; + severity: string; + vulnerable_version_range: string; + first_patched_version: { + identifier: string; + } | null; + }[]; + }; +} +export interface SecurityAdvisoryPublishedEvent { + action: "published"; + /** + * The details of the security advisory, including summary, description, and severity. + */ + security_advisory: { + cvss: { + vector_string: string | null; + score: number; + }; + cwes: { + cwe_id: string; + name: string; + }[]; + ghsa_id: string; + cve_id: string | null; + summary: string; + description: string; + severity: string; + identifiers: { + value: string; + type: string; + }[]; + references: { + url: string; + }[]; + published_at: string; + updated_at: string; + withdrawn_at: string | null; + vulnerabilities: { + package: { + ecosystem: string; + name: string; + }; + severity: string; + vulnerable_version_range: string; + first_patched_version: { + identifier: string; + } | null; + }[]; + }; +} +export interface SecurityAdvisoryUpdatedEvent { + action: "updated"; + /** + * The details of the security advisory, including summary, description, and severity. + */ + security_advisory: { + cvss: { + vector_string: string | null; + score: number; + }; + cwes: { + cwe_id: string; + name: string; + }[]; + ghsa_id: string; + cve_id: string | null; + summary: string; + description: string; + severity: string; + identifiers: { + value: string; + type: string; + }[]; + references: { + url: string; + }[]; + published_at: string; + updated_at: string; + withdrawn_at: string | null; + vulnerabilities: { + package: { + ecosystem: string; + name: string; + }; + severity: string; + vulnerable_version_range: string; + first_patched_version: { + identifier: string; + } | null; + }[]; + }; +} +export interface SecurityAdvisoryWithdrawnEvent { + action: "withdrawn"; + /** + * The details of the security advisory, including summary, description, and severity. + */ + security_advisory: { + cvss: { + vector_string: string | null; + score: number; + }; + cwes: { + cwe_id: string; + name: string; + }[]; + ghsa_id: string; + cve_id: string | null; + summary: string; + description: string; + severity: string; + identifiers: { + value: string; + type: string; + }[]; + references: { + url: string; + }[]; + published_at: string; + updated_at: string; + withdrawn_at: string; + vulnerabilities: { + package: { + ecosystem: string; + name: string; + }; + severity: string; + vulnerable_version_range: string; + first_patched_version: { + identifier: string; + } | null; + }[]; + }; +} +export interface SponsorshipCancelledEvent { + action: "cancelled"; + sponsorship: { + node_id: string; + created_at: string; + sponsorable: User; + sponsor: User; + privacy_level: string; + tier: SponsorshipTier; + }; + sender: User; +} +/** + * The `tier_changed` and `pending_tier_change` will include the original tier before the change or pending change. For more information, see the pending tier change payload. + */ +export interface SponsorshipTier { + node_id: string; + created_at: string; + description: string; + monthly_price_in_cents: number; + monthly_price_in_dollars: number; + name: string; + is_one_time: boolean; + is_custom_ammount: boolean; +} +export interface SponsorshipCreatedEvent { + action: "created"; + sponsorship: { + node_id: string; + created_at: string; + sponsorable: User; + sponsor: User; + privacy_level: string; + tier: SponsorshipTier; + }; + sender: User; +} +export interface SponsorshipEditedEvent { + action: "edited"; + sponsorship: { + node_id: string; + created_at: string; + sponsorable: User; + sponsor: User; + privacy_level: string; + tier: SponsorshipTier; + }; + changes: { + privacy_level?: { + /** + * The `edited` event types include the details about the change when someone edits a sponsorship to change the privacy. + */ + from: string; + }; + }; + sender: User; +} +export interface SponsorshipPendingCancellationEvent { + action: "pending_cancellation"; + sponsorship: { + node_id: string; + created_at: string; + sponsorable: User; + sponsor: User; + privacy_level: string; + tier: SponsorshipTier; + }; + /** + * The `pending_cancellation` and `pending_tier_change` event types will include the date the cancellation or tier change will take effect. + */ + effective_date?: string; + sender: User; +} +export interface SponsorshipPendingTierChangeEvent { + action: "pending_tier_change"; + sponsorship: { + node_id: string; + created_at: string; + sponsorable: User; + sponsor: User; + privacy_level: string; + tier: SponsorshipTier; + }; + /** + * The `pending_cancellation` and `pending_tier_change` event types will include the date the cancellation or tier change will take effect. + */ + effective_date?: string; + changes: { + tier: { + from: SponsorshipTier; + }; + }; + sender: User; +} +export interface SponsorshipTierChangedEvent { + action: "tier_changed"; + sponsorship: { + node_id: string; + created_at: string; + sponsorable: User; + sponsor: User; + privacy_level: string; + tier: SponsorshipTier; + }; + changes: { + tier: { + from: SponsorshipTier; + }; + }; + sender: User; +} +export interface StarCreatedEvent { + action: "created"; + /** + * The time the star was created. This is a timestamp in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. Will be `null` for the `deleted` action. + */ + starred_at: string; + repository: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface StarDeletedEvent { + action: "deleted"; + /** + * The time the star was created. This is a timestamp in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. Will be `null` for the `deleted` action. + */ + starred_at: null; + repository: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface StatusEvent { + /** + * The unique identifier of the status. + */ + id: number; + /** + * The Commit SHA. + */ + sha: string; + name: string; + avatar_url?: string | null; + /** + * The optional link added to the status. + */ + target_url: string | null; + context: string; + /** + * The optional human-readable description added to the status. + */ + description: string | null; + /** + * The new state. Can be `pending`, `success`, `failure`, or `error`. + */ + state: "pending" | "success" | "failure" | "error"; + commit: { + sha: string; + node_id: string; + commit: { + author: Committer & { + date: string; + }; + committer: Committer & { + date: string; + }; + message: string; + tree: { + sha: string; + url: string; + }; + url: string; + comment_count: number; + verification: { + verified: boolean; + reason: + | "expired_key" + | "not_signing_key" + | "gpgverify_error" + | "gpgverify_unavailable" + | "unsigned" + | "unknown_signature_type" + | "no_user" + | "unverified_email" + | "bad_email" + | "unknown_key" + | "malformed_signature" + | "invalid" + | "valid"; + signature: string | null; + payload: string | null; + }; + }; + url: string; + html_url: string; + comments_url: string; + author: User | null; + committer: User | null; + parents: { + sha: string; + url: string; + html_url: string; + }[]; + }; + /** + * An array of branch objects containing the status' SHA. Each branch contains the given SHA, but the SHA may or may not be the head of the branch. The array includes a maximum of 10 branches. + */ + branches: { + name: string; + commit: { + sha: string; + url: string; + }; + protected: boolean; + }[]; + created_at: string; + updated_at: string; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface TeamAddedToRepositoryEvent { + action: "added_to_repository"; + team: Team; + repository?: Repository; + sender: User; + organization: Organization; + installation?: InstallationLite; +} +export interface TeamCreatedEvent { + action: "created"; + team: Team; + repository?: Repository; + sender: User; + organization: Organization; + installation?: InstallationLite; +} +export interface TeamDeletedEvent { + action: "deleted"; + team: Team; + repository?: Repository; + sender: User; + organization: Organization; + installation?: InstallationLite; +} +export interface TeamEditedEvent { + action: "edited"; + /** + * The changes to the team if the action was `edited`. + */ + changes: { + description?: { + /** + * The previous version of the description if the action was `edited`. + */ + from: string; + }; + name?: { + /** + * The previous version of the name if the action was `edited`. + */ + from: string; + }; + privacy?: { + /** + * The previous version of the team's privacy if the action was `edited`. + */ + from: string; + }; + repository?: { + permissions: { + from: { + /** + * The previous version of the team member's `admin` permission on a repository, if the action was `edited`. + */ + admin?: boolean; + /** + * The previous version of the team member's `pull` permission on a repository, if the action was `edited`. + */ + pull?: boolean; + /** + * The previous version of the team member's `push` permission on a repository, if the action was `edited`. + */ + push?: boolean; + }; + }; + }; + }; + team: Team; + repository?: Repository; + sender: User; + organization: Organization; + installation?: InstallationLite; +} +export interface TeamRemovedFromRepositoryEvent { + action: "removed_from_repository"; + team: Team; + repository?: Repository; + sender: User; + organization: Organization; + installation?: InstallationLite; +} +export interface TeamAddEvent { + team: Team; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface WatchStartedEvent { + action: "started"; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface WorkflowDispatchEvent { + /** + * Inputs to the workflow. Each key represents the name of the input while it's value represents the value of that input. + */ + inputs: { + [k: string]: unknown; + } | null; + /** + * The branch ref from which the workflow was run. + */ + ref: string; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; + /** + * Relative path to the workflow file which contains the workflow. + */ + workflow: string; +} +export interface WorkflowJobCompletedEvent { + action: "completed"; + organization?: Organization; + installation?: InstallationLite; + repository: Repository; + sender: User; + workflow_job: WorkflowJob & { + conclusion: "success" | "failure" | "cancelled" | "skipped"; + }; +} +/** + * The workflow job. Many `workflow_job` keys, such as `head_sha`, `conclusion`, and `started_at` are the same as those in a [`check_run`](#check_run) object. + */ +export interface WorkflowJob { + id: number; + run_id: number; + run_attempt: number; + run_url: string; + head_sha: string; + node_id: string; + name: string; + check_run_url: string; + html_url: string; + url: string; + /** + * The current status of the job. Can be `queued`, `in_progress`, or `completed`. + */ + status: "queued" | "in_progress" | "completed"; + steps: WorkflowStep[]; + conclusion: "success" | "failure" | "cancelled" | "skipped" | null; + /** + * Custom labels for the job. Specified by the [`"runs-on"` attribute](https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idruns-on) in the workflow YAML. + */ + labels: string[]; + /** + * The ID of the runner that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. + */ + runner_id: number | null; + /** + * The name of the runner that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. + */ + runner_name: string | null; + /** + * The ID of the runner group that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. + */ + runner_group_id: number | null; + /** + * The name of the runner group that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. + */ + runner_group_name: string | null; + started_at: string; + completed_at: string | null; + /** + * The name of the workflow. + */ + workflow_name: string | null; + /** + * The name of the current branch. + */ + head_branch: string | null; + created_at: string; +} +export interface WorkflowStepInProgress { + name: string; + status: "in_progress"; + conclusion: null; + number: number; + started_at: string; + completed_at: null; +} +export interface WorkflowStepCompleted { + name: string; + status: "completed"; + conclusion: "failure" | "skipped" | "success"; + number: number; + started_at: string; + completed_at: string; +} +export interface WorkflowJobInProgressEvent { + action: "in_progress"; + organization?: Organization; + installation?: InstallationLite; + repository: Repository; + sender: User; + workflow_job: WorkflowJob & { + status: "in_progress"; + }; +} +export interface WorkflowJobQueuedEvent { + action: "queued"; + organization?: Organization; + installation?: InstallationLite; + repository: Repository; + sender: User; + workflow_job: WorkflowJob & { + status: "queued"; + }; +} +export interface WorkflowRunCompletedEvent { + action: "completed"; + organization?: Organization; + repository: Repository; + sender: User; + workflow: Workflow; + workflow_run: WorkflowRun & { + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped"; + }; + installation?: InstallationLite; +} +export interface WorkflowRun { + /** + * The URL to the artifacts for the workflow run. + */ + artifacts_url: string; + /** + * The URL to cancel the workflow run. + */ + cancel_url: string; + /** + * The URL to the associated check suite. + */ + check_suite_url: string; + /** + * The ID of the associated check suite. + */ + check_suite_id: number; + /** + * The node ID of the associated check suite. + */ + check_suite_node_id: string; + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | null; + created_at: string; + event: string; + head_branch: string; + head_commit: SimpleCommit; + head_repository: RepositoryLite; + /** + * The SHA of the head commit that points to the version of the workflow being run. + */ + head_sha: string; + /** + * The full path of the workflow + */ + path: string; + display_title: string; + html_url: string; + /** + * The ID of the workflow run. + */ + id: number; + /** + * The URL to the jobs for the workflow run. + */ + jobs_url: string; + /** + * The URL to download the logs for the workflow run. + */ + logs_url: string; + node_id: string; + /** + * The name of the workflow run. + */ + name: string; + pull_requests: { + url: string; + id: number; + number: number; + head: { + ref: string; + sha: string; + repo: RepoRef; + }; + base: { + ref: string; + sha: string; + repo: RepoRef; + }; + }[]; + repository: RepositoryLite; + /** + * The URL to rerun the workflow run. + */ + rerun_url: string; + /** + * The auto incrementing run number for the workflow run. + */ + run_number: number; + status: "requested" | "in_progress" | "completed" | "queued" | "waiting"; + updated_at: string; + /** + * The URL to the workflow run. + */ + url: string; + /** + * The ID of the parent workflow. + */ + workflow_id: number; + /** + * The URL to the workflow. + */ + workflow_url: string; + /** + * Attempt number of the run, 1 for first attempt and higher if the workflow was re-run. + */ + run_attempt: number; + referenced_workflows?: ReferencedWorkflow[]; + /** + * The start time of the latest run. Resets on re-run. + */ + run_started_at: string; + /** + * The URL to the previous attempted run of this workflow, if one exists. + */ + previous_attempt_url: string | null; + actor: User; + triggering_actor: User; +} +export interface RepositoryLite { + /** + * A template for the API URL to download the repository as an archive. + */ + archive_url: string; + /** + * A template for the API URL to list the available assignees for issues in the repository. + */ + assignees_url: string; + /** + * A template for the API URL to create or retrieve a raw Git blob in the repository. + */ + blobs_url: string; + /** + * A template for the API URL to get information about branches in the repository. + */ + branches_url: string; + /** + * A template for the API URL to get information about collaborators of the repository. + */ + collaborators_url: string; + /** + * A template for the API URL to get information about comments on the repository. + */ + comments_url: string; + /** + * A template for the API URL to get information about commits on the repository. + */ + commits_url: string; + /** + * A template for the API URL to compare two commits or refs. + */ + compare_url: string; + /** + * A template for the API URL to get the contents of the repository. + */ + contents_url: string; + /** + * A template for the API URL to list the contributors to the repository. + */ + contributors_url: string; + /** + * The API URL to list the deployments of the repository. + */ + deployments_url: string; + /** + * The repository description. + */ + description: string | null; + /** + * The API URL to list the downloads on the repository. + */ + downloads_url: string; + /** + * The API URL to list the events of the repository. + */ + events_url: string; + /** + * Whether the repository is a fork. + */ + fork: boolean; + /** + * The API URL to list the forks of the repository. + */ + forks_url: string; + /** + * The full, globally unique, name of the repository. + */ + full_name: string; + /** + * A template for the API URL to get information about Git commits of the repository. + */ + git_commits_url: string; + /** + * A template for the API URL to get information about Git refs of the repository. + */ + git_refs_url: string; + /** + * A template for the API URL to get information about Git tags of the repository. + */ + git_tags_url: string; + /** + * The API URL to list the hooks on the repository. + */ + hooks_url: string; + /** + * The URL to view the repository on GitHub.com. + */ + html_url: string; + /** + * Unique identifier of the repository + */ + id: number; + /** + * A template for the API URL to get information about issue comments on the repository. + */ + issue_comment_url: string; + /** + * A template for the API URL to get information about issue events on the repository. + */ + issue_events_url: string; + /** + * A template for the API URL to get information about issues on the repository. + */ + issues_url: string; + /** + * A template for the API URL to get information about deploy keys on the repository. + */ + keys_url: string; + /** + * A template for the API URL to get information about labels of the repository. + */ + labels_url: string; + /** + * The API URL to get information about the languages of the repository. + */ + languages_url: string; + /** + * The API URL to merge branches in the repository. + */ + merges_url: string; + /** + * A template for the API URL to get information about milestones of the repository. + */ + milestones_url: string; + /** + * The name of the repository. + */ + name: string; + /** + * The GraphQL identifier of the repository. + */ + node_id: string; + /** + * A template for the API URL to get information about notifications on the repository. + */ + notifications_url: string; + owner: User; + /** + * Whether the repository is private or public. + */ + private: boolean; + /** + * A template for the API URL to get information about pull requests on the repository. + */ + pulls_url: string; + /** + * A template for the API URL to get information about releases on the repository. + */ + releases_url: string; + /** + * The API URL to list the stargazers on the repository. + */ + stargazers_url: string; + /** + * A template for the API URL to get information about statuses of a commit. + */ + statuses_url: string; + /** + * The API URL to list the subscribers on the repository. + */ + subscribers_url: string; + /** + * The API URL to subscribe to notifications for this repository. + */ + subscription_url: string; + /** + * The API URL to get information about tags on the repository. + */ + tags_url: string; + /** + * The API URL to list the teams on the repository. + */ + teams_url: string; + /** + * A template for the API URL to create or retrieve a raw Git tree of the repository. + */ + trees_url: string; + /** + * The URL to get more information about the repository from the GitHub API. + */ + url: string; +} +export interface WorkflowRunInProgressEvent { + action: "in_progress"; + organization?: Organization; + repository: Repository; + sender: User; + workflow: Workflow; + workflow_run: WorkflowRun; + installation?: InstallationLite; +} +export interface WorkflowRunRequestedEvent { + action: "requested"; + organization?: Organization; + repository: Repository; + sender: User; + workflow: Workflow; + workflow_run: WorkflowRun; + installation?: InstallationLite; +} + +export interface EventPayloadMap { + branch_protection_rule: BranchProtectionRuleEvent; + check_run: CheckRunEvent; + check_suite: CheckSuiteEvent; + code_scanning_alert: CodeScanningAlertEvent; + commit_comment: CommitCommentEvent; + create: CreateEvent; + delete: DeleteEvent; + dependabot_alert: DependabotAlertEvent; + deploy_key: DeployKeyEvent; + deployment: DeploymentEvent; + deployment_status: DeploymentStatusEvent; + discussion: DiscussionEvent; + discussion_comment: DiscussionCommentEvent; + fork: ForkEvent; + github_app_authorization: GithubAppAuthorizationEvent; + gollum: GollumEvent; + installation: InstallationEvent; + installation_repositories: InstallationRepositoriesEvent; + installation_target: InstallationTargetEvent; + issue_comment: IssueCommentEvent; + issues: IssuesEvent; + label: LabelEvent; + marketplace_purchase: MarketplacePurchaseEvent; + member: MemberEvent; + membership: MembershipEvent; + merge_group: MergeGroupEvent; + meta: MetaEvent; + milestone: MilestoneEvent; + org_block: OrgBlockEvent; + organization: OrganizationEvent; + package: PackageEvent; + page_build: PageBuildEvent; + ping: PingEvent; + project: ProjectEvent; + project_card: ProjectCardEvent; + project_column: ProjectColumnEvent; + projects_v2_item: ProjectsV2ItemEvent; + public: PublicEvent; + pull_request: PullRequestEvent; + pull_request_review: PullRequestReviewEvent; + pull_request_review_comment: PullRequestReviewCommentEvent; + pull_request_review_thread: PullRequestReviewThreadEvent; + push: PushEvent; + registry_package: RegistryPackageEvent; + release: ReleaseEvent; + repository: RepositoryEvent; + repository_dispatch: RepositoryDispatchEvent; + repository_import: RepositoryImportEvent; + repository_vulnerability_alert: RepositoryVulnerabilityAlertEvent; + secret_scanning_alert: SecretScanningAlertEvent; + security_advisory: SecurityAdvisoryEvent; + sponsorship: SponsorshipEvent; + star: StarEvent; + status: StatusEvent; + team: TeamEvent; + team_add: TeamAddEvent; + watch: WatchEvent; + workflow_dispatch: WorkflowDispatchEvent; + workflow_job: WorkflowJobEvent; + workflow_run: WorkflowRunEvent; +} + +export type Asset = ReleaseAsset; +export type WebhookEvent = Schema; +export type WebhookEventMap = EventPayloadMap; +export type WebhookEventName = keyof EventPayloadMap; diff --git a/dist/node_modules/@octokit/webhooks/LICENSE.md b/dist/node_modules/@octokit/webhooks/LICENSE.md new file mode 100644 index 0000000..4b464e5 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/LICENSE.md @@ -0,0 +1,20 @@ +Copyright (c) 2017 Gregor Martynus + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/@octokit/webhooks/README.md b/dist/node_modules/@octokit/webhooks/README.md new file mode 100644 index 0000000..d121977 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/README.md @@ -0,0 +1,735 @@ +# @octokit/webhooks + +> GitHub webhook events toolset for Node.js + +[![@latest](https://img.shields.io/npm/v/@octokit/webhooks.svg)](https://www.npmjs.com/package/@octokit/webhooks) +[![Test](https://github.com/octokit/webhooks.js/workflows/Test/badge.svg)](https://github.com/octokit/webhooks.js/actions?query=workflow) + + + +- [Usage](#usage) +- [Local development](#local-development) +- [API](#api) + - [Constructor](#constructor) + - [webhooks.sign()](#webhookssign) + - [webhooks.verify()](#webhooksverify) + - [webhooks.verifyAndReceive()](#webhooksverifyandreceive) + - [webhooks.receive()](#webhooksreceive) + - [webhooks.on()](#webhookson) + - [webhooks.onAny()](#webhooksonany) + - [webhooks.onError()](#webhooksonerror) + - [webhooks.removeListener()](#webhooksremovelistener) + - [createNodeMiddleware()](#createnodemiddleware) + - [Webhook events](#webhook-events) + - [emitterEventNames](#emittereventnames) +- [TypeScript](#typescript) + - [`EmitterWebhookEventName`](#emitterwebhookeventname) + - [`EmitterWebhookEvent`](#emitterwebhookevent) +- [License](#license) + + + +`@octokit/webhooks` helps to handle webhook events received from GitHub. + +[GitHub webhooks](https://docs.github.com/webhooks/) can be registered in multiple ways + +1. In repository or organization settings on [github.com](https://github.com/). +2. Using the REST API for [repositories](https://docs.github.com/rest/reference/repos#webhooks) or [organizations](https://docs.github.com/rest/reference/orgs#webhooks/) +3. By [creating a GitHub App](https://docs.github.com/developers/apps/creating-a-github-app). + +Note that while setting a secret is optional on GitHub, it is required to be set in order to use `@octokit/webhooks`. Content Type must be set to `application/json`, `application/x-www-form-urlencoded` is not supported. + +## Usage + +```js +// install with: npm install @octokit/webhooks +const { Webhooks, createNodeMiddleware } = require("@octokit/webhooks"); +const webhooks = new Webhooks({ + secret: "mysecret", +}); + +webhooks.onAny(({ id, name, payload }) => { + console.log(name, "event received"); +}); + +require("http").createServer(createNodeMiddleware(webhooks)).listen(3000); +// can now receive webhook events at /api/github/webhooks +``` + +## Local development + +You can receive webhooks on your local machine or even browser using [EventSource](https://developer.mozilla.org/en-US/docs/Web/API/EventSource) and [smee.io](https://smee.io/). + +Go to [smee.io](https://smee.io/) and Start a new channel. Then copy the "Webhook Proxy URL" and + +1. enter it in the GitHub App’s "Webhook URL" input +2. pass it to the [EventSource](https://github.com/EventSource/eventsource) constructor, see below + +```js +const webhookProxyUrl = "https://smee.io/IrqK0nopGAOc847"; // replace with your own Webhook Proxy URL +const source = new EventSource(webhookProxyUrl); +source.onmessage = (event) => { + const webhookEvent = JSON.parse(event.data); + webhooks + .verifyAndReceive({ + id: webhookEvent["x-request-id"], + name: webhookEvent["x-github-event"], + signature: webhookEvent["x-hub-signature"], + payload: webhookEvent.body, + }) + .catch(console.error); +}; +``` + +`EventSource` is a native browser API and can be polyfilled for browsers that don’t support it. In node, you can use the [`eventsource`](https://github.com/EventSource/eventsource) package: install with `npm install eventsource`, then `const EventSource = require('eventsource')` + +## API + +1. [Constructor](#constructor) +2. [webhooks.sign()](#webhookssign) +3. [webhooks.verify()](#webhooksverify) +4. [webhooks.verifyAndReceive()](#webhooksverifyandreceive) +5. [webhooks.receive()](#webhooksreceive) +6. [webhooks.on()](#webhookson) +7. [webhooks.onAny()](#webhooksonany) +8. [webhooks.onError()](#webhooksonerror) +9. [webhooks.removeListener()](#webhooksremovelistener) +10. [createNodeMiddleware()](#createnodemiddleware) +11. [Webhook events](#webhook-events) +12. [emitterEventNames](#emittereventnames) + +### Constructor + +```js +new Webhooks({ secret /*, transform */ }); +``` + + + + + + + + + + + + + + + + +
+ + secret + + (String) + + Required. + Secret as configured in GitHub Settings. +
+ + transform + + (Function) + + Only relevant for webhooks.on. + Transform emitted event before calling handlers. Can be asynchronous. +
+ log + + object + + + +Used for internal logging. Defaults to [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console) with `debug` and `info` doing nothing. + +
+ +Returns the `webhooks` API. + +### webhooks.sign() + +```js +webhooks.sign(eventPayload); +``` + + + + + + + + +
+ + eventPayload + + + (Object) + + + Required. + Webhook request payload as received from GitHub +
+ +Returns a `signature` string. Throws error if `eventPayload` is not passed. + +The `sign` method can be imported as static method from [`@octokit/webhooks-methods`](https://github.com/octokit/webhooks-methods.js/#readme). + +### webhooks.verify() + +```js +webhooks.verify(eventPayload, signature); +``` + + + + + + + + + + + + +
+ + eventPayload + + + (Object [deprecated] or String) + + + Required. + Webhook event request payload as received from GitHub. +
+ + signature + + + (String) + + + Required. + Signature string as calculated by webhooks.sign(). +
+ +Returns `true` or `false`. Throws error if `eventPayload` or `signature` not passed. + +The `verify` method can be imported as static method from [`@octokit/webhooks-methods`](https://github.com/octokit/webhooks-methods.js/#readme). + +### webhooks.verifyAndReceive() + +```js +webhooks.verifyAndReceive({ id, name, payload, signature }); +``` + + + + + + + + + + + + + + + + + + + + +
+ + id + + + String + + + Unique webhook event request id +
+ + name + + + String + + + Required. + Name of the event. (Event names are set as X-GitHub-Event header + in the webhook event request.) +
+ + payload + + + Object (deprecated) or String + + + Required. + Webhook event request payload as received from GitHub. +
+ + signature + + + (String) + + + Required. + Signature string as calculated by webhooks.sign(). +
+ +Returns a promise. + +Verifies event using [webhooks.verify()](#webhooksverify), then handles the event using [webhooks.receive()](#webhooksreceive). + +Additionally, if verification fails, rejects the returned promise and emits an `error` event. + +Example + +```js +const { Webhooks } = require("@octokit/webhooks"); +const webhooks = new Webhooks({ + secret: "mysecret", +}); +eventHandler.on("error", handleSignatureVerificationError); + +// put this inside your webhooks route handler +eventHandler + .verifyAndReceive({ + id: request.headers["x-github-delivery"], + name: request.headers["x-github-event"], + payload: request.body, + signature: request.headers["x-hub-signature-256"], + }) + .catch(handleErrorsFromHooks); +``` + +### webhooks.receive() + +```js +webhooks.receive({ id, name, payload }); +``` + + + + + + + + + + + + + + + + +
+ + id + + + String + + + Unique webhook event request id +
+ + name + + + String + + + Required. + Name of the event. (Event names are set as X-GitHub-Event header + in the webhook event request.) +
+ + payload + + + Object + + + Required. + Webhook event request payload as received from GitHub. +
+ +Returns a promise. Runs all handlers set with [`webhooks.on()`](#webhookson) in parallel and waits for them to finish. If one of the handlers rejects or throws an error, then `webhooks.receive()` rejects. The returned error has an `.errors` property which holds an array of all errors caught from the handlers. If no errors occur, `webhooks.receive()` resolves without passing any value. + +The `.receive()` method belongs to the `event-handler` module which can be used [standalone](src/event-handler/). + +### webhooks.on() + +```js +webhooks.on(eventName, handler); +webhooks.on(eventNames, handler); +``` + + + + + + + + + + + + + + + + +
+ + eventName + + + String + + + Required. + Name of the event. One of GitHub's supported event names, or (if the event has an action property) the name of an event followed by its action in the form of <event>.<action>. +
+ + eventNames + + + Array + + + Required. + Array of event names. +
+ + handler + + + Function + + + Required. + Method to be run each time the event with the passed name is received. + the handler function can be an async function, throw an error or + return a Promise. The handler is called with an event object: {id, name, payload}. +
+ +The `.on()` method belongs to the `event-handler` module which can be used [standalone](src/event-handler/). + +### webhooks.onAny() + +```js +webhooks.onAny(handler); +``` + + + + + + + + +
+ + handler + + + Function + + + Required. + Method to be run each time any event is received. + the handler function can be an async function, throw an error or + return a Promise. The handler is called with an event object: {id, name, payload}. +
+ +The `.onAny()` method belongs to the `event-handler` module which can be used [standalone](src/event-handler/). + +### webhooks.onError() + +```js +webhooks.onError(handler); +``` + +If a webhook event handler throws an error or returns a promise that rejects, an error event is triggered. You can use this handler for logging or reporting events. The passed error object has a .event property which has all information on the event. + +Asynchronous `error` event handler are not blocking the `.receive()` method from completing. + + + + + + + + +
+ + handler + + + Function + + + Required. + Method to be run each time a webhook event handler throws an error or returns a promise that rejects. + The handler function can be an async function, + return a Promise. The handler is called with an error object that has a .event property which has all the information on the event: {id, name, payload}. +
+ +The `.onError()` method belongs to the `event-handler` module which can be used [standalone](src/event-handler/). + +### webhooks.removeListener() + +```js +webhooks.removeListener(eventName, handler); +webhooks.removeListener(eventNames, handler); +``` + + + + + + + + + + + + + + + + +
+ + eventName + + + String + + + Required. + Name of the event. One of GitHub's supported event names, or (if the event has an action property) the name of an event followed by its action in the form of <event>.<action>, or '*' for the onAny() method or 'error' for the onError() method. +
+ + eventNames + + + Array + + + Required. + Array of event names. +
+ + handler + + + Function + + + Required. + Method which was previously passed to webhooks.on(). If the same handler was registered multiple times for the same event, only the most recent handler gets removed. +
+ +The `.removeListener()` method belongs to the `event-handler` module which can be used [standalone](src/event-handler/). + +### createNodeMiddleware() + +```js +const { createServer } = require("http"); +const { Webhooks, createNodeMiddleware } = require("@octokit/webhooks"); + +const webhooks = new Webhooks({ + secret: "mysecret", +}); + +const middleware = createNodeMiddleware(webhooks, { path: "/" }); + +createServer(middleware).listen(3000); +// can now receive user authorization callbacks at POST / +``` + + + + + + + + + + + + + + + + + + + + +
+ webhooks + + Webhooks instance + + + Required. +
+ path + + string + + + Custom path to match requests against. Defaults to /api/github/webhooks. +
+ log + + object + + + +Used for internal logging. Defaults to [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console) with `debug` and `info` doing nothing. + +
+ onUnhandledRequest (deprecated) + + function + + + +Defaults to + +```js +function onUnhandledRequest(request, response) { + response.writeHead(400, { + "content-type": "application/json", + }); + response.end( + JSON.stringify({ + error: error.message, + }) + ); +} +``` + +
+ +### Webhook events + +See the full list of [event types with example payloads](https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads/). + +If there are actions for a webhook, events are emitted for both, the webhook name as well as a combination of the webhook name and the action, e.g. `installation` and `installation.created`. + + + +| Event | Actions | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| [`branch_protection_rule`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#branch_protection_rule) | `created`
`deleted`
`edited` | +| [`check_run`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#check_run) | `completed`
`created`
`requested_action`
`rerequested` | +| [`check_suite`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#check_suite) | `completed`
`requested`
`rerequested` | +| [`code_scanning_alert`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#code_scanning_alert) | `appeared_in_branch`
`closed_by_user`
`created`
`fixed`
`reopened`
`reopened_by_user` | +| [`commit_comment`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#commit_comment) | `created` | +| [`create`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#create) | | +| [`delete`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#delete) | | +| [`dependabot_alert`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#dependabot_alert) | `created`
`dismissed`
`fixed`
`reintroduced`
`reopened` | +| [`deploy_key`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#deploy_key) | `created`
`deleted` | +| [`deployment`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#deployment) | `created` | +| [`deployment_status`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#deployment_status) | `created` | +| [`discussion`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#discussion) | `answered`
`category_changed`
`created`
`deleted`
`edited`
`labeled`
`locked`
`pinned`
`transferred`
`unanswered`
`unlabeled`
`unlocked`
`unpinned` | +| [`discussion_comment`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#discussion_comment) | `created`
`deleted`
`edited` | +| [`fork`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#fork) | | +| [`github_app_authorization`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#github_app_authorization) | `revoked` | +| [`gollum`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#gollum) | | +| [`installation`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#installation) | `created`
`deleted`
`new_permissions_accepted`
`suspend`
`unsuspend` | +| [`installation_repositories`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#installation_repositories) | `added`
`removed` | +| [`installation_target`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#installation_target) | `renamed` | +| [`issue_comment`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issue_comment) | `created`
`deleted`
`edited` | +| [`issues`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issues) | `assigned`
`closed`
`deleted`
`demilestoned`
`edited`
`labeled`
`locked`
`milestoned`
`opened`
`pinned`
`reopened`
`transferred`
`unassigned`
`unlabeled`
`unlocked`
`unpinned` | +| [`label`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#label) | `created`
`deleted`
`edited` | +| [`marketplace_purchase`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#marketplace_purchase) | `cancelled`
`changed`
`pending_change`
`pending_change_cancelled`
`purchased` | +| [`member`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#member) | `added`
`edited`
`removed` | +| [`membership`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#membership) | `added`
`removed` | +| [`merge_group`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#merge_group) | `checks_requested` | +| [`meta`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#meta) | `deleted` | +| [`milestone`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#milestone) | `closed`
`created`
`deleted`
`edited`
`opened` | +| [`org_block`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#org_block) | `blocked`
`unblocked` | +| [`organization`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#organization) | `deleted`
`member_added`
`member_invited`
`member_removed`
`renamed` | +| [`package`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#package) | `published`
`updated` | +| [`page_build`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#page_build) | | +| [`ping`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#ping) | | +| [`project`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#project) | `closed`
`created`
`deleted`
`edited`
`reopened` | +| [`project_card`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#project_card) | `converted`
`created`
`deleted`
`edited`
`moved` | +| [`project_column`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#project_column) | `created`
`deleted`
`edited`
`moved` | +| [`projects_v2_item`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#projects_v2_item) | `archived`
`converted`
`created`
`deleted`
`edited`
`reordered`
`restored` | +| [`public`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#public) | | +| [`pull_request`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request) | `assigned`
`auto_merge_disabled`
`auto_merge_enabled`
`closed`
`converted_to_draft`
`demilestoned`
`dequeued`
`edited`
`labeled`
`locked`
`milestoned`
`opened`
`queued`
`ready_for_review`
`reopened`
`review_request_removed`
`review_requested`
`synchronize`
`unassigned`
`unlabeled`
`unlocked` | +| [`pull_request_review`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request_review) | `dismissed`
`edited`
`submitted` | +| [`pull_request_review_comment`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request_review_comment) | `created`
`deleted`
`edited` | +| [`pull_request_review_thread`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request_review_thread) | `resolved`
`unresolved` | +| [`push`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#push) | | +| [`registry_package`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#registry_package) | `published`
`updated` | +| [`release`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#release) | `created`
`deleted`
`edited`
`prereleased`
`published`
`released`
`unpublished` | +| [`repository`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#repository) | `archived`
`created`
`deleted`
`edited`
`privatized`
`publicized`
`renamed`
`transferred`
`unarchived` | +| [`repository_dispatch`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#repository_dispatch) | | +| [`repository_import`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#repository_import) | | +| [`repository_vulnerability_alert`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#repository_vulnerability_alert) | `create`
`dismiss`
`reopen`
`resolve` | +| [`secret_scanning_alert`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#secret_scanning_alert) | `created`
`reopened`
`resolved` | +| [`security_advisory`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#security_advisory) | `performed`
`published`
`updated`
`withdrawn` | +| [`sponsorship`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#sponsorship) | `cancelled`
`created`
`edited`
`pending_cancellation`
`pending_tier_change`
`tier_changed` | +| [`star`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#star) | `created`
`deleted` | +| [`status`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#status) | | +| [`team`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#team) | `added_to_repository`
`created`
`deleted`
`edited`
`removed_from_repository` | +| [`team_add`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#team_add) | | +| [`watch`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#watch) | `started` | +| [`workflow_dispatch`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_dispatch) | | +| [`workflow_job`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_job) | `completed`
`in_progress`
`queued` | +| [`workflow_run`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_run) | `completed`
`in_progress`
`requested` | + + + +### emitterEventNames + +A read only tuple containing all the possible combinations of the webhook events + actions listed above. This might be useful in GUI and input validation. + +```js +import { emitterEventNames } from "@octokit/webhooks"; +emitterEventNames; // ["check_run", "check_run.completed", ...] +``` + +## TypeScript + +The types for the webhook payloads are sourced from [`@octokit/webhooks-types`](https://github.com/octokit/webhooks/tree/main/payload-types), +which can be used by themselves. + +In addition to these types, `@octokit/webhooks` exports 2 types specific to itself: + +Note that changes to the exported types are not considered breaking changes, as the changes will not impact production code, but only fail locally or during CI at build time. + +**⚠️ Caution ⚠️**: Webhooks Types are expected to be used with the [`strictNullChecks` option](https://www.typescriptlang.org/tsconfig#strictNullChecks) enabled in your `tsconfig`. If you don't have this option enabled, there's the possibility that you get `never` as the inferred type in some use cases. See [octokit/webhooks#395](https://github.com/octokit/webhooks/issues/395) for details. + +### `EmitterWebhookEventName` + +A union of all possible events and event/action combinations supported by the event emitter, e.g. `"check_run" | "check_run.completed" | ... many more ... | "workflow_run.requested"`. + +### `EmitterWebhookEvent` + +The object that is emitted by `@octokit/webhooks` as an event; made up of an `id`, `name`, and `payload` properties. +An optional generic parameter can be passed to narrow the type of the `name` and `payload` properties based on event names or event/action combinations, e.g. `EmitterWebhookEvent<"check_run" | "code_scanning_alert.fixed">`. + +## License + +[MIT](LICENSE.md) diff --git a/dist/node_modules/@octokit/webhooks/dist-node/index.js b/dist/node_modules/@octokit/webhooks/dist-node/index.js new file mode 100644 index 0000000..0223d69 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-node/index.js @@ -0,0 +1,710 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Webhooks: () => Webhooks, + createEventHandler: () => createEventHandler, + createNodeMiddleware: () => createNodeMiddleware, + emitterEventNames: () => emitterEventNames +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/createLogger.js +var createLogger = (logger) => ({ + debug: () => { + }, + info: () => { + }, + warn: console.warn.bind(console), + error: console.error.bind(console), + ...logger +}); + +// pkg/dist-src/generated/webhook-names.js +var emitterEventNames = [ + "branch_protection_rule", + "branch_protection_rule.created", + "branch_protection_rule.deleted", + "branch_protection_rule.edited", + "check_run", + "check_run.completed", + "check_run.created", + "check_run.requested_action", + "check_run.rerequested", + "check_suite", + "check_suite.completed", + "check_suite.requested", + "check_suite.rerequested", + "code_scanning_alert", + "code_scanning_alert.appeared_in_branch", + "code_scanning_alert.closed_by_user", + "code_scanning_alert.created", + "code_scanning_alert.fixed", + "code_scanning_alert.reopened", + "code_scanning_alert.reopened_by_user", + "commit_comment", + "commit_comment.created", + "create", + "delete", + "dependabot_alert", + "dependabot_alert.created", + "dependabot_alert.dismissed", + "dependabot_alert.fixed", + "dependabot_alert.reintroduced", + "dependabot_alert.reopened", + "deploy_key", + "deploy_key.created", + "deploy_key.deleted", + "deployment", + "deployment.created", + "deployment_status", + "deployment_status.created", + "discussion", + "discussion.answered", + "discussion.category_changed", + "discussion.created", + "discussion.deleted", + "discussion.edited", + "discussion.labeled", + "discussion.locked", + "discussion.pinned", + "discussion.transferred", + "discussion.unanswered", + "discussion.unlabeled", + "discussion.unlocked", + "discussion.unpinned", + "discussion_comment", + "discussion_comment.created", + "discussion_comment.deleted", + "discussion_comment.edited", + "fork", + "github_app_authorization", + "github_app_authorization.revoked", + "gollum", + "installation", + "installation.created", + "installation.deleted", + "installation.new_permissions_accepted", + "installation.suspend", + "installation.unsuspend", + "installation_repositories", + "installation_repositories.added", + "installation_repositories.removed", + "installation_target", + "installation_target.renamed", + "issue_comment", + "issue_comment.created", + "issue_comment.deleted", + "issue_comment.edited", + "issues", + "issues.assigned", + "issues.closed", + "issues.deleted", + "issues.demilestoned", + "issues.edited", + "issues.labeled", + "issues.locked", + "issues.milestoned", + "issues.opened", + "issues.pinned", + "issues.reopened", + "issues.transferred", + "issues.unassigned", + "issues.unlabeled", + "issues.unlocked", + "issues.unpinned", + "label", + "label.created", + "label.deleted", + "label.edited", + "marketplace_purchase", + "marketplace_purchase.cancelled", + "marketplace_purchase.changed", + "marketplace_purchase.pending_change", + "marketplace_purchase.pending_change_cancelled", + "marketplace_purchase.purchased", + "member", + "member.added", + "member.edited", + "member.removed", + "membership", + "membership.added", + "membership.removed", + "merge_group", + "merge_group.checks_requested", + "meta", + "meta.deleted", + "milestone", + "milestone.closed", + "milestone.created", + "milestone.deleted", + "milestone.edited", + "milestone.opened", + "org_block", + "org_block.blocked", + "org_block.unblocked", + "organization", + "organization.deleted", + "organization.member_added", + "organization.member_invited", + "organization.member_removed", + "organization.renamed", + "package", + "package.published", + "package.updated", + "page_build", + "ping", + "project", + "project.closed", + "project.created", + "project.deleted", + "project.edited", + "project.reopened", + "project_card", + "project_card.converted", + "project_card.created", + "project_card.deleted", + "project_card.edited", + "project_card.moved", + "project_column", + "project_column.created", + "project_column.deleted", + "project_column.edited", + "project_column.moved", + "projects_v2_item", + "projects_v2_item.archived", + "projects_v2_item.converted", + "projects_v2_item.created", + "projects_v2_item.deleted", + "projects_v2_item.edited", + "projects_v2_item.reordered", + "projects_v2_item.restored", + "public", + "pull_request", + "pull_request.assigned", + "pull_request.auto_merge_disabled", + "pull_request.auto_merge_enabled", + "pull_request.closed", + "pull_request.converted_to_draft", + "pull_request.demilestoned", + "pull_request.dequeued", + "pull_request.edited", + "pull_request.labeled", + "pull_request.locked", + "pull_request.milestoned", + "pull_request.opened", + "pull_request.queued", + "pull_request.ready_for_review", + "pull_request.reopened", + "pull_request.review_request_removed", + "pull_request.review_requested", + "pull_request.synchronize", + "pull_request.unassigned", + "pull_request.unlabeled", + "pull_request.unlocked", + "pull_request_review", + "pull_request_review.dismissed", + "pull_request_review.edited", + "pull_request_review.submitted", + "pull_request_review_comment", + "pull_request_review_comment.created", + "pull_request_review_comment.deleted", + "pull_request_review_comment.edited", + "pull_request_review_thread", + "pull_request_review_thread.resolved", + "pull_request_review_thread.unresolved", + "push", + "registry_package", + "registry_package.published", + "registry_package.updated", + "release", + "release.created", + "release.deleted", + "release.edited", + "release.prereleased", + "release.published", + "release.released", + "release.unpublished", + "repository", + "repository.archived", + "repository.created", + "repository.deleted", + "repository.edited", + "repository.privatized", + "repository.publicized", + "repository.renamed", + "repository.transferred", + "repository.unarchived", + "repository_dispatch", + "repository_import", + "repository_vulnerability_alert", + "repository_vulnerability_alert.create", + "repository_vulnerability_alert.dismiss", + "repository_vulnerability_alert.reopen", + "repository_vulnerability_alert.resolve", + "secret_scanning_alert", + "secret_scanning_alert.created", + "secret_scanning_alert.reopened", + "secret_scanning_alert.resolved", + "security_advisory", + "security_advisory.performed", + "security_advisory.published", + "security_advisory.updated", + "security_advisory.withdrawn", + "sponsorship", + "sponsorship.cancelled", + "sponsorship.created", + "sponsorship.edited", + "sponsorship.pending_cancellation", + "sponsorship.pending_tier_change", + "sponsorship.tier_changed", + "star", + "star.created", + "star.deleted", + "status", + "team", + "team.added_to_repository", + "team.created", + "team.deleted", + "team.edited", + "team.removed_from_repository", + "team_add", + "watch", + "watch.started", + "workflow_dispatch", + "workflow_job", + "workflow_job.completed", + "workflow_job.in_progress", + "workflow_job.queued", + "workflow_run", + "workflow_run.completed", + "workflow_run.in_progress", + "workflow_run.requested" +]; + +// pkg/dist-src/event-handler/on.js +function handleEventHandlers(state, webhookName, handler) { + if (!state.hooks[webhookName]) { + state.hooks[webhookName] = []; + } + state.hooks[webhookName].push(handler); +} +function receiverOn(state, webhookNameOrNames, handler) { + if (Array.isArray(webhookNameOrNames)) { + webhookNameOrNames.forEach( + (webhookName) => receiverOn(state, webhookName, handler) + ); + return; + } + if (["*", "error"].includes(webhookNameOrNames)) { + const webhookName = webhookNameOrNames === "*" ? "any" : webhookNameOrNames; + const message = `Using the "${webhookNameOrNames}" event with the regular Webhooks.on() function is not supported. Please use the Webhooks.on${webhookName.charAt(0).toUpperCase() + webhookName.slice(1)}() method instead`; + throw new Error(message); + } + if (!emitterEventNames.includes(webhookNameOrNames)) { + state.log.warn( + `"${webhookNameOrNames}" is not a known webhook name (https://developer.github.com/v3/activity/events/types/)` + ); + } + handleEventHandlers(state, webhookNameOrNames, handler); +} +function receiverOnAny(state, handler) { + handleEventHandlers(state, "*", handler); +} +function receiverOnError(state, handler) { + handleEventHandlers(state, "error", handler); +} + +// pkg/dist-src/event-handler/receive.js +var import_aggregate_error = __toESM(require("aggregate-error")); + +// pkg/dist-src/event-handler/wrap-error-handler.js +function wrapErrorHandler(handler, error) { + let returnValue; + try { + returnValue = handler(error); + } catch (error2) { + console.log('FATAL: Error occurred in "error" event handler'); + console.log(error2); + } + if (returnValue && returnValue.catch) { + returnValue.catch((error2) => { + console.log('FATAL: Error occurred in "error" event handler'); + console.log(error2); + }); + } +} + +// pkg/dist-src/event-handler/receive.js +function getHooks(state, eventPayloadAction, eventName) { + const hooks = [state.hooks[eventName], state.hooks["*"]]; + if (eventPayloadAction) { + hooks.unshift(state.hooks[`${eventName}.${eventPayloadAction}`]); + } + return [].concat(...hooks.filter(Boolean)); +} +function receiverHandle(state, event) { + const errorHandlers = state.hooks.error || []; + if (event instanceof Error) { + const error = Object.assign(new import_aggregate_error.default([event]), { + event, + errors: [event] + }); + errorHandlers.forEach((handler) => wrapErrorHandler(handler, error)); + return Promise.reject(error); + } + if (!event || !event.name) { + throw new import_aggregate_error.default(["Event name not passed"]); + } + if (!event.payload) { + throw new import_aggregate_error.default(["Event payload not passed"]); + } + const hooks = getHooks( + state, + "action" in event.payload ? event.payload.action : null, + event.name + ); + if (hooks.length === 0) { + return Promise.resolve(); + } + const errors = []; + const promises = hooks.map((handler) => { + let promise = Promise.resolve(event); + if (state.transform) { + promise = promise.then(state.transform); + } + return promise.then((event2) => { + return handler(event2); + }).catch((error) => errors.push(Object.assign(error, { event }))); + }); + return Promise.all(promises).then(() => { + if (errors.length === 0) { + return; + } + const error = new import_aggregate_error.default(errors); + Object.assign(error, { + event, + errors + }); + errorHandlers.forEach((handler) => wrapErrorHandler(handler, error)); + throw error; + }); +} + +// pkg/dist-src/event-handler/remove-listener.js +function removeListener(state, webhookNameOrNames, handler) { + if (Array.isArray(webhookNameOrNames)) { + webhookNameOrNames.forEach( + (webhookName) => removeListener(state, webhookName, handler) + ); + return; + } + if (!state.hooks[webhookNameOrNames]) { + return; + } + for (let i = state.hooks[webhookNameOrNames].length - 1; i >= 0; i--) { + if (state.hooks[webhookNameOrNames][i] === handler) { + state.hooks[webhookNameOrNames].splice(i, 1); + return; + } + } +} + +// pkg/dist-src/event-handler/index.js +function createEventHandler(options) { + const state = { + hooks: {}, + log: createLogger(options && options.log) + }; + if (options && options.transform) { + state.transform = options.transform; + } + return { + on: receiverOn.bind(null, state), + onAny: receiverOnAny.bind(null, state), + onError: receiverOnError.bind(null, state), + removeListener: removeListener.bind(null, state), + receive: receiverHandle.bind(null, state) + }; +} + +// pkg/dist-src/sign.js +var import_webhooks_methods = require("@octokit/webhooks-methods"); + +// pkg/dist-src/to-normalized-json-string.js +function toNormalizedJsonString(payload) { + const payloadString = JSON.stringify(payload); + return payloadString.replace(/[^\\]\\u[\da-f]{4}/g, (s) => { + return s.substr(0, 3) + s.substr(3).toUpperCase(); + }); +} + +// pkg/dist-src/sign.js +async function sign(secret, payload) { + return (0, import_webhooks_methods.sign)( + secret, + typeof payload === "string" ? payload : toNormalizedJsonString(payload) + ); +} + +// pkg/dist-src/verify.js +var import_webhooks_methods2 = require("@octokit/webhooks-methods"); +async function verify(secret, payload, signature) { + return (0, import_webhooks_methods2.verify)( + secret, + typeof payload === "string" ? payload : toNormalizedJsonString(payload), + signature + ); +} + +// pkg/dist-src/verify-and-receive.js +var import_webhooks_methods3 = require("@octokit/webhooks-methods"); +async function verifyAndReceive(state, event) { + const matchesSignature = await (0, import_webhooks_methods3.verify)( + state.secret, + typeof event.payload === "object" ? toNormalizedJsonString(event.payload) : event.payload, + event.signature + ).catch(() => false); + if (!matchesSignature) { + const error = new Error( + "[@octokit/webhooks] signature does not match event payload and secret" + ); + return state.eventHandler.receive( + Object.assign(error, { event, status: 400 }) + ); + } + return state.eventHandler.receive({ + id: event.id, + name: event.name, + payload: typeof event.payload === "string" ? JSON.parse(event.payload) : event.payload + }); +} + +// pkg/dist-src/middleware/node/get-missing-headers.js +var WEBHOOK_HEADERS = [ + "x-github-event", + "x-hub-signature-256", + "x-github-delivery" +]; +function getMissingHeaders(request) { + return WEBHOOK_HEADERS.filter((header) => !(header in request.headers)); +} + +// pkg/dist-src/middleware/node/get-payload.js +var import_aggregate_error2 = __toESM(require("aggregate-error")); +function getPayload(request) { + if (request.body) { + if (typeof request.body !== "string") { + console.warn( + "[@octokit/webhooks] Passing the payload as a JSON object in `request.body` is deprecated and will be removed in a future release of `@octokit/webhooks`, please pass it as a a `string` instead." + ); + } + return Promise.resolve(request.body); + } + return new Promise((resolve, reject) => { + let data = ""; + request.setEncoding("utf8"); + request.on("error", (error) => reject(new import_aggregate_error2.default([error]))); + request.on("data", (chunk) => data += chunk); + request.on("end", () => { + try { + JSON.parse(data); + resolve(data); + } catch (error) { + error.message = "Invalid JSON"; + error.status = 400; + reject(new import_aggregate_error2.default([error])); + } + }); + }); +} + +// pkg/dist-src/middleware/node/middleware.js +async function middleware(webhooks, options, request, response, next) { + let pathname; + try { + pathname = new URL(request.url, "http://localhost").pathname; + } catch (error) { + response.writeHead(422, { + "content-type": "application/json" + }); + response.end( + JSON.stringify({ + error: `Request URL could not be parsed: ${request.url}` + }) + ); + return; + } + const isUnknownRoute = request.method !== "POST" || pathname !== options.path; + const isExpressMiddleware = typeof next === "function"; + if (isUnknownRoute) { + if (isExpressMiddleware) { + return next(); + } else { + return options.onUnhandledRequest(request, response); + } + } + if (!request.headers["content-type"] || !request.headers["content-type"].startsWith("application/json")) { + response.writeHead(415, { + "content-type": "application/json", + accept: "application/json" + }); + response.end( + JSON.stringify({ + error: `Unsupported "Content-Type" header value. Must be "application/json"` + }) + ); + return; + } + const missingHeaders = getMissingHeaders(request).join(", "); + if (missingHeaders) { + response.writeHead(400, { + "content-type": "application/json" + }); + response.end( + JSON.stringify({ + error: `Required headers missing: ${missingHeaders}` + }) + ); + return; + } + const eventName = request.headers["x-github-event"]; + const signatureSHA256 = request.headers["x-hub-signature-256"]; + const id = request.headers["x-github-delivery"]; + options.log.debug(`${eventName} event received (id: ${id})`); + let didTimeout = false; + const timeout = setTimeout(() => { + didTimeout = true; + response.statusCode = 202; + response.end("still processing\n"); + }, 9e3).unref(); + try { + const payload = await getPayload(request); + await webhooks.verifyAndReceive({ + id, + name: eventName, + payload, + signature: signatureSHA256 + }); + clearTimeout(timeout); + if (didTimeout) + return; + response.end("ok\n"); + } catch (error) { + clearTimeout(timeout); + if (didTimeout) + return; + const err = Array.from(error)[0]; + const errorMessage = err.message ? `${err.name}: ${err.message}` : "Error: An Unspecified error occurred"; + response.statusCode = typeof err.status !== "undefined" ? err.status : 500; + options.log.error(error); + response.end( + JSON.stringify({ + error: errorMessage + }) + ); + } +} + +// pkg/dist-src/middleware/node/on-unhandled-request-default.js +function onUnhandledRequestDefault(request, response) { + response.writeHead(404, { + "content-type": "application/json" + }); + response.end( + JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}` + }) + ); +} + +// pkg/dist-src/middleware/node/index.js +function createNodeMiddleware(webhooks, { + path = "/api/github/webhooks", + onUnhandledRequest = onUnhandledRequestDefault, + log = createLogger() +} = {}) { + const deprecateOnUnhandledRequest = (request, response) => { + console.warn( + "[@octokit/webhooks] `onUnhandledRequest()` is deprecated and will be removed in a future release of `@octokit/webhooks`" + ); + return onUnhandledRequest(request, response); + }; + return middleware.bind(null, webhooks, { + path, + onUnhandledRequest: deprecateOnUnhandledRequest, + log + }); +} + +// pkg/dist-src/index.js +var Webhooks = class { + constructor(options) { + if (!options || !options.secret) { + throw new Error("[@octokit/webhooks] options.secret required"); + } + const state = { + eventHandler: createEventHandler(options), + secret: options.secret, + hooks: {}, + log: createLogger(options.log) + }; + this.sign = sign.bind(null, options.secret); + this.verify = (eventPayload, signature) => { + if (typeof eventPayload === "object") { + console.warn( + "[@octokit/webhooks] Passing a JSON payload object to `verify()` is deprecated and the functionality will be removed in a future release of `@octokit/webhooks`" + ); + } + return verify(options.secret, eventPayload, signature); + }; + this.on = state.eventHandler.on; + this.onAny = state.eventHandler.onAny; + this.onError = state.eventHandler.onError; + this.removeListener = state.eventHandler.removeListener; + this.receive = state.eventHandler.receive; + this.verifyAndReceive = (options2) => { + if (typeof options2.payload === "object") { + console.warn( + "[@octokit/webhooks] Passing a JSON payload object to `verifyAndReceive()` is deprecated and the functionality will be removed in a future release of `@octokit/webhooks`" + ); + } + return verifyAndReceive(state, options2); + }; + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + Webhooks, + createEventHandler, + createNodeMiddleware, + emitterEventNames +}); diff --git a/dist/node_modules/@octokit/webhooks/dist-node/index.js.map b/dist/node_modules/@octokit/webhooks/dist-node/index.js.map new file mode 100644 index 0000000..d5cdb54 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/createLogger.js", "../dist-src/generated/webhook-names.js", "../dist-src/event-handler/on.js", "../dist-src/event-handler/receive.js", "../dist-src/event-handler/wrap-error-handler.js", "../dist-src/event-handler/remove-listener.js", "../dist-src/event-handler/index.js", "../dist-src/sign.js", "../dist-src/to-normalized-json-string.js", "../dist-src/verify.js", "../dist-src/verify-and-receive.js", "../dist-src/middleware/node/get-missing-headers.js", "../dist-src/middleware/node/get-payload.js", "../dist-src/middleware/node/middleware.js", "../dist-src/middleware/node/on-unhandled-request-default.js", "../dist-src/middleware/node/index.js"], + "sourcesContent": ["import { createLogger } from \"./createLogger\";\nimport { createEventHandler } from \"./event-handler/index\";\nimport { sign } from \"./sign\";\nimport { verify } from \"./verify\";\nimport { verifyAndReceive } from \"./verify-and-receive\";\nimport { createNodeMiddleware } from \"./middleware/node/index\";\nimport { emitterEventNames } from \"./generated/webhook-names\";\nclass Webhooks {\n constructor(options) {\n if (!options || !options.secret) {\n throw new Error(\"[@octokit/webhooks] options.secret required\");\n }\n const state = {\n eventHandler: createEventHandler(options),\n secret: options.secret,\n hooks: {},\n log: createLogger(options.log)\n };\n this.sign = sign.bind(null, options.secret);\n this.verify = (eventPayload, signature) => {\n if (typeof eventPayload === \"object\") {\n console.warn(\n \"[@octokit/webhooks] Passing a JSON payload object to `verify()` is deprecated and the functionality will be removed in a future release of `@octokit/webhooks`\"\n );\n }\n return verify(options.secret, eventPayload, signature);\n };\n this.on = state.eventHandler.on;\n this.onAny = state.eventHandler.onAny;\n this.onError = state.eventHandler.onError;\n this.removeListener = state.eventHandler.removeListener;\n this.receive = state.eventHandler.receive;\n this.verifyAndReceive = (options2) => {\n if (typeof options2.payload === \"object\") {\n console.warn(\n \"[@octokit/webhooks] Passing a JSON payload object to `verifyAndReceive()` is deprecated and the functionality will be removed in a future release of `@octokit/webhooks`\"\n );\n }\n return verifyAndReceive(state, options2);\n };\n }\n}\nexport {\n Webhooks,\n createEventHandler,\n createNodeMiddleware,\n emitterEventNames\n};\n", "const createLogger = (logger) => ({\n debug: () => {\n },\n info: () => {\n },\n warn: console.warn.bind(console),\n error: console.error.bind(console),\n ...logger\n});\nexport {\n createLogger\n};\n", "const emitterEventNames = [\n \"branch_protection_rule\",\n \"branch_protection_rule.created\",\n \"branch_protection_rule.deleted\",\n \"branch_protection_rule.edited\",\n \"check_run\",\n \"check_run.completed\",\n \"check_run.created\",\n \"check_run.requested_action\",\n \"check_run.rerequested\",\n \"check_suite\",\n \"check_suite.completed\",\n \"check_suite.requested\",\n \"check_suite.rerequested\",\n \"code_scanning_alert\",\n \"code_scanning_alert.appeared_in_branch\",\n \"code_scanning_alert.closed_by_user\",\n \"code_scanning_alert.created\",\n \"code_scanning_alert.fixed\",\n \"code_scanning_alert.reopened\",\n \"code_scanning_alert.reopened_by_user\",\n \"commit_comment\",\n \"commit_comment.created\",\n \"create\",\n \"delete\",\n \"dependabot_alert\",\n \"dependabot_alert.created\",\n \"dependabot_alert.dismissed\",\n \"dependabot_alert.fixed\",\n \"dependabot_alert.reintroduced\",\n \"dependabot_alert.reopened\",\n \"deploy_key\",\n \"deploy_key.created\",\n \"deploy_key.deleted\",\n \"deployment\",\n \"deployment.created\",\n \"deployment_status\",\n \"deployment_status.created\",\n \"discussion\",\n \"discussion.answered\",\n \"discussion.category_changed\",\n \"discussion.created\",\n \"discussion.deleted\",\n \"discussion.edited\",\n \"discussion.labeled\",\n \"discussion.locked\",\n \"discussion.pinned\",\n \"discussion.transferred\",\n \"discussion.unanswered\",\n \"discussion.unlabeled\",\n \"discussion.unlocked\",\n \"discussion.unpinned\",\n \"discussion_comment\",\n \"discussion_comment.created\",\n \"discussion_comment.deleted\",\n \"discussion_comment.edited\",\n \"fork\",\n \"github_app_authorization\",\n \"github_app_authorization.revoked\",\n \"gollum\",\n \"installation\",\n \"installation.created\",\n \"installation.deleted\",\n \"installation.new_permissions_accepted\",\n \"installation.suspend\",\n \"installation.unsuspend\",\n \"installation_repositories\",\n \"installation_repositories.added\",\n \"installation_repositories.removed\",\n \"installation_target\",\n \"installation_target.renamed\",\n \"issue_comment\",\n \"issue_comment.created\",\n \"issue_comment.deleted\",\n \"issue_comment.edited\",\n \"issues\",\n \"issues.assigned\",\n \"issues.closed\",\n \"issues.deleted\",\n \"issues.demilestoned\",\n \"issues.edited\",\n \"issues.labeled\",\n \"issues.locked\",\n \"issues.milestoned\",\n \"issues.opened\",\n \"issues.pinned\",\n \"issues.reopened\",\n \"issues.transferred\",\n \"issues.unassigned\",\n \"issues.unlabeled\",\n \"issues.unlocked\",\n \"issues.unpinned\",\n \"label\",\n \"label.created\",\n \"label.deleted\",\n \"label.edited\",\n \"marketplace_purchase\",\n \"marketplace_purchase.cancelled\",\n \"marketplace_purchase.changed\",\n \"marketplace_purchase.pending_change\",\n \"marketplace_purchase.pending_change_cancelled\",\n \"marketplace_purchase.purchased\",\n \"member\",\n \"member.added\",\n \"member.edited\",\n \"member.removed\",\n \"membership\",\n \"membership.added\",\n \"membership.removed\",\n \"merge_group\",\n \"merge_group.checks_requested\",\n \"meta\",\n \"meta.deleted\",\n \"milestone\",\n \"milestone.closed\",\n \"milestone.created\",\n \"milestone.deleted\",\n \"milestone.edited\",\n \"milestone.opened\",\n \"org_block\",\n \"org_block.blocked\",\n \"org_block.unblocked\",\n \"organization\",\n \"organization.deleted\",\n \"organization.member_added\",\n \"organization.member_invited\",\n \"organization.member_removed\",\n \"organization.renamed\",\n \"package\",\n \"package.published\",\n \"package.updated\",\n \"page_build\",\n \"ping\",\n \"project\",\n \"project.closed\",\n \"project.created\",\n \"project.deleted\",\n \"project.edited\",\n \"project.reopened\",\n \"project_card\",\n \"project_card.converted\",\n \"project_card.created\",\n \"project_card.deleted\",\n \"project_card.edited\",\n \"project_card.moved\",\n \"project_column\",\n \"project_column.created\",\n \"project_column.deleted\",\n \"project_column.edited\",\n \"project_column.moved\",\n \"projects_v2_item\",\n \"projects_v2_item.archived\",\n \"projects_v2_item.converted\",\n \"projects_v2_item.created\",\n \"projects_v2_item.deleted\",\n \"projects_v2_item.edited\",\n \"projects_v2_item.reordered\",\n \"projects_v2_item.restored\",\n \"public\",\n \"pull_request\",\n \"pull_request.assigned\",\n \"pull_request.auto_merge_disabled\",\n \"pull_request.auto_merge_enabled\",\n \"pull_request.closed\",\n \"pull_request.converted_to_draft\",\n \"pull_request.demilestoned\",\n \"pull_request.dequeued\",\n \"pull_request.edited\",\n \"pull_request.labeled\",\n \"pull_request.locked\",\n \"pull_request.milestoned\",\n \"pull_request.opened\",\n \"pull_request.queued\",\n \"pull_request.ready_for_review\",\n \"pull_request.reopened\",\n \"pull_request.review_request_removed\",\n \"pull_request.review_requested\",\n \"pull_request.synchronize\",\n \"pull_request.unassigned\",\n \"pull_request.unlabeled\",\n \"pull_request.unlocked\",\n \"pull_request_review\",\n \"pull_request_review.dismissed\",\n \"pull_request_review.edited\",\n \"pull_request_review.submitted\",\n \"pull_request_review_comment\",\n \"pull_request_review_comment.created\",\n \"pull_request_review_comment.deleted\",\n \"pull_request_review_comment.edited\",\n \"pull_request_review_thread\",\n \"pull_request_review_thread.resolved\",\n \"pull_request_review_thread.unresolved\",\n \"push\",\n \"registry_package\",\n \"registry_package.published\",\n \"registry_package.updated\",\n \"release\",\n \"release.created\",\n \"release.deleted\",\n \"release.edited\",\n \"release.prereleased\",\n \"release.published\",\n \"release.released\",\n \"release.unpublished\",\n \"repository\",\n \"repository.archived\",\n \"repository.created\",\n \"repository.deleted\",\n \"repository.edited\",\n \"repository.privatized\",\n \"repository.publicized\",\n \"repository.renamed\",\n \"repository.transferred\",\n \"repository.unarchived\",\n \"repository_dispatch\",\n \"repository_import\",\n \"repository_vulnerability_alert\",\n \"repository_vulnerability_alert.create\",\n \"repository_vulnerability_alert.dismiss\",\n \"repository_vulnerability_alert.reopen\",\n \"repository_vulnerability_alert.resolve\",\n \"secret_scanning_alert\",\n \"secret_scanning_alert.created\",\n \"secret_scanning_alert.reopened\",\n \"secret_scanning_alert.resolved\",\n \"security_advisory\",\n \"security_advisory.performed\",\n \"security_advisory.published\",\n \"security_advisory.updated\",\n \"security_advisory.withdrawn\",\n \"sponsorship\",\n \"sponsorship.cancelled\",\n \"sponsorship.created\",\n \"sponsorship.edited\",\n \"sponsorship.pending_cancellation\",\n \"sponsorship.pending_tier_change\",\n \"sponsorship.tier_changed\",\n \"star\",\n \"star.created\",\n \"star.deleted\",\n \"status\",\n \"team\",\n \"team.added_to_repository\",\n \"team.created\",\n \"team.deleted\",\n \"team.edited\",\n \"team.removed_from_repository\",\n \"team_add\",\n \"watch\",\n \"watch.started\",\n \"workflow_dispatch\",\n \"workflow_job\",\n \"workflow_job.completed\",\n \"workflow_job.in_progress\",\n \"workflow_job.queued\",\n \"workflow_run\",\n \"workflow_run.completed\",\n \"workflow_run.in_progress\",\n \"workflow_run.requested\"\n];\nexport {\n emitterEventNames\n};\n", "import { emitterEventNames } from \"../generated/webhook-names\";\nfunction handleEventHandlers(state, webhookName, handler) {\n if (!state.hooks[webhookName]) {\n state.hooks[webhookName] = [];\n }\n state.hooks[webhookName].push(handler);\n}\nfunction receiverOn(state, webhookNameOrNames, handler) {\n if (Array.isArray(webhookNameOrNames)) {\n webhookNameOrNames.forEach(\n (webhookName) => receiverOn(state, webhookName, handler)\n );\n return;\n }\n if ([\"*\", \"error\"].includes(webhookNameOrNames)) {\n const webhookName = webhookNameOrNames === \"*\" ? \"any\" : webhookNameOrNames;\n const message = `Using the \"${webhookNameOrNames}\" event with the regular Webhooks.on() function is not supported. Please use the Webhooks.on${webhookName.charAt(0).toUpperCase() + webhookName.slice(1)}() method instead`;\n throw new Error(message);\n }\n if (!emitterEventNames.includes(webhookNameOrNames)) {\n state.log.warn(\n `\"${webhookNameOrNames}\" is not a known webhook name (https://developer.github.com/v3/activity/events/types/)`\n );\n }\n handleEventHandlers(state, webhookNameOrNames, handler);\n}\nfunction receiverOnAny(state, handler) {\n handleEventHandlers(state, \"*\", handler);\n}\nfunction receiverOnError(state, handler) {\n handleEventHandlers(state, \"error\", handler);\n}\nexport {\n receiverOn,\n receiverOnAny,\n receiverOnError\n};\n", "import AggregateError from \"aggregate-error\";\nimport { wrapErrorHandler } from \"./wrap-error-handler\";\nfunction getHooks(state, eventPayloadAction, eventName) {\n const hooks = [state.hooks[eventName], state.hooks[\"*\"]];\n if (eventPayloadAction) {\n hooks.unshift(state.hooks[`${eventName}.${eventPayloadAction}`]);\n }\n return [].concat(...hooks.filter(Boolean));\n}\nfunction receiverHandle(state, event) {\n const errorHandlers = state.hooks.error || [];\n if (event instanceof Error) {\n const error = Object.assign(new AggregateError([event]), {\n event,\n errors: [event]\n });\n errorHandlers.forEach((handler) => wrapErrorHandler(handler, error));\n return Promise.reject(error);\n }\n if (!event || !event.name) {\n throw new AggregateError([\"Event name not passed\"]);\n }\n if (!event.payload) {\n throw new AggregateError([\"Event payload not passed\"]);\n }\n const hooks = getHooks(\n state,\n \"action\" in event.payload ? event.payload.action : null,\n event.name\n );\n if (hooks.length === 0) {\n return Promise.resolve();\n }\n const errors = [];\n const promises = hooks.map((handler) => {\n let promise = Promise.resolve(event);\n if (state.transform) {\n promise = promise.then(state.transform);\n }\n return promise.then((event2) => {\n return handler(event2);\n }).catch((error) => errors.push(Object.assign(error, { event })));\n });\n return Promise.all(promises).then(() => {\n if (errors.length === 0) {\n return;\n }\n const error = new AggregateError(errors);\n Object.assign(error, {\n event,\n errors\n });\n errorHandlers.forEach((handler) => wrapErrorHandler(handler, error));\n throw error;\n });\n}\nexport {\n receiverHandle\n};\n", "function wrapErrorHandler(handler, error) {\n let returnValue;\n try {\n returnValue = handler(error);\n } catch (error2) {\n console.log('FATAL: Error occurred in \"error\" event handler');\n console.log(error2);\n }\n if (returnValue && returnValue.catch) {\n returnValue.catch((error2) => {\n console.log('FATAL: Error occurred in \"error\" event handler');\n console.log(error2);\n });\n }\n}\nexport {\n wrapErrorHandler\n};\n", "function removeListener(state, webhookNameOrNames, handler) {\n if (Array.isArray(webhookNameOrNames)) {\n webhookNameOrNames.forEach(\n (webhookName) => removeListener(state, webhookName, handler)\n );\n return;\n }\n if (!state.hooks[webhookNameOrNames]) {\n return;\n }\n for (let i = state.hooks[webhookNameOrNames].length - 1; i >= 0; i--) {\n if (state.hooks[webhookNameOrNames][i] === handler) {\n state.hooks[webhookNameOrNames].splice(i, 1);\n return;\n }\n }\n}\nexport {\n removeListener\n};\n", "import { createLogger } from \"../createLogger\";\nimport {\n receiverOn as on,\n receiverOnAny as onAny,\n receiverOnError as onError\n} from \"./on\";\nimport { receiverHandle as receive } from \"./receive\";\nimport { removeListener } from \"./remove-listener\";\nfunction createEventHandler(options) {\n const state = {\n hooks: {},\n log: createLogger(options && options.log)\n };\n if (options && options.transform) {\n state.transform = options.transform;\n }\n return {\n on: on.bind(null, state),\n onAny: onAny.bind(null, state),\n onError: onError.bind(null, state),\n removeListener: removeListener.bind(null, state),\n receive: receive.bind(null, state)\n };\n}\nexport {\n createEventHandler\n};\n", "import { sign as signMethod } from \"@octokit/webhooks-methods\";\nimport { toNormalizedJsonString } from \"./to-normalized-json-string\";\nasync function sign(secret, payload) {\n return signMethod(\n secret,\n typeof payload === \"string\" ? payload : toNormalizedJsonString(payload)\n );\n}\nexport {\n sign\n};\n", "function toNormalizedJsonString(payload) {\n const payloadString = JSON.stringify(payload);\n return payloadString.replace(/[^\\\\]\\\\u[\\da-f]{4}/g, (s) => {\n return s.substr(0, 3) + s.substr(3).toUpperCase();\n });\n}\nexport {\n toNormalizedJsonString\n};\n", "import { verify as verifyMethod } from \"@octokit/webhooks-methods\";\nimport { toNormalizedJsonString } from \"./to-normalized-json-string\";\nasync function verify(secret, payload, signature) {\n return verifyMethod(\n secret,\n typeof payload === \"string\" ? payload : toNormalizedJsonString(payload),\n signature\n );\n}\nexport {\n verify\n};\n", "import { verify } from \"@octokit/webhooks-methods\";\nimport { toNormalizedJsonString } from \"./to-normalized-json-string\";\nasync function verifyAndReceive(state, event) {\n const matchesSignature = await verify(\n state.secret,\n typeof event.payload === \"object\" ? toNormalizedJsonString(event.payload) : event.payload,\n event.signature\n ).catch(() => false);\n if (!matchesSignature) {\n const error = new Error(\n \"[@octokit/webhooks] signature does not match event payload and secret\"\n );\n return state.eventHandler.receive(\n Object.assign(error, { event, status: 400 })\n );\n }\n return state.eventHandler.receive({\n id: event.id,\n name: event.name,\n payload: typeof event.payload === \"string\" ? JSON.parse(event.payload) : event.payload\n });\n}\nexport {\n verifyAndReceive\n};\n", "const WEBHOOK_HEADERS = [\n \"x-github-event\",\n \"x-hub-signature-256\",\n \"x-github-delivery\"\n];\nfunction getMissingHeaders(request) {\n return WEBHOOK_HEADERS.filter((header) => !(header in request.headers));\n}\nexport {\n getMissingHeaders\n};\n", "import AggregateError from \"aggregate-error\";\nfunction getPayload(request) {\n if (request.body) {\n if (typeof request.body !== \"string\") {\n console.warn(\n \"[@octokit/webhooks] Passing the payload as a JSON object in `request.body` is deprecated and will be removed in a future release of `@octokit/webhooks`, please pass it as a a `string` instead.\"\n );\n }\n return Promise.resolve(request.body);\n }\n return new Promise((resolve, reject) => {\n let data = \"\";\n request.setEncoding(\"utf8\");\n request.on(\"error\", (error) => reject(new AggregateError([error])));\n request.on(\"data\", (chunk) => data += chunk);\n request.on(\"end\", () => {\n try {\n JSON.parse(data);\n resolve(data);\n } catch (error) {\n error.message = \"Invalid JSON\";\n error.status = 400;\n reject(new AggregateError([error]));\n }\n });\n });\n}\nexport {\n getPayload\n};\n", "import { getMissingHeaders } from \"./get-missing-headers\";\nimport { getPayload } from \"./get-payload\";\nasync function middleware(webhooks, options, request, response, next) {\n let pathname;\n try {\n pathname = new URL(request.url, \"http://localhost\").pathname;\n } catch (error) {\n response.writeHead(422, {\n \"content-type\": \"application/json\"\n });\n response.end(\n JSON.stringify({\n error: `Request URL could not be parsed: ${request.url}`\n })\n );\n return;\n }\n const isUnknownRoute = request.method !== \"POST\" || pathname !== options.path;\n const isExpressMiddleware = typeof next === \"function\";\n if (isUnknownRoute) {\n if (isExpressMiddleware) {\n return next();\n } else {\n return options.onUnhandledRequest(request, response);\n }\n }\n if (!request.headers[\"content-type\"] || !request.headers[\"content-type\"].startsWith(\"application/json\")) {\n response.writeHead(415, {\n \"content-type\": \"application/json\",\n accept: \"application/json\"\n });\n response.end(\n JSON.stringify({\n error: `Unsupported \"Content-Type\" header value. Must be \"application/json\"`\n })\n );\n return;\n }\n const missingHeaders = getMissingHeaders(request).join(\", \");\n if (missingHeaders) {\n response.writeHead(400, {\n \"content-type\": \"application/json\"\n });\n response.end(\n JSON.stringify({\n error: `Required headers missing: ${missingHeaders}`\n })\n );\n return;\n }\n const eventName = request.headers[\"x-github-event\"];\n const signatureSHA256 = request.headers[\"x-hub-signature-256\"];\n const id = request.headers[\"x-github-delivery\"];\n options.log.debug(`${eventName} event received (id: ${id})`);\n let didTimeout = false;\n const timeout = setTimeout(() => {\n didTimeout = true;\n response.statusCode = 202;\n response.end(\"still processing\\n\");\n }, 9e3).unref();\n try {\n const payload = await getPayload(request);\n await webhooks.verifyAndReceive({\n id,\n name: eventName,\n payload,\n signature: signatureSHA256\n });\n clearTimeout(timeout);\n if (didTimeout)\n return;\n response.end(\"ok\\n\");\n } catch (error) {\n clearTimeout(timeout);\n if (didTimeout)\n return;\n const err = Array.from(error)[0];\n const errorMessage = err.message ? `${err.name}: ${err.message}` : \"Error: An Unspecified error occurred\";\n response.statusCode = typeof err.status !== \"undefined\" ? err.status : 500;\n options.log.error(error);\n response.end(\n JSON.stringify({\n error: errorMessage\n })\n );\n }\n}\nexport {\n middleware\n};\n", "function onUnhandledRequestDefault(request, response) {\n response.writeHead(404, {\n \"content-type\": \"application/json\"\n });\n response.end(\n JSON.stringify({\n error: `Unknown route: ${request.method} ${request.url}`\n })\n );\n}\nexport {\n onUnhandledRequestDefault\n};\n", "import { createLogger } from \"../../createLogger\";\nimport { middleware } from \"./middleware\";\nimport { onUnhandledRequestDefault } from \"./on-unhandled-request-default\";\nfunction createNodeMiddleware(webhooks, {\n path = \"/api/github/webhooks\",\n onUnhandledRequest = onUnhandledRequestDefault,\n log = createLogger()\n} = {}) {\n const deprecateOnUnhandledRequest = (request, response) => {\n console.warn(\n \"[@octokit/webhooks] `onUnhandledRequest()` is deprecated and will be removed in a future release of `@octokit/webhooks`\"\n );\n return onUnhandledRequest(request, response);\n };\n return middleware.bind(null, webhooks, {\n path,\n onUnhandledRequest: deprecateOnUnhandledRequest,\n log\n });\n}\nexport {\n createNodeMiddleware\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAM,eAAe,CAAC,YAAY;AAAA,EAChC,OAAO,MAAM;AAAA,EACb;AAAA,EACA,MAAM,MAAM;AAAA,EACZ;AAAA,EACA,MAAM,QAAQ,KAAK,KAAK,OAAO;AAAA,EAC/B,OAAO,QAAQ,MAAM,KAAK,OAAO;AAAA,EACjC,GAAG;AACL;;;ACRA,IAAM,oBAAoB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;AClQA,SAAS,oBAAoB,OAAO,aAAa,SAAS;AACxD,MAAI,CAAC,MAAM,MAAM,WAAW,GAAG;AAC7B,UAAM,MAAM,WAAW,IAAI,CAAC;AAAA,EAC9B;AACA,QAAM,MAAM,WAAW,EAAE,KAAK,OAAO;AACvC;AACA,SAAS,WAAW,OAAO,oBAAoB,SAAS;AACtD,MAAI,MAAM,QAAQ,kBAAkB,GAAG;AACrC,uBAAmB;AAAA,MACjB,CAAC,gBAAgB,WAAW,OAAO,aAAa,OAAO;AAAA,IACzD;AACA;AAAA,EACF;AACA,MAAI,CAAC,KAAK,OAAO,EAAE,SAAS,kBAAkB,GAAG;AAC/C,UAAM,cAAc,uBAAuB,MAAM,QAAQ;AACzD,UAAM,UAAU,cAAc,iHAAiH,YAAY,OAAO,CAAC,EAAE,YAAY,IAAI,YAAY,MAAM,CAAC;AACxM,UAAM,IAAI,MAAM,OAAO;AAAA,EACzB;AACA,MAAI,CAAC,kBAAkB,SAAS,kBAAkB,GAAG;AACnD,UAAM,IAAI;AAAA,MACR,IAAI;AAAA,IACN;AAAA,EACF;AACA,sBAAoB,OAAO,oBAAoB,OAAO;AACxD;AACA,SAAS,cAAc,OAAO,SAAS;AACrC,sBAAoB,OAAO,KAAK,OAAO;AACzC;AACA,SAAS,gBAAgB,OAAO,SAAS;AACvC,sBAAoB,OAAO,SAAS,OAAO;AAC7C;;;AC/BA,6BAA2B;;;ACA3B,SAAS,iBAAiB,SAAS,OAAO;AACxC,MAAI;AACJ,MAAI;AACF,kBAAc,QAAQ,KAAK;AAAA,EAC7B,SAAS,QAAP;AACA,YAAQ,IAAI,gDAAgD;AAC5D,YAAQ,IAAI,MAAM;AAAA,EACpB;AACA,MAAI,eAAe,YAAY,OAAO;AACpC,gBAAY,MAAM,CAAC,WAAW;AAC5B,cAAQ,IAAI,gDAAgD;AAC5D,cAAQ,IAAI,MAAM;AAAA,IACpB,CAAC;AAAA,EACH;AACF;;;ADZA,SAAS,SAAS,OAAO,oBAAoB,WAAW;AACtD,QAAM,QAAQ,CAAC,MAAM,MAAM,SAAS,GAAG,MAAM,MAAM,GAAG,CAAC;AACvD,MAAI,oBAAoB;AACtB,UAAM,QAAQ,MAAM,MAAM,GAAG,aAAa,oBAAoB,CAAC;AAAA,EACjE;AACA,SAAO,CAAC,EAAE,OAAO,GAAG,MAAM,OAAO,OAAO,CAAC;AAC3C;AACA,SAAS,eAAe,OAAO,OAAO;AACpC,QAAM,gBAAgB,MAAM,MAAM,SAAS,CAAC;AAC5C,MAAI,iBAAiB,OAAO;AAC1B,UAAM,QAAQ,OAAO,OAAO,IAAI,uBAAAA,QAAe,CAAC,KAAK,CAAC,GAAG;AAAA,MACvD;AAAA,MACA,QAAQ,CAAC,KAAK;AAAA,IAChB,CAAC;AACD,kBAAc,QAAQ,CAAC,YAAY,iBAAiB,SAAS,KAAK,CAAC;AACnE,WAAO,QAAQ,OAAO,KAAK;AAAA,EAC7B;AACA,MAAI,CAAC,SAAS,CAAC,MAAM,MAAM;AACzB,UAAM,IAAI,uBAAAA,QAAe,CAAC,uBAAuB,CAAC;AAAA,EACpD;AACA,MAAI,CAAC,MAAM,SAAS;AAClB,UAAM,IAAI,uBAAAA,QAAe,CAAC,0BAA0B,CAAC;AAAA,EACvD;AACA,QAAM,QAAQ;AAAA,IACZ;AAAA,IACA,YAAY,MAAM,UAAU,MAAM,QAAQ,SAAS;AAAA,IACnD,MAAM;AAAA,EACR;AACA,MAAI,MAAM,WAAW,GAAG;AACtB,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACA,QAAM,SAAS,CAAC;AAChB,QAAM,WAAW,MAAM,IAAI,CAAC,YAAY;AACtC,QAAI,UAAU,QAAQ,QAAQ,KAAK;AACnC,QAAI,MAAM,WAAW;AACnB,gBAAU,QAAQ,KAAK,MAAM,SAAS;AAAA,IACxC;AACA,WAAO,QAAQ,KAAK,CAAC,WAAW;AAC9B,aAAO,QAAQ,MAAM;AAAA,IACvB,CAAC,EAAE,MAAM,CAAC,UAAU,OAAO,KAAK,OAAO,OAAO,OAAO,EAAE,MAAM,CAAC,CAAC,CAAC;AAAA,EAClE,CAAC;AACD,SAAO,QAAQ,IAAI,QAAQ,EAAE,KAAK,MAAM;AACtC,QAAI,OAAO,WAAW,GAAG;AACvB;AAAA,IACF;AACA,UAAM,QAAQ,IAAI,uBAAAA,QAAe,MAAM;AACvC,WAAO,OAAO,OAAO;AAAA,MACnB;AAAA,MACA;AAAA,IACF,CAAC;AACD,kBAAc,QAAQ,CAAC,YAAY,iBAAiB,SAAS,KAAK,CAAC;AACnE,UAAM;AAAA,EACR,CAAC;AACH;;;AEvDA,SAAS,eAAe,OAAO,oBAAoB,SAAS;AAC1D,MAAI,MAAM,QAAQ,kBAAkB,GAAG;AACrC,uBAAmB;AAAA,MACjB,CAAC,gBAAgB,eAAe,OAAO,aAAa,OAAO;AAAA,IAC7D;AACA;AAAA,EACF;AACA,MAAI,CAAC,MAAM,MAAM,kBAAkB,GAAG;AACpC;AAAA,EACF;AACA,WAAS,IAAI,MAAM,MAAM,kBAAkB,EAAE,SAAS,GAAG,KAAK,GAAG,KAAK;AACpE,QAAI,MAAM,MAAM,kBAAkB,EAAE,CAAC,MAAM,SAAS;AAClD,YAAM,MAAM,kBAAkB,EAAE,OAAO,GAAG,CAAC;AAC3C;AAAA,IACF;AAAA,EACF;AACF;;;ACRA,SAAS,mBAAmB,SAAS;AACnC,QAAM,QAAQ;AAAA,IACZ,OAAO,CAAC;AAAA,IACR,KAAK,aAAa,WAAW,QAAQ,GAAG;AAAA,EAC1C;AACA,MAAI,WAAW,QAAQ,WAAW;AAChC,UAAM,YAAY,QAAQ;AAAA,EAC5B;AACA,SAAO;AAAA,IACL,IAAI,WAAG,KAAK,MAAM,KAAK;AAAA,IACvB,OAAO,cAAM,KAAK,MAAM,KAAK;AAAA,IAC7B,SAAS,gBAAQ,KAAK,MAAM,KAAK;AAAA,IACjC,gBAAgB,eAAe,KAAK,MAAM,KAAK;AAAA,IAC/C,SAAS,eAAQ,KAAK,MAAM,KAAK;AAAA,EACnC;AACF;;;ACvBA,8BAAmC;;;ACAnC,SAAS,uBAAuB,SAAS;AACvC,QAAM,gBAAgB,KAAK,UAAU,OAAO;AAC5C,SAAO,cAAc,QAAQ,uBAAuB,CAAC,MAAM;AACzD,WAAO,EAAE,OAAO,GAAG,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE,YAAY;AAAA,EAClD,CAAC;AACH;;;ADHA,eAAe,KAAK,QAAQ,SAAS;AACnC,aAAO,wBAAAC;AAAA,IACL;AAAA,IACA,OAAO,YAAY,WAAW,UAAU,uBAAuB,OAAO;AAAA,EACxE;AACF;;;AEPA,IAAAC,2BAAuC;AAEvC,eAAe,OAAO,QAAQ,SAAS,WAAW;AAChD,aAAO,yBAAAC;AAAA,IACL;AAAA,IACA,OAAO,YAAY,WAAW,UAAU,uBAAuB,OAAO;AAAA,IACtE;AAAA,EACF;AACF;;;ACRA,IAAAC,2BAAuB;AAEvB,eAAe,iBAAiB,OAAO,OAAO;AAC5C,QAAM,mBAAmB,UAAM;AAAA,IAC7B,MAAM;AAAA,IACN,OAAO,MAAM,YAAY,WAAW,uBAAuB,MAAM,OAAO,IAAI,MAAM;AAAA,IAClF,MAAM;AAAA,EACR,EAAE,MAAM,MAAM,KAAK;AACnB,MAAI,CAAC,kBAAkB;AACrB,UAAM,QAAQ,IAAI;AAAA,MAChB;AAAA,IACF;AACA,WAAO,MAAM,aAAa;AAAA,MACxB,OAAO,OAAO,OAAO,EAAE,OAAO,QAAQ,IAAI,CAAC;AAAA,IAC7C;AAAA,EACF;AACA,SAAO,MAAM,aAAa,QAAQ;AAAA,IAChC,IAAI,MAAM;AAAA,IACV,MAAM,MAAM;AAAA,IACZ,SAAS,OAAO,MAAM,YAAY,WAAW,KAAK,MAAM,MAAM,OAAO,IAAI,MAAM;AAAA,EACjF,CAAC;AACH;;;ACrBA,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AACF;AACA,SAAS,kBAAkB,SAAS;AAClC,SAAO,gBAAgB,OAAO,CAAC,WAAW,EAAE,UAAU,QAAQ,QAAQ;AACxE;;;ACPA,IAAAC,0BAA2B;AAC3B,SAAS,WAAW,SAAS;AAC3B,MAAI,QAAQ,MAAM;AAChB,QAAI,OAAO,QAAQ,SAAS,UAAU;AACpC,cAAQ;AAAA,QACN;AAAA,MACF;AAAA,IACF;AACA,WAAO,QAAQ,QAAQ,QAAQ,IAAI;AAAA,EACrC;AACA,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,QAAI,OAAO;AACX,YAAQ,YAAY,MAAM;AAC1B,YAAQ,GAAG,SAAS,CAAC,UAAU,OAAO,IAAI,wBAAAC,QAAe,CAAC,KAAK,CAAC,CAAC,CAAC;AAClE,YAAQ,GAAG,QAAQ,CAAC,UAAU,QAAQ,KAAK;AAC3C,YAAQ,GAAG,OAAO,MAAM;AACtB,UAAI;AACF,aAAK,MAAM,IAAI;AACf,gBAAQ,IAAI;AAAA,MACd,SAAS,OAAP;AACA,cAAM,UAAU;AAChB,cAAM,SAAS;AACf,eAAO,IAAI,wBAAAA,QAAe,CAAC,KAAK,CAAC,CAAC;AAAA,MACpC;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACH;;;ACxBA,eAAe,WAAW,UAAU,SAAS,SAAS,UAAU,MAAM;AACpE,MAAI;AACJ,MAAI;AACF,eAAW,IAAI,IAAI,QAAQ,KAAK,kBAAkB,EAAE;AAAA,EACtD,SAAS,OAAP;AACA,aAAS,UAAU,KAAK;AAAA,MACtB,gBAAgB;AAAA,IAClB,CAAC;AACD,aAAS;AAAA,MACP,KAAK,UAAU;AAAA,QACb,OAAO,oCAAoC,QAAQ;AAAA,MACrD,CAAC;AAAA,IACH;AACA;AAAA,EACF;AACA,QAAM,iBAAiB,QAAQ,WAAW,UAAU,aAAa,QAAQ;AACzE,QAAM,sBAAsB,OAAO,SAAS;AAC5C,MAAI,gBAAgB;AAClB,QAAI,qBAAqB;AACvB,aAAO,KAAK;AAAA,IACd,OAAO;AACL,aAAO,QAAQ,mBAAmB,SAAS,QAAQ;AAAA,IACrD;AAAA,EACF;AACA,MAAI,CAAC,QAAQ,QAAQ,cAAc,KAAK,CAAC,QAAQ,QAAQ,cAAc,EAAE,WAAW,kBAAkB,GAAG;AACvG,aAAS,UAAU,KAAK;AAAA,MACtB,gBAAgB;AAAA,MAChB,QAAQ;AAAA,IACV,CAAC;AACD,aAAS;AAAA,MACP,KAAK,UAAU;AAAA,QACb,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AACA;AAAA,EACF;AACA,QAAM,iBAAiB,kBAAkB,OAAO,EAAE,KAAK,IAAI;AAC3D,MAAI,gBAAgB;AAClB,aAAS,UAAU,KAAK;AAAA,MACtB,gBAAgB;AAAA,IAClB,CAAC;AACD,aAAS;AAAA,MACP,KAAK,UAAU;AAAA,QACb,OAAO,6BAA6B;AAAA,MACtC,CAAC;AAAA,IACH;AACA;AAAA,EACF;AACA,QAAM,YAAY,QAAQ,QAAQ,gBAAgB;AAClD,QAAM,kBAAkB,QAAQ,QAAQ,qBAAqB;AAC7D,QAAM,KAAK,QAAQ,QAAQ,mBAAmB;AAC9C,UAAQ,IAAI,MAAM,GAAG,iCAAiC,KAAK;AAC3D,MAAI,aAAa;AACjB,QAAM,UAAU,WAAW,MAAM;AAC/B,iBAAa;AACb,aAAS,aAAa;AACtB,aAAS,IAAI,oBAAoB;AAAA,EACnC,GAAG,GAAG,EAAE,MAAM;AACd,MAAI;AACF,UAAM,UAAU,MAAM,WAAW,OAAO;AACxC,UAAM,SAAS,iBAAiB;AAAA,MAC9B;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AACD,iBAAa,OAAO;AACpB,QAAI;AACF;AACF,aAAS,IAAI,MAAM;AAAA,EACrB,SAAS,OAAP;AACA,iBAAa,OAAO;AACpB,QAAI;AACF;AACF,UAAM,MAAM,MAAM,KAAK,KAAK,EAAE,CAAC;AAC/B,UAAM,eAAe,IAAI,UAAU,GAAG,IAAI,SAAS,IAAI,YAAY;AACnE,aAAS,aAAa,OAAO,IAAI,WAAW,cAAc,IAAI,SAAS;AACvE,YAAQ,IAAI,MAAM,KAAK;AACvB,aAAS;AAAA,MACP,KAAK,UAAU;AAAA,QACb,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACtFA,SAAS,0BAA0B,SAAS,UAAU;AACpD,WAAS,UAAU,KAAK;AAAA,IACtB,gBAAgB;AAAA,EAClB,CAAC;AACD,WAAS;AAAA,IACP,KAAK,UAAU;AAAA,MACb,OAAO,kBAAkB,QAAQ,UAAU,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AACF;;;ACNA,SAAS,qBAAqB,UAAU;AAAA,EACtC,OAAO;AAAA,EACP,qBAAqB;AAAA,EACrB,MAAM,aAAa;AACrB,IAAI,CAAC,GAAG;AACN,QAAM,8BAA8B,CAAC,SAAS,aAAa;AACzD,YAAQ;AAAA,MACN;AAAA,IACF;AACA,WAAO,mBAAmB,SAAS,QAAQ;AAAA,EAC7C;AACA,SAAO,WAAW,KAAK,MAAM,UAAU;AAAA,IACrC;AAAA,IACA,oBAAoB;AAAA,IACpB;AAAA,EACF,CAAC;AACH;;;AhBZA,IAAM,WAAN,MAAe;AAAA,EACb,YAAY,SAAS;AACnB,QAAI,CAAC,WAAW,CAAC,QAAQ,QAAQ;AAC/B,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AACA,UAAM,QAAQ;AAAA,MACZ,cAAc,mBAAmB,OAAO;AAAA,MACxC,QAAQ,QAAQ;AAAA,MAChB,OAAO,CAAC;AAAA,MACR,KAAK,aAAa,QAAQ,GAAG;AAAA,IAC/B;AACA,SAAK,OAAO,KAAK,KAAK,MAAM,QAAQ,MAAM;AAC1C,SAAK,SAAS,CAAC,cAAc,cAAc;AACzC,UAAI,OAAO,iBAAiB,UAAU;AACpC,gBAAQ;AAAA,UACN;AAAA,QACF;AAAA,MACF;AACA,aAAO,OAAO,QAAQ,QAAQ,cAAc,SAAS;AAAA,IACvD;AACA,SAAK,KAAK,MAAM,aAAa;AAC7B,SAAK,QAAQ,MAAM,aAAa;AAChC,SAAK,UAAU,MAAM,aAAa;AAClC,SAAK,iBAAiB,MAAM,aAAa;AACzC,SAAK,UAAU,MAAM,aAAa;AAClC,SAAK,mBAAmB,CAAC,aAAa;AACpC,UAAI,OAAO,SAAS,YAAY,UAAU;AACxC,gBAAQ;AAAA,UACN;AAAA,QACF;AAAA,MACF;AACA,aAAO,iBAAiB,OAAO,QAAQ;AAAA,IACzC;AAAA,EACF;AACF;", + "names": ["AggregateError", "signMethod", "import_webhooks_methods", "verifyMethod", "import_webhooks_methods", "import_aggregate_error", "AggregateError"] +} diff --git a/dist/node_modules/@octokit/webhooks/dist-src/createLogger.js b/dist/node_modules/@octokit/webhooks/dist-src/createLogger.js new file mode 100644 index 0000000..819dbd9 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/createLogger.js @@ -0,0 +1,12 @@ +const createLogger = (logger) => ({ + debug: () => { + }, + info: () => { + }, + warn: console.warn.bind(console), + error: console.error.bind(console), + ...logger +}); +export { + createLogger +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/event-handler/index.js b/dist/node_modules/@octokit/webhooks/dist-src/event-handler/index.js new file mode 100644 index 0000000..e43461f --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/event-handler/index.js @@ -0,0 +1,27 @@ +import { createLogger } from "../createLogger"; +import { + receiverOn as on, + receiverOnAny as onAny, + receiverOnError as onError +} from "./on"; +import { receiverHandle as receive } from "./receive"; +import { removeListener } from "./remove-listener"; +function createEventHandler(options) { + const state = { + hooks: {}, + log: createLogger(options && options.log) + }; + if (options && options.transform) { + state.transform = options.transform; + } + return { + on: on.bind(null, state), + onAny: onAny.bind(null, state), + onError: onError.bind(null, state), + removeListener: removeListener.bind(null, state), + receive: receive.bind(null, state) + }; +} +export { + createEventHandler +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/event-handler/on.js b/dist/node_modules/@octokit/webhooks/dist-src/event-handler/on.js new file mode 100644 index 0000000..0cc7cdd --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/event-handler/on.js @@ -0,0 +1,37 @@ +import { emitterEventNames } from "../generated/webhook-names"; +function handleEventHandlers(state, webhookName, handler) { + if (!state.hooks[webhookName]) { + state.hooks[webhookName] = []; + } + state.hooks[webhookName].push(handler); +} +function receiverOn(state, webhookNameOrNames, handler) { + if (Array.isArray(webhookNameOrNames)) { + webhookNameOrNames.forEach( + (webhookName) => receiverOn(state, webhookName, handler) + ); + return; + } + if (["*", "error"].includes(webhookNameOrNames)) { + const webhookName = webhookNameOrNames === "*" ? "any" : webhookNameOrNames; + const message = `Using the "${webhookNameOrNames}" event with the regular Webhooks.on() function is not supported. Please use the Webhooks.on${webhookName.charAt(0).toUpperCase() + webhookName.slice(1)}() method instead`; + throw new Error(message); + } + if (!emitterEventNames.includes(webhookNameOrNames)) { + state.log.warn( + `"${webhookNameOrNames}" is not a known webhook name (https://developer.github.com/v3/activity/events/types/)` + ); + } + handleEventHandlers(state, webhookNameOrNames, handler); +} +function receiverOnAny(state, handler) { + handleEventHandlers(state, "*", handler); +} +function receiverOnError(state, handler) { + handleEventHandlers(state, "error", handler); +} +export { + receiverOn, + receiverOnAny, + receiverOnError +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/event-handler/receive.js b/dist/node_modules/@octokit/webhooks/dist-src/event-handler/receive.js new file mode 100644 index 0000000..d4fbe96 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/event-handler/receive.js @@ -0,0 +1,59 @@ +import AggregateError from "aggregate-error"; +import { wrapErrorHandler } from "./wrap-error-handler"; +function getHooks(state, eventPayloadAction, eventName) { + const hooks = [state.hooks[eventName], state.hooks["*"]]; + if (eventPayloadAction) { + hooks.unshift(state.hooks[`${eventName}.${eventPayloadAction}`]); + } + return [].concat(...hooks.filter(Boolean)); +} +function receiverHandle(state, event) { + const errorHandlers = state.hooks.error || []; + if (event instanceof Error) { + const error = Object.assign(new AggregateError([event]), { + event, + errors: [event] + }); + errorHandlers.forEach((handler) => wrapErrorHandler(handler, error)); + return Promise.reject(error); + } + if (!event || !event.name) { + throw new AggregateError(["Event name not passed"]); + } + if (!event.payload) { + throw new AggregateError(["Event payload not passed"]); + } + const hooks = getHooks( + state, + "action" in event.payload ? event.payload.action : null, + event.name + ); + if (hooks.length === 0) { + return Promise.resolve(); + } + const errors = []; + const promises = hooks.map((handler) => { + let promise = Promise.resolve(event); + if (state.transform) { + promise = promise.then(state.transform); + } + return promise.then((event2) => { + return handler(event2); + }).catch((error) => errors.push(Object.assign(error, { event }))); + }); + return Promise.all(promises).then(() => { + if (errors.length === 0) { + return; + } + const error = new AggregateError(errors); + Object.assign(error, { + event, + errors + }); + errorHandlers.forEach((handler) => wrapErrorHandler(handler, error)); + throw error; + }); +} +export { + receiverHandle +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/event-handler/remove-listener.js b/dist/node_modules/@octokit/webhooks/dist-src/event-handler/remove-listener.js new file mode 100644 index 0000000..adfcf51 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/event-handler/remove-listener.js @@ -0,0 +1,20 @@ +function removeListener(state, webhookNameOrNames, handler) { + if (Array.isArray(webhookNameOrNames)) { + webhookNameOrNames.forEach( + (webhookName) => removeListener(state, webhookName, handler) + ); + return; + } + if (!state.hooks[webhookNameOrNames]) { + return; + } + for (let i = state.hooks[webhookNameOrNames].length - 1; i >= 0; i--) { + if (state.hooks[webhookNameOrNames][i] === handler) { + state.hooks[webhookNameOrNames].splice(i, 1); + return; + } + } +} +export { + removeListener +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/event-handler/wrap-error-handler.js b/dist/node_modules/@octokit/webhooks/dist-src/event-handler/wrap-error-handler.js new file mode 100644 index 0000000..bcaff53 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/event-handler/wrap-error-handler.js @@ -0,0 +1,18 @@ +function wrapErrorHandler(handler, error) { + let returnValue; + try { + returnValue = handler(error); + } catch (error2) { + console.log('FATAL: Error occurred in "error" event handler'); + console.log(error2); + } + if (returnValue && returnValue.catch) { + returnValue.catch((error2) => { + console.log('FATAL: Error occurred in "error" event handler'); + console.log(error2); + }); + } +} +export { + wrapErrorHandler +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/generated/webhook-names.js b/dist/node_modules/@octokit/webhooks/dist-src/generated/webhook-names.js new file mode 100644 index 0000000..872bb59 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/generated/webhook-names.js @@ -0,0 +1,263 @@ +const emitterEventNames = [ + "branch_protection_rule", + "branch_protection_rule.created", + "branch_protection_rule.deleted", + "branch_protection_rule.edited", + "check_run", + "check_run.completed", + "check_run.created", + "check_run.requested_action", + "check_run.rerequested", + "check_suite", + "check_suite.completed", + "check_suite.requested", + "check_suite.rerequested", + "code_scanning_alert", + "code_scanning_alert.appeared_in_branch", + "code_scanning_alert.closed_by_user", + "code_scanning_alert.created", + "code_scanning_alert.fixed", + "code_scanning_alert.reopened", + "code_scanning_alert.reopened_by_user", + "commit_comment", + "commit_comment.created", + "create", + "delete", + "dependabot_alert", + "dependabot_alert.created", + "dependabot_alert.dismissed", + "dependabot_alert.fixed", + "dependabot_alert.reintroduced", + "dependabot_alert.reopened", + "deploy_key", + "deploy_key.created", + "deploy_key.deleted", + "deployment", + "deployment.created", + "deployment_status", + "deployment_status.created", + "discussion", + "discussion.answered", + "discussion.category_changed", + "discussion.created", + "discussion.deleted", + "discussion.edited", + "discussion.labeled", + "discussion.locked", + "discussion.pinned", + "discussion.transferred", + "discussion.unanswered", + "discussion.unlabeled", + "discussion.unlocked", + "discussion.unpinned", + "discussion_comment", + "discussion_comment.created", + "discussion_comment.deleted", + "discussion_comment.edited", + "fork", + "github_app_authorization", + "github_app_authorization.revoked", + "gollum", + "installation", + "installation.created", + "installation.deleted", + "installation.new_permissions_accepted", + "installation.suspend", + "installation.unsuspend", + "installation_repositories", + "installation_repositories.added", + "installation_repositories.removed", + "installation_target", + "installation_target.renamed", + "issue_comment", + "issue_comment.created", + "issue_comment.deleted", + "issue_comment.edited", + "issues", + "issues.assigned", + "issues.closed", + "issues.deleted", + "issues.demilestoned", + "issues.edited", + "issues.labeled", + "issues.locked", + "issues.milestoned", + "issues.opened", + "issues.pinned", + "issues.reopened", + "issues.transferred", + "issues.unassigned", + "issues.unlabeled", + "issues.unlocked", + "issues.unpinned", + "label", + "label.created", + "label.deleted", + "label.edited", + "marketplace_purchase", + "marketplace_purchase.cancelled", + "marketplace_purchase.changed", + "marketplace_purchase.pending_change", + "marketplace_purchase.pending_change_cancelled", + "marketplace_purchase.purchased", + "member", + "member.added", + "member.edited", + "member.removed", + "membership", + "membership.added", + "membership.removed", + "merge_group", + "merge_group.checks_requested", + "meta", + "meta.deleted", + "milestone", + "milestone.closed", + "milestone.created", + "milestone.deleted", + "milestone.edited", + "milestone.opened", + "org_block", + "org_block.blocked", + "org_block.unblocked", + "organization", + "organization.deleted", + "organization.member_added", + "organization.member_invited", + "organization.member_removed", + "organization.renamed", + "package", + "package.published", + "package.updated", + "page_build", + "ping", + "project", + "project.closed", + "project.created", + "project.deleted", + "project.edited", + "project.reopened", + "project_card", + "project_card.converted", + "project_card.created", + "project_card.deleted", + "project_card.edited", + "project_card.moved", + "project_column", + "project_column.created", + "project_column.deleted", + "project_column.edited", + "project_column.moved", + "projects_v2_item", + "projects_v2_item.archived", + "projects_v2_item.converted", + "projects_v2_item.created", + "projects_v2_item.deleted", + "projects_v2_item.edited", + "projects_v2_item.reordered", + "projects_v2_item.restored", + "public", + "pull_request", + "pull_request.assigned", + "pull_request.auto_merge_disabled", + "pull_request.auto_merge_enabled", + "pull_request.closed", + "pull_request.converted_to_draft", + "pull_request.demilestoned", + "pull_request.dequeued", + "pull_request.edited", + "pull_request.labeled", + "pull_request.locked", + "pull_request.milestoned", + "pull_request.opened", + "pull_request.queued", + "pull_request.ready_for_review", + "pull_request.reopened", + "pull_request.review_request_removed", + "pull_request.review_requested", + "pull_request.synchronize", + "pull_request.unassigned", + "pull_request.unlabeled", + "pull_request.unlocked", + "pull_request_review", + "pull_request_review.dismissed", + "pull_request_review.edited", + "pull_request_review.submitted", + "pull_request_review_comment", + "pull_request_review_comment.created", + "pull_request_review_comment.deleted", + "pull_request_review_comment.edited", + "pull_request_review_thread", + "pull_request_review_thread.resolved", + "pull_request_review_thread.unresolved", + "push", + "registry_package", + "registry_package.published", + "registry_package.updated", + "release", + "release.created", + "release.deleted", + "release.edited", + "release.prereleased", + "release.published", + "release.released", + "release.unpublished", + "repository", + "repository.archived", + "repository.created", + "repository.deleted", + "repository.edited", + "repository.privatized", + "repository.publicized", + "repository.renamed", + "repository.transferred", + "repository.unarchived", + "repository_dispatch", + "repository_import", + "repository_vulnerability_alert", + "repository_vulnerability_alert.create", + "repository_vulnerability_alert.dismiss", + "repository_vulnerability_alert.reopen", + "repository_vulnerability_alert.resolve", + "secret_scanning_alert", + "secret_scanning_alert.created", + "secret_scanning_alert.reopened", + "secret_scanning_alert.resolved", + "security_advisory", + "security_advisory.performed", + "security_advisory.published", + "security_advisory.updated", + "security_advisory.withdrawn", + "sponsorship", + "sponsorship.cancelled", + "sponsorship.created", + "sponsorship.edited", + "sponsorship.pending_cancellation", + "sponsorship.pending_tier_change", + "sponsorship.tier_changed", + "star", + "star.created", + "star.deleted", + "status", + "team", + "team.added_to_repository", + "team.created", + "team.deleted", + "team.edited", + "team.removed_from_repository", + "team_add", + "watch", + "watch.started", + "workflow_dispatch", + "workflow_job", + "workflow_job.completed", + "workflow_job.in_progress", + "workflow_job.queued", + "workflow_run", + "workflow_run.completed", + "workflow_run.in_progress", + "workflow_run.requested" +]; +export { + emitterEventNames +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/index.js b/dist/node_modules/@octokit/webhooks/dist-src/index.js new file mode 100644 index 0000000..9c9fc76 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/index.js @@ -0,0 +1,48 @@ +import { createLogger } from "./createLogger"; +import { createEventHandler } from "./event-handler/index"; +import { sign } from "./sign"; +import { verify } from "./verify"; +import { verifyAndReceive } from "./verify-and-receive"; +import { createNodeMiddleware } from "./middleware/node/index"; +import { emitterEventNames } from "./generated/webhook-names"; +class Webhooks { + constructor(options) { + if (!options || !options.secret) { + throw new Error("[@octokit/webhooks] options.secret required"); + } + const state = { + eventHandler: createEventHandler(options), + secret: options.secret, + hooks: {}, + log: createLogger(options.log) + }; + this.sign = sign.bind(null, options.secret); + this.verify = (eventPayload, signature) => { + if (typeof eventPayload === "object") { + console.warn( + "[@octokit/webhooks] Passing a JSON payload object to `verify()` is deprecated and the functionality will be removed in a future release of `@octokit/webhooks`" + ); + } + return verify(options.secret, eventPayload, signature); + }; + this.on = state.eventHandler.on; + this.onAny = state.eventHandler.onAny; + this.onError = state.eventHandler.onError; + this.removeListener = state.eventHandler.removeListener; + this.receive = state.eventHandler.receive; + this.verifyAndReceive = (options2) => { + if (typeof options2.payload === "object") { + console.warn( + "[@octokit/webhooks] Passing a JSON payload object to `verifyAndReceive()` is deprecated and the functionality will be removed in a future release of `@octokit/webhooks`" + ); + } + return verifyAndReceive(state, options2); + }; + } +} +export { + Webhooks, + createEventHandler, + createNodeMiddleware, + emitterEventNames +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/get-missing-headers.js b/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/get-missing-headers.js new file mode 100644 index 0000000..83aa6c8 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/get-missing-headers.js @@ -0,0 +1,11 @@ +const WEBHOOK_HEADERS = [ + "x-github-event", + "x-hub-signature-256", + "x-github-delivery" +]; +function getMissingHeaders(request) { + return WEBHOOK_HEADERS.filter((header) => !(header in request.headers)); +} +export { + getMissingHeaders +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/get-payload.js b/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/get-payload.js new file mode 100644 index 0000000..0b2cfd3 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/get-payload.js @@ -0,0 +1,30 @@ +import AggregateError from "aggregate-error"; +function getPayload(request) { + if (request.body) { + if (typeof request.body !== "string") { + console.warn( + "[@octokit/webhooks] Passing the payload as a JSON object in `request.body` is deprecated and will be removed in a future release of `@octokit/webhooks`, please pass it as a a `string` instead." + ); + } + return Promise.resolve(request.body); + } + return new Promise((resolve, reject) => { + let data = ""; + request.setEncoding("utf8"); + request.on("error", (error) => reject(new AggregateError([error]))); + request.on("data", (chunk) => data += chunk); + request.on("end", () => { + try { + JSON.parse(data); + resolve(data); + } catch (error) { + error.message = "Invalid JSON"; + error.status = 400; + reject(new AggregateError([error])); + } + }); + }); +} +export { + getPayload +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/index.js b/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/index.js new file mode 100644 index 0000000..3c29744 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/index.js @@ -0,0 +1,23 @@ +import { createLogger } from "../../createLogger"; +import { middleware } from "./middleware"; +import { onUnhandledRequestDefault } from "./on-unhandled-request-default"; +function createNodeMiddleware(webhooks, { + path = "/api/github/webhooks", + onUnhandledRequest = onUnhandledRequestDefault, + log = createLogger() +} = {}) { + const deprecateOnUnhandledRequest = (request, response) => { + console.warn( + "[@octokit/webhooks] `onUnhandledRequest()` is deprecated and will be removed in a future release of `@octokit/webhooks`" + ); + return onUnhandledRequest(request, response); + }; + return middleware.bind(null, webhooks, { + path, + onUnhandledRequest: deprecateOnUnhandledRequest, + log + }); +} +export { + createNodeMiddleware +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/middleware.js b/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/middleware.js new file mode 100644 index 0000000..03325a3 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/middleware.js @@ -0,0 +1,90 @@ +import { getMissingHeaders } from "./get-missing-headers"; +import { getPayload } from "./get-payload"; +async function middleware(webhooks, options, request, response, next) { + let pathname; + try { + pathname = new URL(request.url, "http://localhost").pathname; + } catch (error) { + response.writeHead(422, { + "content-type": "application/json" + }); + response.end( + JSON.stringify({ + error: `Request URL could not be parsed: ${request.url}` + }) + ); + return; + } + const isUnknownRoute = request.method !== "POST" || pathname !== options.path; + const isExpressMiddleware = typeof next === "function"; + if (isUnknownRoute) { + if (isExpressMiddleware) { + return next(); + } else { + return options.onUnhandledRequest(request, response); + } + } + if (!request.headers["content-type"] || !request.headers["content-type"].startsWith("application/json")) { + response.writeHead(415, { + "content-type": "application/json", + accept: "application/json" + }); + response.end( + JSON.stringify({ + error: `Unsupported "Content-Type" header value. Must be "application/json"` + }) + ); + return; + } + const missingHeaders = getMissingHeaders(request).join(", "); + if (missingHeaders) { + response.writeHead(400, { + "content-type": "application/json" + }); + response.end( + JSON.stringify({ + error: `Required headers missing: ${missingHeaders}` + }) + ); + return; + } + const eventName = request.headers["x-github-event"]; + const signatureSHA256 = request.headers["x-hub-signature-256"]; + const id = request.headers["x-github-delivery"]; + options.log.debug(`${eventName} event received (id: ${id})`); + let didTimeout = false; + const timeout = setTimeout(() => { + didTimeout = true; + response.statusCode = 202; + response.end("still processing\n"); + }, 9e3).unref(); + try { + const payload = await getPayload(request); + await webhooks.verifyAndReceive({ + id, + name: eventName, + payload, + signature: signatureSHA256 + }); + clearTimeout(timeout); + if (didTimeout) + return; + response.end("ok\n"); + } catch (error) { + clearTimeout(timeout); + if (didTimeout) + return; + const err = Array.from(error)[0]; + const errorMessage = err.message ? `${err.name}: ${err.message}` : "Error: An Unspecified error occurred"; + response.statusCode = typeof err.status !== "undefined" ? err.status : 500; + options.log.error(error); + response.end( + JSON.stringify({ + error: errorMessage + }) + ); + } +} +export { + middleware +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/on-unhandled-request-default.js b/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/on-unhandled-request-default.js new file mode 100644 index 0000000..73e4c84 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/middleware/node/on-unhandled-request-default.js @@ -0,0 +1,13 @@ +function onUnhandledRequestDefault(request, response) { + response.writeHead(404, { + "content-type": "application/json" + }); + response.end( + JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}` + }) + ); +} +export { + onUnhandledRequestDefault +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/sign.js b/dist/node_modules/@octokit/webhooks/dist-src/sign.js new file mode 100644 index 0000000..036baf5 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/sign.js @@ -0,0 +1,11 @@ +import { sign as signMethod } from "@octokit/webhooks-methods"; +import { toNormalizedJsonString } from "./to-normalized-json-string"; +async function sign(secret, payload) { + return signMethod( + secret, + typeof payload === "string" ? payload : toNormalizedJsonString(payload) + ); +} +export { + sign +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/to-normalized-json-string.js b/dist/node_modules/@octokit/webhooks/dist-src/to-normalized-json-string.js new file mode 100644 index 0000000..ebed8e1 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/to-normalized-json-string.js @@ -0,0 +1,9 @@ +function toNormalizedJsonString(payload) { + const payloadString = JSON.stringify(payload); + return payloadString.replace(/[^\\]\\u[\da-f]{4}/g, (s) => { + return s.substr(0, 3) + s.substr(3).toUpperCase(); + }); +} +export { + toNormalizedJsonString +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/verify-and-receive.js b/dist/node_modules/@octokit/webhooks/dist-src/verify-and-receive.js new file mode 100644 index 0000000..64ec6f5 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/verify-and-receive.js @@ -0,0 +1,25 @@ +import { verify } from "@octokit/webhooks-methods"; +import { toNormalizedJsonString } from "./to-normalized-json-string"; +async function verifyAndReceive(state, event) { + const matchesSignature = await verify( + state.secret, + typeof event.payload === "object" ? toNormalizedJsonString(event.payload) : event.payload, + event.signature + ).catch(() => false); + if (!matchesSignature) { + const error = new Error( + "[@octokit/webhooks] signature does not match event payload and secret" + ); + return state.eventHandler.receive( + Object.assign(error, { event, status: 400 }) + ); + } + return state.eventHandler.receive({ + id: event.id, + name: event.name, + payload: typeof event.payload === "string" ? JSON.parse(event.payload) : event.payload + }); +} +export { + verifyAndReceive +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-src/verify.js b/dist/node_modules/@octokit/webhooks/dist-src/verify.js new file mode 100644 index 0000000..b2e0c04 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-src/verify.js @@ -0,0 +1,12 @@ +import { verify as verifyMethod } from "@octokit/webhooks-methods"; +import { toNormalizedJsonString } from "./to-normalized-json-string"; +async function verify(secret, payload, signature) { + return verifyMethod( + secret, + typeof payload === "string" ? payload : toNormalizedJsonString(payload), + signature + ); +} +export { + verify +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/createLogger.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/createLogger.d.ts new file mode 100644 index 0000000..c0c97dc --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/createLogger.d.ts @@ -0,0 +1,7 @@ +export interface Logger { + debug: (...data: any[]) => void; + info: (...data: any[]) => void; + warn: (...data: any[]) => void; + error: (...data: any[]) => void; +} +export declare const createLogger: (logger?: Partial) => Logger; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/event-handler/index.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/event-handler/index.d.ts new file mode 100644 index 0000000..82ebcfa --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/event-handler/index.d.ts @@ -0,0 +1,10 @@ +import type { EmitterWebhookEvent, EmitterWebhookEventName, HandlerFunction, Options, WebhookEventHandlerError } from "../types"; +interface EventHandler { + on(event: E | E[], callback: HandlerFunction): void; + onAny(handler: (event: EmitterWebhookEvent) => any): void; + onError(handler: (event: WebhookEventHandlerError) => any): void; + removeListener(event: E | E[], callback: HandlerFunction): void; + receive(event: EmitterWebhookEvent): Promise; +} +export declare function createEventHandler(options: Options): EventHandler; +export {}; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/event-handler/on.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/event-handler/on.d.ts new file mode 100644 index 0000000..8890836 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/event-handler/on.d.ts @@ -0,0 +1,4 @@ +import type { EmitterWebhookEvent, EmitterWebhookEventName, State, WebhookEventHandlerError } from "../types"; +export declare function receiverOn(state: State, webhookNameOrNames: EmitterWebhookEventName | EmitterWebhookEventName[], handler: Function): void; +export declare function receiverOnAny(state: State, handler: (event: EmitterWebhookEvent) => any): void; +export declare function receiverOnError(state: State, handler: (event: WebhookEventHandlerError) => any): void; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/event-handler/receive.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/event-handler/receive.d.ts new file mode 100644 index 0000000..1894090 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/event-handler/receive.d.ts @@ -0,0 +1,2 @@ +import type { EmitterWebhookEvent, State } from "../types"; +export declare function receiverHandle(state: State, event: EmitterWebhookEvent): Promise; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/event-handler/remove-listener.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/event-handler/remove-listener.d.ts new file mode 100644 index 0000000..052feff --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/event-handler/remove-listener.d.ts @@ -0,0 +1,2 @@ +import type { EmitterWebhookEventName, State } from "../types"; +export declare function removeListener(state: State, webhookNameOrNames: "*" | EmitterWebhookEventName | EmitterWebhookEventName[], handler: Function): void; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/event-handler/wrap-error-handler.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/event-handler/wrap-error-handler.d.ts new file mode 100644 index 0000000..1abd902 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/event-handler/wrap-error-handler.d.ts @@ -0,0 +1 @@ +export declare function wrapErrorHandler(handler: Function, error: Error): void; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/generated/webhook-names.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/generated/webhook-names.d.ts new file mode 100644 index 0000000..1abfae9 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/generated/webhook-names.d.ts @@ -0,0 +1 @@ +export declare const emitterEventNames: readonly ["branch_protection_rule", "branch_protection_rule.created", "branch_protection_rule.deleted", "branch_protection_rule.edited", "check_run", "check_run.completed", "check_run.created", "check_run.requested_action", "check_run.rerequested", "check_suite", "check_suite.completed", "check_suite.requested", "check_suite.rerequested", "code_scanning_alert", "code_scanning_alert.appeared_in_branch", "code_scanning_alert.closed_by_user", "code_scanning_alert.created", "code_scanning_alert.fixed", "code_scanning_alert.reopened", "code_scanning_alert.reopened_by_user", "commit_comment", "commit_comment.created", "create", "delete", "dependabot_alert", "dependabot_alert.created", "dependabot_alert.dismissed", "dependabot_alert.fixed", "dependabot_alert.reintroduced", "dependabot_alert.reopened", "deploy_key", "deploy_key.created", "deploy_key.deleted", "deployment", "deployment.created", "deployment_status", "deployment_status.created", "discussion", "discussion.answered", "discussion.category_changed", "discussion.created", "discussion.deleted", "discussion.edited", "discussion.labeled", "discussion.locked", "discussion.pinned", "discussion.transferred", "discussion.unanswered", "discussion.unlabeled", "discussion.unlocked", "discussion.unpinned", "discussion_comment", "discussion_comment.created", "discussion_comment.deleted", "discussion_comment.edited", "fork", "github_app_authorization", "github_app_authorization.revoked", "gollum", "installation", "installation.created", "installation.deleted", "installation.new_permissions_accepted", "installation.suspend", "installation.unsuspend", "installation_repositories", "installation_repositories.added", "installation_repositories.removed", "installation_target", "installation_target.renamed", "issue_comment", "issue_comment.created", "issue_comment.deleted", "issue_comment.edited", "issues", "issues.assigned", "issues.closed", "issues.deleted", "issues.demilestoned", "issues.edited", "issues.labeled", "issues.locked", "issues.milestoned", "issues.opened", "issues.pinned", "issues.reopened", "issues.transferred", "issues.unassigned", "issues.unlabeled", "issues.unlocked", "issues.unpinned", "label", "label.created", "label.deleted", "label.edited", "marketplace_purchase", "marketplace_purchase.cancelled", "marketplace_purchase.changed", "marketplace_purchase.pending_change", "marketplace_purchase.pending_change_cancelled", "marketplace_purchase.purchased", "member", "member.added", "member.edited", "member.removed", "membership", "membership.added", "membership.removed", "merge_group", "merge_group.checks_requested", "meta", "meta.deleted", "milestone", "milestone.closed", "milestone.created", "milestone.deleted", "milestone.edited", "milestone.opened", "org_block", "org_block.blocked", "org_block.unblocked", "organization", "organization.deleted", "organization.member_added", "organization.member_invited", "organization.member_removed", "organization.renamed", "package", "package.published", "package.updated", "page_build", "ping", "project", "project.closed", "project.created", "project.deleted", "project.edited", "project.reopened", "project_card", "project_card.converted", "project_card.created", "project_card.deleted", "project_card.edited", "project_card.moved", "project_column", "project_column.created", "project_column.deleted", "project_column.edited", "project_column.moved", "projects_v2_item", "projects_v2_item.archived", "projects_v2_item.converted", "projects_v2_item.created", "projects_v2_item.deleted", "projects_v2_item.edited", "projects_v2_item.reordered", "projects_v2_item.restored", "public", "pull_request", "pull_request.assigned", "pull_request.auto_merge_disabled", "pull_request.auto_merge_enabled", "pull_request.closed", "pull_request.converted_to_draft", "pull_request.demilestoned", "pull_request.dequeued", "pull_request.edited", "pull_request.labeled", "pull_request.locked", "pull_request.milestoned", "pull_request.opened", "pull_request.queued", "pull_request.ready_for_review", "pull_request.reopened", "pull_request.review_request_removed", "pull_request.review_requested", "pull_request.synchronize", "pull_request.unassigned", "pull_request.unlabeled", "pull_request.unlocked", "pull_request_review", "pull_request_review.dismissed", "pull_request_review.edited", "pull_request_review.submitted", "pull_request_review_comment", "pull_request_review_comment.created", "pull_request_review_comment.deleted", "pull_request_review_comment.edited", "pull_request_review_thread", "pull_request_review_thread.resolved", "pull_request_review_thread.unresolved", "push", "registry_package", "registry_package.published", "registry_package.updated", "release", "release.created", "release.deleted", "release.edited", "release.prereleased", "release.published", "release.released", "release.unpublished", "repository", "repository.archived", "repository.created", "repository.deleted", "repository.edited", "repository.privatized", "repository.publicized", "repository.renamed", "repository.transferred", "repository.unarchived", "repository_dispatch", "repository_import", "repository_vulnerability_alert", "repository_vulnerability_alert.create", "repository_vulnerability_alert.dismiss", "repository_vulnerability_alert.reopen", "repository_vulnerability_alert.resolve", "secret_scanning_alert", "secret_scanning_alert.created", "secret_scanning_alert.reopened", "secret_scanning_alert.resolved", "security_advisory", "security_advisory.performed", "security_advisory.published", "security_advisory.updated", "security_advisory.withdrawn", "sponsorship", "sponsorship.cancelled", "sponsorship.created", "sponsorship.edited", "sponsorship.pending_cancellation", "sponsorship.pending_tier_change", "sponsorship.tier_changed", "star", "star.created", "star.deleted", "status", "team", "team.added_to_repository", "team.created", "team.deleted", "team.edited", "team.removed_from_repository", "team_add", "watch", "watch.started", "workflow_dispatch", "workflow_job", "workflow_job.completed", "workflow_job.in_progress", "workflow_job.queued", "workflow_run", "workflow_run.completed", "workflow_run.in_progress", "workflow_run.requested"]; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/index.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/index.d.ts new file mode 100644 index 0000000..17e9184 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/index.d.ts @@ -0,0 +1,28 @@ +import { createEventHandler } from "./event-handler/index"; +import type { EmitterWebhookEvent, EmitterWebhookEventName, HandlerFunction, RemoveHandlerFunction, Options, WebhookError, WebhookEventHandlerError, EmitterWebhookEventWithStringPayloadAndSignature, EmitterWebhookEventWithSignature } from "./types"; +export { createNodeMiddleware } from "./middleware/node/index"; +export { emitterEventNames } from "./generated/webhook-names"; +export type Iverify = { + /** @deprecated Passing a JSON payload object to `verify()` is deprecated and the functionality will be removed in a future release of `@octokit/webhooks`. */ + (eventPayload: object, signature: string): Promise; + (eventPayload: string, signature: string): Promise; +}; +export type IverifyAndReceive = { + /** @deprecated Passing a JSON payload object to `verifyAndReceive()` is deprecated and the functionality will be removed in a future release of `@octokit/webhooks`. */ + (options: EmitterWebhookEventWithSignature): Promise; + (options: EmitterWebhookEventWithStringPayloadAndSignature): Promise; +}; +declare class Webhooks { + sign: (payload: string | object) => Promise; + verify: Iverify; + on: (event: E | E[], callback: HandlerFunction) => void; + onAny: (callback: (event: EmitterWebhookEvent) => any) => void; + onError: (callback: (event: WebhookEventHandlerError) => any) => void; + removeListener: (event: E | E[], callback: RemoveHandlerFunction) => void; + receive: (event: EmitterWebhookEvent) => Promise; + verifyAndReceive: IverifyAndReceive; + constructor(options: Options & { + secret: string; + }); +} +export { createEventHandler, Webhooks, type EmitterWebhookEvent, type EmitterWebhookEventName, type WebhookError, }; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/get-missing-headers.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/get-missing-headers.d.ts new file mode 100644 index 0000000..b3042ae --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/get-missing-headers.d.ts @@ -0,0 +1,3 @@ +type IncomingMessage = any; +export declare function getMissingHeaders(request: IncomingMessage): string[]; +export {}; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/get-payload.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/get-payload.d.ts new file mode 100644 index 0000000..608beaa --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/get-payload.d.ts @@ -0,0 +1,4 @@ +import type { WebhookEvent } from "@octokit/webhooks-types"; +type IncomingMessage = any; +export declare function getPayload(request: IncomingMessage): Promise; +export {}; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/index.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/index.d.ts new file mode 100644 index 0000000..d11368c --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/index.d.ts @@ -0,0 +1,3 @@ +import type { Webhooks } from "../../index"; +import type { MiddlewareOptions } from "./types"; +export declare function createNodeMiddleware(webhooks: Webhooks, { path, onUnhandledRequest, log, }?: MiddlewareOptions): (request: any, response: any, next?: Function | undefined) => Promise; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/middleware.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/middleware.d.ts new file mode 100644 index 0000000..bd1355c --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/middleware.d.ts @@ -0,0 +1,6 @@ +type IncomingMessage = any; +type ServerResponse = any; +import type { Webhooks } from "../../index"; +import type { MiddlewareOptions } from "./types"; +export declare function middleware(webhooks: Webhooks, options: Required, request: IncomingMessage, response: ServerResponse, next?: Function): Promise; +export {}; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/on-unhandled-request-default.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/on-unhandled-request-default.d.ts new file mode 100644 index 0000000..3895815 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/on-unhandled-request-default.d.ts @@ -0,0 +1,4 @@ +type IncomingMessage = any; +type ServerResponse = any; +export declare function onUnhandledRequestDefault(request: IncomingMessage, response: ServerResponse): void; +export {}; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/types.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/types.d.ts new file mode 100644 index 0000000..e6edd53 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/middleware/node/types.d.ts @@ -0,0 +1,10 @@ +type IncomingMessage = any; +type ServerResponse = any; +import type { Logger } from "../../createLogger"; +export type MiddlewareOptions = { + path?: string; + log?: Logger; + /** @deprecated `onUnhandledRequest()` is deprecated and will be removed in a future release of `@octokit/webhooks` */ + onUnhandledRequest?: (request: IncomingMessage, response: ServerResponse) => void; +}; +export {}; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/sign.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/sign.d.ts new file mode 100644 index 0000000..14979a7 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/sign.d.ts @@ -0,0 +1 @@ +export declare function sign(secret: string, payload: string | object): Promise; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/to-normalized-json-string.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/to-normalized-json-string.d.ts new file mode 100644 index 0000000..fd758ee --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/to-normalized-json-string.d.ts @@ -0,0 +1,4 @@ +/** + * GitHub sends its JSON with no indentation and no line break at the end + */ +export declare function toNormalizedJsonString(payload: object): string; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/types.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/types.d.ts new file mode 100644 index 0000000..6701138 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/types.d.ts @@ -0,0 +1,63 @@ +import type { RequestError } from "@octokit/request-error"; +import type { WebhookEventMap, WebhookEventName } from "@octokit/webhooks-types"; +import type { Logger } from "./createLogger"; +import type { emitterEventNames } from "./generated/webhook-names"; +export type EmitterWebhookEventName = (typeof emitterEventNames)[number]; +export type EmitterWebhookEvent = TEmitterEvent extends `${infer TWebhookEvent}.${infer TAction}` ? BaseWebhookEvent> & { + payload: { + action: TAction; + }; +} : BaseWebhookEvent>; +export type EmitterWebhookEventWithStringPayloadAndSignature = { + id: string; + name: EmitterWebhookEventName; + payload: string; + signature: string; +}; +/** @deprecated */ +export type EmitterWebhookEventWithSignature = EmitterWebhookEvent & { + signature: string; +}; +interface BaseWebhookEvent { + id: string; + name: TName; + payload: WebhookEventMap[TName]; +} +export interface Options { + secret?: string; + transform?: TransformMethod; + log?: Partial; +} +type TransformMethod = (event: EmitterWebhookEvent) => T | PromiseLike; +export type HandlerFunction = (event: EmitterWebhookEvent & TTransformed) => any; +export type RemoveHandlerFunction = (event: EmitterWebhookEvent> & TTransformed) => any; +type Hooks = { + [key: string]: Function[]; +}; +export interface State extends Options { + eventHandler?: any; + hooks: Hooks; + log: Logger; +} +/** + * Error object with optional properties coming from `octokit.request` errors + */ +export type WebhookError = Error & Partial; +export interface WebhookEventHandlerError extends AggregateError { + event: EmitterWebhookEvent; +} +/** + * Workaround for TypeScript incompatibility with types exported by aggregate-error. + * Credit: https://git.io/JUEEr + * @copyright Sindre Sorhus + * @license MIT (https://git.io/JUEEK) + * @see https://github.com/octokit/webhooks.js/pull/270/files + */ +declare class AggregateError extends Error implements Iterable { + readonly name: "AggregateError"; + constructor(errors: ReadonlyArray); + [Symbol.iterator](): IterableIterator; +} +export {}; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/verify-and-receive.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/verify-and-receive.d.ts new file mode 100644 index 0000000..e91e23d --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/verify-and-receive.d.ts @@ -0,0 +1,4 @@ +import type { EmitterWebhookEventWithStringPayloadAndSignature, EmitterWebhookEventWithSignature, State } from "./types"; +export declare function verifyAndReceive(state: State & { + secret: string; +}, event: EmitterWebhookEventWithStringPayloadAndSignature | EmitterWebhookEventWithSignature): Promise; diff --git a/dist/node_modules/@octokit/webhooks/dist-types/verify.d.ts b/dist/node_modules/@octokit/webhooks/dist-types/verify.d.ts new file mode 100644 index 0000000..bd16e9c --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-types/verify.d.ts @@ -0,0 +1 @@ +export declare function verify(secret: string, payload: string | object, signature: string): Promise; diff --git a/dist/node_modules/@octokit/webhooks/dist-web/index.js b/dist/node_modules/@octokit/webhooks/dist-web/index.js new file mode 100644 index 0000000..a4ad2bb --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-web/index.js @@ -0,0 +1,670 @@ +// pkg/dist-src/createLogger.js +var createLogger = (logger) => ({ + debug: () => { + }, + info: () => { + }, + warn: console.warn.bind(console), + error: console.error.bind(console), + ...logger +}); + +// pkg/dist-src/generated/webhook-names.js +var emitterEventNames = [ + "branch_protection_rule", + "branch_protection_rule.created", + "branch_protection_rule.deleted", + "branch_protection_rule.edited", + "check_run", + "check_run.completed", + "check_run.created", + "check_run.requested_action", + "check_run.rerequested", + "check_suite", + "check_suite.completed", + "check_suite.requested", + "check_suite.rerequested", + "code_scanning_alert", + "code_scanning_alert.appeared_in_branch", + "code_scanning_alert.closed_by_user", + "code_scanning_alert.created", + "code_scanning_alert.fixed", + "code_scanning_alert.reopened", + "code_scanning_alert.reopened_by_user", + "commit_comment", + "commit_comment.created", + "create", + "delete", + "dependabot_alert", + "dependabot_alert.created", + "dependabot_alert.dismissed", + "dependabot_alert.fixed", + "dependabot_alert.reintroduced", + "dependabot_alert.reopened", + "deploy_key", + "deploy_key.created", + "deploy_key.deleted", + "deployment", + "deployment.created", + "deployment_status", + "deployment_status.created", + "discussion", + "discussion.answered", + "discussion.category_changed", + "discussion.created", + "discussion.deleted", + "discussion.edited", + "discussion.labeled", + "discussion.locked", + "discussion.pinned", + "discussion.transferred", + "discussion.unanswered", + "discussion.unlabeled", + "discussion.unlocked", + "discussion.unpinned", + "discussion_comment", + "discussion_comment.created", + "discussion_comment.deleted", + "discussion_comment.edited", + "fork", + "github_app_authorization", + "github_app_authorization.revoked", + "gollum", + "installation", + "installation.created", + "installation.deleted", + "installation.new_permissions_accepted", + "installation.suspend", + "installation.unsuspend", + "installation_repositories", + "installation_repositories.added", + "installation_repositories.removed", + "installation_target", + "installation_target.renamed", + "issue_comment", + "issue_comment.created", + "issue_comment.deleted", + "issue_comment.edited", + "issues", + "issues.assigned", + "issues.closed", + "issues.deleted", + "issues.demilestoned", + "issues.edited", + "issues.labeled", + "issues.locked", + "issues.milestoned", + "issues.opened", + "issues.pinned", + "issues.reopened", + "issues.transferred", + "issues.unassigned", + "issues.unlabeled", + "issues.unlocked", + "issues.unpinned", + "label", + "label.created", + "label.deleted", + "label.edited", + "marketplace_purchase", + "marketplace_purchase.cancelled", + "marketplace_purchase.changed", + "marketplace_purchase.pending_change", + "marketplace_purchase.pending_change_cancelled", + "marketplace_purchase.purchased", + "member", + "member.added", + "member.edited", + "member.removed", + "membership", + "membership.added", + "membership.removed", + "merge_group", + "merge_group.checks_requested", + "meta", + "meta.deleted", + "milestone", + "milestone.closed", + "milestone.created", + "milestone.deleted", + "milestone.edited", + "milestone.opened", + "org_block", + "org_block.blocked", + "org_block.unblocked", + "organization", + "organization.deleted", + "organization.member_added", + "organization.member_invited", + "organization.member_removed", + "organization.renamed", + "package", + "package.published", + "package.updated", + "page_build", + "ping", + "project", + "project.closed", + "project.created", + "project.deleted", + "project.edited", + "project.reopened", + "project_card", + "project_card.converted", + "project_card.created", + "project_card.deleted", + "project_card.edited", + "project_card.moved", + "project_column", + "project_column.created", + "project_column.deleted", + "project_column.edited", + "project_column.moved", + "projects_v2_item", + "projects_v2_item.archived", + "projects_v2_item.converted", + "projects_v2_item.created", + "projects_v2_item.deleted", + "projects_v2_item.edited", + "projects_v2_item.reordered", + "projects_v2_item.restored", + "public", + "pull_request", + "pull_request.assigned", + "pull_request.auto_merge_disabled", + "pull_request.auto_merge_enabled", + "pull_request.closed", + "pull_request.converted_to_draft", + "pull_request.demilestoned", + "pull_request.dequeued", + "pull_request.edited", + "pull_request.labeled", + "pull_request.locked", + "pull_request.milestoned", + "pull_request.opened", + "pull_request.queued", + "pull_request.ready_for_review", + "pull_request.reopened", + "pull_request.review_request_removed", + "pull_request.review_requested", + "pull_request.synchronize", + "pull_request.unassigned", + "pull_request.unlabeled", + "pull_request.unlocked", + "pull_request_review", + "pull_request_review.dismissed", + "pull_request_review.edited", + "pull_request_review.submitted", + "pull_request_review_comment", + "pull_request_review_comment.created", + "pull_request_review_comment.deleted", + "pull_request_review_comment.edited", + "pull_request_review_thread", + "pull_request_review_thread.resolved", + "pull_request_review_thread.unresolved", + "push", + "registry_package", + "registry_package.published", + "registry_package.updated", + "release", + "release.created", + "release.deleted", + "release.edited", + "release.prereleased", + "release.published", + "release.released", + "release.unpublished", + "repository", + "repository.archived", + "repository.created", + "repository.deleted", + "repository.edited", + "repository.privatized", + "repository.publicized", + "repository.renamed", + "repository.transferred", + "repository.unarchived", + "repository_dispatch", + "repository_import", + "repository_vulnerability_alert", + "repository_vulnerability_alert.create", + "repository_vulnerability_alert.dismiss", + "repository_vulnerability_alert.reopen", + "repository_vulnerability_alert.resolve", + "secret_scanning_alert", + "secret_scanning_alert.created", + "secret_scanning_alert.reopened", + "secret_scanning_alert.resolved", + "security_advisory", + "security_advisory.performed", + "security_advisory.published", + "security_advisory.updated", + "security_advisory.withdrawn", + "sponsorship", + "sponsorship.cancelled", + "sponsorship.created", + "sponsorship.edited", + "sponsorship.pending_cancellation", + "sponsorship.pending_tier_change", + "sponsorship.tier_changed", + "star", + "star.created", + "star.deleted", + "status", + "team", + "team.added_to_repository", + "team.created", + "team.deleted", + "team.edited", + "team.removed_from_repository", + "team_add", + "watch", + "watch.started", + "workflow_dispatch", + "workflow_job", + "workflow_job.completed", + "workflow_job.in_progress", + "workflow_job.queued", + "workflow_run", + "workflow_run.completed", + "workflow_run.in_progress", + "workflow_run.requested" +]; + +// pkg/dist-src/event-handler/on.js +function handleEventHandlers(state, webhookName, handler) { + if (!state.hooks[webhookName]) { + state.hooks[webhookName] = []; + } + state.hooks[webhookName].push(handler); +} +function receiverOn(state, webhookNameOrNames, handler) { + if (Array.isArray(webhookNameOrNames)) { + webhookNameOrNames.forEach( + (webhookName) => receiverOn(state, webhookName, handler) + ); + return; + } + if (["*", "error"].includes(webhookNameOrNames)) { + const webhookName = webhookNameOrNames === "*" ? "any" : webhookNameOrNames; + const message = `Using the "${webhookNameOrNames}" event with the regular Webhooks.on() function is not supported. Please use the Webhooks.on${webhookName.charAt(0).toUpperCase() + webhookName.slice(1)}() method instead`; + throw new Error(message); + } + if (!emitterEventNames.includes(webhookNameOrNames)) { + state.log.warn( + `"${webhookNameOrNames}" is not a known webhook name (https://developer.github.com/v3/activity/events/types/)` + ); + } + handleEventHandlers(state, webhookNameOrNames, handler); +} +function receiverOnAny(state, handler) { + handleEventHandlers(state, "*", handler); +} +function receiverOnError(state, handler) { + handleEventHandlers(state, "error", handler); +} + +// pkg/dist-src/event-handler/receive.js +import AggregateError from "aggregate-error"; + +// pkg/dist-src/event-handler/wrap-error-handler.js +function wrapErrorHandler(handler, error) { + let returnValue; + try { + returnValue = handler(error); + } catch (error2) { + console.log('FATAL: Error occurred in "error" event handler'); + console.log(error2); + } + if (returnValue && returnValue.catch) { + returnValue.catch((error2) => { + console.log('FATAL: Error occurred in "error" event handler'); + console.log(error2); + }); + } +} + +// pkg/dist-src/event-handler/receive.js +function getHooks(state, eventPayloadAction, eventName) { + const hooks = [state.hooks[eventName], state.hooks["*"]]; + if (eventPayloadAction) { + hooks.unshift(state.hooks[`${eventName}.${eventPayloadAction}`]); + } + return [].concat(...hooks.filter(Boolean)); +} +function receiverHandle(state, event) { + const errorHandlers = state.hooks.error || []; + if (event instanceof Error) { + const error = Object.assign(new AggregateError([event]), { + event, + errors: [event] + }); + errorHandlers.forEach((handler) => wrapErrorHandler(handler, error)); + return Promise.reject(error); + } + if (!event || !event.name) { + throw new AggregateError(["Event name not passed"]); + } + if (!event.payload) { + throw new AggregateError(["Event payload not passed"]); + } + const hooks = getHooks( + state, + "action" in event.payload ? event.payload.action : null, + event.name + ); + if (hooks.length === 0) { + return Promise.resolve(); + } + const errors = []; + const promises = hooks.map((handler) => { + let promise = Promise.resolve(event); + if (state.transform) { + promise = promise.then(state.transform); + } + return promise.then((event2) => { + return handler(event2); + }).catch((error) => errors.push(Object.assign(error, { event }))); + }); + return Promise.all(promises).then(() => { + if (errors.length === 0) { + return; + } + const error = new AggregateError(errors); + Object.assign(error, { + event, + errors + }); + errorHandlers.forEach((handler) => wrapErrorHandler(handler, error)); + throw error; + }); +} + +// pkg/dist-src/event-handler/remove-listener.js +function removeListener(state, webhookNameOrNames, handler) { + if (Array.isArray(webhookNameOrNames)) { + webhookNameOrNames.forEach( + (webhookName) => removeListener(state, webhookName, handler) + ); + return; + } + if (!state.hooks[webhookNameOrNames]) { + return; + } + for (let i = state.hooks[webhookNameOrNames].length - 1; i >= 0; i--) { + if (state.hooks[webhookNameOrNames][i] === handler) { + state.hooks[webhookNameOrNames].splice(i, 1); + return; + } + } +} + +// pkg/dist-src/event-handler/index.js +function createEventHandler(options) { + const state = { + hooks: {}, + log: createLogger(options && options.log) + }; + if (options && options.transform) { + state.transform = options.transform; + } + return { + on: receiverOn.bind(null, state), + onAny: receiverOnAny.bind(null, state), + onError: receiverOnError.bind(null, state), + removeListener: removeListener.bind(null, state), + receive: receiverHandle.bind(null, state) + }; +} + +// pkg/dist-src/sign.js +import { sign as signMethod } from "@octokit/webhooks-methods"; + +// pkg/dist-src/to-normalized-json-string.js +function toNormalizedJsonString(payload) { + const payloadString = JSON.stringify(payload); + return payloadString.replace(/[^\\]\\u[\da-f]{4}/g, (s) => { + return s.substr(0, 3) + s.substr(3).toUpperCase(); + }); +} + +// pkg/dist-src/sign.js +async function sign(secret, payload) { + return signMethod( + secret, + typeof payload === "string" ? payload : toNormalizedJsonString(payload) + ); +} + +// pkg/dist-src/verify.js +import { verify as verifyMethod } from "@octokit/webhooks-methods"; +async function verify(secret, payload, signature) { + return verifyMethod( + secret, + typeof payload === "string" ? payload : toNormalizedJsonString(payload), + signature + ); +} + +// pkg/dist-src/verify-and-receive.js +import { verify as verify2 } from "@octokit/webhooks-methods"; +async function verifyAndReceive(state, event) { + const matchesSignature = await verify2( + state.secret, + typeof event.payload === "object" ? toNormalizedJsonString(event.payload) : event.payload, + event.signature + ).catch(() => false); + if (!matchesSignature) { + const error = new Error( + "[@octokit/webhooks] signature does not match event payload and secret" + ); + return state.eventHandler.receive( + Object.assign(error, { event, status: 400 }) + ); + } + return state.eventHandler.receive({ + id: event.id, + name: event.name, + payload: typeof event.payload === "string" ? JSON.parse(event.payload) : event.payload + }); +} + +// pkg/dist-src/middleware/node/get-missing-headers.js +var WEBHOOK_HEADERS = [ + "x-github-event", + "x-hub-signature-256", + "x-github-delivery" +]; +function getMissingHeaders(request) { + return WEBHOOK_HEADERS.filter((header) => !(header in request.headers)); +} + +// pkg/dist-src/middleware/node/get-payload.js +import AggregateError2 from "aggregate-error"; +function getPayload(request) { + if (request.body) { + if (typeof request.body !== "string") { + console.warn( + "[@octokit/webhooks] Passing the payload as a JSON object in `request.body` is deprecated and will be removed in a future release of `@octokit/webhooks`, please pass it as a a `string` instead." + ); + } + return Promise.resolve(request.body); + } + return new Promise((resolve, reject) => { + let data = ""; + request.setEncoding("utf8"); + request.on("error", (error) => reject(new AggregateError2([error]))); + request.on("data", (chunk) => data += chunk); + request.on("end", () => { + try { + JSON.parse(data); + resolve(data); + } catch (error) { + error.message = "Invalid JSON"; + error.status = 400; + reject(new AggregateError2([error])); + } + }); + }); +} + +// pkg/dist-src/middleware/node/middleware.js +async function middleware(webhooks, options, request, response, next) { + let pathname; + try { + pathname = new URL(request.url, "http://localhost").pathname; + } catch (error) { + response.writeHead(422, { + "content-type": "application/json" + }); + response.end( + JSON.stringify({ + error: `Request URL could not be parsed: ${request.url}` + }) + ); + return; + } + const isUnknownRoute = request.method !== "POST" || pathname !== options.path; + const isExpressMiddleware = typeof next === "function"; + if (isUnknownRoute) { + if (isExpressMiddleware) { + return next(); + } else { + return options.onUnhandledRequest(request, response); + } + } + if (!request.headers["content-type"] || !request.headers["content-type"].startsWith("application/json")) { + response.writeHead(415, { + "content-type": "application/json", + accept: "application/json" + }); + response.end( + JSON.stringify({ + error: `Unsupported "Content-Type" header value. Must be "application/json"` + }) + ); + return; + } + const missingHeaders = getMissingHeaders(request).join(", "); + if (missingHeaders) { + response.writeHead(400, { + "content-type": "application/json" + }); + response.end( + JSON.stringify({ + error: `Required headers missing: ${missingHeaders}` + }) + ); + return; + } + const eventName = request.headers["x-github-event"]; + const signatureSHA256 = request.headers["x-hub-signature-256"]; + const id = request.headers["x-github-delivery"]; + options.log.debug(`${eventName} event received (id: ${id})`); + let didTimeout = false; + const timeout = setTimeout(() => { + didTimeout = true; + response.statusCode = 202; + response.end("still processing\n"); + }, 9e3).unref(); + try { + const payload = await getPayload(request); + await webhooks.verifyAndReceive({ + id, + name: eventName, + payload, + signature: signatureSHA256 + }); + clearTimeout(timeout); + if (didTimeout) + return; + response.end("ok\n"); + } catch (error) { + clearTimeout(timeout); + if (didTimeout) + return; + const err = Array.from(error)[0]; + const errorMessage = err.message ? `${err.name}: ${err.message}` : "Error: An Unspecified error occurred"; + response.statusCode = typeof err.status !== "undefined" ? err.status : 500; + options.log.error(error); + response.end( + JSON.stringify({ + error: errorMessage + }) + ); + } +} + +// pkg/dist-src/middleware/node/on-unhandled-request-default.js +function onUnhandledRequestDefault(request, response) { + response.writeHead(404, { + "content-type": "application/json" + }); + response.end( + JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}` + }) + ); +} + +// pkg/dist-src/middleware/node/index.js +function createNodeMiddleware(webhooks, { + path = "/api/github/webhooks", + onUnhandledRequest = onUnhandledRequestDefault, + log = createLogger() +} = {}) { + const deprecateOnUnhandledRequest = (request, response) => { + console.warn( + "[@octokit/webhooks] `onUnhandledRequest()` is deprecated and will be removed in a future release of `@octokit/webhooks`" + ); + return onUnhandledRequest(request, response); + }; + return middleware.bind(null, webhooks, { + path, + onUnhandledRequest: deprecateOnUnhandledRequest, + log + }); +} + +// pkg/dist-src/index.js +var Webhooks = class { + constructor(options) { + if (!options || !options.secret) { + throw new Error("[@octokit/webhooks] options.secret required"); + } + const state = { + eventHandler: createEventHandler(options), + secret: options.secret, + hooks: {}, + log: createLogger(options.log) + }; + this.sign = sign.bind(null, options.secret); + this.verify = (eventPayload, signature) => { + if (typeof eventPayload === "object") { + console.warn( + "[@octokit/webhooks] Passing a JSON payload object to `verify()` is deprecated and the functionality will be removed in a future release of `@octokit/webhooks`" + ); + } + return verify(options.secret, eventPayload, signature); + }; + this.on = state.eventHandler.on; + this.onAny = state.eventHandler.onAny; + this.onError = state.eventHandler.onError; + this.removeListener = state.eventHandler.removeListener; + this.receive = state.eventHandler.receive; + this.verifyAndReceive = (options2) => { + if (typeof options2.payload === "object") { + console.warn( + "[@octokit/webhooks] Passing a JSON payload object to `verifyAndReceive()` is deprecated and the functionality will be removed in a future release of `@octokit/webhooks`" + ); + } + return verifyAndReceive(state, options2); + }; + } +}; +export { + Webhooks, + createEventHandler, + createNodeMiddleware, + emitterEventNames +}; diff --git a/dist/node_modules/@octokit/webhooks/dist-web/index.js.map b/dist/node_modules/@octokit/webhooks/dist-web/index.js.map new file mode 100644 index 0000000..116ab01 --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/createLogger.js", "../dist-src/generated/webhook-names.js", "../dist-src/event-handler/on.js", "../dist-src/event-handler/receive.js", "../dist-src/event-handler/wrap-error-handler.js", "../dist-src/event-handler/remove-listener.js", "../dist-src/event-handler/index.js", "../dist-src/sign.js", "../dist-src/to-normalized-json-string.js", "../dist-src/verify.js", "../dist-src/verify-and-receive.js", "../dist-src/middleware/node/get-missing-headers.js", "../dist-src/middleware/node/get-payload.js", "../dist-src/middleware/node/middleware.js", "../dist-src/middleware/node/on-unhandled-request-default.js", "../dist-src/middleware/node/index.js", "../dist-src/index.js"], + "sourcesContent": ["const createLogger = (logger) => ({\n debug: () => {\n },\n info: () => {\n },\n warn: console.warn.bind(console),\n error: console.error.bind(console),\n ...logger\n});\nexport {\n createLogger\n};\n", "const emitterEventNames = [\n \"branch_protection_rule\",\n \"branch_protection_rule.created\",\n \"branch_protection_rule.deleted\",\n \"branch_protection_rule.edited\",\n \"check_run\",\n \"check_run.completed\",\n \"check_run.created\",\n \"check_run.requested_action\",\n \"check_run.rerequested\",\n \"check_suite\",\n \"check_suite.completed\",\n \"check_suite.requested\",\n \"check_suite.rerequested\",\n \"code_scanning_alert\",\n \"code_scanning_alert.appeared_in_branch\",\n \"code_scanning_alert.closed_by_user\",\n \"code_scanning_alert.created\",\n \"code_scanning_alert.fixed\",\n \"code_scanning_alert.reopened\",\n \"code_scanning_alert.reopened_by_user\",\n \"commit_comment\",\n \"commit_comment.created\",\n \"create\",\n \"delete\",\n \"dependabot_alert\",\n \"dependabot_alert.created\",\n \"dependabot_alert.dismissed\",\n \"dependabot_alert.fixed\",\n \"dependabot_alert.reintroduced\",\n \"dependabot_alert.reopened\",\n \"deploy_key\",\n \"deploy_key.created\",\n \"deploy_key.deleted\",\n \"deployment\",\n \"deployment.created\",\n \"deployment_status\",\n \"deployment_status.created\",\n \"discussion\",\n \"discussion.answered\",\n \"discussion.category_changed\",\n \"discussion.created\",\n \"discussion.deleted\",\n \"discussion.edited\",\n \"discussion.labeled\",\n \"discussion.locked\",\n \"discussion.pinned\",\n \"discussion.transferred\",\n \"discussion.unanswered\",\n \"discussion.unlabeled\",\n \"discussion.unlocked\",\n \"discussion.unpinned\",\n \"discussion_comment\",\n \"discussion_comment.created\",\n \"discussion_comment.deleted\",\n \"discussion_comment.edited\",\n \"fork\",\n \"github_app_authorization\",\n \"github_app_authorization.revoked\",\n \"gollum\",\n \"installation\",\n \"installation.created\",\n \"installation.deleted\",\n \"installation.new_permissions_accepted\",\n \"installation.suspend\",\n \"installation.unsuspend\",\n \"installation_repositories\",\n \"installation_repositories.added\",\n \"installation_repositories.removed\",\n \"installation_target\",\n \"installation_target.renamed\",\n \"issue_comment\",\n \"issue_comment.created\",\n \"issue_comment.deleted\",\n \"issue_comment.edited\",\n \"issues\",\n \"issues.assigned\",\n \"issues.closed\",\n \"issues.deleted\",\n \"issues.demilestoned\",\n \"issues.edited\",\n \"issues.labeled\",\n \"issues.locked\",\n \"issues.milestoned\",\n \"issues.opened\",\n \"issues.pinned\",\n \"issues.reopened\",\n \"issues.transferred\",\n \"issues.unassigned\",\n \"issues.unlabeled\",\n \"issues.unlocked\",\n \"issues.unpinned\",\n \"label\",\n \"label.created\",\n \"label.deleted\",\n \"label.edited\",\n \"marketplace_purchase\",\n \"marketplace_purchase.cancelled\",\n \"marketplace_purchase.changed\",\n \"marketplace_purchase.pending_change\",\n \"marketplace_purchase.pending_change_cancelled\",\n \"marketplace_purchase.purchased\",\n \"member\",\n \"member.added\",\n \"member.edited\",\n \"member.removed\",\n \"membership\",\n \"membership.added\",\n \"membership.removed\",\n \"merge_group\",\n \"merge_group.checks_requested\",\n \"meta\",\n \"meta.deleted\",\n \"milestone\",\n \"milestone.closed\",\n \"milestone.created\",\n \"milestone.deleted\",\n \"milestone.edited\",\n \"milestone.opened\",\n \"org_block\",\n \"org_block.blocked\",\n \"org_block.unblocked\",\n \"organization\",\n \"organization.deleted\",\n \"organization.member_added\",\n \"organization.member_invited\",\n \"organization.member_removed\",\n \"organization.renamed\",\n \"package\",\n \"package.published\",\n \"package.updated\",\n \"page_build\",\n \"ping\",\n \"project\",\n \"project.closed\",\n \"project.created\",\n \"project.deleted\",\n \"project.edited\",\n \"project.reopened\",\n \"project_card\",\n \"project_card.converted\",\n \"project_card.created\",\n \"project_card.deleted\",\n \"project_card.edited\",\n \"project_card.moved\",\n \"project_column\",\n \"project_column.created\",\n \"project_column.deleted\",\n \"project_column.edited\",\n \"project_column.moved\",\n \"projects_v2_item\",\n \"projects_v2_item.archived\",\n \"projects_v2_item.converted\",\n \"projects_v2_item.created\",\n \"projects_v2_item.deleted\",\n \"projects_v2_item.edited\",\n \"projects_v2_item.reordered\",\n \"projects_v2_item.restored\",\n \"public\",\n \"pull_request\",\n \"pull_request.assigned\",\n \"pull_request.auto_merge_disabled\",\n \"pull_request.auto_merge_enabled\",\n \"pull_request.closed\",\n \"pull_request.converted_to_draft\",\n \"pull_request.demilestoned\",\n \"pull_request.dequeued\",\n \"pull_request.edited\",\n \"pull_request.labeled\",\n \"pull_request.locked\",\n \"pull_request.milestoned\",\n \"pull_request.opened\",\n \"pull_request.queued\",\n \"pull_request.ready_for_review\",\n \"pull_request.reopened\",\n \"pull_request.review_request_removed\",\n \"pull_request.review_requested\",\n \"pull_request.synchronize\",\n \"pull_request.unassigned\",\n \"pull_request.unlabeled\",\n \"pull_request.unlocked\",\n \"pull_request_review\",\n \"pull_request_review.dismissed\",\n \"pull_request_review.edited\",\n \"pull_request_review.submitted\",\n \"pull_request_review_comment\",\n \"pull_request_review_comment.created\",\n \"pull_request_review_comment.deleted\",\n \"pull_request_review_comment.edited\",\n \"pull_request_review_thread\",\n \"pull_request_review_thread.resolved\",\n \"pull_request_review_thread.unresolved\",\n \"push\",\n \"registry_package\",\n \"registry_package.published\",\n \"registry_package.updated\",\n \"release\",\n \"release.created\",\n \"release.deleted\",\n \"release.edited\",\n \"release.prereleased\",\n \"release.published\",\n \"release.released\",\n \"release.unpublished\",\n \"repository\",\n \"repository.archived\",\n \"repository.created\",\n \"repository.deleted\",\n \"repository.edited\",\n \"repository.privatized\",\n \"repository.publicized\",\n \"repository.renamed\",\n \"repository.transferred\",\n \"repository.unarchived\",\n \"repository_dispatch\",\n \"repository_import\",\n \"repository_vulnerability_alert\",\n \"repository_vulnerability_alert.create\",\n \"repository_vulnerability_alert.dismiss\",\n \"repository_vulnerability_alert.reopen\",\n \"repository_vulnerability_alert.resolve\",\n \"secret_scanning_alert\",\n \"secret_scanning_alert.created\",\n \"secret_scanning_alert.reopened\",\n \"secret_scanning_alert.resolved\",\n \"security_advisory\",\n \"security_advisory.performed\",\n \"security_advisory.published\",\n \"security_advisory.updated\",\n \"security_advisory.withdrawn\",\n \"sponsorship\",\n \"sponsorship.cancelled\",\n \"sponsorship.created\",\n \"sponsorship.edited\",\n \"sponsorship.pending_cancellation\",\n \"sponsorship.pending_tier_change\",\n \"sponsorship.tier_changed\",\n \"star\",\n \"star.created\",\n \"star.deleted\",\n \"status\",\n \"team\",\n \"team.added_to_repository\",\n \"team.created\",\n \"team.deleted\",\n \"team.edited\",\n \"team.removed_from_repository\",\n \"team_add\",\n \"watch\",\n \"watch.started\",\n \"workflow_dispatch\",\n \"workflow_job\",\n \"workflow_job.completed\",\n \"workflow_job.in_progress\",\n \"workflow_job.queued\",\n \"workflow_run\",\n \"workflow_run.completed\",\n \"workflow_run.in_progress\",\n \"workflow_run.requested\"\n];\nexport {\n emitterEventNames\n};\n", "import { emitterEventNames } from \"../generated/webhook-names\";\nfunction handleEventHandlers(state, webhookName, handler) {\n if (!state.hooks[webhookName]) {\n state.hooks[webhookName] = [];\n }\n state.hooks[webhookName].push(handler);\n}\nfunction receiverOn(state, webhookNameOrNames, handler) {\n if (Array.isArray(webhookNameOrNames)) {\n webhookNameOrNames.forEach(\n (webhookName) => receiverOn(state, webhookName, handler)\n );\n return;\n }\n if ([\"*\", \"error\"].includes(webhookNameOrNames)) {\n const webhookName = webhookNameOrNames === \"*\" ? \"any\" : webhookNameOrNames;\n const message = `Using the \"${webhookNameOrNames}\" event with the regular Webhooks.on() function is not supported. Please use the Webhooks.on${webhookName.charAt(0).toUpperCase() + webhookName.slice(1)}() method instead`;\n throw new Error(message);\n }\n if (!emitterEventNames.includes(webhookNameOrNames)) {\n state.log.warn(\n `\"${webhookNameOrNames}\" is not a known webhook name (https://developer.github.com/v3/activity/events/types/)`\n );\n }\n handleEventHandlers(state, webhookNameOrNames, handler);\n}\nfunction receiverOnAny(state, handler) {\n handleEventHandlers(state, \"*\", handler);\n}\nfunction receiverOnError(state, handler) {\n handleEventHandlers(state, \"error\", handler);\n}\nexport {\n receiverOn,\n receiverOnAny,\n receiverOnError\n};\n", "import AggregateError from \"aggregate-error\";\nimport { wrapErrorHandler } from \"./wrap-error-handler\";\nfunction getHooks(state, eventPayloadAction, eventName) {\n const hooks = [state.hooks[eventName], state.hooks[\"*\"]];\n if (eventPayloadAction) {\n hooks.unshift(state.hooks[`${eventName}.${eventPayloadAction}`]);\n }\n return [].concat(...hooks.filter(Boolean));\n}\nfunction receiverHandle(state, event) {\n const errorHandlers = state.hooks.error || [];\n if (event instanceof Error) {\n const error = Object.assign(new AggregateError([event]), {\n event,\n errors: [event]\n });\n errorHandlers.forEach((handler) => wrapErrorHandler(handler, error));\n return Promise.reject(error);\n }\n if (!event || !event.name) {\n throw new AggregateError([\"Event name not passed\"]);\n }\n if (!event.payload) {\n throw new AggregateError([\"Event payload not passed\"]);\n }\n const hooks = getHooks(\n state,\n \"action\" in event.payload ? event.payload.action : null,\n event.name\n );\n if (hooks.length === 0) {\n return Promise.resolve();\n }\n const errors = [];\n const promises = hooks.map((handler) => {\n let promise = Promise.resolve(event);\n if (state.transform) {\n promise = promise.then(state.transform);\n }\n return promise.then((event2) => {\n return handler(event2);\n }).catch((error) => errors.push(Object.assign(error, { event })));\n });\n return Promise.all(promises).then(() => {\n if (errors.length === 0) {\n return;\n }\n const error = new AggregateError(errors);\n Object.assign(error, {\n event,\n errors\n });\n errorHandlers.forEach((handler) => wrapErrorHandler(handler, error));\n throw error;\n });\n}\nexport {\n receiverHandle\n};\n", "function wrapErrorHandler(handler, error) {\n let returnValue;\n try {\n returnValue = handler(error);\n } catch (error2) {\n console.log('FATAL: Error occurred in \"error\" event handler');\n console.log(error2);\n }\n if (returnValue && returnValue.catch) {\n returnValue.catch((error2) => {\n console.log('FATAL: Error occurred in \"error\" event handler');\n console.log(error2);\n });\n }\n}\nexport {\n wrapErrorHandler\n};\n", "function removeListener(state, webhookNameOrNames, handler) {\n if (Array.isArray(webhookNameOrNames)) {\n webhookNameOrNames.forEach(\n (webhookName) => removeListener(state, webhookName, handler)\n );\n return;\n }\n if (!state.hooks[webhookNameOrNames]) {\n return;\n }\n for (let i = state.hooks[webhookNameOrNames].length - 1; i >= 0; i--) {\n if (state.hooks[webhookNameOrNames][i] === handler) {\n state.hooks[webhookNameOrNames].splice(i, 1);\n return;\n }\n }\n}\nexport {\n removeListener\n};\n", "import { createLogger } from \"../createLogger\";\nimport {\n receiverOn as on,\n receiverOnAny as onAny,\n receiverOnError as onError\n} from \"./on\";\nimport { receiverHandle as receive } from \"./receive\";\nimport { removeListener } from \"./remove-listener\";\nfunction createEventHandler(options) {\n const state = {\n hooks: {},\n log: createLogger(options && options.log)\n };\n if (options && options.transform) {\n state.transform = options.transform;\n }\n return {\n on: on.bind(null, state),\n onAny: onAny.bind(null, state),\n onError: onError.bind(null, state),\n removeListener: removeListener.bind(null, state),\n receive: receive.bind(null, state)\n };\n}\nexport {\n createEventHandler\n};\n", "import { sign as signMethod } from \"@octokit/webhooks-methods\";\nimport { toNormalizedJsonString } from \"./to-normalized-json-string\";\nasync function sign(secret, payload) {\n return signMethod(\n secret,\n typeof payload === \"string\" ? payload : toNormalizedJsonString(payload)\n );\n}\nexport {\n sign\n};\n", "function toNormalizedJsonString(payload) {\n const payloadString = JSON.stringify(payload);\n return payloadString.replace(/[^\\\\]\\\\u[\\da-f]{4}/g, (s) => {\n return s.substr(0, 3) + s.substr(3).toUpperCase();\n });\n}\nexport {\n toNormalizedJsonString\n};\n", "import { verify as verifyMethod } from \"@octokit/webhooks-methods\";\nimport { toNormalizedJsonString } from \"./to-normalized-json-string\";\nasync function verify(secret, payload, signature) {\n return verifyMethod(\n secret,\n typeof payload === \"string\" ? payload : toNormalizedJsonString(payload),\n signature\n );\n}\nexport {\n verify\n};\n", "import { verify } from \"@octokit/webhooks-methods\";\nimport { toNormalizedJsonString } from \"./to-normalized-json-string\";\nasync function verifyAndReceive(state, event) {\n const matchesSignature = await verify(\n state.secret,\n typeof event.payload === \"object\" ? toNormalizedJsonString(event.payload) : event.payload,\n event.signature\n ).catch(() => false);\n if (!matchesSignature) {\n const error = new Error(\n \"[@octokit/webhooks] signature does not match event payload and secret\"\n );\n return state.eventHandler.receive(\n Object.assign(error, { event, status: 400 })\n );\n }\n return state.eventHandler.receive({\n id: event.id,\n name: event.name,\n payload: typeof event.payload === \"string\" ? JSON.parse(event.payload) : event.payload\n });\n}\nexport {\n verifyAndReceive\n};\n", "const WEBHOOK_HEADERS = [\n \"x-github-event\",\n \"x-hub-signature-256\",\n \"x-github-delivery\"\n];\nfunction getMissingHeaders(request) {\n return WEBHOOK_HEADERS.filter((header) => !(header in request.headers));\n}\nexport {\n getMissingHeaders\n};\n", "import AggregateError from \"aggregate-error\";\nfunction getPayload(request) {\n if (request.body) {\n if (typeof request.body !== \"string\") {\n console.warn(\n \"[@octokit/webhooks] Passing the payload as a JSON object in `request.body` is deprecated and will be removed in a future release of `@octokit/webhooks`, please pass it as a a `string` instead.\"\n );\n }\n return Promise.resolve(request.body);\n }\n return new Promise((resolve, reject) => {\n let data = \"\";\n request.setEncoding(\"utf8\");\n request.on(\"error\", (error) => reject(new AggregateError([error])));\n request.on(\"data\", (chunk) => data += chunk);\n request.on(\"end\", () => {\n try {\n JSON.parse(data);\n resolve(data);\n } catch (error) {\n error.message = \"Invalid JSON\";\n error.status = 400;\n reject(new AggregateError([error]));\n }\n });\n });\n}\nexport {\n getPayload\n};\n", "import { getMissingHeaders } from \"./get-missing-headers\";\nimport { getPayload } from \"./get-payload\";\nasync function middleware(webhooks, options, request, response, next) {\n let pathname;\n try {\n pathname = new URL(request.url, \"http://localhost\").pathname;\n } catch (error) {\n response.writeHead(422, {\n \"content-type\": \"application/json\"\n });\n response.end(\n JSON.stringify({\n error: `Request URL could not be parsed: ${request.url}`\n })\n );\n return;\n }\n const isUnknownRoute = request.method !== \"POST\" || pathname !== options.path;\n const isExpressMiddleware = typeof next === \"function\";\n if (isUnknownRoute) {\n if (isExpressMiddleware) {\n return next();\n } else {\n return options.onUnhandledRequest(request, response);\n }\n }\n if (!request.headers[\"content-type\"] || !request.headers[\"content-type\"].startsWith(\"application/json\")) {\n response.writeHead(415, {\n \"content-type\": \"application/json\",\n accept: \"application/json\"\n });\n response.end(\n JSON.stringify({\n error: `Unsupported \"Content-Type\" header value. Must be \"application/json\"`\n })\n );\n return;\n }\n const missingHeaders = getMissingHeaders(request).join(\", \");\n if (missingHeaders) {\n response.writeHead(400, {\n \"content-type\": \"application/json\"\n });\n response.end(\n JSON.stringify({\n error: `Required headers missing: ${missingHeaders}`\n })\n );\n return;\n }\n const eventName = request.headers[\"x-github-event\"];\n const signatureSHA256 = request.headers[\"x-hub-signature-256\"];\n const id = request.headers[\"x-github-delivery\"];\n options.log.debug(`${eventName} event received (id: ${id})`);\n let didTimeout = false;\n const timeout = setTimeout(() => {\n didTimeout = true;\n response.statusCode = 202;\n response.end(\"still processing\\n\");\n }, 9e3).unref();\n try {\n const payload = await getPayload(request);\n await webhooks.verifyAndReceive({\n id,\n name: eventName,\n payload,\n signature: signatureSHA256\n });\n clearTimeout(timeout);\n if (didTimeout)\n return;\n response.end(\"ok\\n\");\n } catch (error) {\n clearTimeout(timeout);\n if (didTimeout)\n return;\n const err = Array.from(error)[0];\n const errorMessage = err.message ? `${err.name}: ${err.message}` : \"Error: An Unspecified error occurred\";\n response.statusCode = typeof err.status !== \"undefined\" ? err.status : 500;\n options.log.error(error);\n response.end(\n JSON.stringify({\n error: errorMessage\n })\n );\n }\n}\nexport {\n middleware\n};\n", "function onUnhandledRequestDefault(request, response) {\n response.writeHead(404, {\n \"content-type\": \"application/json\"\n });\n response.end(\n JSON.stringify({\n error: `Unknown route: ${request.method} ${request.url}`\n })\n );\n}\nexport {\n onUnhandledRequestDefault\n};\n", "import { createLogger } from \"../../createLogger\";\nimport { middleware } from \"./middleware\";\nimport { onUnhandledRequestDefault } from \"./on-unhandled-request-default\";\nfunction createNodeMiddleware(webhooks, {\n path = \"/api/github/webhooks\",\n onUnhandledRequest = onUnhandledRequestDefault,\n log = createLogger()\n} = {}) {\n const deprecateOnUnhandledRequest = (request, response) => {\n console.warn(\n \"[@octokit/webhooks] `onUnhandledRequest()` is deprecated and will be removed in a future release of `@octokit/webhooks`\"\n );\n return onUnhandledRequest(request, response);\n };\n return middleware.bind(null, webhooks, {\n path,\n onUnhandledRequest: deprecateOnUnhandledRequest,\n log\n });\n}\nexport {\n createNodeMiddleware\n};\n", "import { createLogger } from \"./createLogger\";\nimport { createEventHandler } from \"./event-handler/index\";\nimport { sign } from \"./sign\";\nimport { verify } from \"./verify\";\nimport { verifyAndReceive } from \"./verify-and-receive\";\nimport { createNodeMiddleware } from \"./middleware/node/index\";\nimport { emitterEventNames } from \"./generated/webhook-names\";\nclass Webhooks {\n constructor(options) {\n if (!options || !options.secret) {\n throw new Error(\"[@octokit/webhooks] options.secret required\");\n }\n const state = {\n eventHandler: createEventHandler(options),\n secret: options.secret,\n hooks: {},\n log: createLogger(options.log)\n };\n this.sign = sign.bind(null, options.secret);\n this.verify = (eventPayload, signature) => {\n if (typeof eventPayload === \"object\") {\n console.warn(\n \"[@octokit/webhooks] Passing a JSON payload object to `verify()` is deprecated and the functionality will be removed in a future release of `@octokit/webhooks`\"\n );\n }\n return verify(options.secret, eventPayload, signature);\n };\n this.on = state.eventHandler.on;\n this.onAny = state.eventHandler.onAny;\n this.onError = state.eventHandler.onError;\n this.removeListener = state.eventHandler.removeListener;\n this.receive = state.eventHandler.receive;\n this.verifyAndReceive = (options2) => {\n if (typeof options2.payload === \"object\") {\n console.warn(\n \"[@octokit/webhooks] Passing a JSON payload object to `verifyAndReceive()` is deprecated and the functionality will be removed in a future release of `@octokit/webhooks`\"\n );\n }\n return verifyAndReceive(state, options2);\n };\n }\n}\nexport {\n Webhooks,\n createEventHandler,\n createNodeMiddleware,\n emitterEventNames\n};\n"], + "mappings": ";AAAA,IAAM,eAAe,CAAC,YAAY;AAAA,EAChC,OAAO,MAAM;AAAA,EACb;AAAA,EACA,MAAM,MAAM;AAAA,EACZ;AAAA,EACA,MAAM,QAAQ,KAAK,KAAK,OAAO;AAAA,EAC/B,OAAO,QAAQ,MAAM,KAAK,OAAO;AAAA,EACjC,GAAG;AACL;;;ACRA,IAAM,oBAAoB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;;;AClQA,SAAS,oBAAoB,OAAO,aAAa,SAAS;AACxD,MAAI,CAAC,MAAM,MAAM,WAAW,GAAG;AAC7B,UAAM,MAAM,WAAW,IAAI,CAAC;AAAA,EAC9B;AACA,QAAM,MAAM,WAAW,EAAE,KAAK,OAAO;AACvC;AACA,SAAS,WAAW,OAAO,oBAAoB,SAAS;AACtD,MAAI,MAAM,QAAQ,kBAAkB,GAAG;AACrC,uBAAmB;AAAA,MACjB,CAAC,gBAAgB,WAAW,OAAO,aAAa,OAAO;AAAA,IACzD;AACA;AAAA,EACF;AACA,MAAI,CAAC,KAAK,OAAO,EAAE,SAAS,kBAAkB,GAAG;AAC/C,UAAM,cAAc,uBAAuB,MAAM,QAAQ;AACzD,UAAM,UAAU,cAAc,iHAAiH,YAAY,OAAO,CAAC,EAAE,YAAY,IAAI,YAAY,MAAM,CAAC;AACxM,UAAM,IAAI,MAAM,OAAO;AAAA,EACzB;AACA,MAAI,CAAC,kBAAkB,SAAS,kBAAkB,GAAG;AACnD,UAAM,IAAI;AAAA,MACR,IAAI;AAAA,IACN;AAAA,EACF;AACA,sBAAoB,OAAO,oBAAoB,OAAO;AACxD;AACA,SAAS,cAAc,OAAO,SAAS;AACrC,sBAAoB,OAAO,KAAK,OAAO;AACzC;AACA,SAAS,gBAAgB,OAAO,SAAS;AACvC,sBAAoB,OAAO,SAAS,OAAO;AAC7C;;;AC/BA,OAAO,oBAAoB;;;ACA3B,SAAS,iBAAiB,SAAS,OAAO;AACxC,MAAI;AACJ,MAAI;AACF,kBAAc,QAAQ,KAAK;AAAA,EAC7B,SAAS,QAAP;AACA,YAAQ,IAAI,gDAAgD;AAC5D,YAAQ,IAAI,MAAM;AAAA,EACpB;AACA,MAAI,eAAe,YAAY,OAAO;AACpC,gBAAY,MAAM,CAAC,WAAW;AAC5B,cAAQ,IAAI,gDAAgD;AAC5D,cAAQ,IAAI,MAAM;AAAA,IACpB,CAAC;AAAA,EACH;AACF;;;ADZA,SAAS,SAAS,OAAO,oBAAoB,WAAW;AACtD,QAAM,QAAQ,CAAC,MAAM,MAAM,SAAS,GAAG,MAAM,MAAM,GAAG,CAAC;AACvD,MAAI,oBAAoB;AACtB,UAAM,QAAQ,MAAM,MAAM,GAAG,aAAa,oBAAoB,CAAC;AAAA,EACjE;AACA,SAAO,CAAC,EAAE,OAAO,GAAG,MAAM,OAAO,OAAO,CAAC;AAC3C;AACA,SAAS,eAAe,OAAO,OAAO;AACpC,QAAM,gBAAgB,MAAM,MAAM,SAAS,CAAC;AAC5C,MAAI,iBAAiB,OAAO;AAC1B,UAAM,QAAQ,OAAO,OAAO,IAAI,eAAe,CAAC,KAAK,CAAC,GAAG;AAAA,MACvD;AAAA,MACA,QAAQ,CAAC,KAAK;AAAA,IAChB,CAAC;AACD,kBAAc,QAAQ,CAAC,YAAY,iBAAiB,SAAS,KAAK,CAAC;AACnE,WAAO,QAAQ,OAAO,KAAK;AAAA,EAC7B;AACA,MAAI,CAAC,SAAS,CAAC,MAAM,MAAM;AACzB,UAAM,IAAI,eAAe,CAAC,uBAAuB,CAAC;AAAA,EACpD;AACA,MAAI,CAAC,MAAM,SAAS;AAClB,UAAM,IAAI,eAAe,CAAC,0BAA0B,CAAC;AAAA,EACvD;AACA,QAAM,QAAQ;AAAA,IACZ;AAAA,IACA,YAAY,MAAM,UAAU,MAAM,QAAQ,SAAS;AAAA,IACnD,MAAM;AAAA,EACR;AACA,MAAI,MAAM,WAAW,GAAG;AACtB,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACA,QAAM,SAAS,CAAC;AAChB,QAAM,WAAW,MAAM,IAAI,CAAC,YAAY;AACtC,QAAI,UAAU,QAAQ,QAAQ,KAAK;AACnC,QAAI,MAAM,WAAW;AACnB,gBAAU,QAAQ,KAAK,MAAM,SAAS;AAAA,IACxC;AACA,WAAO,QAAQ,KAAK,CAAC,WAAW;AAC9B,aAAO,QAAQ,MAAM;AAAA,IACvB,CAAC,EAAE,MAAM,CAAC,UAAU,OAAO,KAAK,OAAO,OAAO,OAAO,EAAE,MAAM,CAAC,CAAC,CAAC;AAAA,EAClE,CAAC;AACD,SAAO,QAAQ,IAAI,QAAQ,EAAE,KAAK,MAAM;AACtC,QAAI,OAAO,WAAW,GAAG;AACvB;AAAA,IACF;AACA,UAAM,QAAQ,IAAI,eAAe,MAAM;AACvC,WAAO,OAAO,OAAO;AAAA,MACnB;AAAA,MACA;AAAA,IACF,CAAC;AACD,kBAAc,QAAQ,CAAC,YAAY,iBAAiB,SAAS,KAAK,CAAC;AACnE,UAAM;AAAA,EACR,CAAC;AACH;;;AEvDA,SAAS,eAAe,OAAO,oBAAoB,SAAS;AAC1D,MAAI,MAAM,QAAQ,kBAAkB,GAAG;AACrC,uBAAmB;AAAA,MACjB,CAAC,gBAAgB,eAAe,OAAO,aAAa,OAAO;AAAA,IAC7D;AACA;AAAA,EACF;AACA,MAAI,CAAC,MAAM,MAAM,kBAAkB,GAAG;AACpC;AAAA,EACF;AACA,WAAS,IAAI,MAAM,MAAM,kBAAkB,EAAE,SAAS,GAAG,KAAK,GAAG,KAAK;AACpE,QAAI,MAAM,MAAM,kBAAkB,EAAE,CAAC,MAAM,SAAS;AAClD,YAAM,MAAM,kBAAkB,EAAE,OAAO,GAAG,CAAC;AAC3C;AAAA,IACF;AAAA,EACF;AACF;;;ACRA,SAAS,mBAAmB,SAAS;AACnC,QAAM,QAAQ;AAAA,IACZ,OAAO,CAAC;AAAA,IACR,KAAK,aAAa,WAAW,QAAQ,GAAG;AAAA,EAC1C;AACA,MAAI,WAAW,QAAQ,WAAW;AAChC,UAAM,YAAY,QAAQ;AAAA,EAC5B;AACA,SAAO;AAAA,IACL,IAAI,WAAG,KAAK,MAAM,KAAK;AAAA,IACvB,OAAO,cAAM,KAAK,MAAM,KAAK;AAAA,IAC7B,SAAS,gBAAQ,KAAK,MAAM,KAAK;AAAA,IACjC,gBAAgB,eAAe,KAAK,MAAM,KAAK;AAAA,IAC/C,SAAS,eAAQ,KAAK,MAAM,KAAK;AAAA,EACnC;AACF;;;ACvBA,SAAS,QAAQ,kBAAkB;;;ACAnC,SAAS,uBAAuB,SAAS;AACvC,QAAM,gBAAgB,KAAK,UAAU,OAAO;AAC5C,SAAO,cAAc,QAAQ,uBAAuB,CAAC,MAAM;AACzD,WAAO,EAAE,OAAO,GAAG,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE,YAAY;AAAA,EAClD,CAAC;AACH;;;ADHA,eAAe,KAAK,QAAQ,SAAS;AACnC,SAAO;AAAA,IACL;AAAA,IACA,OAAO,YAAY,WAAW,UAAU,uBAAuB,OAAO;AAAA,EACxE;AACF;;;AEPA,SAAS,UAAU,oBAAoB;AAEvC,eAAe,OAAO,QAAQ,SAAS,WAAW;AAChD,SAAO;AAAA,IACL;AAAA,IACA,OAAO,YAAY,WAAW,UAAU,uBAAuB,OAAO;AAAA,IACtE;AAAA,EACF;AACF;;;ACRA,SAAS,UAAAA,eAAc;AAEvB,eAAe,iBAAiB,OAAO,OAAO;AAC5C,QAAM,mBAAmB,MAAMC;AAAA,IAC7B,MAAM;AAAA,IACN,OAAO,MAAM,YAAY,WAAW,uBAAuB,MAAM,OAAO,IAAI,MAAM;AAAA,IAClF,MAAM;AAAA,EACR,EAAE,MAAM,MAAM,KAAK;AACnB,MAAI,CAAC,kBAAkB;AACrB,UAAM,QAAQ,IAAI;AAAA,MAChB;AAAA,IACF;AACA,WAAO,MAAM,aAAa;AAAA,MACxB,OAAO,OAAO,OAAO,EAAE,OAAO,QAAQ,IAAI,CAAC;AAAA,IAC7C;AAAA,EACF;AACA,SAAO,MAAM,aAAa,QAAQ;AAAA,IAChC,IAAI,MAAM;AAAA,IACV,MAAM,MAAM;AAAA,IACZ,SAAS,OAAO,MAAM,YAAY,WAAW,KAAK,MAAM,MAAM,OAAO,IAAI,MAAM;AAAA,EACjF,CAAC;AACH;;;ACrBA,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AACF;AACA,SAAS,kBAAkB,SAAS;AAClC,SAAO,gBAAgB,OAAO,CAAC,WAAW,EAAE,UAAU,QAAQ,QAAQ;AACxE;;;ACPA,OAAOC,qBAAoB;AAC3B,SAAS,WAAW,SAAS;AAC3B,MAAI,QAAQ,MAAM;AAChB,QAAI,OAAO,QAAQ,SAAS,UAAU;AACpC,cAAQ;AAAA,QACN;AAAA,MACF;AAAA,IACF;AACA,WAAO,QAAQ,QAAQ,QAAQ,IAAI;AAAA,EACrC;AACA,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,QAAI,OAAO;AACX,YAAQ,YAAY,MAAM;AAC1B,YAAQ,GAAG,SAAS,CAAC,UAAU,OAAO,IAAIA,gBAAe,CAAC,KAAK,CAAC,CAAC,CAAC;AAClE,YAAQ,GAAG,QAAQ,CAAC,UAAU,QAAQ,KAAK;AAC3C,YAAQ,GAAG,OAAO,MAAM;AACtB,UAAI;AACF,aAAK,MAAM,IAAI;AACf,gBAAQ,IAAI;AAAA,MACd,SAAS,OAAP;AACA,cAAM,UAAU;AAChB,cAAM,SAAS;AACf,eAAO,IAAIA,gBAAe,CAAC,KAAK,CAAC,CAAC;AAAA,MACpC;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AACH;;;ACxBA,eAAe,WAAW,UAAU,SAAS,SAAS,UAAU,MAAM;AACpE,MAAI;AACJ,MAAI;AACF,eAAW,IAAI,IAAI,QAAQ,KAAK,kBAAkB,EAAE;AAAA,EACtD,SAAS,OAAP;AACA,aAAS,UAAU,KAAK;AAAA,MACtB,gBAAgB;AAAA,IAClB,CAAC;AACD,aAAS;AAAA,MACP,KAAK,UAAU;AAAA,QACb,OAAO,oCAAoC,QAAQ;AAAA,MACrD,CAAC;AAAA,IACH;AACA;AAAA,EACF;AACA,QAAM,iBAAiB,QAAQ,WAAW,UAAU,aAAa,QAAQ;AACzE,QAAM,sBAAsB,OAAO,SAAS;AAC5C,MAAI,gBAAgB;AAClB,QAAI,qBAAqB;AACvB,aAAO,KAAK;AAAA,IACd,OAAO;AACL,aAAO,QAAQ,mBAAmB,SAAS,QAAQ;AAAA,IACrD;AAAA,EACF;AACA,MAAI,CAAC,QAAQ,QAAQ,cAAc,KAAK,CAAC,QAAQ,QAAQ,cAAc,EAAE,WAAW,kBAAkB,GAAG;AACvG,aAAS,UAAU,KAAK;AAAA,MACtB,gBAAgB;AAAA,MAChB,QAAQ;AAAA,IACV,CAAC;AACD,aAAS;AAAA,MACP,KAAK,UAAU;AAAA,QACb,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AACA;AAAA,EACF;AACA,QAAM,iBAAiB,kBAAkB,OAAO,EAAE,KAAK,IAAI;AAC3D,MAAI,gBAAgB;AAClB,aAAS,UAAU,KAAK;AAAA,MACtB,gBAAgB;AAAA,IAClB,CAAC;AACD,aAAS;AAAA,MACP,KAAK,UAAU;AAAA,QACb,OAAO,6BAA6B;AAAA,MACtC,CAAC;AAAA,IACH;AACA;AAAA,EACF;AACA,QAAM,YAAY,QAAQ,QAAQ,gBAAgB;AAClD,QAAM,kBAAkB,QAAQ,QAAQ,qBAAqB;AAC7D,QAAM,KAAK,QAAQ,QAAQ,mBAAmB;AAC9C,UAAQ,IAAI,MAAM,GAAG,iCAAiC,KAAK;AAC3D,MAAI,aAAa;AACjB,QAAM,UAAU,WAAW,MAAM;AAC/B,iBAAa;AACb,aAAS,aAAa;AACtB,aAAS,IAAI,oBAAoB;AAAA,EACnC,GAAG,GAAG,EAAE,MAAM;AACd,MAAI;AACF,UAAM,UAAU,MAAM,WAAW,OAAO;AACxC,UAAM,SAAS,iBAAiB;AAAA,MAC9B;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AACD,iBAAa,OAAO;AACpB,QAAI;AACF;AACF,aAAS,IAAI,MAAM;AAAA,EACrB,SAAS,OAAP;AACA,iBAAa,OAAO;AACpB,QAAI;AACF;AACF,UAAM,MAAM,MAAM,KAAK,KAAK,EAAE,CAAC;AAC/B,UAAM,eAAe,IAAI,UAAU,GAAG,IAAI,SAAS,IAAI,YAAY;AACnE,aAAS,aAAa,OAAO,IAAI,WAAW,cAAc,IAAI,SAAS;AACvE,YAAQ,IAAI,MAAM,KAAK;AACvB,aAAS;AAAA,MACP,KAAK,UAAU;AAAA,QACb,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ACtFA,SAAS,0BAA0B,SAAS,UAAU;AACpD,WAAS,UAAU,KAAK;AAAA,IACtB,gBAAgB;AAAA,EAClB,CAAC;AACD,WAAS;AAAA,IACP,KAAK,UAAU;AAAA,MACb,OAAO,kBAAkB,QAAQ,UAAU,QAAQ;AAAA,IACrD,CAAC;AAAA,EACH;AACF;;;ACNA,SAAS,qBAAqB,UAAU;AAAA,EACtC,OAAO;AAAA,EACP,qBAAqB;AAAA,EACrB,MAAM,aAAa;AACrB,IAAI,CAAC,GAAG;AACN,QAAM,8BAA8B,CAAC,SAAS,aAAa;AACzD,YAAQ;AAAA,MACN;AAAA,IACF;AACA,WAAO,mBAAmB,SAAS,QAAQ;AAAA,EAC7C;AACA,SAAO,WAAW,KAAK,MAAM,UAAU;AAAA,IACrC;AAAA,IACA,oBAAoB;AAAA,IACpB;AAAA,EACF,CAAC;AACH;;;ACZA,IAAM,WAAN,MAAe;AAAA,EACb,YAAY,SAAS;AACnB,QAAI,CAAC,WAAW,CAAC,QAAQ,QAAQ;AAC/B,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AACA,UAAM,QAAQ;AAAA,MACZ,cAAc,mBAAmB,OAAO;AAAA,MACxC,QAAQ,QAAQ;AAAA,MAChB,OAAO,CAAC;AAAA,MACR,KAAK,aAAa,QAAQ,GAAG;AAAA,IAC/B;AACA,SAAK,OAAO,KAAK,KAAK,MAAM,QAAQ,MAAM;AAC1C,SAAK,SAAS,CAAC,cAAc,cAAc;AACzC,UAAI,OAAO,iBAAiB,UAAU;AACpC,gBAAQ;AAAA,UACN;AAAA,QACF;AAAA,MACF;AACA,aAAO,OAAO,QAAQ,QAAQ,cAAc,SAAS;AAAA,IACvD;AACA,SAAK,KAAK,MAAM,aAAa;AAC7B,SAAK,QAAQ,MAAM,aAAa;AAChC,SAAK,UAAU,MAAM,aAAa;AAClC,SAAK,iBAAiB,MAAM,aAAa;AACzC,SAAK,UAAU,MAAM,aAAa;AAClC,SAAK,mBAAmB,CAAC,aAAa;AACpC,UAAI,OAAO,SAAS,YAAY,UAAU;AACxC,gBAAQ;AAAA,UACN;AAAA,QACF;AAAA,MACF;AACA,aAAO,iBAAiB,OAAO,QAAQ;AAAA,IACzC;AAAA,EACF;AACF;", + "names": ["verify", "verify", "AggregateError"] +} diff --git a/dist/node_modules/@octokit/webhooks/package.json b/dist/node_modules/@octokit/webhooks/package.json new file mode 100644 index 0000000..4ae6b2d --- /dev/null +++ b/dist/node_modules/@octokit/webhooks/package.json @@ -0,0 +1,51 @@ +{ + "name": "@octokit/webhooks", + "version": "10.9.2", + "description": "GitHub webhook events toolset for Node.js", + "keywords": [], + "repository": "github:octokit/webhooks.js", + "license": "MIT", + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "dependencies": { + "@octokit/request-error": "^3.0.0", + "@octokit/webhooks-methods": "^3.0.0", + "@octokit/webhooks-types": "6.11.0", + "aggregate-error": "^3.1.0" + }, + "devDependencies": { + "@jest/types": "^29.0.0", + "@octokit/tsconfig": "^1.0.1", + "@octokit/webhooks-schemas": "6.11.0", + "@types/jest": "^29.0.0", + "@types/json-schema": "^7.0.7", + "@types/node": "^18.0.0", + "@types/node-fetch": "^2.5.8", + "@types/prettier": "^2.0.0", + "axios": "^1.0.0", + "esbuild": "^0.17.11", + "express": "^4.17.1", + "glob": "^9.2.1", + "jest": "^29.0.0", + "node-fetch": "^2.6.7", + "prettier": "2.8.7", + "prettier-plugin-packagejson": "^2.2.9", + "ts-jest": "^29.0.0", + "ts-node": "^10.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "publishConfig": { + "access": "public" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "source": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/@types/aws-lambda/LICENSE b/dist/node_modules/@types/aws-lambda/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/dist/node_modules/@types/aws-lambda/README.md b/dist/node_modules/@types/aws-lambda/README.md new file mode 100644 index 0000000..50c6b83 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/aws-lambda` + +# Summary +This package contains type definitions for aws-lambda (http://docs.aws.amazon.com/lambda). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/aws-lambda. + +### Additional Details + * Last updated: Tue, 05 Mar 2024 21:35:40 GMT + * Dependencies: none + +# Credits +These definitions were written by [James Darbyshire](https://github.com/darbio), [Michael Skarum](https://github.com/skarum), [Stef Heyenrath](https://github.com/StefH), [Rich Buggy](https://github.com/buggy), [wwwy3y3](https://github.com/wwwy3y3), [Ishaan Malhi](https://github.com/OrthoDex), [Michael Marner](https://github.com/MichaelMarner), [Daniel Cottone](https://github.com/daniel-cottone), [Kostya Misura](https://github.com/kostya-misura), [Markus Tacker](https://github.com/coderbyheart), [Palmi Valgeirsson](https://github.com/palmithor), [Danilo Raisi](https://github.com/daniloraisi), [Simon Buchan](https://github.com/simonbuchan), [David Hayden](https://github.com/Haydabase), [Chris Redekop](https://github.com/repl-chris), [Aneil Mallavarapu](https://github.com/aneilbaboo), [Jeremy Nagel](https://github.com/jeznag), [Louis Larry](https://github.com/louislarry), [Daniel Papukchiev](https://github.com/dpapukchiev), [Oliver Hookins](https://github.com/ohookins), [Trevor Leach](https://github.com/trevor-leach), [James Gregory](https://github.com/jagregory), [Erik Dalén](https://github.com/dalen), [Loïk Gaonac'h](https://github.com/loikg), [Roberto Zen](https://github.com/skyzenr), [Grzegorz Redlicki](https://github.com/redlickigrzegorz), [Juan Carbonel](https://github.com/juancarbonel), [Peter McIntyre](https://github.com/pwmcintyre), [Alex Bolenok](https://github.com/alex-bolenok-centralreach), [Marian Zange](https://github.com/marianzange), [Alessandro Palumbo](https://github.com/apalumbo), [Sachin Shekhar](https://github.com/SachinShekhar), [Ivan Martos](https://github.com/ivanmartos), [Zach Anthony](https://github.com/zach-anthony), [Peter Savnik](https://github.com/savnik), [Benoit Boure](https://github.com/bboure), [James Lakin](https://github.com/jamesorlakin), [Ross Gerbasi](https://github.com/aphex), [Joey Kilpatrick](https://github.com/joeykilpatrick), and [Luciano Manerich Junior](https://github.com/lmanerich). diff --git a/dist/node_modules/@types/aws-lambda/common/api-gateway.d.ts b/dist/node_modules/@types/aws-lambda/common/api-gateway.d.ts new file mode 100644 index 0000000..8cb6a39 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/common/api-gateway.d.ts @@ -0,0 +1,83 @@ +// Types shared between trigger/api-gateway-authorizer.d.ts and api-gateway-proxy.d.ts + +// Poorly documented, but API Gateway will just fail internally if +// the context type does not match this. +// Note that although non-string types will be accepted, they will be +// coerced to strings on the other side. +export interface APIGatewayAuthorizerResultContext { + [name: string]: string | number | boolean | null | undefined; +} + +// Default authorizer type, prefer using a specific type with the "...WithAuthorizer..." variant types. +// Note that this doesn't have to be a context from a custom lambda outhorizer, AWS also has a cognito +// authorizer type and could add more, so the property won't always be a string. +export type APIGatewayEventDefaultAuthorizerContext = + | undefined + | null + | { + [name: string]: any; + }; + +export type APIGatewayEventRequestContext = APIGatewayEventRequestContextWithAuthorizer< + APIGatewayEventDefaultAuthorizerContext +>; + +// The requestContext property of both request authorizer and proxy integration events. +export interface APIGatewayEventRequestContextWithAuthorizer { + accountId: string; + apiId: string; + // This one is a bit confusing: it is not actually present in authorizer calls + // and proxy calls without an authorizer. We model this by allowing undefined in the type, + // since it ends up the same and avoids breaking users that are testing the property. + // This lets us allow parameterizing the authorizer for proxy events that know what authorizer + // context values they have. + authorizer: TAuthorizerContext; + connectedAt?: number | undefined; + connectionId?: string | undefined; + domainName?: string | undefined; + domainPrefix?: string | undefined; + eventType?: string | undefined; + extendedRequestId?: string | undefined; + protocol: string; + httpMethod: string; + identity: APIGatewayEventIdentity; + messageDirection?: string | undefined; + messageId?: string | null | undefined; + path: string; + stage: string; + requestId: string; + requestTime?: string | undefined; + requestTimeEpoch: number; + resourceId: string; + resourcePath: string; + routeKey?: string | undefined; +} + +export interface APIGatewayEventClientCertificate { + clientCertPem: string; + serialNumber: string; + subjectDN: string; + issuerDN: string; + validity: { + notAfter: string; + notBefore: string; + }; +} + +export interface APIGatewayEventIdentity { + accessKey: string | null; + accountId: string | null; + apiKey: string | null; + apiKeyId: string | null; + caller: string | null; + clientCert: APIGatewayEventClientCertificate | null; + cognitoAuthenticationProvider: string | null; + cognitoAuthenticationType: string | null; + cognitoIdentityId: string | null; + cognitoIdentityPoolId: string | null; + principalOrgId: string | null; + sourceIp: string; + user: string | null; + userAgent: string | null; + userArn: string | null; +} diff --git a/dist/node_modules/@types/aws-lambda/common/cloudfront.d.ts b/dist/node_modules/@types/aws-lambda/common/cloudfront.d.ts new file mode 100644 index 0000000..f90de09 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/common/cloudfront.d.ts @@ -0,0 +1,245 @@ +/** + * CloudFront events + * http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/lambda-event-structure.html + * Bear in mind that the "example" event structure in the page above includes + * both an S3 and a Custom origin, which is not strictly allowed. Only one + * of these per event may be present. + */ +export interface CloudFrontHeaders { + [name: string]: Array<{ + key?: string | undefined; + value: string; + }>; +} + +export type CloudFrontOrigin = + | { s3: CloudFrontS3Origin; custom?: never | undefined } + | { custom: CloudFrontCustomOrigin; s3?: never | undefined }; + +export interface CloudFrontCustomOrigin { + customHeaders: CloudFrontHeaders; + domainName: string; + keepaliveTimeout: number; + path: string; + port: number; + protocol: "http" | "https"; + readTimeout: number; + sslProtocols: string[]; +} + +export type CloudFrontS3Origin = + | CloudFrontS3OriginAuthMethodNone + | CloudFrontS3OriginAuthMethodOriginAccessIdentity; + +export interface CloudFrontS3OriginBase { + authMethod: "origin-access-identity" | "none"; + customHeaders: CloudFrontHeaders; + domainName: string; + path: string; +} + +export interface CloudFrontS3OriginAuthMethodNone extends CloudFrontS3OriginBase { + authMethod: "none"; + region?: never; +} + +export interface CloudFrontS3OriginAuthMethodOriginAccessIdentity extends CloudFrontS3OriginBase { + authMethod: "origin-access-identity"; + region: string; +} + +export interface CloudFrontResponse { + status: string; + statusDescription: string; + headers: CloudFrontHeaders; +} + +export interface CloudFrontRequest { + body?: + | { + action: "read-only" | "replace"; + data: string; + encoding: "base64" | "text"; + readonly inputTruncated: boolean; + } + | undefined; + readonly clientIp: string; + readonly method: string; + uri: string; + querystring: string; + headers: CloudFrontHeaders; + origin?: CloudFrontOrigin | undefined; +} + +export interface CloudFrontEvent { + config: { + readonly distributionDomainName: string; + readonly distributionId: string; + readonly eventType: "origin-request" | "origin-response" | "viewer-request" | "viewer-response"; + readonly requestId: string; + }; +} + +/** + * Generated HTTP response in viewer request event or origin request event + * + * https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/lambda-generating-http-responses-in-requests.html#lambda-generating-http-responses-object + */ +export interface CloudFrontResultResponse { + status: string; + statusDescription?: string | undefined; + headers?: CloudFrontHeaders | undefined; + bodyEncoding?: "text" | "base64" | undefined; + body?: string | undefined; +} + +/** @see https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/functions-event-structure.html#functions-event-structure-query-header-cookie */ +export interface CloudFrontFunctionsCookies { + [key: string]: { + value: string; + attributes?: string; + multiValue?: Array<{ value: string; attributes?: string }>; + }; +} + +/** @see https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/functions-event-structure.html#functions-event-structure-query-header-cookie */ +export interface CloudFrontFunctionsQuerystring { + [key: string]: { value: string; multiValue?: Array<{ value: string }> }; +} + +/** @see https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/functions-event-structure.html#functions-event-structure-query-header-cookie */ +export interface CloudFrontFunctionsHeaders { + [key: string]: { value: string; multiValue?: Array<{ value: string }> }; +} + +/** + * @see https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/functions-event-structure.html + * + * @usage + * ```ts + * export const handler = async (event: CloudFrontFunctionsEvent) => { + * var response = event.response; + * var headers = response.headers; + * // Set the cache-control header + * headers["cache-control"] = { value: "public,max-age=31536000,immutable" }; + * // Return response to viewers + * return response + * }; + * ``` + */ +export interface CloudFrontFunctionsEvent { + /** + * ## Version field + * The version field contains a string that specifies the version of the + * CloudFront Functions event object. The current version is 1.0. + */ + version: string; + /** + * ## Context object + * The `context` object contains contextual information about the event. It includes the following fields: + * - `distributionDomainName` + * - `distributionId` + * - `eventType` + * - `requestId` + */ + context: { + /** The CloudFront domain name (for example, d111111abcdef8.cloudfront.net) of the distribution that’s associated with the event. */ + distributionDomainName: string; + /** The ID of the distribution (for example, EDFDVBD6EXAMPLE) that’s associated with the event. */ + distributionId: string; + /** The event type, either `viewer-request` or `viewer-response`. */ + eventType: "viewer-request" | "viewer-response"; + /** A string that uniquely identifies a CloudFront request (and its associated response). */ + requestId: string; + }; + /** + * ## Viewer object + * The `viewer` object contains an `ip` field whose value is the IP address of the viewer (client) that sent the request. + * If the viewer request came through an HTTP proxy or a load balancer, the value is the IP address of the proxy or load balancer. + */ + viewer: { + ip: string; + }; + /** + * ## Request object + * The `request` object contains a representation of a viewer-to-CloudFront HTTP request. + * In the `event` object that’s passed to your function, the `request` object represents the + * actual request that CloudFront received from the viewer. + * + * If your function code returns a `request` object to CloudFront, it must use this same structure. + * + * The `request` object contains the following fields: + * - `method` + * - `uri` + * - `querystring` + * - `headers` + * - `cookies` + */ + request: { + /** The HTTP method of the request. If your function code returns a `request`, it cannot modify this field. This is the only read-only field in the `request` object. */ + method: string; + /** + * The relative path of the requested object. If your function modifies the `uri value, note the following: + * - The new `uri` value must begin with a forward slash (`/`)`. + * - When a function changes the `uri` value, it changes the object that the viewer is requesting. + * - When a function changes the `uri` value, it doesn’t change the cache behavior for the request or the origin that an origin request is sent to. + */ + uri: string; + /** + * An object that represents the query string in the request. If the request doesn’t include a query string, + * the `request` object still includes an empty `querystring` object. + * + * The `querystring` object contains one field for each query string parameter in the request. + * Query string parameter names are converted to lowercase. + */ + querystring: CloudFrontFunctionsQuerystring; + /** + * An object that represents the HTTP headers in the request. If the request contains any `Cookie` headers, + * those headers are not part of the `headers` object. Cookies are represented separately in the `cookies` object. + * + * The `headers` object contains one field for each header in the request. Header names are converted to lowercase. + */ + headers: CloudFrontFunctionsHeaders; + /** + * An object that represents the cookies in the request (`Cookie` headers). + * + * The `cookies` object contains one field for each cookie in the request. Cookie names are converted to lowercase. + */ + cookies: CloudFrontFunctionsCookies; + }; + /** + * ## Response object + * + * The `response` object contains a representation of a CloudFront-to-viewer HTTP response. + * In the `event` object that’s passed to your function, the `response` object represents CloudFront’s actual response to a viewer request. + * + * If your function code returns a `response` object, it must use this same structure. + * + * The `response` object contains the following fields: + */ + response: { + /** + * The HTTP status code of the response. This value is an integer, not a string. + * + * If the function is associated with a _viewer response_ event type, your function code cannot change + * the `statusCode` that it received. If the function is associated with a _viewer request_ event type + * and [generates an HTTP response](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/writing-function-code.html#function-code-generate-response), + * your function code can set the `statusCode`. + */ + statusCode: number; + /** The HTTP status description of the response. If your function code generates a response, this field is optional. */ + statusDescription?: string; + /** + * An object that represents the HTTP headers in the response. If the response contains any `Set-Cookie` headers, + * those `headers` are not part of the headers object. Cookies are represented separately in the `cookies` object. + * + * The `headers` object contains one field for each header in the response. Header names are converted to lowercase. + */ + headers: CloudFrontFunctionsHeaders; + /** + * An object that represents the cookies in the response (`Set-Cookie` headers). + * The `cookies` object contains one field for each cookie in the response. Cookie names are converted to lowercase. + */ + cookies: CloudFrontFunctionsCookies; + }; +} diff --git a/dist/node_modules/@types/aws-lambda/handler.d.ts b/dist/node_modules/@types/aws-lambda/handler.d.ts new file mode 100644 index 0000000..98a274a --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/handler.d.ts @@ -0,0 +1,172 @@ +/** + * The interface that AWS Lambda will invoke your handler with. + * There are more specialized types for many cases where AWS services + * invoke your lambda, but you can directly use this type for when you are invoking + * your lambda directly. + * + * See the {@link http://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-handler.html AWS documentation} + * for more information about the runtime behavior, and the + * {@link https://aws.amazon.com/blogs/compute/node-js-8-10-runtime-now-available-in-aws-lambda/ AWS Blog post} + * introducing the async handler behavior in the 8.10 runtime. + * + * @example Defining a custom handler type + * import { Handler } from 'aws-lambda' + * + * interface NameEvent { + * fullName: string + * } + * interface NameResult { + * firstName: string + * middleNames: string + * lastName: string + * } + * type PersonHandler = Handler + * + * export const handler: PersonHandler = async (event) => { + * const names = event.fullName.split(' ') + * const firstName = names.shift() + * const lastName = names.pop() + * return { firstName, middleNames: names, lastName } + * } + * + * @example Logs the contents of the event object and returns the location of the logs + * import { Handler } from 'aws-lambda' + * + * export const handler: Handler = async (event, context) => { + * console.log("EVENT: \n" + JSON.stringify(event, null, 2)) + * return context.logStreamName + * } + * + * @example AWS SDK with Async Function and Promises + * import { Handler } from 'aws-lambda' + * import AWS from 'aws-sdk' + * + * const s3 = new AWS.S3() + * + * export const handler: Handler = async (event) => { + * const response = await s3.listBuckets().promise() + * return response?.Buckets.map((bucket) => bucket.Name) + * } + * + * @example HTTP Request with Callback + * import { Handler } from 'aws-lambda' + * import https from 'https' + * + * let url = "https://docs.aws.amazon.com/lambda/latest/dg/welcome.html" + * + * export const handler: Handler = (event, context, callback) => { + * https.get(url, (res) => { + * callback(null, res.statusCode) + * }).on('error', (e) => { + * callback(Error(e)) + * }) + * } + * + * @param event + * Parsed JSON data in the lambda request payload. For an AWS service triggered + * lambda this should be in the format of a type ending in Event, for example the + * S3Handler receives an event of type S3Event. + * @param context + * Runtime contextual information of the current invocation, for example the caller + * identity, available memory and time remaining, legacy completion callbacks, and + * a mutable property controlling when the lambda execution completes. + * @param callback + * NodeJS-style completion callback that the AWS Lambda runtime will provide that can + * be used to provide the lambda result payload value, or any execution error. Can + * instead return a promise that resolves with the result payload value or rejects + * with the execution error. + * @return + * A promise that resolves with the lambda result payload value, or rejects with the + * execution error. Note that if you implement your handler as an async function, + * you will automatically return a promise that will resolve with a returned value, + * or reject with a thrown value. + */ +export type Handler = ( + event: TEvent, + context: Context, + callback: Callback, + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type +) => void | Promise; + +/** + * {@link Handler} context parameter. + * See {@link https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-context.html AWS documentation}. + */ +export interface Context { + callbackWaitsForEmptyEventLoop: boolean; + functionName: string; + functionVersion: string; + invokedFunctionArn: string; + memoryLimitInMB: string; + awsRequestId: string; + logGroupName: string; + logStreamName: string; + identity?: CognitoIdentity | undefined; + clientContext?: ClientContext | undefined; + + getRemainingTimeInMillis(): number; + + // Functions for compatibility with earlier Node.js Runtime v0.10.42 + // No longer documented, so they are deprecated, but they still work + // as of the 12.x runtime, so they are not removed from the types. + + /** @deprecated Use handler callback or promise result */ + done(error?: Error, result?: any): void; + /** @deprecated Use handler callback with first argument or reject a promise result */ + fail(error: Error | string): void; + /** @deprecated Use handler callback with second argument or resolve a promise result */ + succeed(messageOrObject: any): void; + // Unclear what behavior this is supposed to have, I couldn't find any still extant reference, + // and it behaves like the above, ignoring the object parameter. + /** @deprecated Use handler callback or promise result */ + succeed(message: string, object: any): void; +} + +export interface CognitoIdentity { + cognitoIdentityId: string; + cognitoIdentityPoolId: string; +} + +export interface ClientContext { + client: ClientContextClient; + Custom?: any; + env: ClientContextEnv; +} + +export interface ClientContextClient { + installationId: string; + appTitle: string; + appVersionName: string; + appVersionCode: string; + appPackageName: string; +} + +export interface ClientContextEnv { + platformVersion: string; + platform: string; + make: string; + model: string; + locale: string; +} + +/** + * NodeJS-style callback parameter for the {@link Handler} type. + * Can be used instead of returning a promise, see the + * {@link https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-handler.html AWS documentation} + * for the handler programming model. + * + * @param error + * Parameter to use to provide the error payload for a failed lambda execution. + * See {@link https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-mode-exceptions.html AWS documentation} + * for error handling. + * If an Error instance is passed, the error payload uses the `name` property as the `errorType`, + * the `message` property as the `errorMessage`, and parses the `stack` property string into + * the `trace` array. + * For other values, the `errorType` is `typeof value`, the `errorMessage` is `String(value)`, and + * `trace` is an empty array. + * + * @param result + * Parameter to use to provide the result payload for a successful lambda execution. + * Pass `null` or `undefined` for the `error` parameter to use this parameter. + */ +export type Callback = (error?: Error | string | null, result?: TResult) => void; diff --git a/dist/node_modules/@types/aws-lambda/index.d.ts b/dist/node_modules/@types/aws-lambda/index.d.ts new file mode 100644 index 0000000..f1df85f --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/index.d.ts @@ -0,0 +1,52 @@ +export * from "./common/api-gateway"; +export * from "./common/cloudfront"; +export * from "./handler"; +export * from "./trigger/alb"; +// TODO: export * from "./trigger/alexa"; +export * from "./trigger/api-gateway-authorizer"; +export * from "./trigger/api-gateway-proxy"; +export * from "./trigger/appsync-resolver"; +export * from "./trigger/autoscaling"; +// CloudTrail section just describes using S3 to trigger on cloudtrail changes. +export * from "./trigger/cdk-custom-resource"; +export * from "./trigger/cloudformation-custom-resource"; +export * from "./trigger/cloudfront-request"; +export * from "./trigger/cloudfront-response"; +export * from "./trigger/cloudwatch-alarm"; +export * from "./trigger/cloudwatch-events"; +export * from "./trigger/cloudwatch-logs"; +export * from "./trigger/codebuild-cloudwatch-state"; +export * from "./trigger/codecommit"; +export * from "./trigger/codepipeline"; +export * from "./trigger/codepipeline-cloudwatch"; +export * from "./trigger/codepipeline-cloudwatch-action"; +export * from "./trigger/codepipeline-cloudwatch-pipeline"; +export * from "./trigger/codepipeline-cloudwatch-stage"; +// TODO: export * from "./trigger/cognito-sync"; +export * from "./trigger/cognito-user-pool-trigger/"; +export * from "./trigger/connect-contact-flow"; +// TODO: export * from "./trigger/config"; +export * from "./trigger/dynamodb-stream"; +export * from "./trigger/eventbridge"; +// ElastiCache section just describes using lambdas in an ElastiCache context (VPC issues, etc.) +// EC2 events are delivered using cloudwatch events... +export * from "./trigger/iot"; +export * from "./trigger/iot-authorizer"; +export * from "./trigger/kinesis-firehose-transformation"; +export * from "./trigger/kinesis-stream"; +export * from "./trigger/lambda-function-url"; +export * from "./trigger/lex"; +export * from "./trigger/lex-v2"; +// RDS events are delivered using SNS events... +export * from "./trigger/amplify-resolver"; +export * from "./trigger/msk"; +export * from "./trigger/s3"; +export * from "./trigger/s3-batch"; +export * from "./trigger/s3-event-notification"; +export * from "./trigger/secretsmanager"; +export * from "./trigger/self-managed-kafka"; +export * from "./trigger/ses"; +export * from "./trigger/sns"; +export * from "./trigger/sqs"; + +export as namespace AWSLambda; diff --git a/dist/node_modules/@types/aws-lambda/package.json b/dist/node_modules/@types/aws-lambda/package.json new file mode 100644 index 0000000..f2f06ee --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/package.json @@ -0,0 +1,220 @@ +{ + "name": "@types/aws-lambda", + "version": "8.10.136", + "description": "TypeScript definitions for aws-lambda", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/aws-lambda", + "license": "MIT", + "contributors": [ + { + "name": "James Darbyshire", + "githubUsername": "darbio", + "url": "https://github.com/darbio" + }, + { + "name": "Michael Skarum", + "githubUsername": "skarum", + "url": "https://github.com/skarum" + }, + { + "name": "Stef Heyenrath", + "githubUsername": "StefH", + "url": "https://github.com/StefH" + }, + { + "name": "Rich Buggy", + "githubUsername": "buggy", + "url": "https://github.com/buggy" + }, + { + "name": "wwwy3y3", + "githubUsername": "wwwy3y3", + "url": "https://github.com/wwwy3y3" + }, + { + "name": "Ishaan Malhi", + "githubUsername": "OrthoDex", + "url": "https://github.com/OrthoDex" + }, + { + "name": "Michael Marner", + "githubUsername": "MichaelMarner", + "url": "https://github.com/MichaelMarner" + }, + { + "name": "Daniel Cottone", + "githubUsername": "daniel-cottone", + "url": "https://github.com/daniel-cottone" + }, + { + "name": "Kostya Misura", + "githubUsername": "kostya-misura", + "url": "https://github.com/kostya-misura" + }, + { + "name": "Markus Tacker", + "githubUsername": "coderbyheart", + "url": "https://github.com/coderbyheart" + }, + { + "name": "Palmi Valgeirsson", + "githubUsername": "palmithor", + "url": "https://github.com/palmithor" + }, + { + "name": "Danilo Raisi", + "githubUsername": "daniloraisi", + "url": "https://github.com/daniloraisi" + }, + { + "name": "Simon Buchan", + "githubUsername": "simonbuchan", + "url": "https://github.com/simonbuchan" + }, + { + "name": "David Hayden", + "githubUsername": "Haydabase", + "url": "https://github.com/Haydabase" + }, + { + "name": "Chris Redekop", + "githubUsername": "repl-chris", + "url": "https://github.com/repl-chris" + }, + { + "name": "Aneil Mallavarapu", + "githubUsername": "aneilbaboo", + "url": "https://github.com/aneilbaboo" + }, + { + "name": "Jeremy Nagel", + "githubUsername": "jeznag", + "url": "https://github.com/jeznag" + }, + { + "name": "Louis Larry", + "githubUsername": "louislarry", + "url": "https://github.com/louislarry" + }, + { + "name": "Daniel Papukchiev", + "githubUsername": "dpapukchiev", + "url": "https://github.com/dpapukchiev" + }, + { + "name": "Oliver Hookins", + "githubUsername": "ohookins", + "url": "https://github.com/ohookins" + }, + { + "name": "Trevor Leach", + "githubUsername": "trevor-leach", + "url": "https://github.com/trevor-leach" + }, + { + "name": "James Gregory", + "githubUsername": "jagregory", + "url": "https://github.com/jagregory" + }, + { + "name": "Erik Dalén", + "githubUsername": "dalen", + "url": "https://github.com/dalen" + }, + { + "name": "Loïk Gaonac'h", + "githubUsername": "loikg", + "url": "https://github.com/loikg" + }, + { + "name": "Roberto Zen", + "githubUsername": "skyzenr", + "url": "https://github.com/skyzenr" + }, + { + "name": "Grzegorz Redlicki", + "githubUsername": "redlickigrzegorz", + "url": "https://github.com/redlickigrzegorz" + }, + { + "name": "Juan Carbonel", + "githubUsername": "juancarbonel", + "url": "https://github.com/juancarbonel" + }, + { + "name": "Peter McIntyre", + "githubUsername": "pwmcintyre", + "url": "https://github.com/pwmcintyre" + }, + { + "name": "Alex Bolenok", + "githubUsername": "alex-bolenok-centralreach", + "url": "https://github.com/alex-bolenok-centralreach" + }, + { + "name": "Marian Zange", + "githubUsername": "marianzange", + "url": "https://github.com/marianzange" + }, + { + "name": "Alessandro Palumbo", + "githubUsername": "apalumbo", + "url": "https://github.com/apalumbo" + }, + { + "name": "Sachin Shekhar", + "githubUsername": "SachinShekhar", + "url": "https://github.com/SachinShekhar" + }, + { + "name": "Ivan Martos", + "githubUsername": "ivanmartos", + "url": "https://github.com/ivanmartos" + }, + { + "name": "Zach Anthony", + "githubUsername": "zach-anthony", + "url": "https://github.com/zach-anthony" + }, + { + "name": "Peter Savnik", + "githubUsername": "savnik", + "url": "https://github.com/savnik" + }, + { + "name": "Benoit Boure", + "githubUsername": "bboure", + "url": "https://github.com/bboure" + }, + { + "name": "James Lakin", + "githubUsername": "jamesorlakin", + "url": "https://github.com/jamesorlakin" + }, + { + "name": "Ross Gerbasi", + "githubUsername": "aphex", + "url": "https://github.com/aphex" + }, + { + "name": "Joey Kilpatrick", + "githubUsername": "joeykilpatrick", + "url": "https://github.com/joeykilpatrick" + }, + { + "name": "Luciano Manerich Junior", + "githubUsername": "lmanerich", + "url": "https://github.com/lmanerich" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/aws-lambda" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "958c87d4ef0ed00c7121081f8d3190a4072d9bf959f25cb839b3c3b865f6a828", + "typeScriptVersion": "4.6" +} \ No newline at end of file diff --git a/dist/node_modules/@types/aws-lambda/trigger/alb.d.ts b/dist/node_modules/@types/aws-lambda/trigger/alb.d.ts new file mode 100644 index 0000000..d87ba78 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/alb.d.ts @@ -0,0 +1,48 @@ +import { Callback, Handler } from "../handler"; + +export type ALBHandler = Handler; +export type ALBCallback = Callback; + +// https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html +export interface ALBEventRequestContext { + elb: { + targetGroupArn: string; + }; +} + +export interface ALBEventQueryStringParameters { + [name: string]: string | undefined; +} + +export interface ALBEventHeaders { + [name: string]: string | undefined; +} + +export interface ALBEventMultiValueHeaders { + [name: string]: string[] | undefined; +} + +export interface ALBEventMultiValueQueryStringParameters { + [name: string]: string[] | undefined; +} + +export interface ALBEvent { + requestContext: ALBEventRequestContext; + httpMethod: string; + path: string; + queryStringParameters?: ALBEventQueryStringParameters | undefined; // URL encoded + headers?: ALBEventHeaders | undefined; + multiValueQueryStringParameters?: ALBEventMultiValueQueryStringParameters | undefined; // URL encoded + multiValueHeaders?: ALBEventMultiValueHeaders | undefined; + body: string | null; + isBase64Encoded: boolean; +} + +export interface ALBResult { + statusCode: number; + statusDescription?: string | undefined; + headers?: { [header: string]: boolean | number | string } | undefined; + multiValueHeaders?: { [header: string]: Array } | undefined; + body?: string | undefined; + isBase64Encoded?: boolean | undefined; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/amplify-resolver.d.ts b/dist/node_modules/@types/aws-lambda/trigger/amplify-resolver.d.ts new file mode 100644 index 0000000..ea84031 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/amplify-resolver.d.ts @@ -0,0 +1,38 @@ +import { Handler } from "../handler"; +import { AppSyncIdentity, AppSyncResolverEventHeaders } from "./appsync-resolver"; + +/** + * An AWS Amplify GraphQL resolver event. It differs slightly from a native ('direct') AppSync resolver event. + * + * @see https://docs.amplify.aws/cli/graphql/custom-business-logic/#structure-of-the-function-event + */ +export interface AmplifyGraphQlResolverEvent, TSource = Record> { + /** The name of the parent object type (data model) of the field being resolved. */ + typeName: string; + /** The field within the given type to resolve. */ + fieldName: string; + /** A map of GraphQL arguments passed to the field being resolved. */ + arguments: TArguments; + /** The identity used to authenticate the request to AppSync. */ + identity?: AppSyncIdentity; + /** The parent object's value if resolving a nested field. */ + source: TSource; + /** The request headers */ + request: { + headers: AppSyncResolverEventHeaders; + domainName: string | null; + }; + /** The object returned by a possible previous pipeline resolver function. */ + prev: { result: { [key: string]: any } } | null; +} + +/** + * A handler for Amplify GraphQL Lambda resolvers. The returned result will be resolved as the value (no need to convert to a JSON string). + * + * @see https://docs.amplify.aws/cli/graphql/custom-business-logic/#structure-of-the-function-event + */ +export type AmplifyGraphQlResolverHandler< + TArguments = Record, + TSource = Record, + TResult = any, +> = Handler, TResult>; diff --git a/dist/node_modules/@types/aws-lambda/trigger/api-gateway-authorizer.d.ts b/dist/node_modules/@types/aws-lambda/trigger/api-gateway-authorizer.d.ts new file mode 100644 index 0000000..db55054 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/api-gateway-authorizer.d.ts @@ -0,0 +1,244 @@ +import { + APIGatewayAuthorizerResultContext, + APIGatewayEventDefaultAuthorizerContext, + APIGatewayEventRequestContextWithAuthorizer, +} from "../common/api-gateway"; +import { Callback, Handler } from "../handler"; +import { APIGatewayEventRequestContextV2 } from "./api-gateway-proxy"; + +export type APIGatewayAuthorizerHandler = Handler; +export type APIGatewayAuthorizerWithContextHandler = + Handler>; + +export type APIGatewayAuthorizerCallback = Callback; +export type APIGatewayAuthorizerWithContextCallback = + Callback>; + +export type APIGatewayTokenAuthorizerHandler = Handler; +export type APIGatewayTokenAuthorizerWithContextHandler = + Handler>; + +export type APIGatewayRequestAuthorizerHandler = Handler; +export type APIGatewayRequestAuthorizerWithContextHandler< + TAuthorizerContext extends APIGatewayAuthorizerResultContext, +> = Handler>; + +export type APIGatewayAuthorizerEvent = APIGatewayTokenAuthorizerEvent | APIGatewayRequestAuthorizerEvent; + +export interface APIGatewayTokenAuthorizerEvent { + type: "TOKEN"; + methodArn: string; + authorizationToken: string; +} + +export interface APIGatewayRequestAuthorizerEventV2 { + version: string; + type: "REQUEST"; + routeArn: string; + identitySource: string[]; + routeKey: string; + rawPath: string; + rawQueryString: string; + cookies: string[]; + headers?: APIGatewayRequestAuthorizerEventHeaders; + queryStringParameters?: APIGatewayRequestAuthorizerEventQueryStringParameters; + requestContext: APIGatewayEventRequestContextV2; + pathParameters?: APIGatewayRequestAuthorizerEventPathParameters; + stageVariables?: APIGatewayRequestAuthorizerEventStageVariables; +} + +export interface APIGatewayRequestAuthorizerEventHeaders { + [name: string]: string | undefined; +} + +export interface APIGatewayRequestAuthorizerEventMultiValueHeaders { + [name: string]: string[] | undefined; +} + +export interface APIGatewayRequestAuthorizerEventPathParameters { + [name: string]: string | undefined; +} + +export interface APIGatewayRequestAuthorizerEventQueryStringParameters { + [name: string]: string | undefined; +} + +export interface APIGatewayRequestAuthorizerEventMultiValueQueryStringParameters { + [name: string]: string[] | undefined; +} + +export interface APIGatewayRequestAuthorizerEventStageVariables { + [name: string]: string | undefined; +} + +// Note, when invoked by the tester in the AWS web console, the map values can be null, +// but they will be empty objects in the real object. +// Worse, it will include "body" and "isBase64Encoded" properties, unlike the real call! +// See https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-lambda-authorizer-input.html for the +// formal definition. +export interface APIGatewayRequestAuthorizerEvent { + type: "REQUEST"; + methodArn: string; + resource: string; + path: string; + httpMethod: string; + headers: APIGatewayRequestAuthorizerEventHeaders | null; + multiValueHeaders: APIGatewayRequestAuthorizerEventMultiValueHeaders | null; + pathParameters: APIGatewayRequestAuthorizerEventPathParameters | null; + queryStringParameters: APIGatewayRequestAuthorizerEventQueryStringParameters | null; + multiValueQueryStringParameters: APIGatewayRequestAuthorizerEventMultiValueQueryStringParameters | null; + stageVariables: APIGatewayRequestAuthorizerEventStageVariables | null; + requestContext: APIGatewayEventRequestContextWithAuthorizer; +} + +export interface APIGatewayAuthorizerResult { + principalId: string; + policyDocument: PolicyDocument; + context?: APIGatewayAuthorizerResultContext | null | undefined; + usageIdentifierKey?: string | null | undefined; +} + +// Separate type so the context property is required, without pulling complex type magic. +export interface APIGatewayAuthorizerWithContextResult { + principalId: string; + policyDocument: PolicyDocument; + context: TAuthorizerContext; + usageIdentifierKey?: string | null | undefined; +} + +/** + * IAM Authorizer Types + */ +export interface APIGatewayIAMAuthorizerResult { + principalId: string; + policyDocument: PolicyDocument; + context?: APIGatewayAuthorizerResultContext | null | undefined; + usageIdentifierKey?: string | null | undefined; +} + +export interface APIGatewayIAMAuthorizerWithContextResult< + TAuthorizerContext extends APIGatewayAuthorizerResultContext, +> { + principalId: string; + policyDocument: PolicyDocument; + context: TAuthorizerContext; + usageIdentifierKey?: string | null | undefined; +} + +export type APIGatewayRequestIAMAuthorizerHandlerV2 = Handler< + APIGatewayRequestAuthorizerEventV2, + APIGatewayIAMAuthorizerResult +>; + +export type APIGatewayRequestIAMAuthorizerV2WithContextHandler< + TAuthorizerContext extends APIGatewayAuthorizerResultContext, +> = Handler>; + +/** + * Simple Lambda Authorizer Types V2 spec with simple response + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-lambda-authorizer.html + */ +export interface APIGatewaySimpleAuthorizerResult { + isAuthorized: boolean; +} + +export interface APIGatewaySimpleAuthorizerWithContextResult + extends APIGatewaySimpleAuthorizerResult +{ + context: TAuthorizerContext; +} + +export type APIGatewayRequestSimpleAuthorizerHandlerV2 = Handler< + APIGatewayRequestAuthorizerEventV2, + APIGatewaySimpleAuthorizerResult +>; + +export type APIGatewayRequestSimpleAuthorizerHandlerV2WithContext = Handler< + APIGatewayRequestAuthorizerEventV2, + APIGatewaySimpleAuthorizerWithContextResult +>; + +// Legacy event / names + +/** @deprecated Use APIGatewayAuthorizerHandler or a subtype */ +export type CustomAuthorizerHandler = Handler; + +// This one is actually fine. +export type CustomAuthorizerCallback = APIGatewayAuthorizerCallback; + +/** @deprecated Use APIGatewayAuthorizerEvent or a subtype */ +export interface CustomAuthorizerEvent { + type: string; + methodArn: string; + authorizationToken?: string | undefined; + resource?: string | undefined; + path?: string | undefined; + httpMethod?: string | undefined; + headers?: { [name: string]: string } | undefined; + multiValueHeaders?: { [name: string]: string[] } | undefined; + pathParameters?: { [name: string]: string } | null | undefined; + queryStringParameters?: { [name: string]: string } | null | undefined; + multiValueQueryStringParameters?: { [name: string]: string[] } | null | undefined; + stageVariables?: { [name: string]: string } | undefined; + requestContext?: APIGatewayEventRequestContextWithAuthorizer | undefined; + domainName?: string | undefined; + apiId?: string | undefined; +} + +export type CustomAuthorizerResult = APIGatewayAuthorizerResult; +export type AuthResponse = APIGatewayAuthorizerResult; +export type AuthResponseContext = APIGatewayAuthorizerResultContext; + +/** + * API Gateway CustomAuthorizer AuthResponse.PolicyDocument. + * https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-lambda-authorizer-output.html + * https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#Condition + */ +export interface PolicyDocument { + Version: string; + Id?: string | undefined; + Statement: Statement[]; +} + +/** + * API Gateway CustomAuthorizer AuthResponse.PolicyDocument.Condition. + * https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-control-access-policy-language-overview.html + * https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_condition.html + */ +export interface ConditionBlock { + [condition: string]: Condition | Condition[]; +} + +export interface Condition { + [key: string]: string | string[]; +} + +/** + * API Gateway CustomAuthorizer AuthResponse.PolicyDocument.Statement. + * https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-control-access-policy-language-overview.html + * https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html + */ +export type Statement = BaseStatement & StatementAction & (StatementResource | StatementPrincipal); + +export interface BaseStatement { + Effect: string; + Sid?: string | undefined; + Condition?: ConditionBlock | undefined; +} + +export type PrincipalValue = { [key: string]: string | string[] } | string | string[]; +export interface MaybeStatementPrincipal { + Principal?: PrincipalValue | undefined; + NotPrincipal?: PrincipalValue | undefined; +} +export interface MaybeStatementResource { + Resource?: string | string[] | undefined; + NotResource?: string | string[] | undefined; +} +export type StatementAction = { Action: string | string[] } | { NotAction: string | string[] }; +export type StatementResource = + & MaybeStatementPrincipal + & ({ Resource: string | string[] } | { NotResource: string | string[] }); +export type StatementPrincipal = + & MaybeStatementResource + & ({ Principal: PrincipalValue } | { NotPrincipal: PrincipalValue }); diff --git a/dist/node_modules/@types/aws-lambda/trigger/api-gateway-proxy.d.ts b/dist/node_modules/@types/aws-lambda/trigger/api-gateway-proxy.d.ts new file mode 100644 index 0000000..4543219 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/api-gateway-proxy.d.ts @@ -0,0 +1,336 @@ +import { + APIGatewayEventClientCertificate, + APIGatewayEventDefaultAuthorizerContext, + APIGatewayEventRequestContextWithAuthorizer, +} from "../common/api-gateway"; +import { Callback, CognitoIdentity, Handler } from "../handler"; + +/** + * Works with Lambda Proxy Integration for Rest API or HTTP API integration Payload Format version 1.0 + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export type APIGatewayProxyHandler = Handler; +/** + * Works with Lambda Proxy Integration for Rest API or HTTP API integration Payload Format version 1.0 + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export type APIGatewayProxyCallback = Callback; + +/** + * Works with HTTP API integration Payload Format version 2.0 + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export type APIGatewayProxyHandlerV2 = Handler>; + +/** + * Works with HTTP API integration Payload Format version 2.0 + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-integration-requests.html + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-mapping-template-reference.html + */ +export type APIGatewayProxyWebsocketHandlerV2 = Handler< + APIGatewayProxyWebsocketEventV2, + APIGatewayProxyResultV2 +>; + +/** + * Works with HTTP API integration Payload Format version 2.0 adds JWT Authroizer to RequestContext + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export type APIGatewayProxyHandlerV2WithJWTAuthorizer = Handler< + APIGatewayProxyEventV2WithJWTAuthorizer, + APIGatewayProxyResultV2 +>; + +/** + * Works with HTTP API integration Payload Format version 2.0 adds Lambda Authroizer to RequestContext + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export type APIGatewayProxyHandlerV2WithLambdaAuthorizer = Handler< + APIGatewayProxyEventV2WithLambdaAuthorizer, + APIGatewayProxyResultV2 +>; + +/** + * Works with HTTP API integration Payload Format version 2.0 adds IAM Authroizer to RequestContext + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export type APIGatewayProxyHandlerV2WithIAMAuthorizer = Handler< + APIGatewayProxyEventV2WithIAMAuthorizer, + APIGatewayProxyResultV2 +>; + +/** + * Works with HTTP API integration Payload Format version 2.0 + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export type APIGatewayProxyCallbackV2 = Callback; + +/** + * Works with Lambda Proxy Integration for Rest API or HTTP API integration Payload Format version 1.0 + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export type APIGatewayProxyEvent = APIGatewayProxyEventBase; + +export type APIGatewayProxyWithLambdaAuthorizerHandler = Handler< + APIGatewayProxyWithLambdaAuthorizerEvent, + APIGatewayProxyResult +>; + +export type APIGatewayProxyWithCognitoAuthorizerHandler = Handler< + APIGatewayProxyWithCognitoAuthorizerEvent, + APIGatewayProxyResult +>; + +export type APIGatewayProxyWithLambdaAuthorizerEvent = APIGatewayProxyEventBase< + APIGatewayEventLambdaAuthorizerContext +>; + +export type APIGatewayProxyWithLambdaAuthorizerEventRequestContext = + APIGatewayEventRequestContextWithAuthorizer>; + +// API Gateway proxy integration mangles the context from a custom authorizer, +// converting all number or boolean properties to string, and adding some extra properties. +export type APIGatewayEventLambdaAuthorizerContext = + & { + [P in keyof TAuthorizerContext]: TAuthorizerContext[P] extends null ? null : string; + } + & { + principalId: string; + integrationLatency: number; + }; + +export type APIGatewayProxyWithCognitoAuthorizerEvent = APIGatewayProxyEventBase; + +// All claims are coerced into strings. +export interface APIGatewayProxyCognitoAuthorizer { + claims: { + [name: string]: string; + }; +} + +export interface APIGatewayProxyEventHeaders { + [name: string]: string | undefined; +} + +export interface APIGatewayProxyEventMultiValueHeaders { + [name: string]: string[] | undefined; +} + +export interface APIGatewayProxyEventPathParameters { + [name: string]: string | undefined; +} + +export interface APIGatewayProxyEventQueryStringParameters { + [name: string]: string | undefined; +} + +export interface APIGatewayProxyEventMultiValueQueryStringParameters { + [name: string]: string[] | undefined; +} + +export interface APIGatewayProxyEventStageVariables { + [name: string]: string | undefined; +} + +export interface APIGatewayProxyEventBase { + body: string | null; + headers: APIGatewayProxyEventHeaders; + multiValueHeaders: APIGatewayProxyEventMultiValueHeaders; + httpMethod: string; + isBase64Encoded: boolean; + path: string; + pathParameters: APIGatewayProxyEventPathParameters | null; + queryStringParameters: APIGatewayProxyEventQueryStringParameters | null; + multiValueQueryStringParameters: APIGatewayProxyEventMultiValueQueryStringParameters | null; + stageVariables: APIGatewayProxyEventStageVariables | null; + requestContext: APIGatewayEventRequestContextWithAuthorizer; + resource: string; +} + +/** + * Works with Lambda Proxy Integration for Rest API or HTTP API integration Payload Format version 1.0 + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export interface APIGatewayProxyResult { + statusCode: number; + headers?: + | { + [header: string]: boolean | number | string; + } + | undefined; + multiValueHeaders?: + | { + [header: string]: Array; + } + | undefined; + body: string; + isBase64Encoded?: boolean | undefined; +} + +/** + * Works with HTTP API integration Payload Format version 2.0 + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export interface APIGatewayEventRequestContextV2 { + accountId: string; + apiId: string; + authentication?: { + clientCert: APIGatewayEventClientCertificate; + }; + domainName: string; + domainPrefix: string; + http: { + method: string; + path: string; + protocol: string; + sourceIp: string; + userAgent: string; + }; + requestId: string; + routeKey: string; + stage: string; + time: string; + timeEpoch: number; +} +/** + * Works with Websocket API integration Payload Format version 2.0 + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-integration-requests.html + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-websocket-api-mapping-template-reference.html + */ +export interface APIGatewayEventWebsocketRequestContextV2 { + routeKey: string; + messageId: string; + eventType: "CONNECT" | "MESSAGE" | "DISCONNECT"; + extendedRequestId: string; + requestTime: string; + messageDirection: "IN"; + stage: string; + connectedAt: number; + requestTimeEpoch: number; + requestId: string; + domainName: string; + connectionId: string; + apiId: string; +} + +/** + * Proxy Event with adaptable requestContext for different authorizer scenarios + */ +export interface APIGatewayProxyEventV2WithRequestContext { + version: string; + routeKey: string; + rawPath: string; + rawQueryString: string; + cookies?: string[]; + headers: APIGatewayProxyEventHeaders; + queryStringParameters?: APIGatewayProxyEventQueryStringParameters; + requestContext: TRequestContext; + body?: string; + pathParameters?: APIGatewayProxyEventPathParameters; + isBase64Encoded: boolean; + stageVariables?: APIGatewayProxyEventStageVariables; +} + +/** + * Proxy Websocket Event with adaptable requestContext for different authorizer scenarios + */ +export interface APIGatewayProxyWebsocketEventV2WithRequestContext { + requestContext: TRequestContext; + body?: string; + isBase64Encoded: boolean; + stageVariables?: APIGatewayProxyEventStageVariables; +} + +/** + * Lambda Authorizer Payload + */ +export interface APIGatewayEventRequestContextLambdaAuthorizer { + lambda: TAuthorizerContext; +} + +/** + * JWT Authorizer Payload + */ +export interface APIGatewayEventRequestContextJWTAuthorizer { + principalId: string; + integrationLatency: number; + jwt: { + claims: { [name: string]: string | number | boolean | string[] }; + scopes: string[]; + }; +} + +/** + * IAM Authorizer Payload + */ +export interface APIGatewayEventRequestContextIAMAuthorizer { + iam: { + accessKey: string; + accountId: string; + callerId: string; + cognitoIdentity: null; + principalOrgId: string; + userArn: string; + userId: string; + }; +} + +export type APIGatewayProxyEventV2WithJWTAuthorizer = APIGatewayProxyEventV2WithRequestContext< + APIGatewayEventRequestContextV2WithAuthorizer +>; + +export type APIGatewayProxyEventV2WithLambdaAuthorizer = APIGatewayProxyEventV2WithRequestContext< + APIGatewayEventRequestContextV2WithAuthorizer> +>; + +/** + * Event type when invoking Lambda function URLs with IAM authorizer + */ +export type APIGatewayProxyEventV2WithIAMAuthorizer = APIGatewayProxyEventV2WithRequestContext< + APIGatewayEventRequestContextV2WithAuthorizer +>; + +export interface APIGatewayEventRequestContextV2WithAuthorizer extends APIGatewayEventRequestContextV2 { + authorizer: TAuthorizer; +} + +/** + * Default Proxy event with no Authorizer + */ +export type APIGatewayProxyEventV2 = APIGatewayProxyEventV2WithRequestContext; + +/** + * Default Websocket Proxy event with no Authorizer + */ +export type APIGatewayProxyWebsocketEventV2 = APIGatewayProxyWebsocketEventV2WithRequestContext< + APIGatewayEventWebsocketRequestContextV2 +>; + +/** + * Works with HTTP API integration Payload Format version 2.0 + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export type APIGatewayProxyResultV2 = APIGatewayProxyStructuredResultV2 | string | T; + +/** + * Interface for structured response with `statusCode` and`headers` + * Works with HTTP API integration Payload Format version 2.0 + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export interface APIGatewayProxyStructuredResultV2 { + statusCode?: number | undefined; + headers?: + | { + [header: string]: boolean | number | string; + } + | undefined; + body?: string | undefined; + isBase64Encoded?: boolean | undefined; + cookies?: string[] | undefined; +} + +// Legacy names +export type ProxyHandler = APIGatewayProxyHandler; +export type ProxyCallback = APIGatewayProxyCallback; +export type APIGatewayEvent = APIGatewayProxyEvent; +export type ProxyResult = APIGatewayProxyResult; diff --git a/dist/node_modules/@types/aws-lambda/trigger/appsync-resolver.d.ts b/dist/node_modules/@types/aws-lambda/trigger/appsync-resolver.d.ts new file mode 100644 index 0000000..ff985e3 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/appsync-resolver.d.ts @@ -0,0 +1,116 @@ +import { Handler } from "../handler"; + +export type AppSyncResolverHandler | null> = Handler< + AppSyncResolverEvent, + TResult +>; + +// https:docs.aws.amazon.com/appsync/latest/devguide/tutorial-lambda-resolvers.html#advanced-use-case-batching +export type AppSyncBatchResolverHandler | null> = Handler< + Array>, + TResult[] +>; + +/** + * @deprecated Use {@link AppSyncAuthorizerHandler} + */ +export type AppSyncAuthorizerHander = AppSyncAuthorizerHandler; + +/** + * See https://docs.aws.amazon.com/appsync/latest/devguide/security-authz.html#aws-lambda-authorization + * + * @param TResolverContext type of the resolverContext object that you can return from the handler + */ +export type AppSyncAuthorizerHandler = Handler< + AppSyncAuthorizerEvent, + AppSyncAuthorizerResult +>; + +export interface AppSyncResolverEventHeaders { + [name: string]: string | undefined; +} + +export type AppSyncIdentity = + | AppSyncIdentityIAM + | AppSyncIdentityCognito + | AppSyncIdentityOIDC + | AppSyncIdentityLambda + | undefined + | null; + +/** + * See https://docs.aws.amazon.com/appsync/latest/devguide/resolver-context-reference.html + * + * @param TArguments type of the arguments + * @param TSource type of the source + */ +// Maintainer's note: Some of these properties are shared with the Amplify resolver. +// It may be worth checking if changes here may be applicable there too. +export interface AppSyncResolverEvent | null> { + arguments: TArguments; + identity?: AppSyncIdentity; + source: TSource; + request: { + headers: AppSyncResolverEventHeaders; + /** The API's custom domain if used for the request. */ + domainName: string | null; + }; + info: { + selectionSetList: string[]; + selectionSetGraphQL: string; + parentTypeName: string; + fieldName: string; + variables: { [key: string]: any }; + }; + prev: { result: { [key: string]: any } } | null; + stash: { [key: string]: any }; +} + +export interface AppSyncAuthorizerEvent { + authorizationToken: string; + requestContext: { + apiId: string; + accountId: string; + requestId: string; + queryString: string; + operationName?: string; + variables: { [key: string]: any }; + }; +} + +export interface AppSyncAuthorizerResult { + isAuthorized: boolean; + resolverContext?: TResolverContext; + deniedFields?: string[]; + ttlOverride?: number; +} +export interface AppSyncIdentityIAM { + accountId: string; + cognitoIdentityPoolId: string; + cognitoIdentityId: string; + sourceIp: string[]; + username: string; + userArn: string; + cognitoIdentityAuthType: string; + cognitoIdentityAuthProvider: string; +} + +export interface AppSyncIdentityCognito { + sub: string; + issuer: string; + username: string; + claims: any; + sourceIp: string[]; + defaultAuthStrategy: string; + groups: string[] | null; +} + +export interface AppSyncIdentityOIDC { + claims: any; + issuer: string; + sub: string; +} + +export interface AppSyncIdentityLambda { + resolverContext: any; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/autoscaling.d.ts b/dist/node_modules/@types/aws-lambda/trigger/autoscaling.d.ts new file mode 100644 index 0000000..e3ef193 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/autoscaling.d.ts @@ -0,0 +1,41 @@ +import { Callback, Handler } from "../handler"; + +export type AutoScalingScaleInHandler = Handler; +export type AutoScalingScaleInCallback = Callback; + +export type AutoScalingInstanceMarketOption = "spot" | "on-demand"; + +export type AutoScalingScaleInCause = "SCALE_IN" | "INSTANCE_REFRESH" | "MAX_INSTANCE_LIFETIME" | "REBALANCE"; + +export interface AutoScalingTerminationRequest { + AvailabilityZone: string; + Capacity: number; + InstanceMarketOption: AutoScalingInstanceMarketOption; +} + +export interface AutoScalingInstanceRecord { + AvailabilityZone: string; + InstanceId: string; + InstanceType: string; + InstanceMarketOption: AutoScalingInstanceMarketOption; +} + +/** + * An Auto Scaling Group may trigger a Scale In Event when you have attached + * a custom termination policy function. This event and the expected response are + * described in the Auto Scaling docs at: + * + * https://docs.aws.amazon.com/autoscaling/ec2/userguide/lambda-custom-termination-policy.html + */ +export interface AutoScalingScaleInEvent { + AutoScalingGroupARN: string; + AutoScalingGroupName: string; + CapacityToTerminate: AutoScalingTerminationRequest[]; + Instances: AutoScalingInstanceRecord[]; + Cause: AutoScalingScaleInCause; + HasMoreInstances?: boolean; +} + +export interface AutoScalingScaleInResult { + InstanceIDs: string[]; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/cdk-custom-resource.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cdk-custom-resource.d.ts new file mode 100644 index 0000000..cfb46e8 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cdk-custom-resource.d.ts @@ -0,0 +1,67 @@ +import { Callback, Handler } from "../handler"; +import { CloudFormationCustomResourceEvent } from "./cloudformation-custom-resource"; + +// The CDK docs only specify 'important' properties, but in reality the incoming event +// to the Lambda matches that of a traditional custom resource. +// This includes the ResponseURL property which should not be used as the framework +// itself will deal with delivering responses. +export type CdkCustomResourceEvent = CloudFormationCustomResourceEvent & { + /** + * **This URL should not be used.** The CDK Provider Framework will call this URL + * automatically based on the response produced by the Lambda handler. + */ + ResponseURL: string; +}; + +/** + * A custom resource based on the AWS CDK custom resource Provider Framework. + * This is not to be confused with traditional CloudFormation custom resources. + * @link https://docs.aws.amazon.com/cdk/api/latest/docs/custom-resources-readme.html#handling-lifecycle-events-onevent + */ +export type CdkCustomResourceHandler = Handler; +export type CdkCustomResourceCallback = Callback; + +export interface CdkCustomResourceResponse { + PhysicalResourceId?: string; + Data?: + | { + [Key: string]: any; + } + | undefined; + // Any extra properties will be provided to the isComplete handler for asynchronous resources. + [Key: string]: any; +} + +// IsComplete events will contain all normal request fields, as well as those returned from +// the initial onEvent handler. +export type CdkCustomResourceIsCompleteEvent = CdkCustomResourceEvent & CdkCustomResourceResponse; + +export type CdkCustomResourceIsCompleteResponse = + | CdkCustomResourceIsCompleteResponseSuccess + | CdkCustomResourceIsCompleteResponseWaiting; + +export interface CdkCustomResourceIsCompleteResponseSuccess { + IsComplete: true; + /** + * This will be merged with the `Data` property of the onEvent handler's response. + */ + Data?: + | { + [Key: string]: any; + } + | undefined; +} + +export interface CdkCustomResourceIsCompleteResponseWaiting { + IsComplete: false; +} + +/** + * An asynchronous custom resource handler. + * @link https://docs.aws.amazon.com/cdk/api/latest/docs/custom-resources-readme.html#asynchronous-providers-iscomplete + */ +export type CdkCustomResourceIsCompleteHandler = Handler< + CdkCustomResourceIsCompleteEvent, + CdkCustomResourceIsCompleteResponse +>; +export type CdkCustomResourceIsCompleteCallback = Callback; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cloudformation-custom-resource.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cloudformation-custom-resource.d.ts new file mode 100644 index 0000000..813e98c --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cloudformation-custom-resource.d.ts @@ -0,0 +1,70 @@ +import { Handler } from "../handler"; + +// Note, responses are *not* lambda results, they are sent to the event ResponseURL. +export type CloudFormationCustomResourceHandler = Handler; + +export type CloudFormationCustomResourceEvent = + | CloudFormationCustomResourceCreateEvent + | CloudFormationCustomResourceUpdateEvent + | CloudFormationCustomResourceDeleteEvent; + +export type CloudFormationCustomResourceResponse = + | CloudFormationCustomResourceSuccessResponse + | CloudFormationCustomResourceFailedResponse; + +/** + * CloudFormation Custom Resource event and response + * http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/crpg-ref.html + */ +export interface CloudFormationCustomResourceEventCommon { + ServiceToken: string; + ResponseURL: string; + StackId: string; + RequestId: string; + LogicalResourceId: string; + ResourceType: string; + ResourceProperties: { + ServiceToken: string; + [Key: string]: any; + }; +} + +export interface CloudFormationCustomResourceCreateEvent extends CloudFormationCustomResourceEventCommon { + RequestType: "Create"; +} + +export interface CloudFormationCustomResourceUpdateEvent extends CloudFormationCustomResourceEventCommon { + RequestType: "Update"; + PhysicalResourceId: string; + OldResourceProperties: { + [Key: string]: any; + }; +} + +export interface CloudFormationCustomResourceDeleteEvent extends CloudFormationCustomResourceEventCommon { + RequestType: "Delete"; + PhysicalResourceId: string; +} + +export interface CloudFormationCustomResourceResponseCommon { + PhysicalResourceId: string; + StackId: string; + RequestId: string; + LogicalResourceId: string; + Data?: + | { + [Key: string]: any; + } + | undefined; + NoEcho?: boolean | undefined; +} + +export interface CloudFormationCustomResourceSuccessResponse extends CloudFormationCustomResourceResponseCommon { + Status: "SUCCESS"; + Reason?: string | undefined; +} + +export interface CloudFormationCustomResourceFailedResponse extends CloudFormationCustomResourceResponseCommon { + Status: "FAILED"; + Reason: string; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/cloudfront-request.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cloudfront-request.d.ts new file mode 100644 index 0000000..ace26c7 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cloudfront-request.d.ts @@ -0,0 +1,22 @@ +import type { CloudFrontEvent, CloudFrontRequest, CloudFrontResultResponse } from "../common/cloudfront"; +import type { Callback, Handler } from "../handler"; + +export type CloudFrontRequestHandler = Handler; +export type CloudFrontRequestCallback = Callback; + +export interface CloudFrontRequestEventRecord { + cf: CloudFrontEvent & { + request: CloudFrontRequest; + }; +} + +/** + * CloudFront viewer request or origin request event + * + * https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/lambda-event-structure.html#lambda-event-structure-request + */ +export interface CloudFrontRequestEvent { + Records: CloudFrontRequestEventRecord[]; +} + +export type CloudFrontRequestResult = undefined | null | CloudFrontResultResponse | CloudFrontRequest; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cloudfront-response.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cloudfront-response.d.ts new file mode 100644 index 0000000..86c1a1c --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cloudfront-response.d.ts @@ -0,0 +1,21 @@ +import { CloudFrontEvent, CloudFrontRequest, CloudFrontResponse, CloudFrontResultResponse } from "../common/cloudfront"; +import { Callback, Handler } from "../handler"; + +export type CloudFrontResponseHandler = Handler; +export type CloudFrontResponseCallback = Callback; + +/** + * CloudFront viewer response or origin response event + * + * https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/lambda-event-structure.html#lambda-event-structure-response + */ +export interface CloudFrontResponseEvent { + Records: Array<{ + cf: CloudFrontEvent & { + readonly request: Pick>; + response: CloudFrontResponse; + }; + }>; +} + +export type CloudFrontResponseResult = undefined | null | CloudFrontResultResponse; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cloudwatch-alarm.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cloudwatch-alarm.d.ts new file mode 100644 index 0000000..7b82134 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cloudwatch-alarm.d.ts @@ -0,0 +1,69 @@ +import { Handler } from "../handler"; + +export type CloudWatchAlarmHandler = Handler; + +export interface CloudWatchAlarmEvent { + source: string; + alarmArn: string; + accountId: string; + time: string; + region: string; + alarmData: CloudWatchAlarmData; +} + +export interface CloudWatchAlarmState { + value: string; + reason: string; + timestamp: string; + reasonData?: string; + actionsSuppressedBy?: string; + actionsSuppressedReason?: string; +} + +export interface CloudWatchMetric { + namespace: string; + name: string; + dimensions?: Record; +} + +export interface CloudWatchMetricStat { + metric: CloudWatchMetric; + period: number; + stat: string; + unit?: string; +} + +export interface CloudWatchAlarmMetric { + id: string; + metricStat: CloudWatchMetricStat; + returnData: boolean; +} + +export interface CloudWatchAlarmExpression { + id: string; + expression: string; + label: string; + returnData: boolean; +} + +export type CloudWatchAlarmMetricDataQuery = CloudWatchAlarmMetric | CloudWatchAlarmExpression; + +export interface CloudWatchAlarmConfiguration { + metrics: CloudWatchAlarmMetricDataQuery[]; + description?: string; +} + +export interface CloudWatchAlarmCompositeConfiguration { + alarmRule: string; + description?: string; + actionsSuppressor?: string; + actionsSuppressorWaitPeriod?: number; + actionsSuppressorExtensionPeriod?: number; +} + +export interface CloudWatchAlarmData { + alarmName: string; + state: CloudWatchAlarmState; + previousState: CloudWatchAlarmState; + configuration: CloudWatchAlarmConfiguration | CloudWatchAlarmCompositeConfiguration; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/cloudwatch-events.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cloudwatch-events.d.ts new file mode 100644 index 0000000..a857dec --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cloudwatch-events.d.ts @@ -0,0 +1,8 @@ +import { EventBridgeEvent, EventBridgeHandler } from "../trigger/eventbridge"; + +export type ScheduledHandler = EventBridgeHandler<"Scheduled Event", TDetail, void>; + +/** + * https://docs.aws.amazon.com/lambda/latest/dg/with-scheduled-events.html + */ +export interface ScheduledEvent extends EventBridgeEvent<"Scheduled Event", TDetail> {} diff --git a/dist/node_modules/@types/aws-lambda/trigger/cloudwatch-logs.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cloudwatch-logs.d.ts new file mode 100644 index 0000000..5fc3d99 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cloudwatch-logs.d.ts @@ -0,0 +1,37 @@ +import { Handler } from "../handler"; + +export type CloudWatchLogsHandler = Handler; + +/** + * See http://docs.aws.amazon.com/lambda/latest/dg/eventsources.html#eventsources-cloudwatch-logs + */ +export interface CloudWatchLogsEvent { + awslogs: CloudWatchLogsEventData; +} + +export interface CloudWatchLogsEventData { + data: string; +} + +export interface CloudWatchLogsDecodedData { + owner: string; + logGroup: string; + logStream: string; + subscriptionFilters: string[]; + messageType: string; + logEvents: CloudWatchLogsLogEvent[]; +} + +export interface CloudWatchLogsLogEventExtractedFields { + [name: string]: string | undefined; +} + +/** + * See http://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/SubscriptionFilters.html#LambdaFunctionExample + */ +export interface CloudWatchLogsLogEvent { + id: string; + timestamp: number; + message: string; + extractedFields?: CloudWatchLogsLogEventExtractedFields | undefined; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/codebuild-cloudwatch-state.d.ts b/dist/node_modules/@types/aws-lambda/trigger/codebuild-cloudwatch-state.d.ts new file mode 100644 index 0000000..32bccfe --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/codebuild-cloudwatch-state.d.ts @@ -0,0 +1,104 @@ +import { EventBridgeEvent, EventBridgeHandler } from "./eventbridge"; + +export type CodeBuildCloudWatchStateHandler = EventBridgeHandler< + "CodeBuild Build State Change", + CodeBuildStateEventDetail, + void +>; + +export type CodeBuildStateType = "IN_PROGRESS" | "SUCCEEDED" | "FAILED" | "STOPPED"; +export type CodeBuildPhaseType = + | "COMPLETED" + | "FINALIZING" + | "UPLOAD_ARTIFACTS" + | "POST_BUILD" + | "BUILD" + | "PRE_BUILD" + | "INSTALL" + | "QUEUED" + | "DOWNLOAD_SOURCE" + | "PROVISIONING" + | "SUBMITTED"; +export type CodeBuildPhaseStatusType = "TIMED_OUT" | "STOPPED" | "FAILED" | "SUCCEEDED" | "FAULT" | "CLIENT_ERROR"; +export type CodeBuildCacheType = "NO_CACHE" | "LOCAL" | "S3"; +export type CodeBuildSourceLocationType = + | "CODECOMMIT" + | "CODEPIPELINE" + | "GITHUB" + | "GITHUB_ENTERPRISE" + | "BITBUCKET" + | "S3" + | "NO_SOURCE"; +export type CodeBuildEnvironmentType = + | "LINUX_CONTAINER" + | "LINUX_GPU_CONTAINER" + | "WINDOWS_CONTAINER" + | "ARM_CONTAINER"; +export type CodeBuildEnvironmentPullCredentialsType = "CODEBUILD" | "SERVICE_ROLE"; +export type CodeBuildEnvironmentComputeType = + | "BUILD_GENERAL1_SMALL" + | "BUILD_GENERAL1_MEDIUM" + | "BUILD_GENERAL1_LARGE" + | "BUILD_GENERAL1_2XLARGE"; +export type CodeBuildEnvironmentVariableType = "PARAMETER_STORE" | "PLAINTEXT" | "SECRETS_MANAGER"; + +export interface CodeBuildStateEventDetail { + "build-status": CodeBuildStateType; + "project-name": string; + "build-id": string; + "current-phase": CodeBuildPhaseType; + "current-phase-context": string; + version: string; + "additional-information": { + cache: { + type: CodeBuildCacheType; + }; + "build-number": number; + "timeout-in-minutes": number; + "build-complete": boolean; + initiator: string; + "build-start-time": string; + source: { + buildspec: string; + location: string; + type: CodeBuildSourceLocationType; + }; + "source-version": string; + artifact: { + location: string; + }; + environment: { + image: string; + "privileged-mode": boolean; + "image-pull-credentials-type"?: CodeBuildEnvironmentPullCredentialsType | undefined; + "compute-type": CodeBuildEnvironmentComputeType; + type: CodeBuildEnvironmentType; + "environment-variables": Array<{ + name: string; + type?: CodeBuildEnvironmentVariableType | undefined; + value: string; + }>; + }; + "project-file-system-locations": []; + logs: { + "group-name": string; + "stream-name": string; + "deep-link": string; + }; + phases: Array<{ + "phase-context"?: string[] | undefined; // Not available for COMPLETED phase-type + "start-time": string; + "end-time"?: string | undefined; // Not available for COMPLETED phase-type + "duration-in-seconds"?: number | undefined; // Not available for COMPLETED phase-type + "phase-type": CodeBuildPhaseType; + "phase-status"?: CodeBuildPhaseStatusType | undefined; // Not available for COMPLETED phase-type + }>; + "queued-timeout-in-minutes": number; + }; +} + +export interface CodeBuildCloudWatchStateEvent + extends EventBridgeEvent<"CodeBuild Build State Change", CodeBuildStateEventDetail> +{ + source: "aws.codebuild"; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/codecommit.d.ts b/dist/node_modules/@types/aws-lambda/trigger/codecommit.d.ts new file mode 100644 index 0000000..c1bd1a8 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/codecommit.d.ts @@ -0,0 +1,35 @@ +import { Handler } from "../handler"; + +export type CodeCommitHandler = Handler; + +/** + * CodeCommit events + * https://docs.aws.amazon.com/lambda/latest/dg/services-codecommit.html + */ +export interface CodeCommitTrigger { + awsRegion: string; + codecommit: { + references: Array<{ + commit: string; + created?: boolean; + deleted?: boolean; + ref: string; + }>; + }; + customData?: string; + eventId: string; + eventName: string; + eventPartNumber: number; + eventSource: string; + eventSourceARN: string; + eventTime: string; + eventTotalParts: number; + eventTriggerConfigId: string; + eventTriggerName: string; + eventVersion: string; + userIdentityARN: string; +} + +export interface CodeCommitTriggerEvent { + Records: CodeCommitTrigger[]; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/codepipeline-cloudwatch-action.d.ts b/dist/node_modules/@types/aws-lambda/trigger/codepipeline-cloudwatch-action.d.ts new file mode 100644 index 0000000..d4a541e --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/codepipeline-cloudwatch-action.d.ts @@ -0,0 +1,31 @@ +import { Handler } from "../handler"; + +export type CodePipelineCloudWatchActionHandler = Handler; + +export type CodePipelineActionCategory = "Approval" | "Build" | "Deploy" | "Invoke" | "Source" | "Test"; +export type CodePipelineActionState = "STARTED" | "SUCCEEDED" | "FAILED" | "CANCELED"; + +export interface CodePipelineCloudWatchActionEvent { + version: string; + id: string; + "detail-type": "CodePipeline Action Execution State Change"; + source: "aws.codepipeline"; + account: string; + time: string; + region: string; + resources: string[]; + detail: { + pipeline: string; + version: number; + "execution-id": string; + stage: string; + action: string; + state: CodePipelineActionState; + type: { + owner: "AWS" | "Custom" | "ThirdParty"; + category: CodePipelineActionCategory; + provider: string; + version: number; + }; + }; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/codepipeline-cloudwatch-pipeline.d.ts b/dist/node_modules/@types/aws-lambda/trigger/codepipeline-cloudwatch-pipeline.d.ts new file mode 100644 index 0000000..fca317e --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/codepipeline-cloudwatch-pipeline.d.ts @@ -0,0 +1,32 @@ +import { Handler } from "../handler"; + +export type CodePipelineCloudWatchPipelineHandler = Handler; + +export type CodePipelineState = "STARTED" | "SUCCEEDED" | "RESUMED" | "FAILED" | "CANCELED" | "SUPERSEDED"; + +/** + * CodePipeline CloudWatch Events + * https://docs.aws.amazon.com/codepipeline/latest/userguide/detect-state-changes-cloudwatch-events.html + * + * The above CodePipelineEvent is when a lambda is invoked by a CodePipeline. + * These events are when you subscribe to CodePipeline events in CloudWatch. + * + * Their documentation says that detail.version is a string, but it is actually an integer + */ + +export interface CodePipelineCloudWatchPipelineEvent { + version: string; + id: string; + "detail-type": "CodePipeline Pipeline Execution State Change"; + source: "aws.codepipeline"; + account: string; + time: string; + region: string; + resources: string[]; + detail: { + pipeline: string; + version: number; + state: CodePipelineState; + "execution-id": string; + }; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/codepipeline-cloudwatch-stage.d.ts b/dist/node_modules/@types/aws-lambda/trigger/codepipeline-cloudwatch-stage.d.ts new file mode 100644 index 0000000..ad47d82 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/codepipeline-cloudwatch-stage.d.ts @@ -0,0 +1,23 @@ +import { Handler } from "../handler"; + +export type CodePipelineCloudWatchStageHandler = Handler; + +export type CodePipelineStageState = "STARTED" | "SUCCEEDED" | "RESUMED" | "FAILED" | "CANCELED"; + +export interface CodePipelineCloudWatchStageEvent { + version: string; + id: string; + "detail-type": "CodePipeline Stage Execution State Change"; + source: "aws.codepipeline"; + account: string; + time: string; + region: string; + resources: string[]; + detail: { + pipeline: string; + version: number; + "execution-id": string; + stage: string; + state: CodePipelineStageState; + }; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/codepipeline-cloudwatch.d.ts b/dist/node_modules/@types/aws-lambda/trigger/codepipeline-cloudwatch.d.ts new file mode 100644 index 0000000..60cde8b --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/codepipeline-cloudwatch.d.ts @@ -0,0 +1,11 @@ +import { Handler } from "../handler"; +import { CodePipelineCloudWatchActionEvent } from "./codepipeline-cloudwatch-action"; +import { CodePipelineCloudWatchPipelineEvent } from "./codepipeline-cloudwatch-pipeline"; +import { CodePipelineCloudWatchStageEvent } from "./codepipeline-cloudwatch-stage"; + +export type CodePipelineCloudWatchHandler = Handler; + +export type CodePipelineCloudWatchEvent = + | CodePipelineCloudWatchPipelineEvent + | CodePipelineCloudWatchStageEvent + | CodePipelineCloudWatchActionEvent; diff --git a/dist/node_modules/@types/aws-lambda/trigger/codepipeline.d.ts b/dist/node_modules/@types/aws-lambda/trigger/codepipeline.d.ts new file mode 100644 index 0000000..82f4444 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/codepipeline.d.ts @@ -0,0 +1,55 @@ +import { Handler } from "../handler"; + +export type CodePipelineHandler = Handler; + +/** + * CodePipeline events + * https://docs.aws.amazon.com/codepipeline/latest/userguide/actions-invoke-lambda-function.html + */ +export interface S3ArtifactLocation { + bucketName: string; + objectKey: string; +} +export interface S3ArtifactStore { + type: "S3"; + s3Location: S3ArtifactLocation; +} + +export type ArtifactLocation = S3ArtifactStore; + +export interface Artifact { + name: string; + revision: string | null; + location: ArtifactLocation; +} + +export interface Credentials { + accessKeyId: string; + secretAccessKey: string; + sessionToken?: string | undefined; +} + +export interface EncryptionKey { + type: string; + id: string; +} + +export interface CodePipelineEvent { + "CodePipeline.job": { + id: string; + accountId: string; + data: { + actionConfiguration: { + configuration: { + FunctionName: string; + UserParameters: string; + }; + }; + inputArtifacts: Artifact[]; + outputArtifacts: Artifact[]; + artifactCredentials: Credentials; + encryptionKey?: (EncryptionKey & { type: "KMS" }) | undefined; + continuationToken?: string | undefined; + }; + }; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/_common.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/_common.d.ts new file mode 100644 index 0000000..19c6f41 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/_common.d.ts @@ -0,0 +1,41 @@ +export interface StringMap { + [name: string]: string; +} + +export type ChallengeName = + | "PASSWORD_VERIFIER" + | "SMS_MFA" + | "DEVICE_SRP_AUTH" + | "DEVICE_PASSWORD_VERIFIER" + | "ADMIN_NO_SRP_AUTH" + | "SRP_A"; + +export interface ChallengeResult { + challengeName: ChallengeName; + challengeResult: boolean; + challengeMetadata?: undefined; +} + +export interface CustomChallengeResult { + challengeName: "CUSTOM_CHALLENGE"; + challengeResult: boolean; + challengeMetadata?: string | undefined; +} + +/** + * Common attributes shared by all User Pool Lambda Trigger Events + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-identity-pools-working-with-aws-lambda-triggers.html#cognito-user-pools-lambda-trigger-event-parameter-shared + */ +export interface BaseTriggerEvent { + version: string; + region: string; + userPoolId: string; + triggerSource: T; + userName: string; + callerContext: { + awsSdkVersion: string; + clientId: string; + }; + request: {}; + response: {}; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/create-auth-challenge.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/create-auth-challenge.d.ts new file mode 100644 index 0000000..78c02b6 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/create-auth-challenge.d.ts @@ -0,0 +1,22 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, ChallengeResult, CustomChallengeResult, StringMap } from "./_common"; + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-create-auth-challenge.html + */ +export interface CreateAuthChallengeTriggerEvent extends BaseTriggerEvent<"CreateAuthChallenge_Authentication"> { + request: { + userAttributes: StringMap; + challengeName: string; + session: Array; + userNotFound?: boolean | undefined; + clientMetadata?: StringMap | undefined; + }; + response: { + publicChallengeParameters: StringMap; + privateChallengeParameters: StringMap; + challengeMetadata: string; + }; +} + +export type CreateAuthChallengeTriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/custom-email-sender.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/custom-email-sender.d.ts new file mode 100644 index 0000000..be6b4df --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/custom-email-sender.d.ts @@ -0,0 +1,76 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, StringMap } from "./_common"; + +export interface BaseCustomEmailSenderTriggerEvent extends BaseTriggerEvent { + request: { + type: string; + code: string | null; + userAttributes: StringMap; + clientMetadata?: StringMap | undefined; + }; +} + +export type AccountTakeOverActionType = "BLOCK" | "NO_ACTION" | "MFA" | "MFA_IF_CONFIGURED" | "MFA_REQUIRED"; + +export interface CustomEmailSender_AccountTakeOverNotification_UserAttributes { + EVENT_ID: string; + USER_NAME: string; + IP_ADDRESS: string; + ACCOUNT_TAKE_OVER_ACTION: AccountTakeOverActionType; + ONE_CLICK_LINK_VALID: string; + ONE_CLICK_LINK_INVALID: string; + LOGIN_TIME: string; + FEEDBACK_TOKEN: string; + CITY?: string | undefined; + COUNTRY?: string | undefined; + DEVICE_NAME?: string | undefined; +} + +export interface CustomEmailSenderSignUpTriggerEvent + extends BaseCustomEmailSenderTriggerEvent<"CustomEmailSender_SignUp"> +{} + +export interface CustomEmailSenderResendCodeTriggerEvent + extends BaseCustomEmailSenderTriggerEvent<"CustomEmailSender_ResendCode"> +{} + +export interface CustomEmailSenderForgotPasswordTriggerEvent + extends BaseCustomEmailSenderTriggerEvent<"CustomEmailSender_ForgotPassword"> +{} + +export interface CustomEmailSenderUpdateUserAttributeTriggerEvent + extends BaseCustomEmailSenderTriggerEvent<"CustomEmailSender_UpdateUserAttribute"> +{} + +export interface CustomEmailSenderVerifyUserAttributeTriggerEvent + extends BaseCustomEmailSenderTriggerEvent<"CustomEmailSender_VerifyUserAttribute"> +{} + +export interface CustomEmailSenderAdminCreateUserTriggerEvent + extends BaseCustomEmailSenderTriggerEvent<"CustomEmailSender_AdminCreateUser"> +{} + +export interface CustomEmailSenderAccountTakeOverNotificationTriggerEvent + extends BaseTriggerEvent<"CustomEmailSender_AccountTakeOverNotification"> +{ + request: { + type: string; + code: string | null; + userAttributes: CustomEmailSender_AccountTakeOverNotification_UserAttributes; + clientMetadata?: StringMap | undefined; + }; +} + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-custom-email-sender.html + */ +export type CustomEmailSenderTriggerEvent = + | CustomEmailSenderSignUpTriggerEvent + | CustomEmailSenderResendCodeTriggerEvent + | CustomEmailSenderForgotPasswordTriggerEvent + | CustomEmailSenderUpdateUserAttributeTriggerEvent + | CustomEmailSenderVerifyUserAttributeTriggerEvent + | CustomEmailSenderAdminCreateUserTriggerEvent + | CustomEmailSenderAccountTakeOverNotificationTriggerEvent; + +export type CustomEmailSenderTriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/custom-message.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/custom-message.d.ts new file mode 100644 index 0000000..6908216 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/custom-message.d.ts @@ -0,0 +1,52 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, StringMap } from "./_common"; + +export interface BaseCustomMessageTriggerEvent extends BaseTriggerEvent { + request: { + userAttributes: StringMap; + codeParameter: string; + linkParameter: string; + /** + * This is null for all events other than the AdminCreateUser action. + */ + usernameParameter: string | null; + clientMetadata?: StringMap | undefined; + }; + response: { + smsMessage: string | null; + emailMessage: string | null; + emailSubject: string | null; + }; +} + +export type CustomMessageAdminCreateUserTriggerEvent = BaseCustomMessageTriggerEvent<"CustomMessage_AdminCreateUser">; + +export type CustomMessageAuthenticationTriggerEvent = BaseCustomMessageTriggerEvent<"CustomMessage_Authentication">; + +export type CustomMessageForgotPasswordTriggerEvent = BaseCustomMessageTriggerEvent<"CustomMessage_ForgotPassword">; + +export type CustomMessageResendCodeTriggerEvent = BaseCustomMessageTriggerEvent<"CustomMessage_ResendCode">; + +export type CustomMessageSignUpTriggerEvent = BaseCustomMessageTriggerEvent<"CustomMessage_SignUp">; + +export type CustomMessageUpdateUserAttributeTriggerEvent = BaseCustomMessageTriggerEvent< + "CustomMessage_UpdateUserAttribute" +>; + +export type CustomMessageVerifyUserAttributeTriggerEvent = BaseCustomMessageTriggerEvent< + "CustomMessage_VerifyUserAttribute" +>; + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-custom-message.html + */ +export type CustomMessageTriggerEvent = + | CustomMessageSignUpTriggerEvent + | CustomMessageAdminCreateUserTriggerEvent + | CustomMessageResendCodeTriggerEvent + | CustomMessageForgotPasswordTriggerEvent + | CustomMessageUpdateUserAttributeTriggerEvent + | CustomMessageVerifyUserAttributeTriggerEvent + | CustomMessageAuthenticationTriggerEvent; + +export type CustomMessageTriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/custom-sms-sender.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/custom-sms-sender.d.ts new file mode 100644 index 0000000..8ad3bd9 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/custom-sms-sender.d.ts @@ -0,0 +1,51 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, StringMap } from "./_common"; + +export interface BaseCustomSMSSenderTriggerEvent extends BaseTriggerEvent { + request: { + type: string; + code: string | null; + userAttributes: StringMap; + clientMetadata?: StringMap | undefined; + }; +} + +export interface CustomSMSSenderSignUpTriggerEvent extends BaseCustomSMSSenderTriggerEvent<"CustomSMSSender_SignUp"> {} + +export interface CustomSMSSenderResendCodeTriggerEvent + extends BaseCustomSMSSenderTriggerEvent<"CustomSMSSender_ResendCode"> +{} + +export interface CustomSMSSenderForgotPasswordTriggerEvent + extends BaseCustomSMSSenderTriggerEvent<"CustomSMSSender_ForgotPassword"> +{} + +export interface CustomSMSSenderUpdateUserAttributeTriggerEvent + extends BaseCustomSMSSenderTriggerEvent<"CustomSMSSender_UpdateUserAttribute"> +{} + +export interface CustomSMSSenderVerifyUserAttributeTriggerEvent + extends BaseCustomSMSSenderTriggerEvent<"CustomSMSSender_VerifyUserAttribute"> +{} + +export interface CustomSMSSenderAdminCreateUserTriggerEvent + extends BaseCustomSMSSenderTriggerEvent<"CustomSMSSender_AdminCreateUser"> +{} + +export interface CustomSMSSenderAuthenticationUserTriggerEvent + extends BaseCustomSMSSenderTriggerEvent<"CustomSMSSender_Authentication"> +{} + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-custom-sms-sender.html + */ +export type CustomSMSSenderTriggerEvent = + | CustomSMSSenderSignUpTriggerEvent + | CustomSMSSenderResendCodeTriggerEvent + | CustomSMSSenderForgotPasswordTriggerEvent + | CustomSMSSenderUpdateUserAttributeTriggerEvent + | CustomSMSSenderVerifyUserAttributeTriggerEvent + | CustomSMSSenderAdminCreateUserTriggerEvent + | CustomSMSSenderAuthenticationUserTriggerEvent; + +export type CustomSMSSenderTriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/define-auth-challenge.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/define-auth-challenge.d.ts new file mode 100644 index 0000000..2299fde --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/define-auth-challenge.d.ts @@ -0,0 +1,21 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, ChallengeResult, CustomChallengeResult, StringMap } from "./_common"; + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-define-auth-challenge.html + */ +export interface DefineAuthChallengeTriggerEvent extends BaseTriggerEvent<"DefineAuthChallenge_Authentication"> { + request: { + userAttributes: StringMap; + session: Array; + userNotFound?: boolean | undefined; + clientMetadata?: StringMap | undefined; + }; + response: { + challengeName: string; + failAuthentication: boolean; + issueTokens: boolean; + }; +} + +export type DefineAuthChallengeTriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/index.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/index.d.ts new file mode 100644 index 0000000..7c9cf7a --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/index.d.ts @@ -0,0 +1,128 @@ +import { Handler } from "../../handler"; + +/** + * Cognito User Pool event + * @deprecated Please use specific event types instead + * http://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-identity-pools-working-with-aws-lambda-triggers.html + */ +export interface CognitoUserPoolTriggerEvent { + version: number; + triggerSource: + | "PreSignUp_SignUp" + | "PreSignUp_ExternalProvider" + | "PostConfirmation_ConfirmSignUp" + | "PreAuthentication_Authentication" + | "PostAuthentication_Authentication" + | "CustomMessage_SignUp" + | "CustomMessage_AdminCreateUser" + | "CustomMessage_ResendCode" + | "CustomMessage_ForgotPassword" + | "CustomMessage_UpdateUserAttribute" + | "CustomMessage_VerifyUserAttribute" + | "CustomMessage_Authentication" + | "DefineAuthChallenge_Authentication" + | "CreateAuthChallenge_Authentication" + | "VerifyAuthChallengeResponse_Authentication" + | "PreSignUp_AdminCreateUser" + | "PostConfirmation_ConfirmForgotPassword" + | "TokenGeneration_HostedAuth" + | "TokenGeneration_Authentication" + | "TokenGeneration_NewPasswordChallenge" + | "TokenGeneration_AuthenticateDevice" + | "TokenGeneration_RefreshTokens" + | "UserMigration_Authentication" + | "UserMigration_ForgotPassword"; + region: string; + userPoolId: string; + userName?: string | undefined; + callerContext: { + awsSdkVersion: string; + clientId: string; + }; + request: { + userAttributes: { [key: string]: string }; + validationData?: { [key: string]: string } | undefined; + codeParameter?: string | undefined; + linkParameter?: string | undefined; + usernameParameter?: string | undefined; + newDeviceUsed?: boolean | undefined; + session?: + | Array<{ + challengeName: + | "CUSTOM_CHALLENGE" + | "PASSWORD_VERIFIER" + | "SMS_MFA" + | "DEVICE_SRP_AUTH" + | "DEVICE_PASSWORD_VERIFIER" + | "ADMIN_NO_SRP_AUTH" + | "SRP_A"; + challengeResult: boolean; + challengeMetadata?: string | undefined; + }> + | undefined; + challengeName?: string | undefined; + privateChallengeParameters?: { [key: string]: string } | undefined; + challengeAnswer?: string | undefined; + password?: string | undefined; + clientMetadata?: { [key: string]: string } | undefined; + userNotFound?: boolean | undefined; + }; + response: { + autoConfirmUser?: boolean | undefined; + autoVerifyPhone?: boolean | undefined; + autoVerifyEmail?: boolean | undefined; + smsMessage?: string | undefined; + emailMessage?: string | undefined; + emailSubject?: string | undefined; + challengeName?: string | undefined; + issueTokens?: boolean | undefined; + failAuthentication?: boolean | undefined; + publicChallengeParameters?: { [key: string]: string } | undefined; + privateChallengeParameters?: { [key: string]: string } | undefined; + challengeMetadata?: string | undefined; + answerCorrect?: boolean | undefined; + userAttributes?: { [key: string]: string } | undefined; + finalUserStatus?: "CONFIRMED" | "RESET_REQUIRED" | undefined; + messageAction?: "SUPPRESS" | undefined; + desiredDeliveryMediums?: Array<"EMAIL" | "SMS"> | undefined; + forceAliasCreation?: boolean | undefined; + claimsOverrideDetails?: + | { + claimsToAddOrOverride?: { [key: string]: string } | undefined; + claimsToSuppress?: string[] | undefined; + groupOverrideDetails?: + | null + | { + groupsToOverride?: string[] | undefined; + iamRolesToOverride?: string[] | undefined; + preferredRole?: string | undefined; + } + | undefined; + } + | undefined; + }; +} + +/** + * @deprecated Please use specific event types instead + */ +export type CognitoUserPoolEvent = CognitoUserPoolTriggerEvent; + +/** + * @deprecated Please use specific event handler types instead + */ +export type CognitoUserPoolTriggerHandler = Handler; + +export * from "./create-auth-challenge"; +export * from "./custom-email-sender"; +export * from "./custom-message"; +export * from "./custom-sms-sender"; +export * from "./define-auth-challenge"; +export * from "./post-authentication"; +export * from "./post-confirmation"; +export * from "./pre-authentication"; +export * from "./pre-signup"; +export * from "./pre-token-generation"; +export * from "./pre-token-generation-v2"; +export * from "./user-migration"; +export * from "./verify-auth-challenge-response"; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/post-authentication.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/post-authentication.d.ts new file mode 100644 index 0000000..4b5973f --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/post-authentication.d.ts @@ -0,0 +1,15 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, StringMap } from "./_common"; + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-post-authentication.html + */ +export interface PostAuthenticationTriggerEvent extends BaseTriggerEvent<"PostAuthentication_Authentication"> { + request: { + userAttributes: StringMap; + newDeviceUsed: boolean; + clientMetadata?: StringMap | undefined; + }; +} + +export type PostAuthenticationTriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/post-confirmation.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/post-confirmation.d.ts new file mode 100644 index 0000000..2a7c5db --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/post-confirmation.d.ts @@ -0,0 +1,26 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, StringMap } from "./_common"; + +export interface BasePostConfirmationTriggerEvent extends BaseTriggerEvent { + request: { + userAttributes: StringMap; + clientMetadata?: StringMap | undefined; + }; +} + +export type PostConfirmationConfirmSignUpTriggerEvent = BasePostConfirmationTriggerEvent< + "PostConfirmation_ConfirmSignUp" +>; + +export type PostConfirmationConfirmForgotPassword = BasePostConfirmationTriggerEvent< + "PostConfirmation_ConfirmForgotPassword" +>; + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-post-confirmation.html + */ +export type PostConfirmationTriggerEvent = + | PostConfirmationConfirmSignUpTriggerEvent + | PostConfirmationConfirmForgotPassword; + +export type PostConfirmationTriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/pre-authentication.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/pre-authentication.d.ts new file mode 100644 index 0000000..c62edac --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/pre-authentication.d.ts @@ -0,0 +1,16 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, StringMap } from "./_common"; + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-pre-authentication.html + */ +export interface PreAuthenticationTriggerEvent extends BaseTriggerEvent<"PreAuthentication_Authentication"> { + request: { + userAttributes: StringMap; + userNotFound?: boolean | undefined; + validationData?: StringMap | undefined; + clientMetadata?: StringMap | undefined; + }; +} + +export type PreAuthenticationTriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/pre-signup.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/pre-signup.d.ts new file mode 100644 index 0000000..ee867bb --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/pre-signup.d.ts @@ -0,0 +1,31 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, StringMap } from "./_common"; + +export interface BasePreSignUpTriggerEvent extends BaseTriggerEvent { + request: { + userAttributes: StringMap; + validationData?: StringMap | undefined; + clientMetadata?: StringMap | undefined; + }; + response: { + autoConfirmUser: boolean; + autoVerifyEmail: boolean; + autoVerifyPhone: boolean; + }; +} + +export type PreSignUpEmailTriggerEvent = BasePreSignUpTriggerEvent<"PreSignUp_SignUp">; + +export type PreSignUpExternalProviderTriggerEvent = BasePreSignUpTriggerEvent<"PreSignUp_ExternalProvider">; + +export type PreSignUpAdminCreateUserTriggerEvent = BasePreSignUpTriggerEvent<"PreSignUp_AdminCreateUser">; + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-pre-sign-up.html + */ +export type PreSignUpTriggerEvent = + | PreSignUpEmailTriggerEvent + | PreSignUpExternalProviderTriggerEvent + | PreSignUpAdminCreateUserTriggerEvent; + +export type PreSignUpTriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/pre-token-generation-v2.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/pre-token-generation-v2.d.ts new file mode 100644 index 0000000..6aada78 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/pre-token-generation-v2.d.ts @@ -0,0 +1,73 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, StringMap } from "./_common"; + +export interface GroupOverrideDetailsV2 { + groupsToOverride?: string[] | undefined; + iamRolesToOverride?: string[] | undefined; + preferredRole?: string | undefined; +} + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-pre-token-generation.html#cognito-user-pools-lambda-trigger-syntax-pre-token-generation + */ +export interface IdTokenGeneration { + claimsToAddOrOverride?: StringMap | undefined; + claimsToSuppress?: string[] | undefined; +} + +export interface AccessTokenGeneration { + claimsToAddOrOverride?: StringMap | undefined; + claimsToSuppress?: string[] | undefined; + scopesToAdd?: string[] | undefined; + scopesToSuppress?: string[] | undefined; +} + +export interface ClaimsAndScopeOverrideDetails { + idTokenGeneration?: IdTokenGeneration | undefined; + accessTokenGeneration?: AccessTokenGeneration | undefined; + groupOverrideDetails?: GroupOverrideDetailsV2 | undefined; +} + +export interface BasePreTokenGenerationV2TriggerEvent extends BaseTriggerEvent { + request: { + userAttributes: StringMap; + groupConfiguration: GroupOverrideDetailsV2; + scopes?: string[] | undefined; + clientMetadata?: StringMap | undefined; + }; + response: { + claimsAndScopeOverrideDetails: ClaimsAndScopeOverrideDetails; + }; +} + +export type PreTokenGenerationHostedAuthV2TriggerEvent = BasePreTokenGenerationV2TriggerEvent< + "TokenGeneration_HostedAuth" +>; + +export type PreTokenGenerationAuthenticationV2TriggerEvent = BasePreTokenGenerationV2TriggerEvent< + "TokenGeneration_Authentication" +>; + +export type PreTokenGenerationNewPasswordChallengeV2TriggerEvent = BasePreTokenGenerationV2TriggerEvent< + "TokenGeneration_NewPasswordChallenge" +>; + +export type PreTokenGenerationAuthenticateDeviceV2TriggerEvent = BasePreTokenGenerationV2TriggerEvent< + "TokenGeneration_AuthenticateDevice" +>; + +export type PreTokenGenerationRefreshTokensV2TriggerEvent = BasePreTokenGenerationV2TriggerEvent< + "TokenGeneration_RefreshTokens" +>; + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-pre-token-generation.html + */ +export type PreTokenGenerationV2TriggerEvent = + | PreTokenGenerationHostedAuthV2TriggerEvent + | PreTokenGenerationAuthenticationV2TriggerEvent + | PreTokenGenerationNewPasswordChallengeV2TriggerEvent + | PreTokenGenerationAuthenticateDeviceV2TriggerEvent + | PreTokenGenerationRefreshTokensV2TriggerEvent; + +export type PreTokenGenerationV2TriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/pre-token-generation.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/pre-token-generation.d.ts new file mode 100644 index 0000000..c34559c --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/pre-token-generation.d.ts @@ -0,0 +1,53 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, StringMap } from "./_common"; + +export interface GroupOverrideDetails { + groupsToOverride?: string[] | undefined; + iamRolesToOverride?: string[] | undefined; + preferredRole?: string | undefined; +} + +export interface BasePreTokenGenerationTriggerEvent extends BaseTriggerEvent { + request: { + userAttributes: StringMap; + groupConfiguration: GroupOverrideDetails; + clientMetadata?: StringMap | undefined; + }; + response: { + claimsOverrideDetails: { + claimsToAddOrOverride?: StringMap | undefined; + claimsToSuppress?: string[] | undefined; + groupOverrideDetails?: GroupOverrideDetails | undefined; + }; + }; +} + +export type PreTokenGenerationHostedAuthTriggerEvent = BasePreTokenGenerationTriggerEvent<"TokenGeneration_HostedAuth">; + +export type PreTokenGenerationAuthenticationTriggerEvent = BasePreTokenGenerationTriggerEvent< + "TokenGeneration_Authentication" +>; + +export type PreTokenGenerationNewPasswordChallengeTriggerEvent = BasePreTokenGenerationTriggerEvent< + "TokenGeneration_NewPasswordChallenge" +>; + +export type PreTokenGenerationAuthenticateDeviceTriggerEvent = BasePreTokenGenerationTriggerEvent< + "TokenGeneration_AuthenticateDevice" +>; + +export type PreTokenGenerationRefreshTokensTriggerEvent = BasePreTokenGenerationTriggerEvent< + "TokenGeneration_RefreshTokens" +>; + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-pre-token-generation.html + */ +export type PreTokenGenerationTriggerEvent = + | PreTokenGenerationHostedAuthTriggerEvent + | PreTokenGenerationAuthenticationTriggerEvent + | PreTokenGenerationNewPasswordChallengeTriggerEvent + | PreTokenGenerationAuthenticateDeviceTriggerEvent + | PreTokenGenerationRefreshTokensTriggerEvent; + +export type PreTokenGenerationTriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/user-migration.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/user-migration.d.ts new file mode 100644 index 0000000..d9cfaf9 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/user-migration.d.ts @@ -0,0 +1,40 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, StringMap } from "./_common"; + +export type UserStatus = + | "UNCONFIRMED" + | "CONFIRMED" + | "ARCHIVED" + | "COMPROMISED" + | "UNKNOWN" + | "RESET_REQUIRED" + | "FORCE_CHANGE_PASSWORD"; + +export interface BaseUserMigrationTriggerEvent extends BaseTriggerEvent { + request: { + password: string; + validationData?: StringMap | undefined; + clientMetadata?: StringMap | undefined; + }; + response: { + userAttributes: StringMap; + finalUserStatus?: UserStatus | undefined; + messageAction?: "RESEND" | "SUPPRESS" | undefined; + desiredDeliveryMediums: Array<"SMS" | "EMAIL">; + forceAliasCreation?: boolean | undefined; + enableSMSMFA?: boolean | undefined; + }; +} + +export type UserMigrationAuthenticationTriggerEvent = BaseUserMigrationTriggerEvent<"UserMigration_Authentication">; + +export type UserMigrationForgotPasswordTriggerEvent = BaseUserMigrationTriggerEvent<"UserMigration_ForgotPassword">; + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-migrate-user.html + */ +export type UserMigrationTriggerEvent = + | UserMigrationAuthenticationTriggerEvent + | UserMigrationForgotPasswordTriggerEvent; + +export type UserMigrationTriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/verify-auth-challenge-response.d.ts b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/verify-auth-challenge-response.d.ts new file mode 100644 index 0000000..c3c4b75 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/cognito-user-pool-trigger/verify-auth-challenge-response.d.ts @@ -0,0 +1,22 @@ +import { Handler } from "../../handler"; +import { BaseTriggerEvent, StringMap } from "./_common"; + +/** + * @see https://docs.aws.amazon.com/cognito/latest/developerguide/user-pool-lambda-verify-auth-challenge-response.html + */ +export interface VerifyAuthChallengeResponseTriggerEvent + extends BaseTriggerEvent<"VerifyAuthChallengeResponse_Authentication"> +{ + request: { + userAttributes: StringMap; + privateChallengeParameters: StringMap; + challengeAnswer: string; + userNotFound?: boolean | undefined; + clientMetadata?: StringMap | undefined; + }; + response: { + answerCorrect: boolean; + }; +} + +export type VerifyAuthChallengeResponseTriggerHandler = Handler; diff --git a/dist/node_modules/@types/aws-lambda/trigger/connect-contact-flow.d.ts b/dist/node_modules/@types/aws-lambda/trigger/connect-contact-flow.d.ts new file mode 100644 index 0000000..9a26639 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/connect-contact-flow.d.ts @@ -0,0 +1,64 @@ +import { Callback, Handler } from "../handler"; + +export type ConnectContactFlowHandler = Handler; +export type ConnectContactFlowCallback = Callback; + +// Connect +// https://docs.aws.amazon.com/connect/latest/adminguide/connect-lambda-functions.html +export interface ConnectContactFlowEvent { + Details: { + ContactData: { + Attributes: { [key: string]: string }; + Channel: ConnectContactFlowChannel; + ContactId: string; + CustomerEndpoint: ConnectContactFlowEndpoint | null; + InitialContactId: string; + InitiationMethod: ConnectContactFlowInitiationMethod; + InstanceARN: string; + PreviousContactId: string; + Queue: ConnectContactFlowQueue | null; + SystemEndpoint: ConnectContactFlowEndpoint | null; + MediaStreams: { + Customer: { + Audio: CustomerAudio; + }; + }; + }; + Parameters: { [key: string]: string }; + }; + Name: "ContactFlowEvent"; +} + +export type ConnectContactFlowChannel = "VOICE" | "CHAT"; + +export type ConnectContactFlowInitiationMethod = "INBOUND" | "OUTBOUND" | "TRANSFER" | "CALLBACK" | "API"; + +export interface ConnectContactFlowEndpoint { + Address: string; + Type: "TELEPHONE_NUMBER"; +} + +export interface ConnectContactFlowQueue { + ARN: string; + Name: string; +} + +export interface ConnectContactFlowResult { + [key: string]: string | null; +} + +export type CustomerAudio = + | null // If Media Streaming has never been started. + | StartedCustomerAudio // If Media Streaming has been started, but not stopped. + | (StartedCustomerAudio & StoppedCustomerAudio); // If Media Streaming has been started and stopped. + +export interface StartedCustomerAudio { + StartFragmentNumber: string; + StartTimestamp: string; + StreamARN: string; +} + +export interface StoppedCustomerAudio { + StopFragmentNumber: string; + StopTimestamp: string; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/dynamodb-stream.d.ts b/dist/node_modules/@types/aws-lambda/trigger/dynamodb-stream.d.ts new file mode 100644 index 0000000..ac190a3 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/dynamodb-stream.d.ts @@ -0,0 +1,55 @@ +import { Handler } from "../handler"; + +// eslint-disable-next-line @typescript-eslint/no-invalid-void-type +export type DynamoDBStreamHandler = Handler; + +// http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_AttributeValue.html +export interface AttributeValue { + B?: string | undefined; + BS?: string[] | undefined; + BOOL?: boolean | undefined; + L?: AttributeValue[] | undefined; + M?: { [id: string]: AttributeValue } | undefined; + N?: string | undefined; + NS?: string[] | undefined; + NULL?: boolean | undefined; + S?: string | undefined; + SS?: string[] | undefined; +} + +// http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_StreamRecord.html +export interface StreamRecord { + ApproximateCreationDateTime?: number | undefined; + Keys?: { [key: string]: AttributeValue } | undefined; + NewImage?: { [key: string]: AttributeValue } | undefined; + OldImage?: { [key: string]: AttributeValue } | undefined; + SequenceNumber?: string | undefined; + SizeBytes?: number | undefined; + StreamViewType?: "KEYS_ONLY" | "NEW_IMAGE" | "OLD_IMAGE" | "NEW_AND_OLD_IMAGES" | undefined; +} + +// http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_streams_Record.html +export interface DynamoDBRecord { + awsRegion?: string | undefined; + dynamodb?: StreamRecord | undefined; + eventID?: string | undefined; + eventName?: "INSERT" | "MODIFY" | "REMOVE" | undefined; + eventSource?: string | undefined; + eventSourceARN?: string | undefined; + eventVersion?: string | undefined; + userIdentity?: any; +} + +// http://docs.aws.amazon.com/lambda/latest/dg/eventsources.html#eventsources-ddb-update +export interface DynamoDBStreamEvent { + Records: DynamoDBRecord[]; +} + +// https://docs.aws.amazon.com/lambda/latest/dg/with-ddb.html#services-ddb-batchfailurereporting +export interface DynamoDBBatchResponse { + batchItemFailures: DynamoDBBatchItemFailure[]; +} + +export interface DynamoDBBatchItemFailure { + itemIdentifier: string; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/eventbridge.d.ts b/dist/node_modules/@types/aws-lambda/trigger/eventbridge.d.ts new file mode 100644 index 0000000..529b7a1 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/eventbridge.d.ts @@ -0,0 +1,19 @@ +import { Handler } from "../handler"; + +export type EventBridgeHandler = Handler< + EventBridgeEvent, + TResult +>; + +export interface EventBridgeEvent { + id: string; + version: string; + account: string; + time: string; + region: string; + resources: string[]; + source: string; + "detail-type": TDetailType; + detail: TDetail; + "replay-name"?: string; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/iot-authorizer.d.ts b/dist/node_modules/@types/aws-lambda/trigger/iot-authorizer.d.ts new file mode 100644 index 0000000..215b980 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/iot-authorizer.d.ts @@ -0,0 +1,48 @@ +import { Handler } from "../handler"; +import { PolicyDocument } from "./api-gateway-authorizer"; + +export type IoTProtocolType = "http" | "mqtt" | "tls"; + +export type IoTCustomAuthorizerHandler = Handler; + +export interface IoTProtocolDataTLS { + serverName: string; // The server name indication (SNI) host_name string. +} + +export interface IoTProtocolDataHTTP { + headers: Record; + queryString: string; +} + +export interface IoTProtocolDataMQTT { + username?: string; + password?: string; // A base64-encoded string. + clientId: string; // Included in the event only when the device sends the value. +} + +export interface IoTCustomAuthorizerEvent { + token?: string; + signatureVerified: boolean; // Indicates whether the device gateway has validated the signature. + protocols: IoTProtocolType[]; // Indicates which protocols to expect for the request. + protocolData: { + tls?: IoTProtocolDataTLS; + http?: IoTProtocolDataHTTP; + mqtt?: IoTProtocolDataMQTT; + }; + connectionMetadata: { + id: string; // The connection ID. You can use this for logging. + }; +} + +/** + * IoT CustomAuthorizer AuthResponse.PolicyDocument. + * https://docs.aws.amazon.com/iot/latest/developerguide/config-custom-auth.html#custom-auth-lambda + * https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements.html#Condition + */ +export interface IoTCustomAuthorizerResult { + isAuthenticated: boolean; // A Boolean that determines whether client can connect. + principalId: string; // A string that identifies the connection in logs. + disconnectAfterInSeconds: number; + refreshAfterInSeconds: number; + policyDocuments: PolicyDocument[]; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/iot.d.ts b/dist/node_modules/@types/aws-lambda/trigger/iot.d.ts new file mode 100644 index 0000000..a188aab --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/iot.d.ts @@ -0,0 +1,28 @@ +import { Handler } from "../handler"; + +export type IoTHandler = Handler; + +// IoT +// https://docs.aws.amazon.com/lambda/latest/dg/services-iot.html +// IoT payload is not restriced to JSON, but JSON is highly recommended. Types as string, number or array are possible to use. + +export type IoTEvent = string | number | T; + +// PreProvisioningHook +// https://docs.aws.amazon.com/iot/latest/developerguide/pre-provisioning-hook.html +// When using AWS IoT fleet provisioning, you can set up a Lambda function to validate parameters passed from the device before allowing the device to be provisioned. +export type IoTPreProvisioningHookHandler = Handler; + +export interface IoTPreProvisioningHookEvent { + claimCertificateId: string; + certificateId: string; + certificatePem: string; + templateArn: string; + clientId: string; + parameters: Record; +} + +export interface IoTPreProvisioningHookResult { + allowProvisioning: boolean; + parameterOverrides: Record; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/kinesis-firehose-transformation.d.ts b/dist/node_modules/@types/aws-lambda/trigger/kinesis-firehose-transformation.d.ts new file mode 100644 index 0000000..dff5fb5 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/kinesis-firehose-transformation.d.ts @@ -0,0 +1,51 @@ +import { Callback, Handler } from "../handler"; + +export type FirehoseTransformationHandler = Handler; +export type FirehoseTransformationCallback = Callback; + +// Kinesis Data Firehose Event +// https://docs.aws.amazon.com/lambda/latest/dg/eventsources.html#eventsources-kinesis-firehose +// https://docs.aws.amazon.com/firehose/latest/dev/data-transformation.html +// https://aws.amazon.com/blogs/compute/amazon-kinesis-firehose-data-transformation-with-aws-lambda/ +// Examples in the lambda blueprints +export interface FirehoseTransformationEvent { + invocationId: string; + deliveryStreamArn: string; + sourceKinesisStreamArn?: string | undefined; + region: string; + records: FirehoseTransformationEventRecord[]; +} + +export interface FirehoseTransformationEventRecord { + recordId: string; + approximateArrivalTimestamp: number; + /** Base64 encoded */ + data: string; + kinesisRecordMetadata?: FirehoseRecordMetadata | undefined; +} + +export interface FirehoseRecordMetadata { + shardId: string; + partitionKey: string; + approximateArrivalTimestamp: number; + sequenceNumber: string; + subsequenceNumber: string; +} + +export type FirehoseRecordTransformationStatus = "Ok" | "Dropped" | "ProcessingFailed"; + +export interface FirehoseTransformationMetadata { + partitionKeys: { [name: string]: string }; +} + +export interface FirehoseTransformationResultRecord { + recordId: string; + result: FirehoseRecordTransformationStatus; + /** Encode in Base64 */ + data?: string; + metadata?: FirehoseTransformationMetadata; +} + +export interface FirehoseTransformationResult { + records: FirehoseTransformationResultRecord[]; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/kinesis-stream.d.ts b/dist/node_modules/@types/aws-lambda/trigger/kinesis-stream.d.ts new file mode 100644 index 0000000..7d9fa41 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/kinesis-stream.d.ts @@ -0,0 +1,56 @@ +import { Handler } from "../handler"; + +// eslint-disable-next-line @typescript-eslint/no-invalid-void-type +export type KinesisStreamHandler = Handler; + +export type KinesisStreamTumblingWindowHandler = Handler< + KinesisStreamTumblingWindowEvent, + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + KinesisStreamStateResponse | void +>; + +// Kinesis Streams +// https://docs.aws.amazon.com/lambda/latest/dg/eventsources.html#eventsources-kinesis-streams +export interface KinesisStreamRecordPayload { + approximateArrivalTimestamp: number; + data: string; + kinesisSchemaVersion: string; + partitionKey: string; + sequenceNumber: string; +} + +export interface KinesisStreamRecord { + awsRegion: string; + eventID: string; + eventName: string; + eventSource: string; + eventSourceARN: string; + eventVersion: string; + invokeIdentityArn: string; + kinesis: KinesisStreamRecordPayload; +} + +export interface KinesisStreamEvent { + Records: KinesisStreamRecord[]; +} + +// https://docs.aws.amazon.com/lambda/latest/dg/with-kinesis.html#services-kinesis-windows +export interface KinesisStreamTumblingWindowEvent extends KinesisStreamEvent { + window: { start: string; end: string }; + state?: { [key: string]: any }; + isFinalInvokeForWindow: boolean; + isWindowTerminatedEarly: boolean; +} + +export interface KinesisStreamStateResponse extends Partial { + state: { [key: string]: any }; +} + +// https://docs.aws.amazon.com/lambda/latest/dg/with-kinesis.html#services-kinesis-batchfailurereporting +export interface KinesisStreamBatchResponse { + batchItemFailures: KinesisStreamBatchItemFailure[]; +} + +export interface KinesisStreamBatchItemFailure { + itemIdentifier: string; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/lambda-function-url.d.ts b/dist/node_modules/@types/aws-lambda/trigger/lambda-function-url.d.ts new file mode 100644 index 0000000..015f0b5 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/lambda-function-url.d.ts @@ -0,0 +1,45 @@ +import { Handler } from "../handler"; +import { + APIGatewayEventRequestContextIAMAuthorizer, + APIGatewayEventRequestContextV2WithAuthorizer, + APIGatewayProxyCallbackV2, + APIGatewayProxyEventV2, + APIGatewayProxyEventV2WithRequestContext, + APIGatewayProxyResultV2, +} from "./api-gateway-proxy"; + +/** + * Default Lambda Function URL event with no Authorizer + */ +export type LambdaFunctionURLEvent = APIGatewayProxyEventV2; + +/** + * Works with Lambda Function URL format which is currently the same as HTTP API integration Payload Format version 2.0 + * @see - https://docs.aws.amazon.com/lambda/latest/dg/urls-invocation.html#urls-payloads + */ +export type LambdaFunctionURLResult = APIGatewayProxyResultV2; + +/** + * Works with Lambda Function URL format which is currently the same as HTTP API integration Payload Format version 2.0 + * @see - https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html + */ +export type LambdaFunctionURLCallback = APIGatewayProxyCallbackV2; + +/** + * Works with Lambda Function URL format which is currently the same as HTTP API integration Payload Format version 2.0 + * @see - https://docs.aws.amazon.com/lambda/latest/dg/urls-invocation.html#urls-payloads + */ +export type LambdaFunctionURLHandler = Handler>; + +export type LambdaFunctionURLEventWithIAMAuthorizer = APIGatewayProxyEventV2WithRequestContext< + APIGatewayEventRequestContextV2WithAuthorizer +>; + +/** + * Works with Lambda Function URL format which is currently the same as HTTP API integration Payload Format version 2.0 + * @see - https://docs.aws.amazon.com/lambda/latest/dg/urls-invocation.html#urls-payloads + */ +export type LambdaFunctionURLHandlerWithIAMAuthorizer = Handler< + LambdaFunctionURLEventWithIAMAuthorizer, + LambdaFunctionURLResult +>; diff --git a/dist/node_modules/@types/aws-lambda/trigger/lex-v2.d.ts b/dist/node_modules/@types/aws-lambda/trigger/lex-v2.d.ts new file mode 100644 index 0000000..d2458b5 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/lex-v2.d.ts @@ -0,0 +1,164 @@ +import { Callback, Handler } from "../handler"; + +export type LexV2Handler = Handler; +export type LexV2Callback = Callback; + +// Lex V2 +// https://docs.aws.amazon.com/lexv2/latest/dg/lambda.html +export interface LexV2Event { + messageVersion: string; + invocationSource: "DialogCodeHook" | "FulfillmentCodeHook"; + inputMode: "DTMF" | "Speech" | "Text"; + responseContentType: string; + sessionId: string; + inputTranscript: string; + bot: LexV2Bot; + interpretations: LexV2Interpretation[]; + proposedNextState: { + dialogAction: LexV2DialogAction; + intent: LexV2Intent; + }; + requestAttributes?: Record; + sessionState: LexV2SessionState; + transcriptions: LexV2Transcription[]; +} + +export interface LexV2Bot { + id: string; + name: string; + aliasId: string; + aliasName: string; + localeId: string; + version: string; // 'DRAFT' | `${number}` +} + +export interface LexV2Interpretation { + intent: LexV2Intent; + nluConfidence?: number; + sentimentResponse?: LexV2SentimentResponse; +} + +export interface LexV2Intent { + confirmationState: "Confirmed" | "Denied" | "None"; + name: string; + slots: LexV2Slots; + state: LexV2IntentState; + kendraResponse?: any; +} + +export type LexV2IntentState = + | "Failed" + | "Fulfilled" + | "FulfillmentInProgress" + | "InProgress" + | "ReadyForFulfillment" + | "Waiting"; + +export interface LexV2SentimentResponse { + sentiment: string; + sentimentScore: LexV2SentimentScore; +} + +export interface LexV2SentimentScore { + mixed: number; + negative: number; + neutral: number; + positive: number; +} + +export interface LexV2SessionState { + activeContexts?: LexV2ActiveContext[]; + sessionAttributes?: Record; + dialogAction?: LexV2DialogAction; + intent: LexV2Intent; + originatingRequestId: string; +} + +export interface LexV2ActiveContext { + name: string; + contextAttributes: Record; + timeToLive: { + timeToLiveInSeconds: number; + turnsToLive: number; + }; +} + +export type LevV2DialogActionWithoutSlot = + | { type: "Close" } + | { type: "ConfirmIntent" } + | { type: "Delegate" } + | { type: "ElicitIntent" }; + +export type LexV2DialogAction = + | (LevV2DialogActionWithoutSlot & { slotToElicit?: never }) + | { type: "ElicitSlot"; slotToElicit: string }; + +export type LexV2ResultDialogAction = + | (LevV2DialogActionWithoutSlot & { slotToElicit?: never }) + | { type: "ElicitSlot"; slotToElicit: string; slotElicitationStyle: "Default" | "SpellByLetter" | "SpellByWord" }; + +export interface LexV2Result { + sessionState: { + sessionAttributes?: Record; + dialogAction: LexV2ResultDialogAction; + intent?: { + name?: string; + state: LexV2IntentState; + slots?: LexV2Slots; + }; + }; + messages?: LexV2Message[]; +} + +export type LexV2Message = LexV2ContentMessage | LexV2ImageResponseCardMessage; + +export interface LexV2ContentMessage { + contentType: "CustomPayload" | "PlainText" | "SSML"; + content: string; +} + +export interface LexV2ImageResponseCardMessage { + contentType: "ImageResponseCard"; + imageResponseCard: LexV2ImageResponseCard; +} + +export interface LexV2ImageResponseCard { + title: string; + subtitle?: string; + imageUrl?: string; + buttons?: LexV2ImageResponseCardButton[]; +} + +export interface LexV2ImageResponseCardButton { + text: string; + value: string; +} + +export type LexV2Slot = LexV2ScalarSlotValue | LexV2ListSlotValue; +export type LexV2Slots = Record; + +export interface LexV2ScalarSlotValue { + shape: "Scalar"; + value: LexV2SlotValue; +} + +export interface LexV2ListSlotValue { + shape: "List"; + value: LexV2SlotValue; + values: LexV2ScalarSlotValue[]; +} + +export interface LexV2SlotValue { + interpretedValue?: string; + originalValue: string; + resolvedValues: string[]; +} + +export interface LexV2Transcription { + transcription: string; + transcriptionConfidence: number; + resolvedContext: { + intent: string; + }; + resolvedSlots: LexV2Slots; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/lex.d.ts b/dist/node_modules/@types/aws-lambda/trigger/lex.d.ts new file mode 100644 index 0000000..439b92d --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/lex.d.ts @@ -0,0 +1,120 @@ +import { Callback, Handler } from "../handler"; + +export type LexHandler = Handler; +export type LexCallback = Callback; + +export interface LexEventSlots { + [name: string]: string | undefined | null; +} + +export interface LexEventSessionAttributes { + [key: string]: string | undefined; +} + +export interface LexEventRequestAttributes { + [key: string]: string | undefined; +} + +// Lex +// https://docs.aws.amazon.com/lambda/latest/dg/invoking-lambda-function.html#supported-event-source-lex +export interface LexEvent { + currentIntent: { + name: string; + slots: LexEventSlots; + slotDetails: LexSlotDetails; + confirmationStatus: "None" | "Confirmed" | "Denied"; + }; + bot: { + name: string; + alias: string; + version: string; + }; + userId: string; + inputTranscript: string; + invocationSource: "DialogCodeHook" | "FulfillmentCodeHook"; + outputDialogMode: "Text" | "Voice"; + messageVersion: "1.0"; + sessionAttributes: LexEventSessionAttributes; + requestAttributes: LexEventRequestAttributes | null; +} + +export interface LexSlotResolution { + value: string; +} + +export interface LexSlotDetail { + // "at least 1 but no more than 5 items" + resolutions: [LexSlotResolution, LexSlotResolution?, LexSlotResolution?, LexSlotResolution?, LexSlotResolution?]; + originalValue: string; +} + +export interface LexSlotDetails { + [name: string]: LexSlotDetail; +} + +export interface LexGenericAttachment { + title: string; + subTitle: string; + imageUrl: string; + attachmentLinkUrl: string; + buttons: Array<{ + text: string; + value: string; + }>; +} + +export interface LexDialogActionBase { + type: "Close" | "ElicitIntent" | "ElicitSlot" | "ConfirmIntent"; + message?: + | { + contentType: "PlainText" | "SSML" | "CustomPayload"; + content: string; + } + | undefined; + responseCard?: + | { + version: number; + contentType: "application/vnd.amazonaws.card.generic"; + genericAttachments: LexGenericAttachment[]; + } + | undefined; +} + +export interface LexDialogActionClose extends LexDialogActionBase { + type: "Close"; + fulfillmentState: "Fulfilled" | "Failed"; +} + +export interface LexDialogActionElicitIntent extends LexDialogActionBase { + type: "ElicitIntent"; +} + +export interface LexDialogActionElicitSlot extends LexDialogActionBase { + type: "ElicitSlot"; + intentName: string; + slots: { [name: string]: string | null }; + slotToElicit: string; +} + +export interface LexDialogActionConfirmIntent extends LexDialogActionBase { + type: "ConfirmIntent"; + intentName: string; + slots: { [name: string]: string | null }; +} + +export interface LexDialogActionDelegate { + type: "Delegate"; + slots: { [name: string]: string | null }; +} + +export type LexDialogAction = + | LexDialogActionClose + | LexDialogActionElicitIntent + | LexDialogActionElicitSlot + | LexDialogActionConfirmIntent + | LexDialogActionDelegate; + +export interface LexResult { + sessionAttributes?: { [key: string]: string } | undefined; + dialogAction: LexDialogAction; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/msk.d.ts b/dist/node_modules/@types/aws-lambda/trigger/msk.d.ts new file mode 100644 index 0000000..af5d471 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/msk.d.ts @@ -0,0 +1,28 @@ +import { Handler } from "../handler"; + +export type MSKHandler = Handler; + +export interface MSKRecordHeader { + [headerKey: string]: number[]; +} + +export interface MSKRecord { + topic: string; + partition: number; + offset: number; + timestamp: number; + timestampType: "CREATE_TIME" | "LOG_APPEND_TIME"; + key: string; + value: string; + headers: MSKRecordHeader[]; +} + +// https://docs.aws.amazon.com/lambda/latest/dg/with-msk.html +export interface MSKEvent { + eventSource: "aws:kafka"; + eventSourceArn: string; + bootstrapServers: string; + records: { + [topic: string]: MSKRecord[]; + }; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/s3-batch.d.ts b/dist/node_modules/@types/aws-lambda/trigger/s3-batch.d.ts new file mode 100644 index 0000000..07fdf5b --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/s3-batch.d.ts @@ -0,0 +1,41 @@ +import { Callback, Handler } from "../handler"; + +/** + * S3 Batch Operations event + * https://docs.aws.amazon.com/AmazonS3/latest/dev/batch-ops-invoke-lambda.html + */ +export type S3BatchHandler = Handler; +export type S3BatchCallback = Callback; + +export interface S3BatchEvent { + invocationSchemaVersion: string; + invocationId: string; + job: S3BatchEventJob; + tasks: S3BatchEventTask[]; +} + +export interface S3BatchEventJob { + id: string; +} + +export interface S3BatchEventTask { + taskId: string; + s3Key: string; + s3VersionId: string | null; + s3BucketArn: string; +} + +export interface S3BatchResult { + invocationSchemaVersion: string; + treatMissingKeysAs: S3BatchResultResultCode; + invocationId: string; + results: S3BatchResultResult[]; +} + +export type S3BatchResultResultCode = "Succeeded" | "TemporaryFailure" | "PermanentFailure"; + +export interface S3BatchResultResult { + taskId: string; + resultCode: S3BatchResultResultCode; + resultString: string; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/s3-event-notification.d.ts b/dist/node_modules/@types/aws-lambda/trigger/s3-event-notification.d.ts new file mode 100644 index 0000000..f7df4c2 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/s3-event-notification.d.ts @@ -0,0 +1,290 @@ +import { Handler } from "../handler"; +import { EventBridgeEvent } from "./eventbridge"; + +export interface S3ObjectAccessTierChangedNotificationEventDetail { + version: "0"; + bucket: { + name: string; + }; + object: { + key: string; + size: number; + etag: string; + "version-id": string; + }; + "request-id": string; + requester: string; + "destination-access-tier": "ARCHIVE_ACCESS" | "DEEP_ARCHIVE_ACCESS"; // https://docs.aws.amazon.com/AmazonS3/latest/API/API_Tiering.html +} + +export interface S3ObjectACLUpdatedNotificationEventDetail { + version: "0"; + bucket: { + name: string; + }; + object: { + key: string; + etag: string; + "version-id": string; + }; + "request-id": string; + requester: string; + "source-ip-address": string; +} + +export interface S3ObjectCreatedNotificationEventDetail { + version: "0"; + bucket: { + name: string; + }; + object: { + key: string; + size: number; + etag: string; + "version-id": string; + sequencer: string; + }; + "request-id": string; + requester: string; + "source-ip-address": string; + reason: "PutObject" | "POST Object" | "CopyObject" | "CompleteMultipartUpload"; +} + +export interface S3ObjectDeletedNotificationEventLifecycleExpirationDetail { + version: "0"; + bucket: { + name: string; + }; + object: { + key: string; + etag: string; + "version-id": string; + sequencer: string; + }; + "request-id": string; + requester: string; + reason: "Lifecycle Expiration"; + "deletion-type": "Permanently Deleted" | "Delete Marker Created"; +} + +export interface S3ObjectDeletedNotificationEventDeleteObjectDetail { + version: "0"; + bucket: { + name: string; + }; + object: { + key: string; + etag: string; + "version-id": string; + sequencer: string; + }; + "request-id": string; + requester: string; + "source-ip-address": string; + reason: "DeleteObject"; + "deletion-type": "Permanently Deleted" | "Delete Marker Created"; +} + +export type S3ObjectDeletedNotificationEventDetail = + | S3ObjectDeletedNotificationEventLifecycleExpirationDetail + | S3ObjectDeletedNotificationEventDeleteObjectDetail; + +export interface S3ObjectRestoreCompletedNotificationEventDetail { + version: "0"; + bucket: { + name: string; + }; + object: { + key: string; + size: number; + "version-id": string; + etag: string; + }; + "request-id": string; + requester: "s3.amazonaws.com"; + "restore-expiry-time": string; + "source-storage-class": + | "STANDARD" + | "REDUCED_REDUNDANCY" + | "STANDARD_IA" + | "ONEZONE_IA" + | "INTELLIGENT_TIERING" + | "GLACIER" + | "DEEP_ARCHIVE" + | "OUTPOSTS" + | "GLACIER_IR"; // https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html#API_PutObject_RequestSyntax +} + +export interface S3ObjectRestoreExpiredNotificationEventDetail { + version: "0"; + bucket: { + name: string; + }; + object: { + key: string; + "version-id": string; + etag: string; + }; + "request-id": string; + requester: "s3.amazonaws.com"; +} + +export interface S3ObjectRestoreInitiatedNotificationEventDetail { + version: "0"; + bucket: { + name: string; + }; + object: { + key: string; + size: number; + "version-id": string; + etag: string; + }; + "request-id": string; + requester: string; + "source-ip-address": string; + "source-storage-class": + | "STANDARD" + | "REDUCED_REDUNDANCY" + | "STANDARD_IA" + | "ONEZONE_IA" + | "INTELLIGENT_TIERING" + | "GLACIER" + | "DEEP_ARCHIVE" + | "OUTPOSTS" + | "GLACIER_IR"; // https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html#API_PutObject_RequestSyntax +} + +export interface S3ObjectStorageClassChangedNotificationEventDetail { + version: "0"; + bucket: { + name: string; + }; + object: { + key: string; + size: number; + "version-id": string; + etag: string; + }; + "request-id": string; + requester: string; + "destination-storage-class": + | "STANDARD" + | "REDUCED_REDUNDANCY" + | "STANDARD_IA" + | "ONEZONE_IA" + | "INTELLIGENT_TIERING" + | "GLACIER" + | "DEEP_ARCHIVE" + | "OUTPOSTS" + | "GLACIER_IR"; // https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html#API_PutObject_RequestSyntax +} + +export interface S3ObjectTagsAddedNotificationEventDetail { + version: "0"; + bucket: { + name: string; + }; + object: { + key: string; + "version-id": string; + etag: string; + }; + "request-id": string; + requester: string; + "source-ip-address": string; +} + +export interface S3ObjectTagsDeletedNotificationEventDetail { + version: "0"; + bucket: { + name: string; + }; + object: { + key: string; + "version-id": string; + etag: string; + }; + "request-id": string; + requester: string; + "source-ip-address": string; +} + +export interface S3ObjectAccessTierChangedNotificationEvent + extends EventBridgeEvent<"Object Access Tier Changed", S3ObjectAccessTierChangedNotificationEventDetail> +{ + source: "aws.s3"; +} + +export interface S3ObjectACLUpdatedNotificationEvent + extends EventBridgeEvent<"Object ACL Updated", S3ObjectACLUpdatedNotificationEventDetail> +{ + source: "aws.s3"; +} + +export interface S3ObjectCreatedNotificationEvent + extends EventBridgeEvent<"Object Created", S3ObjectCreatedNotificationEventDetail> +{ + source: "aws.s3"; +} + +export interface S3ObjectDeletedNotificationEvent + extends EventBridgeEvent<"Object Deleted", S3ObjectDeletedNotificationEventDetail> +{ + source: "aws.s3"; +} + +export interface S3ObjectRestoreCompletedNotificationEvent + extends EventBridgeEvent<"Object Restore Completed", S3ObjectRestoreCompletedNotificationEventDetail> +{ + source: "aws.s3"; +} + +export interface S3ObjectRestoreExpiredNotificationEvent + extends EventBridgeEvent<"Object Restore Expired", S3ObjectRestoreExpiredNotificationEventDetail> +{ + source: "aws.s3"; +} + +export interface S3ObjectRestoreInitiatedNotificationEvent + extends EventBridgeEvent<"Object Restore Initiated", S3ObjectRestoreInitiatedNotificationEventDetail> +{ + source: "aws.s3"; +} + +export interface S3ObjectStorageClassChangedNotificationEvent + extends EventBridgeEvent<"Object Storage Class Changed", S3ObjectStorageClassChangedNotificationEventDetail> +{ + source: "aws.s3"; +} + +export interface S3ObjectTagsAddedNotificationEvent + extends EventBridgeEvent<"Object Tags Added", S3ObjectTagsAddedNotificationEventDetail> +{ + source: "aws.s3"; +} + +export interface S3ObjectTagsDeletedNotificationEvent + extends EventBridgeEvent<"Object Tags Deleted", S3ObjectTagsDeletedNotificationEventDetail> +{ + source: "aws.s3"; +} + +export type S3NotificationEvent = + | S3ObjectAccessTierChangedNotificationEvent + | S3ObjectACLUpdatedNotificationEvent + | S3ObjectCreatedNotificationEvent + | S3ObjectDeletedNotificationEvent + | S3ObjectRestoreCompletedNotificationEvent + | S3ObjectRestoreExpiredNotificationEvent + | S3ObjectRestoreInitiatedNotificationEvent + | S3ObjectStorageClassChangedNotificationEvent + | S3ObjectTagsAddedNotificationEvent + | S3ObjectTagsDeletedNotificationEvent; + +export type S3NotificationEventBridgeHandler = Handler< + EventBridgeEvent & { + source: "aws.s3"; + }, + void +>; diff --git a/dist/node_modules/@types/aws-lambda/trigger/s3.d.ts b/dist/node_modules/@types/aws-lambda/trigger/s3.d.ts new file mode 100644 index 0000000..f062504 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/s3.d.ts @@ -0,0 +1,60 @@ +import { Handler } from "../handler"; + +export type S3Handler = Handler; + +/** + * S3Create event + * https://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html + */ + +export interface S3EventRecordGlacierRestoreEventData { + lifecycleRestorationExpiryTime: string; + lifecycleRestoreStorageClass: string; +} + +export interface S3EventRecordGlacierEventData { + restoreEventData: S3EventRecordGlacierRestoreEventData; +} + +export interface S3EventRecord { + eventVersion: string; + eventSource: string; + awsRegion: string; + eventTime: string; + eventName: string; + userIdentity: { + principalId: string; + }; + requestParameters: { + sourceIPAddress: string; + }; + responseElements: { + "x-amz-request-id": string; + "x-amz-id-2": string; + }; + s3: { + s3SchemaVersion: string; + configurationId: string; + bucket: { + name: string; + ownerIdentity: { + principalId: string; + }; + arn: string; + }; + object: { + key: string; + size: number; + eTag: string; + versionId?: string | undefined; + sequencer: string; + }; + }; + glacierEventData?: S3EventRecordGlacierEventData | undefined; +} + +export interface S3Event { + Records: S3EventRecord[]; +} + +export type S3CreateEvent = S3Event; // old name diff --git a/dist/node_modules/@types/aws-lambda/trigger/secretsmanager.d.ts b/dist/node_modules/@types/aws-lambda/trigger/secretsmanager.d.ts new file mode 100644 index 0000000..1d3299f --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/secretsmanager.d.ts @@ -0,0 +1,15 @@ +import { Handler } from "../handler"; + +export type SecretsManagerRotationHandler = Handler; + +export type SecretsManagerRotationEventStep = "createSecret" | "setSecret" | "testSecret" | "finishSecret"; + +/** + * Secrets Manager Rotation event + * https://docs.aws.amazon.com/secretsmanager/latest/userguide/rotating-secrets-lambda-function-overview.html + */ +export interface SecretsManagerRotationEvent { + Step: SecretsManagerRotationEventStep; + SecretId: string; + ClientRequestToken: string; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/self-managed-kafka.d.ts b/dist/node_modules/@types/aws-lambda/trigger/self-managed-kafka.d.ts new file mode 100644 index 0000000..27dc2fc --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/self-managed-kafka.d.ts @@ -0,0 +1,27 @@ +import { Handler } from "../handler"; + +export type SelfManagedKafkaHandler = Handler; + +export interface SelfManagedKafkaRecordHeader { + [headerKey: string]: number[]; +} + +export interface SelfManagedKafkaRecord { + topic: string; + partition: number; + offset: number; + timestamp: number; + timestampType: "CREATE_TIME" | "LOG_APPEND_TIME"; + key: string; + value: string; + headers: SelfManagedKafkaRecordHeader[]; +} + +// https://docs.aws.amazon.com/lambda/latest/dg/with-kafka.html +export interface SelfManagedKafkaEvent { + eventSource: "SelfManagedKafka"; + bootstrapServers: string; + records: { + [topic: string]: SelfManagedKafkaRecord[]; + }; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/ses.d.ts b/dist/node_modules/@types/aws-lambda/trigger/ses.d.ts new file mode 100644 index 0000000..ff7a594 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/ses.d.ts @@ -0,0 +1,109 @@ +import { Handler } from "../handler"; + +export type SESHandler = Handler; + +// SES event +export interface SESMailHeader { + name: string; + value: string; +} + +export interface SESMailCommonHeaders { + returnPath: string; + from?: string[] | undefined; + date: string; + to?: string[] | undefined; + cc?: string[] | undefined; + bcc?: string[] | undefined; + sender?: string[] | undefined; + replyTo?: string[] | undefined; + messageId: string; + subject?: string | undefined; +} + +export interface SESMail { + timestamp: string; + source: string; + messageId: string; + destination: string[]; + headersTruncated: boolean; + headers: SESMailHeader[]; + commonHeaders: SESMailCommonHeaders; +} + +export interface SESReceiptStatus { + status: "PASS" | "FAIL" | "GRAY" | "PROCESSING_FAILED" | "DISABLED"; +} + +export interface SESReceiptS3Action { + type: "S3"; + topicArn?: string | undefined; + bucketName: string; + objectKey: string; +} + +export interface SESReceiptSnsAction { + type: "SNS"; + topicArn: string; +} + +export interface SESReceiptBounceAction { + type: "Bounce"; + topicArn?: string | undefined; + smtpReplyCode: string; + statusCode: string; + message: string; + sender: string; +} + +export interface SESReceiptLambdaAction { + type: "Lambda"; + topicArn?: string | undefined; + functionArn: string; + invocationType: string; +} + +export interface SESReceiptStopAction { + type: "Stop"; + topicArn?: string | undefined; +} + +export interface SESReceiptWorkMailAction { + type: "WorkMail"; + topicArn?: string | undefined; + organizationArn: string; +} + +export interface SESReceipt { + timestamp: string; + processingTimeMillis: number; + recipients: string[]; + spamVerdict: SESReceiptStatus; + virusVerdict: SESReceiptStatus; + spfVerdict: SESReceiptStatus; + dkimVerdict: SESReceiptStatus; + dmarcVerdict: SESReceiptStatus; + dmarcPolicy?: "none" | "quarantine" | "reject" | undefined; + action: + | SESReceiptS3Action + | SESReceiptSnsAction + | SESReceiptBounceAction + | SESReceiptLambdaAction + | SESReceiptStopAction + | SESReceiptWorkMailAction; +} + +export interface SESMessage { + mail: SESMail; + receipt: SESReceipt; +} + +export interface SESEventRecord { + eventSource: string; + eventVersion: string; + ses: SESMessage; +} + +export interface SESEvent { + Records: SESEventRecord[]; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/sns.d.ts b/dist/node_modules/@types/aws-lambda/trigger/sns.d.ts new file mode 100644 index 0000000..c6d2cae --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/sns.d.ts @@ -0,0 +1,39 @@ +import { Handler } from "../handler"; + +export type SNSHandler = Handler; + +// SNS "event" +export interface SNSMessageAttribute { + Type: string; + Value: string; +} + +export interface SNSMessageAttributes { + [name: string]: SNSMessageAttribute; +} + +export interface SNSMessage { + SignatureVersion: string; + Timestamp: string; + Signature: string; + SigningCertUrl: string; + MessageId: string; + Message: string; + MessageAttributes: SNSMessageAttributes; + Type: string; + UnsubscribeUrl: string; + TopicArn: string; + Subject?: string; + Token?: string; +} + +export interface SNSEventRecord { + EventVersion: string; + EventSubscriptionArn: string; + EventSource: string; + Sns: SNSMessage; +} + +export interface SNSEvent { + Records: SNSEventRecord[]; +} diff --git a/dist/node_modules/@types/aws-lambda/trigger/sqs.d.ts b/dist/node_modules/@types/aws-lambda/trigger/sqs.d.ts new file mode 100644 index 0000000..07f8850 --- /dev/null +++ b/dist/node_modules/@types/aws-lambda/trigger/sqs.d.ts @@ -0,0 +1,57 @@ +import { Handler } from "../handler"; + +// eslint-disable-next-line @typescript-eslint/no-invalid-void-type +export type SQSHandler = Handler; + +// SQS +// https://docs.aws.amazon.com/lambda/latest/dg/invoking-lambda-function.html#supported-event-source-sqs +export interface SQSRecord { + messageId: string; + receiptHandle: string; + body: string; + attributes: SQSRecordAttributes; + messageAttributes: SQSMessageAttributes; + md5OfBody: string; + eventSource: string; + eventSourceARN: string; + awsRegion: string; +} + +export interface SQSEvent { + Records: SQSRecord[]; +} + +export interface SQSRecordAttributes { + AWSTraceHeader?: string | undefined; + ApproximateReceiveCount: string; + SentTimestamp: string; + SenderId: string; + ApproximateFirstReceiveTimestamp: string; + SequenceNumber?: string | undefined; + MessageGroupId?: string | undefined; + MessageDeduplicationId?: string | undefined; + DeadLetterQueueSourceArn?: string | undefined; // Undocumented, but used by AWS to support their re-drive functionality in the console +} + +export type SQSMessageAttributeDataType = "String" | "Number" | "Binary" | string; + +export interface SQSMessageAttribute { + stringValue?: string | undefined; + binaryValue?: string | undefined; + stringListValues?: string[] | undefined; // Not implemented. Reserved for future use. + binaryListValues?: string[] | undefined; // Not implemented. Reserved for future use. + dataType: SQSMessageAttributeDataType; +} + +export interface SQSMessageAttributes { + [name: string]: SQSMessageAttribute; +} + +// https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#services-sqs-batchfailurereporting +export interface SQSBatchResponse { + batchItemFailures: SQSBatchItemFailure[]; +} + +export interface SQSBatchItemFailure { + itemIdentifier: string; +} diff --git a/dist/node_modules/@types/btoa-lite/LICENSE b/dist/node_modules/@types/btoa-lite/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/dist/node_modules/@types/btoa-lite/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/dist/node_modules/@types/btoa-lite/README.md b/dist/node_modules/@types/btoa-lite/README.md new file mode 100644 index 0000000..04748af --- /dev/null +++ b/dist/node_modules/@types/btoa-lite/README.md @@ -0,0 +1,23 @@ +# Installation +> `npm install --save @types/btoa-lite` + +# Summary +This package contains type definitions for btoa-lite (https://github.com/hughsk/btoa-lite). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/btoa-lite. +## [index.d.ts](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/btoa-lite/index.d.ts) +````ts +type EncodedData = string; +declare function btoa(decodedData: string): EncodedData; + +export = btoa; + +```` + +### Additional Details + * Last updated: Mon, 06 Nov 2023 22:41:05 GMT + * Dependencies: none + +# Credits +These definitions were written by [Gregor Martynus](https://github.com/gr2m). diff --git a/dist/node_modules/@types/btoa-lite/index.d.ts b/dist/node_modules/@types/btoa-lite/index.d.ts new file mode 100644 index 0000000..01aee0a --- /dev/null +++ b/dist/node_modules/@types/btoa-lite/index.d.ts @@ -0,0 +1,4 @@ +type EncodedData = string; +declare function btoa(decodedData: string): EncodedData; + +export = btoa; diff --git a/dist/node_modules/@types/btoa-lite/package.json b/dist/node_modules/@types/btoa-lite/package.json new file mode 100644 index 0000000..68449d4 --- /dev/null +++ b/dist/node_modules/@types/btoa-lite/package.json @@ -0,0 +1,25 @@ +{ + "name": "@types/btoa-lite", + "version": "1.0.2", + "description": "TypeScript definitions for btoa-lite", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/btoa-lite", + "license": "MIT", + "contributors": [ + { + "name": "Gregor Martynus", + "githubUsername": "gr2m", + "url": "https://github.com/gr2m" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/btoa-lite" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "b050fa116b0b808a46d58a4bcab62f433189e4877c0237bcb8a4172c2c08fbfd", + "typeScriptVersion": "4.5" +} \ No newline at end of file diff --git a/dist/node_modules/@types/jsonwebtoken/LICENSE b/dist/node_modules/@types/jsonwebtoken/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/dist/node_modules/@types/jsonwebtoken/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/dist/node_modules/@types/jsonwebtoken/README.md b/dist/node_modules/@types/jsonwebtoken/README.md new file mode 100644 index 0000000..ee6e760 --- /dev/null +++ b/dist/node_modules/@types/jsonwebtoken/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/jsonwebtoken` + +# Summary +This package contains type definitions for jsonwebtoken (https://github.com/auth0/node-jsonwebtoken). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/jsonwebtoken. + +### Additional Details + * Last updated: Mon, 26 Feb 2024 18:35:29 GMT + * Dependencies: [@types/node](https://npmjs.com/package/@types/node) + +# Credits +These definitions were written by [Maxime LUCE](https://github.com/SomaticIT), [Daniel Heim](https://github.com/danielheim), [Brice BERNARD](https://github.com/brikou), [Veli-Pekka Kestilä](https://github.com/vpk), [Daniel Parker](https://github.com/GeneralistDev), [Kjell Dießel](https://github.com/kettil), [Robert Gajda](https://github.com/RunAge), [Nico Flaig](https://github.com/nflaig), [Linus Unnebäck](https://github.com/LinusU), [Ivan Sieder](https://github.com/ivansieder), [Piotr Błażejewicz](https://github.com/peterblazejewicz), and [Nandor Kraszlan](https://github.com/nandi95). diff --git a/dist/node_modules/@types/jsonwebtoken/index.d.ts b/dist/node_modules/@types/jsonwebtoken/index.d.ts new file mode 100644 index 0000000..e1bbb18 --- /dev/null +++ b/dist/node_modules/@types/jsonwebtoken/index.d.ts @@ -0,0 +1,260 @@ +/// + +import { KeyObject } from "crypto"; + +export class JsonWebTokenError extends Error { + inner: Error; + + constructor(message: string, error?: Error); +} + +export class TokenExpiredError extends JsonWebTokenError { + expiredAt: Date; + + constructor(message: string, expiredAt: Date); +} + +/** + * Thrown if current time is before the nbf claim. + */ +export class NotBeforeError extends JsonWebTokenError { + date: Date; + + constructor(message: string, date: Date); +} + +export interface SignOptions { + /** + * Signature algorithm. Could be one of these values : + * - HS256: HMAC using SHA-256 hash algorithm (default) + * - HS384: HMAC using SHA-384 hash algorithm + * - HS512: HMAC using SHA-512 hash algorithm + * - RS256: RSASSA using SHA-256 hash algorithm + * - RS384: RSASSA using SHA-384 hash algorithm + * - RS512: RSASSA using SHA-512 hash algorithm + * - ES256: ECDSA using P-256 curve and SHA-256 hash algorithm + * - ES384: ECDSA using P-384 curve and SHA-384 hash algorithm + * - ES512: ECDSA using P-521 curve and SHA-512 hash algorithm + * - none: No digital signature or MAC value included + */ + algorithm?: Algorithm | undefined; + keyid?: string | undefined; + /** expressed in seconds or a string describing a time span [zeit/ms](https://github.com/zeit/ms.js). Eg: 60, "2 days", "10h", "7d" */ + expiresIn?: string | number; + /** expressed in seconds or a string describing a time span [zeit/ms](https://github.com/zeit/ms.js). Eg: 60, "2 days", "10h", "7d" */ + notBefore?: string | number | undefined; + audience?: string | string[] | undefined; + subject?: string | undefined; + issuer?: string | undefined; + jwtid?: string | undefined; + mutatePayload?: boolean | undefined; + noTimestamp?: boolean | undefined; + header?: JwtHeader | undefined; + encoding?: string | undefined; + allowInsecureKeySizes?: boolean | undefined; + allowInvalidAsymmetricKeyTypes?: boolean | undefined; +} + +export interface VerifyOptions { + algorithms?: Algorithm[] | undefined; + audience?: string | RegExp | Array | undefined; + clockTimestamp?: number | undefined; + clockTolerance?: number | undefined; + /** return an object with the decoded `{ payload, header, signature }` instead of only the usual content of the payload. */ + complete?: boolean | undefined; + issuer?: string | string[] | undefined; + ignoreExpiration?: boolean | undefined; + ignoreNotBefore?: boolean | undefined; + jwtid?: string | undefined; + /** + * If you want to check `nonce` claim, provide a string value here. + * It is used on Open ID for the ID Tokens. ([Open ID implementation notes](https://openid.net/specs/openid-connect-core-1_0.html#NonceNotes)) + */ + nonce?: string | undefined; + subject?: string | undefined; + maxAge?: string | number | undefined; + allowInvalidAsymmetricKeyTypes?: boolean | undefined; +} + +export interface DecodeOptions { + complete?: boolean | undefined; + json?: boolean | undefined; +} +export type VerifyErrors = + | JsonWebTokenError + | NotBeforeError + | TokenExpiredError; +export type VerifyCallback = ( + error: VerifyErrors | null, + decoded: T | undefined, +) => void; + +export type SignCallback = ( + error: Error | null, + encoded: string | undefined, +) => void; + +// standard names https://www.rfc-editor.org/rfc/rfc7515.html#section-4.1 +export interface JwtHeader { + alg: string | Algorithm; + typ?: string | undefined; + cty?: string | undefined; + crit?: Array> | undefined; + kid?: string | undefined; + jku?: string | undefined; + x5u?: string | string[] | undefined; + "x5t#S256"?: string | undefined; + x5t?: string | undefined; + x5c?: string | string[] | undefined; +} + +// standard claims https://datatracker.ietf.org/doc/html/rfc7519#section-4.1 +export interface JwtPayload { + [key: string]: any; + iss?: string | undefined; + sub?: string | undefined; + aud?: string | string[] | undefined; + exp?: number | undefined; + nbf?: number | undefined; + iat?: number | undefined; + jti?: string | undefined; +} + +export interface Jwt { + header: JwtHeader; + payload: JwtPayload | string; + signature: string; +} + +// https://github.com/auth0/node-jsonwebtoken#algorithms-supported +export type Algorithm = + | "HS256" + | "HS384" + | "HS512" + | "RS256" + | "RS384" + | "RS512" + | "ES256" + | "ES384" + | "ES512" + | "PS256" + | "PS384" + | "PS512" + | "none"; + +export type SigningKeyCallback = ( + error: Error | null, + signingKey?: Secret, +) => void; + +export type GetPublicKeyOrSecret = ( + header: JwtHeader, + callback: SigningKeyCallback, +) => void; + +export type Secret = + | string + | Buffer + | KeyObject + | { key: string | Buffer; passphrase: string }; + +/** + * Synchronously sign the given payload into a JSON Web Token string + * payload - Payload to sign, could be an literal, buffer or string + * secretOrPrivateKey - Either the secret for HMAC algorithms, or the PEM encoded private key for RSA and ECDSA. + * [options] - Options for the signature + * returns - The JSON Web Token string + */ +export function sign( + payload: string | Buffer | object, + secretOrPrivateKey: Secret, + options?: SignOptions, +): string; +export function sign( + payload: string | Buffer | object, + secretOrPrivateKey: null, + options?: SignOptions & { algorithm: "none" }, +): string; + +/** + * Sign the given payload into a JSON Web Token string + * payload - Payload to sign, could be an literal, buffer or string + * secretOrPrivateKey - Either the secret for HMAC algorithms, or the PEM encoded private key for RSA and ECDSA. + * [options] - Options for the signature + * callback - Callback to get the encoded token on + */ +export function sign( + payload: string | Buffer | object, + secretOrPrivateKey: Secret, + callback: SignCallback, +): void; +export function sign( + payload: string | Buffer | object, + secretOrPrivateKey: Secret, + options: SignOptions, + callback: SignCallback, +): void; +export function sign( + payload: string | Buffer | object, + secretOrPrivateKey: null, + options: SignOptions & { algorithm: "none" }, + callback: SignCallback, +): void; + +/** + * Synchronously verify given token using a secret or a public key to get a decoded token + * token - JWT string to verify + * secretOrPublicKey - Either the secret for HMAC algorithms, or the PEM encoded public key for RSA and ECDSA. + * [options] - Options for the verification + * returns - The decoded token. + */ +export function verify(token: string, secretOrPublicKey: Secret, options: VerifyOptions & { complete: true }): Jwt; +export function verify( + token: string, + secretOrPublicKey: Secret, + options?: VerifyOptions & { complete?: false }, +): JwtPayload | string; +export function verify(token: string, secretOrPublicKey: Secret, options?: VerifyOptions): Jwt | JwtPayload | string; + +/** + * Asynchronously verify given token using a secret or a public key to get a decoded token + * token - JWT string to verify + * secretOrPublicKey - A string or buffer containing either the secret for HMAC algorithms, + * or the PEM encoded public key for RSA and ECDSA. If jwt.verify is called asynchronous, + * secretOrPublicKey can be a function that should fetch the secret or public key + * [options] - Options for the verification + * callback - Callback to get the decoded token on + */ +export function verify( + token: string, + secretOrPublicKey: Secret | GetPublicKeyOrSecret, + callback?: VerifyCallback, +): void; +export function verify( + token: string, + secretOrPublicKey: Secret | GetPublicKeyOrSecret, + options: VerifyOptions & { complete: true }, + callback?: VerifyCallback, +): void; +export function verify( + token: string, + secretOrPublicKey: Secret | GetPublicKeyOrSecret, + options?: VerifyOptions & { complete?: false }, + callback?: VerifyCallback, +): void; +export function verify( + token: string, + secretOrPublicKey: Secret | GetPublicKeyOrSecret, + options?: VerifyOptions, + callback?: VerifyCallback, +): void; + +/** + * Returns the decoded payload without verifying if the signature is valid. + * token - JWT string to decode + * [options] - Options for decoding + * returns - The decoded Token + */ +export function decode(token: string, options: DecodeOptions & { complete: true }): null | Jwt; +export function decode(token: string, options: DecodeOptions & { json: true }): null | JwtPayload; +export function decode(token: string, options?: DecodeOptions): null | JwtPayload | string; diff --git a/dist/node_modules/@types/jsonwebtoken/package.json b/dist/node_modules/@types/jsonwebtoken/package.json new file mode 100644 index 0000000..b063c6d --- /dev/null +++ b/dist/node_modules/@types/jsonwebtoken/package.json @@ -0,0 +1,82 @@ +{ + "name": "@types/jsonwebtoken", + "version": "9.0.6", + "description": "TypeScript definitions for jsonwebtoken", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/jsonwebtoken", + "license": "MIT", + "contributors": [ + { + "name": "Maxime LUCE", + "githubUsername": "SomaticIT", + "url": "https://github.com/SomaticIT" + }, + { + "name": "Daniel Heim", + "githubUsername": "danielheim", + "url": "https://github.com/danielheim" + }, + { + "name": "Brice BERNARD", + "githubUsername": "brikou", + "url": "https://github.com/brikou" + }, + { + "name": "Veli-Pekka Kestilä", + "githubUsername": "vpk", + "url": "https://github.com/vpk" + }, + { + "name": "Daniel Parker", + "githubUsername": "GeneralistDev", + "url": "https://github.com/GeneralistDev" + }, + { + "name": "Kjell Dießel", + "githubUsername": "kettil", + "url": "https://github.com/kettil" + }, + { + "name": "Robert Gajda", + "githubUsername": "RunAge", + "url": "https://github.com/RunAge" + }, + { + "name": "Nico Flaig", + "githubUsername": "nflaig", + "url": "https://github.com/nflaig" + }, + { + "name": "Linus Unnebäck", + "githubUsername": "LinusU", + "url": "https://github.com/LinusU" + }, + { + "name": "Ivan Sieder", + "githubUsername": "ivansieder", + "url": "https://github.com/ivansieder" + }, + { + "name": "Piotr Błażejewicz", + "githubUsername": "peterblazejewicz", + "url": "https://github.com/peterblazejewicz" + }, + { + "name": "Nandor Kraszlan", + "githubUsername": "nandi95", + "url": "https://github.com/nandi95" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/jsonwebtoken" + }, + "scripts": {}, + "dependencies": { + "@types/node": "*" + }, + "typesPublisherContentHash": "77d3bf562214bf5a3dea7e1a5f31b746c79aa56e8119f765369ce8bda9514204", + "typeScriptVersion": "4.6" +} \ No newline at end of file diff --git a/dist/node_modules/@types/node/LICENSE b/dist/node_modules/@types/node/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/dist/node_modules/@types/node/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/dist/node_modules/@types/node/README.md b/dist/node_modules/@types/node/README.md new file mode 100644 index 0000000..65d43f6 --- /dev/null +++ b/dist/node_modules/@types/node/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/node` + +# Summary +This package contains type definitions for node (https://nodejs.org/). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node. + +### Additional Details + * Last updated: Wed, 06 Mar 2024 17:07:21 GMT + * Dependencies: [undici-types](https://npmjs.com/package/undici-types) + +# Credits +These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [Alberto Schiabel](https://github.com/jkomyno), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), [Piotr Błażejewicz](https://github.com/peterblazejewicz), [Anna Henningsen](https://github.com/addaleax), [Victor Perin](https://github.com/victorperin), [Yongsheng Zhang](https://github.com/ZYSzys), [NodeJS Contributors](https://github.com/NodeJS), [Linus Unnebäck](https://github.com/LinusU), [wafuwafu13](https://github.com/wafuwafu13), [Matteo Collina](https://github.com/mcollina), and [Dmitry Semigradsky](https://github.com/Semigradsky). diff --git a/dist/node_modules/@types/node/assert.d.ts b/dist/node_modules/@types/node/assert.d.ts new file mode 100644 index 0000000..cd82143 --- /dev/null +++ b/dist/node_modules/@types/node/assert.d.ts @@ -0,0 +1,996 @@ +/** + * The `node:assert` module provides a set of assertion functions for verifying + * invariants. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/assert.js) + */ +declare module "assert" { + /** + * An alias of {@link ok}. + * @since v0.5.9 + * @param value The input that is checked for being truthy. + */ + function assert(value: unknown, message?: string | Error): asserts value; + namespace assert { + /** + * Indicates the failure of an assertion. All errors thrown by the `node:assert`module will be instances of the `AssertionError` class. + */ + class AssertionError extends Error { + /** + * Set to the `actual` argument for methods such as {@link assert.strictEqual()}. + */ + actual: unknown; + /** + * Set to the `expected` argument for methods such as {@link assert.strictEqual()}. + */ + expected: unknown; + /** + * Set to the passed in operator value. + */ + operator: string; + /** + * Indicates if the message was auto-generated (`true`) or not. + */ + generatedMessage: boolean; + /** + * Value is always `ERR_ASSERTION` to show that the error is an assertion error. + */ + code: "ERR_ASSERTION"; + constructor(options?: { + /** If provided, the error message is set to this value. */ + message?: string | undefined; + /** The `actual` property on the error instance. */ + actual?: unknown | undefined; + /** The `expected` property on the error instance. */ + expected?: unknown | undefined; + /** The `operator` property on the error instance. */ + operator?: string | undefined; + /** If provided, the generated stack trace omits frames before this function. */ + // eslint-disable-next-line @typescript-eslint/ban-types + stackStartFn?: Function | undefined; + }); + } + /** + * This feature is deprecated and will be removed in a future version. + * Please consider using alternatives such as the `mock` helper function. + * @since v14.2.0, v12.19.0 + * @deprecated Deprecated + */ + class CallTracker { + /** + * The wrapper function is expected to be called exactly `exact` times. If the + * function has not been called exactly `exact` times when `tracker.verify()` is called, then `tracker.verify()` will throw an + * error. + * + * ```js + * import assert from 'node:assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func); + * ``` + * @since v14.2.0, v12.19.0 + * @param [fn='A no-op function'] + * @param [exact=1] + * @return that wraps `fn`. + */ + calls(exact?: number): () => void; + calls any>(fn?: Func, exact?: number): Func; + /** + * Example: + * + * ```js + * import assert from 'node:assert'; + * + * const tracker = new assert.CallTracker(); + * + * function func() {} + * const callsfunc = tracker.calls(func); + * callsfunc(1, 2, 3); + * + * assert.deepStrictEqual(tracker.getCalls(callsfunc), + * [{ thisArg: undefined, arguments: [1, 2, 3] }]); + * ``` + * @since v18.8.0, v16.18.0 + * @param fn + * @return An Array with all the calls to a tracked function. + */ + getCalls(fn: Function): CallTrackerCall[]; + /** + * The arrays contains information about the expected and actual number of calls of + * the functions that have not been called the expected number of times. + * + * ```js + * import assert from 'node:assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func, 2); + * + * // Returns an array containing information on callsfunc() + * console.log(tracker.report()); + * // [ + * // { + * // message: 'Expected the func function to be executed 2 time(s) but was + * // executed 0 time(s).', + * // actual: 0, + * // expected: 2, + * // operator: 'func', + * // stack: stack trace + * // } + * // ] + * ``` + * @since v14.2.0, v12.19.0 + * @return An Array of objects containing information about the wrapper functions returned by `calls`. + */ + report(): CallTrackerReportInformation[]; + /** + * Reset calls of the call tracker. + * If a tracked function is passed as an argument, the calls will be reset for it. + * If no arguments are passed, all tracked functions will be reset. + * + * ```js + * import assert from 'node:assert'; + * + * const tracker = new assert.CallTracker(); + * + * function func() {} + * const callsfunc = tracker.calls(func); + * + * callsfunc(); + * // Tracker was called once + * assert.strictEqual(tracker.getCalls(callsfunc).length, 1); + * + * tracker.reset(callsfunc); + * assert.strictEqual(tracker.getCalls(callsfunc).length, 0); + * ``` + * @since v18.8.0, v16.18.0 + * @param fn a tracked function to reset. + */ + reset(fn?: Function): void; + /** + * Iterates through the list of functions passed to `tracker.calls()` and will throw an error for functions that + * have not been called the expected number of times. + * + * ```js + * import assert from 'node:assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func, 2); + * + * callsfunc(); + * + * // Will throw an error since callsfunc() was only called once. + * tracker.verify(); + * ``` + * @since v14.2.0, v12.19.0 + */ + verify(): void; + } + interface CallTrackerCall { + thisArg: object; + arguments: unknown[]; + } + interface CallTrackerReportInformation { + message: string; + /** The actual number of times the function was called. */ + actual: number; + /** The number of times the function was expected to be called. */ + expected: number; + /** The name of the function that is wrapped. */ + operator: string; + /** A stack trace of the function. */ + stack: object; + } + type AssertPredicate = RegExp | (new() => object) | ((thrown: unknown) => boolean) | object | Error; + /** + * Throws an `AssertionError` with the provided error message or a default + * error message. If the `message` parameter is an instance of an `Error` then + * it will be thrown instead of the `AssertionError`. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.fail(); + * // AssertionError [ERR_ASSERTION]: Failed + * + * assert.fail('boom'); + * // AssertionError [ERR_ASSERTION]: boom + * + * assert.fail(new TypeError('need array')); + * // TypeError: need array + * ``` + * + * Using `assert.fail()` with more than two arguments is possible but deprecated. + * See below for further details. + * @since v0.1.21 + * @param [message='Failed'] + */ + function fail(message?: string | Error): never; + /** @deprecated since v10.0.0 - use fail([message]) or other assert functions instead. */ + function fail( + actual: unknown, + expected: unknown, + message?: string | Error, + operator?: string, + // eslint-disable-next-line @typescript-eslint/ban-types + stackStartFn?: Function, + ): never; + /** + * Tests if `value` is truthy. It is equivalent to`assert.equal(!!value, true, message)`. + * + * If `value` is not truthy, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is `undefined`, a default + * error message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * If no arguments are passed in at all `message` will be set to the string:`` 'No value argument passed to `assert.ok()`' ``. + * + * Be aware that in the `repl` the error message will be different to the one + * thrown in a file! See below for further details. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.ok(true); + * // OK + * assert.ok(1); + * // OK + * + * assert.ok(); + * // AssertionError: No value argument passed to `assert.ok()` + * + * assert.ok(false, 'it\'s false'); + * // AssertionError: it's false + * + * // In the repl: + * assert.ok(typeof 123 === 'string'); + * // AssertionError: false == true + * + * // In a file (e.g. test.js): + * assert.ok(typeof 123 === 'string'); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(typeof 123 === 'string') + * + * assert.ok(false); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(false) + * + * assert.ok(0); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(0) + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * // Using `assert()` works the same: + * assert(0); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert(0) + * ``` + * @since v0.1.21 + */ + function ok(value: unknown, message?: string | Error): asserts value; + /** + * **Strict assertion mode** + * + * An alias of {@link strictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link strictEqual} instead. + * + * Tests shallow, coercive equality between the `actual` and `expected` parameters + * using the [`==` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Equality). `NaN` is specially handled + * and treated as being identical if both sides are `NaN`. + * + * ```js + * import assert from 'node:assert'; + * + * assert.equal(1, 1); + * // OK, 1 == 1 + * assert.equal(1, '1'); + * // OK, 1 == '1' + * assert.equal(NaN, NaN); + * // OK + * + * assert.equal(1, 2); + * // AssertionError: 1 == 2 + * assert.equal({ a: { b: 1 } }, { a: { b: 1 } }); + * // AssertionError: { a: { b: 1 } } == { a: { b: 1 } } + * ``` + * + * If the values are not equal, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is undefined, a default + * error message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * @since v0.1.21 + */ + function equal(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link notStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link notStrictEqual} instead. + * + * Tests shallow, coercive inequality with the [`!=` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Inequality). `NaN` is + * specially handled and treated as being identical if both sides are `NaN`. + * + * ```js + * import assert from 'node:assert'; + * + * assert.notEqual(1, 2); + * // OK + * + * assert.notEqual(1, 1); + * // AssertionError: 1 != 1 + * + * assert.notEqual(1, '1'); + * // AssertionError: 1 != '1' + * ``` + * + * If the values are equal, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is undefined, a default error + * message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * @since v0.1.21 + */ + function notEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link deepStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link deepStrictEqual} instead. + * + * Tests for deep equality between the `actual` and `expected` parameters. Consider + * using {@link deepStrictEqual} instead. {@link deepEqual} can have + * surprising results. + * + * _Deep equality_ means that the enumerable "own" properties of child objects + * are also recursively evaluated by the following rules. + * @since v0.1.21 + */ + function deepEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link notDeepStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link notDeepStrictEqual} instead. + * + * Tests for any deep inequality. Opposite of {@link deepEqual}. + * + * ```js + * import assert from 'node:assert'; + * + * const obj1 = { + * a: { + * b: 1, + * }, + * }; + * const obj2 = { + * a: { + * b: 2, + * }, + * }; + * const obj3 = { + * a: { + * b: 1, + * }, + * }; + * const obj4 = { __proto__: obj1 }; + * + * assert.notDeepEqual(obj1, obj1); + * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } } + * + * assert.notDeepEqual(obj1, obj2); + * // OK + * + * assert.notDeepEqual(obj1, obj3); + * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } } + * + * assert.notDeepEqual(obj1, obj4); + * // OK + * ``` + * + * If the values are deeply equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a default + * error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function notDeepEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Tests strict equality between the `actual` and `expected` parameters as + * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.strictEqual(1, 2); + * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal: + * // + * // 1 !== 2 + * + * assert.strictEqual(1, 1); + * // OK + * + * assert.strictEqual('Hello foobar', 'Hello World!'); + * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal: + * // + actual - expected + * // + * // + 'Hello foobar' + * // - 'Hello World!' + * // ^ + * + * const apples = 1; + * const oranges = 2; + * assert.strictEqual(apples, oranges, `apples ${apples} !== oranges ${oranges}`); + * // AssertionError [ERR_ASSERTION]: apples 1 !== oranges 2 + * + * assert.strictEqual(1, '1', new TypeError('Inputs are not identical')); + * // TypeError: Inputs are not identical + * ``` + * + * If the values are not strictly equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a + * default error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function strictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T; + /** + * Tests strict inequality between the `actual` and `expected` parameters as + * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.notStrictEqual(1, 2); + * // OK + * + * assert.notStrictEqual(1, 1); + * // AssertionError [ERR_ASSERTION]: Expected "actual" to be strictly unequal to: + * // + * // 1 + * + * assert.notStrictEqual(1, '1'); + * // OK + * ``` + * + * If the values are strictly equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a + * default error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function notStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Tests for deep equality between the `actual` and `expected` parameters. + * "Deep" equality means that the enumerable "own" properties of child objects + * are recursively evaluated also by the following rules. + * @since v1.2.0 + */ + function deepStrictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T; + /** + * Tests for deep strict inequality. Opposite of {@link deepStrictEqual}. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.notDeepStrictEqual({ a: 1 }, { a: '1' }); + * // OK + * ``` + * + * If the values are deeply and strictly equal, an `AssertionError` is thrown + * with a `message` property set equal to the value of the `message` parameter. If + * the `message` parameter is undefined, a default error message is assigned. If + * the `message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v1.2.0 + */ + function notDeepStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Expects the function `fn` to throw an error. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function, + * a validation object where each property will be tested for strict deep equality, + * or an instance of error where each property will be tested for strict deep + * equality including the non-enumerable `message` and `name` properties. When + * using an object, it is also possible to use a regular expression, when + * validating against a string property. See below for examples. + * + * If specified, `message` will be appended to the message provided by the`AssertionError` if the `fn` call fails to throw or in case the error validation + * fails. + * + * Custom validation object/error instance: + * + * ```js + * import assert from 'node:assert/strict'; + * + * const err = new TypeError('Wrong value'); + * err.code = 404; + * err.foo = 'bar'; + * err.info = { + * nested: true, + * baz: 'text', + * }; + * err.reg = /abc/i; + * + * assert.throws( + * () => { + * throw err; + * }, + * { + * name: 'TypeError', + * message: 'Wrong value', + * info: { + * nested: true, + * baz: 'text', + * }, + * // Only properties on the validation object will be tested for. + * // Using nested objects requires all properties to be present. Otherwise + * // the validation is going to fail. + * }, + * ); + * + * // Using regular expressions to validate error properties: + * assert.throws( + * () => { + * throw err; + * }, + * { + * // The `name` and `message` properties are strings and using regular + * // expressions on those will match against the string. If they fail, an + * // error is thrown. + * name: /^TypeError$/, + * message: /Wrong/, + * foo: 'bar', + * info: { + * nested: true, + * // It is not possible to use regular expressions for nested properties! + * baz: 'text', + * }, + * // The `reg` property contains a regular expression and only if the + * // validation object contains an identical regular expression, it is going + * // to pass. + * reg: /abc/i, + * }, + * ); + * + * // Fails due to the different `message` and `name` properties: + * assert.throws( + * () => { + * const otherErr = new Error('Not found'); + * // Copy all enumerable properties from `err` to `otherErr`. + * for (const [key, value] of Object.entries(err)) { + * otherErr[key] = value; + * } + * throw otherErr; + * }, + * // The error's `message` and `name` properties will also be checked when using + * // an error as validation object. + * err, + * ); + * ``` + * + * Validate instanceof using constructor: + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * Error, + * ); + * ``` + * + * Validate error message using [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions): + * + * Using a regular expression runs `.toString` on the error object, and will + * therefore also include the error name. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * /^Error: Wrong value$/, + * ); + * ``` + * + * Custom error validation: + * + * The function must return `true` to indicate all internal validations passed. + * It will otherwise fail with an `AssertionError`. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * (err) => { + * assert(err instanceof Error); + * assert(/value/.test(err)); + * // Avoid returning anything from validation functions besides `true`. + * // Otherwise, it's not clear what part of the validation failed. Instead, + * // throw an error about the specific validation that failed (as done in this + * // example) and add as much helpful debugging information to that error as + * // possible. + * return true; + * }, + * 'unexpected error', + * ); + * ``` + * + * `error` cannot be a string. If a string is provided as the second + * argument, then `error` is assumed to be omitted and the string will be used for`message` instead. This can lead to easy-to-miss mistakes. Using the same + * message as the thrown error message is going to result in an`ERR_AMBIGUOUS_ARGUMENT` error. Please read the example below carefully if using + * a string as the second argument gets considered: + * + * ```js + * import assert from 'node:assert/strict'; + * + * function throwingFirst() { + * throw new Error('First'); + * } + * + * function throwingSecond() { + * throw new Error('Second'); + * } + * + * function notThrowing() {} + * + * // The second argument is a string and the input function threw an Error. + * // The first case will not throw as it does not match for the error message + * // thrown by the input function! + * assert.throws(throwingFirst, 'Second'); + * // In the next example the message has no benefit over the message from the + * // error and since it is not clear if the user intended to actually match + * // against the error message, Node.js throws an `ERR_AMBIGUOUS_ARGUMENT` error. + * assert.throws(throwingSecond, 'Second'); + * // TypeError [ERR_AMBIGUOUS_ARGUMENT] + * + * // The string is only used (as message) in case the function does not throw: + * assert.throws(notThrowing, 'Second'); + * // AssertionError [ERR_ASSERTION]: Missing expected exception: Second + * + * // If it was intended to match for the error message do this instead: + * // It does not throw because the error messages match. + * assert.throws(throwingSecond, /Second$/); + * + * // If the error message does not match, an AssertionError is thrown. + * assert.throws(throwingFirst, /Second$/); + * // AssertionError [ERR_ASSERTION] + * ``` + * + * Due to the confusing error-prone notation, avoid a string as the second + * argument. + * @since v0.1.21 + */ + function throws(block: () => unknown, message?: string | Error): void; + function throws(block: () => unknown, error: AssertPredicate, message?: string | Error): void; + /** + * Asserts that the function `fn` does not throw an error. + * + * Using `assert.doesNotThrow()` is actually not useful because there + * is no benefit in catching an error and then rethrowing it. Instead, consider + * adding a comment next to the specific code path that should not throw and keep + * error messages as expressive as possible. + * + * When `assert.doesNotThrow()` is called, it will immediately call the `fn`function. + * + * If an error is thrown and it is the same type as that specified by the `error`parameter, then an `AssertionError` is thrown. If the error is of a + * different type, or if the `error` parameter is undefined, the error is + * propagated back to the caller. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), or a validation + * function. See {@link throws} for more details. + * + * The following, for instance, will throw the `TypeError` because there is no + * matching error type in the assertion: + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * SyntaxError, + * ); + * ``` + * + * However, the following will result in an `AssertionError` with the message + * 'Got unwanted exception...': + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * TypeError, + * ); + * ``` + * + * If an `AssertionError` is thrown and a value is provided for the `message`parameter, the value of `message` will be appended to the `AssertionError` message: + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * /Wrong value/, + * 'Whoops', + * ); + * // Throws: AssertionError: Got unwanted exception: Whoops + * ``` + * @since v0.1.21 + */ + function doesNotThrow(block: () => unknown, message?: string | Error): void; + function doesNotThrow(block: () => unknown, error: AssertPredicate, message?: string | Error): void; + /** + * Throws `value` if `value` is not `undefined` or `null`. This is useful when + * testing the `error` argument in callbacks. The stack trace contains all frames + * from the error passed to `ifError()` including the potential new frames for`ifError()` itself. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.ifError(null); + * // OK + * assert.ifError(0); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 0 + * assert.ifError('error'); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 'error' + * assert.ifError(new Error()); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: Error + * + * // Create some random error frames. + * let err; + * (function errorFrame() { + * err = new Error('test error'); + * })(); + * + * (function ifErrorFrame() { + * assert.ifError(err); + * })(); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: test error + * // at ifErrorFrame + * // at errorFrame + * ``` + * @since v0.1.97 + */ + function ifError(value: unknown): asserts value is null | undefined; + /** + * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately + * calls the function and awaits the returned promise to complete. It will then + * check that the promise is rejected. + * + * If `asyncFn` is a function and it throws an error synchronously,`assert.rejects()` will return a rejected `Promise` with that error. If the + * function does not return a promise, `assert.rejects()` will return a rejected`Promise` with an `ERR_INVALID_RETURN_VALUE` error. In both cases the error + * handler is skipped. + * + * Besides the async nature to await the completion behaves identically to {@link throws}. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function, + * an object where each property will be tested for, or an instance of error where + * each property will be tested for including the non-enumerable `message` and`name` properties. + * + * If specified, `message` will be the message provided by the `AssertionError` if the `asyncFn` fails to reject. + * + * ```js + * import assert from 'node:assert/strict'; + * + * await assert.rejects( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * { + * name: 'TypeError', + * message: 'Wrong value', + * }, + * ); + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * await assert.rejects( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * (err) => { + * assert.strictEqual(err.name, 'TypeError'); + * assert.strictEqual(err.message, 'Wrong value'); + * return true; + * }, + * ); + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.rejects( + * Promise.reject(new Error('Wrong value')), + * Error, + * ).then(() => { + * // ... + * }); + * ``` + * + * `error` cannot be a string. If a string is provided as the second + * argument, then `error` is assumed to be omitted and the string will be used for`message` instead. This can lead to easy-to-miss mistakes. Please read the + * example in {@link throws} carefully if using a string as the second + * argument gets considered. + * @since v10.0.0 + */ + function rejects(block: (() => Promise) | Promise, message?: string | Error): Promise; + function rejects( + block: (() => Promise) | Promise, + error: AssertPredicate, + message?: string | Error, + ): Promise; + /** + * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately + * calls the function and awaits the returned promise to complete. It will then + * check that the promise is not rejected. + * + * If `asyncFn` is a function and it throws an error synchronously,`assert.doesNotReject()` will return a rejected `Promise` with that error. If + * the function does not return a promise, `assert.doesNotReject()` will return a + * rejected `Promise` with an `ERR_INVALID_RETURN_VALUE` error. In both cases + * the error handler is skipped. + * + * Using `assert.doesNotReject()` is actually not useful because there is little + * benefit in catching a rejection and then rejecting it again. Instead, consider + * adding a comment next to the specific code path that should not reject and keep + * error messages as expressive as possible. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), or a validation + * function. See {@link throws} for more details. + * + * Besides the async nature to await the completion behaves identically to {@link doesNotThrow}. + * + * ```js + * import assert from 'node:assert/strict'; + * + * await assert.doesNotReject( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * SyntaxError, + * ); + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotReject(Promise.reject(new TypeError('Wrong value'))) + * .then(() => { + * // ... + * }); + * ``` + * @since v10.0.0 + */ + function doesNotReject( + block: (() => Promise) | Promise, + message?: string | Error, + ): Promise; + function doesNotReject( + block: (() => Promise) | Promise, + error: AssertPredicate, + message?: string | Error, + ): Promise; + /** + * Expects the `string` input to match the regular expression. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.match('I will fail', /pass/); + * // AssertionError [ERR_ASSERTION]: The input did not match the regular ... + * + * assert.match(123, /pass/); + * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string. + * + * assert.match('I will pass', /pass/); + * // OK + * ``` + * + * If the values do not match, or if the `string` argument is of another type than`string`, an `AssertionError` is thrown with a `message` property set equal + * to the value of the `message` parameter. If the `message` parameter is + * undefined, a default error message is assigned. If the `message` parameter is an + * instance of an `Error` then it will be thrown instead of the `AssertionError`. + * @since v13.6.0, v12.16.0 + */ + function match(value: string, regExp: RegExp, message?: string | Error): void; + /** + * Expects the `string` input not to match the regular expression. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotMatch('I will fail', /fail/); + * // AssertionError [ERR_ASSERTION]: The input was expected to not match the ... + * + * assert.doesNotMatch(123, /pass/); + * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string. + * + * assert.doesNotMatch('I will pass', /different/); + * // OK + * ``` + * + * If the values do match, or if the `string` argument is of another type than`string`, an `AssertionError` is thrown with a `message` property set equal + * to the value of the `message` parameter. If the `message` parameter is + * undefined, a default error message is assigned. If the `message` parameter is an + * instance of an `Error` then it will be thrown instead of the `AssertionError`. + * @since v13.6.0, v12.16.0 + */ + function doesNotMatch(value: string, regExp: RegExp, message?: string | Error): void; + const strict: + & Omit< + typeof assert, + | "equal" + | "notEqual" + | "deepEqual" + | "notDeepEqual" + | "ok" + | "strictEqual" + | "deepStrictEqual" + | "ifError" + | "strict" + > + & { + (value: unknown, message?: string | Error): asserts value; + equal: typeof strictEqual; + notEqual: typeof notStrictEqual; + deepEqual: typeof deepStrictEqual; + notDeepEqual: typeof notDeepStrictEqual; + // Mapped types and assertion functions are incompatible? + // TS2775: Assertions require every name in the call target + // to be declared with an explicit type annotation. + ok: typeof ok; + strictEqual: typeof strictEqual; + deepStrictEqual: typeof deepStrictEqual; + ifError: typeof ifError; + strict: typeof strict; + }; + } + export = assert; +} +declare module "node:assert" { + import assert = require("assert"); + export = assert; +} diff --git a/dist/node_modules/@types/node/assert/strict.d.ts b/dist/node_modules/@types/node/assert/strict.d.ts new file mode 100644 index 0000000..f333913 --- /dev/null +++ b/dist/node_modules/@types/node/assert/strict.d.ts @@ -0,0 +1,8 @@ +declare module "assert/strict" { + import { strict } from "node:assert"; + export = strict; +} +declare module "node:assert/strict" { + import { strict } from "node:assert"; + export = strict; +} diff --git a/dist/node_modules/@types/node/async_hooks.d.ts b/dist/node_modules/@types/node/async_hooks.d.ts new file mode 100644 index 0000000..0667a61 --- /dev/null +++ b/dist/node_modules/@types/node/async_hooks.d.ts @@ -0,0 +1,539 @@ +/** + * We strongly discourage the use of the `async_hooks` API. + * Other APIs that can cover most of its use cases include: + * + * * `AsyncLocalStorage` tracks async context + * * `process.getActiveResourcesInfo()` tracks active resources + * + * The `node:async_hooks` module provides an API to track asynchronous resources. + * It can be accessed using: + * + * ```js + * import async_hooks from 'node:async_hooks'; + * ``` + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/async_hooks.js) + */ +declare module "async_hooks" { + /** + * ```js + * import { executionAsyncId } from 'node:async_hooks'; + * import fs from 'node:fs'; + * + * console.log(executionAsyncId()); // 1 - bootstrap + * const path = '.'; + * fs.open(path, 'r', (err, fd) => { + * console.log(executionAsyncId()); // 6 - open() + * }); + * ``` + * + * The ID returned from `executionAsyncId()` is related to execution timing, not + * causality (which is covered by `triggerAsyncId()`): + * + * ```js + * const server = net.createServer((conn) => { + * // Returns the ID of the server, not of the new connection, because the + * // callback runs in the execution scope of the server's MakeCallback(). + * async_hooks.executionAsyncId(); + * + * }).listen(port, () => { + * // Returns the ID of a TickObject (process.nextTick()) because all + * // callbacks passed to .listen() are wrapped in a nextTick(). + * async_hooks.executionAsyncId(); + * }); + * ``` + * + * Promise contexts may not get precise `executionAsyncIds` by default. + * See the section on `promise execution tracking`. + * @since v8.1.0 + * @return The `asyncId` of the current execution context. Useful to track when something calls. + */ + function executionAsyncId(): number; + /** + * Resource objects returned by `executionAsyncResource()` are most often internal + * Node.js handle objects with undocumented APIs. Using any functions or properties + * on the object is likely to crash your application and should be avoided. + * + * Using `executionAsyncResource()` in the top-level execution context will + * return an empty object as there is no handle or request object to use, + * but having an object representing the top-level can be helpful. + * + * ```js + * import { open } from 'node:fs'; + * import { executionAsyncId, executionAsyncResource } from 'node:async_hooks'; + * + * console.log(executionAsyncId(), executionAsyncResource()); // 1 {} + * open(new URL(import.meta.url), 'r', (err, fd) => { + * console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap + * }); + * ``` + * + * This can be used to implement continuation local storage without the + * use of a tracking `Map` to store the metadata: + * + * ```js + * import { createServer } from 'node:http'; + * import { + * executionAsyncId, + * executionAsyncResource, + * createHook, + * } from 'async_hooks'; + * const sym = Symbol('state'); // Private symbol to avoid pollution + * + * createHook({ + * init(asyncId, type, triggerAsyncId, resource) { + * const cr = executionAsyncResource(); + * if (cr) { + * resource[sym] = cr[sym]; + * } + * }, + * }).enable(); + * + * const server = createServer((req, res) => { + * executionAsyncResource()[sym] = { state: req.url }; + * setTimeout(function() { + * res.end(JSON.stringify(executionAsyncResource()[sym])); + * }, 100); + * }).listen(3000); + * ``` + * @since v13.9.0, v12.17.0 + * @return The resource representing the current execution. Useful to store data within the resource. + */ + function executionAsyncResource(): object; + /** + * ```js + * const server = net.createServer((conn) => { + * // The resource that caused (or triggered) this callback to be called + * // was that of the new connection. Thus the return value of triggerAsyncId() + * // is the asyncId of "conn". + * async_hooks.triggerAsyncId(); + * + * }).listen(port, () => { + * // Even though all callbacks passed to .listen() are wrapped in a nextTick() + * // the callback itself exists because the call to the server's .listen() + * // was made. So the return value would be the ID of the server. + * async_hooks.triggerAsyncId(); + * }); + * ``` + * + * Promise contexts may not get valid `triggerAsyncId`s by default. See + * the section on `promise execution tracking`. + * @return The ID of the resource responsible for calling the callback that is currently being executed. + */ + function triggerAsyncId(): number; + interface HookCallbacks { + /** + * Called when a class is constructed that has the possibility to emit an asynchronous event. + * @param asyncId a unique ID for the async resource + * @param type the type of the async resource + * @param triggerAsyncId the unique ID of the async resource in whose execution context this async resource was created + * @param resource reference to the resource representing the async operation, needs to be released during destroy + */ + init?(asyncId: number, type: string, triggerAsyncId: number, resource: object): void; + /** + * When an asynchronous operation is initiated or completes a callback is called to notify the user. + * The before callback is called just before said callback is executed. + * @param asyncId the unique identifier assigned to the resource about to execute the callback. + */ + before?(asyncId: number): void; + /** + * Called immediately after the callback specified in before is completed. + * @param asyncId the unique identifier assigned to the resource which has executed the callback. + */ + after?(asyncId: number): void; + /** + * Called when a promise has resolve() called. This may not be in the same execution id + * as the promise itself. + * @param asyncId the unique id for the promise that was resolve()d. + */ + promiseResolve?(asyncId: number): void; + /** + * Called after the resource corresponding to asyncId is destroyed + * @param asyncId a unique ID for the async resource + */ + destroy?(asyncId: number): void; + } + interface AsyncHook { + /** + * Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop. + */ + enable(): this; + /** + * Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled. + */ + disable(): this; + } + /** + * Registers functions to be called for different lifetime events of each async + * operation. + * + * The callbacks `init()`/`before()`/`after()`/`destroy()` are called for the + * respective asynchronous event during a resource's lifetime. + * + * All callbacks are optional. For example, if only resource cleanup needs to + * be tracked, then only the `destroy` callback needs to be passed. The + * specifics of all functions that can be passed to `callbacks` is in the `Hook Callbacks` section. + * + * ```js + * import { createHook } from 'node:async_hooks'; + * + * const asyncHook = createHook({ + * init(asyncId, type, triggerAsyncId, resource) { }, + * destroy(asyncId) { }, + * }); + * ``` + * + * The callbacks will be inherited via the prototype chain: + * + * ```js + * class MyAsyncCallbacks { + * init(asyncId, type, triggerAsyncId, resource) { } + * destroy(asyncId) {} + * } + * + * class MyAddedCallbacks extends MyAsyncCallbacks { + * before(asyncId) { } + * after(asyncId) { } + * } + * + * const asyncHook = async_hooks.createHook(new MyAddedCallbacks()); + * ``` + * + * Because promises are asynchronous resources whose lifecycle is tracked + * via the async hooks mechanism, the `init()`, `before()`, `after()`, and`destroy()` callbacks _must not_ be async functions that return promises. + * @since v8.1.0 + * @param callbacks The `Hook Callbacks` to register + * @return Instance used for disabling and enabling hooks + */ + function createHook(callbacks: HookCallbacks): AsyncHook; + interface AsyncResourceOptions { + /** + * The ID of the execution context that created this async event. + * @default executionAsyncId() + */ + triggerAsyncId?: number | undefined; + /** + * Disables automatic `emitDestroy` when the object is garbage collected. + * This usually does not need to be set (even if `emitDestroy` is called + * manually), unless the resource's `asyncId` is retrieved and the + * sensitive API's `emitDestroy` is called with it. + * @default false + */ + requireManualDestroy?: boolean | undefined; + } + /** + * The class `AsyncResource` is designed to be extended by the embedder's async + * resources. Using this, users can easily trigger the lifetime events of their + * own resources. + * + * The `init` hook will trigger when an `AsyncResource` is instantiated. + * + * The following is an overview of the `AsyncResource` API. + * + * ```js + * import { AsyncResource, executionAsyncId } from 'node:async_hooks'; + * + * // AsyncResource() is meant to be extended. Instantiating a + * // new AsyncResource() also triggers init. If triggerAsyncId is omitted then + * // async_hook.executionAsyncId() is used. + * const asyncResource = new AsyncResource( + * type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false }, + * ); + * + * // Run a function in the execution context of the resource. This will + * // * establish the context of the resource + * // * trigger the AsyncHooks before callbacks + * // * call the provided function `fn` with the supplied arguments + * // * trigger the AsyncHooks after callbacks + * // * restore the original execution context + * asyncResource.runInAsyncScope(fn, thisArg, ...args); + * + * // Call AsyncHooks destroy callbacks. + * asyncResource.emitDestroy(); + * + * // Return the unique ID assigned to the AsyncResource instance. + * asyncResource.asyncId(); + * + * // Return the trigger ID for the AsyncResource instance. + * asyncResource.triggerAsyncId(); + * ``` + */ + class AsyncResource { + /** + * AsyncResource() is meant to be extended. Instantiating a + * new AsyncResource() also triggers init. If triggerAsyncId is omitted then + * async_hook.executionAsyncId() is used. + * @param type The type of async event. + * @param triggerAsyncId The ID of the execution context that created + * this async event (default: `executionAsyncId()`), or an + * AsyncResourceOptions object (since v9.3.0) + */ + constructor(type: string, triggerAsyncId?: number | AsyncResourceOptions); + /** + * Binds the given function to the current execution context. + * @since v14.8.0, v12.19.0 + * @param fn The function to bind to the current execution context. + * @param type An optional name to associate with the underlying `AsyncResource`. + */ + static bind any, ThisArg>( + fn: Func, + type?: string, + thisArg?: ThisArg, + ): Func; + /** + * Binds the given function to execute to this `AsyncResource`'s scope. + * @since v14.8.0, v12.19.0 + * @param fn The function to bind to the current `AsyncResource`. + */ + bind any>(fn: Func): Func; + /** + * Call the provided function with the provided arguments in the execution context + * of the async resource. This will establish the context, trigger the AsyncHooks + * before callbacks, call the function, trigger the AsyncHooks after callbacks, and + * then restore the original execution context. + * @since v9.6.0 + * @param fn The function to call in the execution context of this async resource. + * @param thisArg The receiver to be used for the function call. + * @param args Optional arguments to pass to the function. + */ + runInAsyncScope( + fn: (this: This, ...args: any[]) => Result, + thisArg?: This, + ...args: any[] + ): Result; + /** + * Call all `destroy` hooks. This should only ever be called once. An error will + * be thrown if it is called more than once. This **must** be manually called. If + * the resource is left to be collected by the GC then the `destroy` hooks will + * never be called. + * @return A reference to `asyncResource`. + */ + emitDestroy(): this; + /** + * @return The unique `asyncId` assigned to the resource. + */ + asyncId(): number; + /** + * @return The same `triggerAsyncId` that is passed to the `AsyncResource` constructor. + */ + triggerAsyncId(): number; + } + /** + * This class creates stores that stay coherent through asynchronous operations. + * + * While you can create your own implementation on top of the `node:async_hooks`module, `AsyncLocalStorage` should be preferred as it is a performant and memory + * safe implementation that involves significant optimizations that are non-obvious + * to implement. + * + * The following example uses `AsyncLocalStorage` to build a simple logger + * that assigns IDs to incoming HTTP requests and includes them in messages + * logged within each request. + * + * ```js + * import http from 'node:http'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const asyncLocalStorage = new AsyncLocalStorage(); + * + * function logWithId(msg) { + * const id = asyncLocalStorage.getStore(); + * console.log(`${id !== undefined ? id : '-'}:`, msg); + * } + * + * let idSeq = 0; + * http.createServer((req, res) => { + * asyncLocalStorage.run(idSeq++, () => { + * logWithId('start'); + * // Imagine any chain of async operations here + * setImmediate(() => { + * logWithId('finish'); + * res.end(); + * }); + * }); + * }).listen(8080); + * + * http.get('http://localhost:8080'); + * http.get('http://localhost:8080'); + * // Prints: + * // 0: start + * // 1: start + * // 0: finish + * // 1: finish + * ``` + * + * Each instance of `AsyncLocalStorage` maintains an independent storage context. + * Multiple instances can safely exist simultaneously without risk of interfering + * with each other's data. + * @since v13.10.0, v12.17.0 + */ + class AsyncLocalStorage { + /** + * Binds the given function to the current execution context. + * @since v19.8.0 + * @experimental + * @param fn The function to bind to the current execution context. + * @return A new function that calls `fn` within the captured execution context. + */ + static bind any>(fn: Func): Func; + /** + * Captures the current execution context and returns a function that accepts a + * function as an argument. Whenever the returned function is called, it + * calls the function passed to it within the captured context. + * + * ```js + * const asyncLocalStorage = new AsyncLocalStorage(); + * const runInAsyncScope = asyncLocalStorage.run(123, () => AsyncLocalStorage.snapshot()); + * const result = asyncLocalStorage.run(321, () => runInAsyncScope(() => asyncLocalStorage.getStore())); + * console.log(result); // returns 123 + * ``` + * + * AsyncLocalStorage.snapshot() can replace the use of AsyncResource for simple + * async context tracking purposes, for example: + * + * ```js + * class Foo { + * #runInAsyncScope = AsyncLocalStorage.snapshot(); + * + * get() { return this.#runInAsyncScope(() => asyncLocalStorage.getStore()); } + * } + * + * const foo = asyncLocalStorage.run(123, () => new Foo()); + * console.log(asyncLocalStorage.run(321, () => foo.get())); // returns 123 + * ``` + * @since v19.8.0 + * @experimental + * @return A new function with the signature `(fn: (...args) : R, ...args) : R`. + */ + static snapshot(): (fn: (...args: TArgs) => R, ...args: TArgs) => R; + /** + * Disables the instance of `AsyncLocalStorage`. All subsequent calls + * to `asyncLocalStorage.getStore()` will return `undefined` until`asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()` is called again. + * + * When calling `asyncLocalStorage.disable()`, all current contexts linked to the + * instance will be exited. + * + * Calling `asyncLocalStorage.disable()` is required before the`asyncLocalStorage` can be garbage collected. This does not apply to stores + * provided by the `asyncLocalStorage`, as those objects are garbage collected + * along with the corresponding async resources. + * + * Use this method when the `asyncLocalStorage` is not in use anymore + * in the current process. + * @since v13.10.0, v12.17.0 + * @experimental + */ + disable(): void; + /** + * Returns the current store. + * If called outside of an asynchronous context initialized by + * calling `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()`, it + * returns `undefined`. + * @since v13.10.0, v12.17.0 + */ + getStore(): T | undefined; + /** + * Runs a function synchronously within a context and returns its + * return value. The store is not accessible outside of the callback function. + * The store is accessible to any asynchronous operations created within the + * callback. + * + * The optional `args` are passed to the callback function. + * + * If the callback function throws an error, the error is thrown by `run()` too. + * The stacktrace is not impacted by this call and the context is exited. + * + * Example: + * + * ```js + * const store = { id: 2 }; + * try { + * asyncLocalStorage.run(store, () => { + * asyncLocalStorage.getStore(); // Returns the store object + * setTimeout(() => { + * asyncLocalStorage.getStore(); // Returns the store object + * }, 200); + * throw new Error(); + * }); + * } catch (e) { + * asyncLocalStorage.getStore(); // Returns undefined + * // The error will be caught here + * } + * ``` + * @since v13.10.0, v12.17.0 + */ + run(store: T, callback: () => R): R; + run(store: T, callback: (...args: TArgs) => R, ...args: TArgs): R; + /** + * Runs a function synchronously outside of a context and returns its + * return value. The store is not accessible within the callback function or + * the asynchronous operations created within the callback. Any `getStore()`call done within the callback function will always return `undefined`. + * + * The optional `args` are passed to the callback function. + * + * If the callback function throws an error, the error is thrown by `exit()` too. + * The stacktrace is not impacted by this call and the context is re-entered. + * + * Example: + * + * ```js + * // Within a call to run + * try { + * asyncLocalStorage.getStore(); // Returns the store object or value + * asyncLocalStorage.exit(() => { + * asyncLocalStorage.getStore(); // Returns undefined + * throw new Error(); + * }); + * } catch (e) { + * asyncLocalStorage.getStore(); // Returns the same object or value + * // The error will be caught here + * } + * ``` + * @since v13.10.0, v12.17.0 + * @experimental + */ + exit(callback: (...args: TArgs) => R, ...args: TArgs): R; + /** + * Transitions into the context for the remainder of the current + * synchronous execution and then persists the store through any following + * asynchronous calls. + * + * Example: + * + * ```js + * const store = { id: 1 }; + * // Replaces previous store with the given store object + * asyncLocalStorage.enterWith(store); + * asyncLocalStorage.getStore(); // Returns the store object + * someAsyncOperation(() => { + * asyncLocalStorage.getStore(); // Returns the same object + * }); + * ``` + * + * This transition will continue for the _entire_ synchronous execution. + * This means that if, for example, the context is entered within an event + * handler subsequent event handlers will also run within that context unless + * specifically bound to another context with an `AsyncResource`. That is why`run()` should be preferred over `enterWith()` unless there are strong reasons + * to use the latter method. + * + * ```js + * const store = { id: 1 }; + * + * emitter.on('my-event', () => { + * asyncLocalStorage.enterWith(store); + * }); + * emitter.on('my-event', () => { + * asyncLocalStorage.getStore(); // Returns the same object + * }); + * + * asyncLocalStorage.getStore(); // Returns undefined + * emitter.emit('my-event'); + * asyncLocalStorage.getStore(); // Returns the same object + * ``` + * @since v13.11.0, v12.17.0 + * @experimental + */ + enterWith(store: T): void; + } +} +declare module "node:async_hooks" { + export * from "async_hooks"; +} diff --git a/dist/node_modules/@types/node/buffer.d.ts b/dist/node_modules/@types/node/buffer.d.ts new file mode 100644 index 0000000..255e268 --- /dev/null +++ b/dist/node_modules/@types/node/buffer.d.ts @@ -0,0 +1,2363 @@ +/** + * `Buffer` objects are used to represent a fixed-length sequence of bytes. Many + * Node.js APIs support `Buffer`s. + * + * The `Buffer` class is a subclass of JavaScript's [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) class and + * extends it with methods that cover additional use cases. Node.js APIs accept + * plain [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) s wherever `Buffer`s are supported as well. + * + * While the `Buffer` class is available within the global scope, it is still + * recommended to explicitly reference it via an import or require statement. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Creates a zero-filled Buffer of length 10. + * const buf1 = Buffer.alloc(10); + * + * // Creates a Buffer of length 10, + * // filled with bytes which all have the value `1`. + * const buf2 = Buffer.alloc(10, 1); + * + * // Creates an uninitialized buffer of length 10. + * // This is faster than calling Buffer.alloc() but the returned + * // Buffer instance might contain old data that needs to be + * // overwritten using fill(), write(), or other functions that fill the Buffer's + * // contents. + * const buf3 = Buffer.allocUnsafe(10); + * + * // Creates a Buffer containing the bytes [1, 2, 3]. + * const buf4 = Buffer.from([1, 2, 3]); + * + * // Creates a Buffer containing the bytes [1, 1, 1, 1] – the entries + * // are all truncated using `(value & 255)` to fit into the range 0–255. + * const buf5 = Buffer.from([257, 257.5, -255, '1']); + * + * // Creates a Buffer containing the UTF-8-encoded bytes for the string 'tést': + * // [0x74, 0xc3, 0xa9, 0x73, 0x74] (in hexadecimal notation) + * // [116, 195, 169, 115, 116] (in decimal notation) + * const buf6 = Buffer.from('tést'); + * + * // Creates a Buffer containing the Latin-1 bytes [0x74, 0xe9, 0x73, 0x74]. + * const buf7 = Buffer.from('tést', 'latin1'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/buffer.js) + */ +declare module "buffer" { + import { BinaryLike } from "node:crypto"; + import { ReadableStream as WebReadableStream } from "node:stream/web"; + /** + * This function returns `true` if `input` contains only valid UTF-8-encoded data, + * including the case in which `input` is empty. + * + * Throws if the `input` is a detached array buffer. + * @since v19.4.0, v18.14.0 + * @param input The input to validate. + */ + export function isUtf8(input: Buffer | ArrayBuffer | NodeJS.TypedArray): boolean; + /** + * This function returns `true` if `input` contains only valid ASCII-encoded data, + * including the case in which `input` is empty. + * + * Throws if the `input` is a detached array buffer. + * @since v19.6.0, v18.15.0 + * @param input The input to validate. + */ + export function isAscii(input: Buffer | ArrayBuffer | NodeJS.TypedArray): boolean; + export const INSPECT_MAX_BYTES: number; + export const kMaxLength: number; + export const kStringMaxLength: number; + export const constants: { + MAX_LENGTH: number; + MAX_STRING_LENGTH: number; + }; + export type TranscodeEncoding = + | "ascii" + | "utf8" + | "utf-8" + | "utf16le" + | "utf-16le" + | "ucs2" + | "ucs-2" + | "latin1" + | "binary"; + /** + * Re-encodes the given `Buffer` or `Uint8Array` instance from one character + * encoding to another. Returns a new `Buffer` instance. + * + * Throws if the `fromEnc` or `toEnc` specify invalid character encodings or if + * conversion from `fromEnc` to `toEnc` is not permitted. + * + * Encodings supported by `buffer.transcode()` are: `'ascii'`, `'utf8'`,`'utf16le'`, `'ucs2'`, `'latin1'`, and `'binary'`. + * + * The transcoding process will use substitution characters if a given byte + * sequence cannot be adequately represented in the target encoding. For instance: + * + * ```js + * import { Buffer, transcode } from 'node:buffer'; + * + * const newBuf = transcode(Buffer.from('€'), 'utf8', 'ascii'); + * console.log(newBuf.toString('ascii')); + * // Prints: '?' + * ``` + * + * Because the Euro (`€`) sign is not representable in US-ASCII, it is replaced + * with `?` in the transcoded `Buffer`. + * @since v7.1.0 + * @param source A `Buffer` or `Uint8Array` instance. + * @param fromEnc The current encoding. + * @param toEnc To target encoding. + */ + export function transcode(source: Uint8Array, fromEnc: TranscodeEncoding, toEnc: TranscodeEncoding): Buffer; + export const SlowBuffer: { + /** @deprecated since v6.0.0, use `Buffer.allocUnsafeSlow()` */ + new(size: number): Buffer; + prototype: Buffer; + }; + /** + * Resolves a `'blob:nodedata:...'` an associated `Blob` object registered using + * a prior call to `URL.createObjectURL()`. + * @since v16.7.0 + * @experimental + * @param id A `'blob:nodedata:...` URL string returned by a prior call to `URL.createObjectURL()`. + */ + export function resolveObjectURL(id: string): Blob | undefined; + export { Buffer }; + /** + * @experimental + */ + export interface BlobOptions { + /** + * One of either `'transparent'` or `'native'`. When set to `'native'`, line endings in string source parts + * will be converted to the platform native line-ending as specified by `require('node:os').EOL`. + */ + endings?: "transparent" | "native"; + /** + * The Blob content-type. The intent is for `type` to convey + * the MIME media type of the data, however no validation of the type format + * is performed. + */ + type?: string | undefined; + } + /** + * A [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) encapsulates immutable, raw data that can be safely shared across + * multiple worker threads. + * @since v15.7.0, v14.18.0 + */ + export class Blob { + /** + * The total size of the `Blob` in bytes. + * @since v15.7.0, v14.18.0 + */ + readonly size: number; + /** + * The content-type of the `Blob`. + * @since v15.7.0, v14.18.0 + */ + readonly type: string; + /** + * Creates a new `Blob` object containing a concatenation of the given sources. + * + * {ArrayBuffer}, {TypedArray}, {DataView}, and {Buffer} sources are copied into + * the 'Blob' and can therefore be safely modified after the 'Blob' is created. + * + * String sources are also copied into the `Blob`. + */ + constructor(sources: Array, options?: BlobOptions); + /** + * Returns a promise that fulfills with an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) containing a copy of + * the `Blob` data. + * @since v15.7.0, v14.18.0 + */ + arrayBuffer(): Promise; + /** + * Creates and returns a new `Blob` containing a subset of this `Blob` objects + * data. The original `Blob` is not altered. + * @since v15.7.0, v14.18.0 + * @param start The starting index. + * @param end The ending index. + * @param type The content-type for the new `Blob` + */ + slice(start?: number, end?: number, type?: string): Blob; + /** + * Returns a promise that fulfills with the contents of the `Blob` decoded as a + * UTF-8 string. + * @since v15.7.0, v14.18.0 + */ + text(): Promise; + /** + * Returns a new `ReadableStream` that allows the content of the `Blob` to be read. + * @since v16.7.0 + */ + stream(): WebReadableStream; + } + export interface FileOptions { + /** + * One of either `'transparent'` or `'native'`. When set to `'native'`, line endings in string source parts will be + * converted to the platform native line-ending as specified by `require('node:os').EOL`. + */ + endings?: "native" | "transparent"; + /** The File content-type. */ + type?: string; + /** The last modified date of the file. `Default`: Date.now(). */ + lastModified?: number; + } + /** + * A [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) provides information about files. + * @since v19.2.0, v18.13.0 + */ + export class File extends Blob { + constructor(sources: Array, fileName: string, options?: FileOptions); + /** + * The name of the `File`. + * @since v19.2.0, v18.13.0 + */ + readonly name: string; + /** + * The last modified date of the `File`. + * @since v19.2.0, v18.13.0 + */ + readonly lastModified: number; + } + export import atob = globalThis.atob; + export import btoa = globalThis.btoa; + import { Blob as NodeBlob } from "buffer"; + // This conditional type will be the existing global Blob in a browser, or + // the copy below in a Node environment. + type __Blob = typeof globalThis extends { onmessage: any; Blob: any } ? {} : NodeBlob; + global { + namespace NodeJS { + export { BufferEncoding }; + } + // Buffer class + type BufferEncoding = + | "ascii" + | "utf8" + | "utf-8" + | "utf16le" + | "utf-16le" + | "ucs2" + | "ucs-2" + | "base64" + | "base64url" + | "latin1" + | "binary" + | "hex"; + type WithImplicitCoercion = + | T + | { + valueOf(): T; + }; + /** + * Raw data is stored in instances of the Buffer class. + * A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized. + * Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'base64url'|'binary'(deprecated)|'hex' + */ + interface BufferConstructor { + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + * @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead. + */ + new(str: string, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`). + */ + new(size: number): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new(array: Uint8Array): Buffer; + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}/{SharedArrayBuffer}. + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + * @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead. + */ + new(arrayBuffer: ArrayBuffer | SharedArrayBuffer): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new(array: readonly any[]): Buffer; + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + * @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead. + */ + new(buffer: Buffer): Buffer; + /** + * Allocates a new `Buffer` using an `array` of bytes in the range `0` – `255`. + * Array entries outside that range will be truncated to fit into it. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'. + * const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]); + * ``` + * + * If `array` is an `Array`\-like object (that is, one with a `length` property of + * type `number`), it is treated as if it is an array, unless it is a `Buffer` or + * a `Uint8Array`. This means all other `TypedArray` variants get treated as an`Array`. To create a `Buffer` from the bytes backing a `TypedArray`, use `Buffer.copyBytesFrom()`. + * + * A `TypeError` will be thrown if `array` is not an `Array` or another type + * appropriate for `Buffer.from()` variants. + * + * `Buffer.from(array)` and `Buffer.from(string)` may also use the internal`Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v5.10.0 + */ + from( + arrayBuffer: WithImplicitCoercion, + byteOffset?: number, + length?: number, + ): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param data data to create a new Buffer + */ + from(data: Uint8Array | readonly number[]): Buffer; + from(data: WithImplicitCoercion): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + */ + from( + str: + | WithImplicitCoercion + | { + [Symbol.toPrimitive](hint: "string"): string; + }, + encoding?: BufferEncoding, + ): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param values to create a new Buffer + */ + of(...items: number[]): Buffer; + /** + * Returns `true` if `obj` is a `Buffer`, `false` otherwise. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * Buffer.isBuffer(Buffer.alloc(10)); // true + * Buffer.isBuffer(Buffer.from('foo')); // true + * Buffer.isBuffer('a string'); // false + * Buffer.isBuffer([]); // false + * Buffer.isBuffer(new Uint8Array(1024)); // false + * ``` + * @since v0.1.101 + */ + isBuffer(obj: any): obj is Buffer; + /** + * Returns `true` if `encoding` is the name of a supported character encoding, + * or `false` otherwise. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * console.log(Buffer.isEncoding('utf8')); + * // Prints: true + * + * console.log(Buffer.isEncoding('hex')); + * // Prints: true + * + * console.log(Buffer.isEncoding('utf/8')); + * // Prints: false + * + * console.log(Buffer.isEncoding('')); + * // Prints: false + * ``` + * @since v0.9.1 + * @param encoding A character encoding name to check. + */ + isEncoding(encoding: string): encoding is BufferEncoding; + /** + * Returns the byte length of a string when encoded using `encoding`. + * This is not the same as [`String.prototype.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), which does not account + * for the encoding that is used to convert the string into bytes. + * + * For `'base64'`, `'base64url'`, and `'hex'`, this function assumes valid input. + * For strings that contain non-base64/hex-encoded data (e.g. whitespace), the + * return value might be greater than the length of a `Buffer` created from the + * string. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const str = '\u00bd + \u00bc = \u00be'; + * + * console.log(`${str}: ${str.length} characters, ` + + * `${Buffer.byteLength(str, 'utf8')} bytes`); + * // Prints: ½ + ¼ = ¾: 9 characters, 12 bytes + * ``` + * + * When `string` is a + * `Buffer`/[`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView)/[`TypedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/- + * Reference/Global_Objects/TypedArray)/[`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer)/[`SharedArrayBuffer`](https://develop- + * er.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer), the byte length as reported by `.byteLength`is returned. + * @since v0.1.90 + * @param string A value to calculate the length of. + * @param [encoding='utf8'] If `string` is a string, this is its encoding. + * @return The number of bytes contained within `string`. + */ + byteLength( + string: string | Buffer | NodeJS.ArrayBufferView | ArrayBuffer | SharedArrayBuffer, + encoding?: BufferEncoding, + ): number; + /** + * Returns a new `Buffer` which is the result of concatenating all the `Buffer`instances in the `list` together. + * + * If the list has no items, or if the `totalLength` is 0, then a new zero-length`Buffer` is returned. + * + * If `totalLength` is not provided, it is calculated from the `Buffer` instances + * in `list` by adding their lengths. + * + * If `totalLength` is provided, it is coerced to an unsigned integer. If the + * combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is + * truncated to `totalLength`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a single `Buffer` from a list of three `Buffer` instances. + * + * const buf1 = Buffer.alloc(10); + * const buf2 = Buffer.alloc(14); + * const buf3 = Buffer.alloc(18); + * const totalLength = buf1.length + buf2.length + buf3.length; + * + * console.log(totalLength); + * // Prints: 42 + * + * const bufA = Buffer.concat([buf1, buf2, buf3], totalLength); + * + * console.log(bufA); + * // Prints: + * console.log(bufA.length); + * // Prints: 42 + * ``` + * + * `Buffer.concat()` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v0.7.11 + * @param list List of `Buffer` or {@link Uint8Array} instances to concatenate. + * @param totalLength Total length of the `Buffer` instances in `list` when concatenated. + */ + concat(list: readonly Uint8Array[], totalLength?: number): Buffer; + /** + * Copies the underlying memory of `view` into a new `Buffer`. + * + * ```js + * const u16 = new Uint16Array([0, 0xffff]); + * const buf = Buffer.copyBytesFrom(u16, 1, 1); + * u16[1] = 0; + * console.log(buf.length); // 2 + * console.log(buf[0]); // 255 + * console.log(buf[1]); // 255 + * ``` + * @since v19.8.0 + * @param view The {TypedArray} to copy. + * @param [offset=0] The starting offset within `view`. + * @param [length=view.length - offset] The number of elements from `view` to copy. + */ + copyBytesFrom(view: NodeJS.TypedArray, offset?: number, length?: number): Buffer; + /** + * Compares `buf1` to `buf2`, typically for the purpose of sorting arrays of`Buffer` instances. This is equivalent to calling `buf1.compare(buf2)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('1234'); + * const buf2 = Buffer.from('0123'); + * const arr = [buf1, buf2]; + * + * console.log(arr.sort(Buffer.compare)); + * // Prints: [ , ] + * // (This result is equal to: [buf2, buf1].) + * ``` + * @since v0.11.13 + * @return Either `-1`, `0`, or `1`, depending on the result of the comparison. See `compare` for details. + */ + compare(buf1: Uint8Array, buf2: Uint8Array): -1 | 0 | 1; + /** + * Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the`Buffer` will be zero-filled. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(5); + * + * console.log(buf); + * // Prints: + * ``` + * + * If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. + * + * If `fill` is specified, the allocated `Buffer` will be initialized by calling `buf.fill(fill)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(5, 'a'); + * + * console.log(buf); + * // Prints: + * ``` + * + * If both `fill` and `encoding` are specified, the allocated `Buffer` will be + * initialized by calling `buf.fill(fill, encoding)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); + * + * console.log(buf); + * // Prints: + * ``` + * + * Calling `Buffer.alloc()` can be measurably slower than the alternative `Buffer.allocUnsafe()` but ensures that the newly created `Buffer` instance + * contents will never contain sensitive data from previous allocations, including + * data that might not have been allocated for `Buffer`s. + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + * @param [fill=0] A value to pre-fill the new `Buffer` with. + * @param [encoding='utf8'] If `fill` is a string, this is its encoding. + */ + alloc(size: number, fill?: string | Uint8Array | number, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `Buffer.alloc()` instead to initialize`Buffer` instances with zeroes. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(10); + * + * console.log(buf); + * // Prints (contents may vary): + * + * buf.fill(0); + * + * console.log(buf); + * // Prints: + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * + * The `Buffer` module pre-allocates an internal `Buffer` instance of + * size `Buffer.poolSize` that is used as a pool for the fast allocation of new `Buffer` instances created using `Buffer.allocUnsafe()`, `Buffer.from(array)`, + * and `Buffer.concat()` only when `size` is less than `Buffer.poolSize >>> 1` (floor of `Buffer.poolSize` divided by two). + * + * Use of this pre-allocated internal memory pool is a key difference between + * calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. + * Specifically, `Buffer.alloc(size, fill)` will _never_ use the internal `Buffer`pool, while `Buffer.allocUnsafe(size).fill(fill)`_will_ use the internal`Buffer` pool if `size` is less + * than or equal to half `Buffer.poolSize`. The + * difference is subtle but can be important when an application requires the + * additional performance that `Buffer.allocUnsafe()` provides. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafe(size: number): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. A zero-length `Buffer` is created if + * `size` is 0. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `buf.fill(0)` to initialize + * such `Buffer` instances with zeroes. + * + * When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, + * allocations under 4 KiB are sliced from a single pre-allocated `Buffer`. This + * allows applications to avoid the garbage collection overhead of creating many + * individually allocated `Buffer` instances. This approach improves both + * performance and memory usage by eliminating the need to track and clean up as + * many individual `ArrayBuffer` objects. + * + * However, in the case where a developer may need to retain a small chunk of + * memory from a pool for an indeterminate amount of time, it may be appropriate + * to create an un-pooled `Buffer` instance using `Buffer.allocUnsafeSlow()` and + * then copying out the relevant bits. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Need to keep around a few small chunks of memory. + * const store = []; + * + * socket.on('readable', () => { + * let data; + * while (null !== (data = readable.read())) { + * // Allocate for retained data. + * const sb = Buffer.allocUnsafeSlow(10); + * + * // Copy the data into the new allocation. + * data.copy(sb, 0, 0, 10); + * + * store.push(sb); + * } + * }); + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.12.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafeSlow(size: number): Buffer; + /** + * This is the size (in bytes) of pre-allocated internal `Buffer` instances used + * for pooling. This value may be modified. + * @since v0.11.3 + */ + poolSize: number; + } + interface Buffer extends Uint8Array { + /** + * Writes `string` to `buf` at `offset` according to the character encoding in`encoding`. The `length` parameter is the number of bytes to write. If `buf` did + * not contain enough space to fit the entire string, only part of `string` will be + * written. However, partially encoded characters will not be written. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(256); + * + * const len = buf.write('\u00bd + \u00bc = \u00be', 0); + * + * console.log(`${len} bytes: ${buf.toString('utf8', 0, len)}`); + * // Prints: 12 bytes: ½ + ¼ = ¾ + * + * const buffer = Buffer.alloc(10); + * + * const length = buffer.write('abcd', 8); + * + * console.log(`${length} bytes: ${buffer.toString('utf8', 8, 10)}`); + * // Prints: 2 bytes : ab + * ``` + * @since v0.1.90 + * @param string String to write to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write `string`. + * @param [length=buf.length - offset] Maximum number of bytes to write (written bytes will not exceed `buf.length - offset`). + * @param [encoding='utf8'] The character encoding of `string`. + * @return Number of bytes written. + */ + write(string: string, encoding?: BufferEncoding): number; + write(string: string, offset: number, encoding?: BufferEncoding): number; + write(string: string, offset: number, length: number, encoding?: BufferEncoding): number; + /** + * Decodes `buf` to a string according to the specified character encoding in`encoding`. `start` and `end` may be passed to decode only a subset of `buf`. + * + * If `encoding` is `'utf8'` and a byte sequence in the input is not valid UTF-8, + * then each invalid byte is replaced with the replacement character `U+FFFD`. + * + * The maximum length of a string instance (in UTF-16 code units) is available + * as {@link constants.MAX_STRING_LENGTH}. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * console.log(buf1.toString('utf8')); + * // Prints: abcdefghijklmnopqrstuvwxyz + * console.log(buf1.toString('utf8', 0, 5)); + * // Prints: abcde + * + * const buf2 = Buffer.from('tést'); + * + * console.log(buf2.toString('hex')); + * // Prints: 74c3a97374 + * console.log(buf2.toString('utf8', 0, 3)); + * // Prints: té + * console.log(buf2.toString(undefined, 0, 3)); + * // Prints: té + * ``` + * @since v0.1.90 + * @param [encoding='utf8'] The character encoding to use. + * @param [start=0] The byte offset to start decoding at. + * @param [end=buf.length] The byte offset to stop decoding at (not inclusive). + */ + toString(encoding?: BufferEncoding, start?: number, end?: number): string; + /** + * Returns a JSON representation of `buf`. [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) implicitly calls + * this function when stringifying a `Buffer` instance. + * + * `Buffer.from()` accepts objects in the format returned from this method. + * In particular, `Buffer.from(buf.toJSON())` works like `Buffer.from(buf)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]); + * const json = JSON.stringify(buf); + * + * console.log(json); + * // Prints: {"type":"Buffer","data":[1,2,3,4,5]} + * + * const copy = JSON.parse(json, (key, value) => { + * return value && value.type === 'Buffer' ? + * Buffer.from(value) : + * value; + * }); + * + * console.log(copy); + * // Prints: + * ``` + * @since v0.9.2 + */ + toJSON(): { + type: "Buffer"; + data: number[]; + }; + /** + * Returns `true` if both `buf` and `otherBuffer` have exactly the same bytes,`false` otherwise. Equivalent to `buf.compare(otherBuffer) === 0`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('ABC'); + * const buf2 = Buffer.from('414243', 'hex'); + * const buf3 = Buffer.from('ABCD'); + * + * console.log(buf1.equals(buf2)); + * // Prints: true + * console.log(buf1.equals(buf3)); + * // Prints: false + * ``` + * @since v0.11.13 + * @param otherBuffer A `Buffer` or {@link Uint8Array} with which to compare `buf`. + */ + equals(otherBuffer: Uint8Array): boolean; + /** + * Compares `buf` with `target` and returns a number indicating whether `buf`comes before, after, or is the same as `target` in sort order. + * Comparison is based on the actual sequence of bytes in each `Buffer`. + * + * * `0` is returned if `target` is the same as `buf` + * * `1` is returned if `target` should come _before_`buf` when sorted. + * * `-1` is returned if `target` should come _after_`buf` when sorted. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('ABC'); + * const buf2 = Buffer.from('BCD'); + * const buf3 = Buffer.from('ABCD'); + * + * console.log(buf1.compare(buf1)); + * // Prints: 0 + * console.log(buf1.compare(buf2)); + * // Prints: -1 + * console.log(buf1.compare(buf3)); + * // Prints: -1 + * console.log(buf2.compare(buf1)); + * // Prints: 1 + * console.log(buf2.compare(buf3)); + * // Prints: 1 + * console.log([buf1, buf2, buf3].sort(Buffer.compare)); + * // Prints: [ , , ] + * // (This result is equal to: [buf1, buf3, buf2].) + * ``` + * + * The optional `targetStart`, `targetEnd`, `sourceStart`, and `sourceEnd`arguments can be used to limit the comparison to specific ranges within `target`and `buf` respectively. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8, 9]); + * const buf2 = Buffer.from([5, 6, 7, 8, 9, 1, 2, 3, 4]); + * + * console.log(buf1.compare(buf2, 5, 9, 0, 4)); + * // Prints: 0 + * console.log(buf1.compare(buf2, 0, 6, 4)); + * // Prints: -1 + * console.log(buf1.compare(buf2, 5, 6, 5)); + * // Prints: 1 + * ``` + * + * `ERR_OUT_OF_RANGE` is thrown if `targetStart < 0`, `sourceStart < 0`,`targetEnd > target.byteLength`, or `sourceEnd > source.byteLength`. + * @since v0.11.13 + * @param target A `Buffer` or {@link Uint8Array} with which to compare `buf`. + * @param [targetStart=0] The offset within `target` at which to begin comparison. + * @param [targetEnd=target.length] The offset within `target` at which to end comparison (not inclusive). + * @param [sourceStart=0] The offset within `buf` at which to begin comparison. + * @param [sourceEnd=buf.length] The offset within `buf` at which to end comparison (not inclusive). + */ + compare( + target: Uint8Array, + targetStart?: number, + targetEnd?: number, + sourceStart?: number, + sourceEnd?: number, + ): -1 | 0 | 1; + /** + * Copies data from a region of `buf` to a region in `target`, even if the `target`memory region overlaps with `buf`. + * + * [`TypedArray.prototype.set()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/set) performs the same operation, and is available + * for all TypedArrays, including Node.js `Buffer`s, although it takes + * different function arguments. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create two `Buffer` instances. + * const buf1 = Buffer.allocUnsafe(26); + * const buf2 = Buffer.allocUnsafe(26).fill('!'); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * // Copy `buf1` bytes 16 through 19 into `buf2` starting at byte 8 of `buf2`. + * buf1.copy(buf2, 8, 16, 20); + * // This is equivalent to: + * // buf2.set(buf1.subarray(16, 20), 8); + * + * console.log(buf2.toString('ascii', 0, 25)); + * // Prints: !!!!!!!!qrst!!!!!!!!!!!!! + * ``` + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a `Buffer` and copy data from one region to an overlapping region + * // within the same `Buffer`. + * + * const buf = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf[i] = i + 97; + * } + * + * buf.copy(buf, 0, 4, 10); + * + * console.log(buf.toString()); + * // Prints: efghijghijklmnopqrstuvwxyz + * ``` + * @since v0.1.90 + * @param target A `Buffer` or {@link Uint8Array} to copy into. + * @param [targetStart=0] The offset within `target` at which to begin writing. + * @param [sourceStart=0] The offset within `buf` from which to begin copying. + * @param [sourceEnd=buf.length] The offset within `buf` at which to stop copying (not inclusive). + * @return The number of bytes copied. + */ + copy(target: Uint8Array, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * This method is not compatible with the `Uint8Array.prototype.slice()`, + * which is a superclass of `Buffer`. To copy the slice, use`Uint8Array.prototype.slice()`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * const copiedBuf = Uint8Array.prototype.slice.call(buf); + * copiedBuf[0]++; + * console.log(copiedBuf.toString()); + * // Prints: cuffer + * + * console.log(buf.toString()); + * // Prints: buffer + * + * // With buf.slice(), the original buffer is modified. + * const notReallyCopiedBuf = buf.slice(); + * notReallyCopiedBuf[0]++; + * console.log(notReallyCopiedBuf.toString()); + * // Prints: cuffer + * console.log(buf.toString()); + * // Also prints: cuffer (!) + * ``` + * @since v0.3.0 + * @deprecated Use `subarray` instead. + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + slice(start?: number, end?: number): Buffer; + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * Specifying `end` greater than `buf.length` will return the same result as + * that of `end` equal to `buf.length`. + * + * This method is inherited from [`TypedArray.prototype.subarray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray). + * + * Modifying the new `Buffer` slice will modify the memory in the original `Buffer`because the allocated memory of the two objects overlap. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte + * // from the original `Buffer`. + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * const buf2 = buf1.subarray(0, 3); + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: abc + * + * buf1[0] = 33; + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: !bc + * ``` + * + * Specifying negative indexes causes the slice to be generated relative to the + * end of `buf` rather than the beginning. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * console.log(buf.subarray(-6, -1).toString()); + * // Prints: buffe + * // (Equivalent to buf.subarray(0, 5).) + * + * console.log(buf.subarray(-6, -2).toString()); + * // Prints: buff + * // (Equivalent to buf.subarray(0, 4).) + * + * console.log(buf.subarray(-5, -2).toString()); + * // Prints: uff + * // (Equivalent to buf.subarray(1, 4).) + * ``` + * @since v3.0.0 + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + subarray(start?: number, end?: number): Buffer; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigInt64BE(0x0102030405060708n, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigInt64BE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigInt64LE(0x0102030405060708n, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigInt64LE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. + * + * This function is also available under the `writeBigUint64BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigUInt64BE(0xdecafafecacefaden, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigUInt64BE(value: bigint, offset?: number): number; + /** + * @alias Buffer.writeBigUInt64BE + * @since v14.10.0, v12.19.0 + */ + writeBigUint64BE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigUInt64LE(0xdecafafecacefaden, 0); + * + * console.log(buf); + * // Prints: + * ``` + * + * This function is also available under the `writeBigUint64LE` alias. + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigUInt64LE(value: bigint, offset?: number): number; + /** + * @alias Buffer.writeBigUInt64LE + * @since v14.10.0, v12.19.0 + */ + writeBigUint64LE(value: bigint, offset?: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than an unsigned integer. + * + * This function is also available under the `writeUintLE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeUIntLE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeUIntLE(value: number, offset: number, byteLength: number): number; + /** + * @alias Buffer.writeUIntLE + * @since v14.9.0, v12.19.0 + */ + writeUintLE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than an unsigned integer. + * + * This function is also available under the `writeUintBE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeUIntBE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeUIntBE(value: number, offset: number, byteLength: number): number; + /** + * @alias Buffer.writeUIntBE + * @since v14.9.0, v12.19.0 + */ + writeUintBE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than a signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeIntLE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeIntLE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined when`value` is anything other than a + * signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeIntBE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeIntBE(value: number, offset: number, byteLength: number): number; + /** + * Reads an unsigned, big-endian 64-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readBigUint64BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + * + * console.log(buf.readBigUInt64BE(0)); + * // Prints: 4294967295n + * ``` + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigUInt64BE(offset?: number): bigint; + /** + * @alias Buffer.readBigUInt64BE + * @since v14.10.0, v12.19.0 + */ + readBigUint64BE(offset?: number): bigint; + /** + * Reads an unsigned, little-endian 64-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readBigUint64LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + * + * console.log(buf.readBigUInt64LE(0)); + * // Prints: 18446744069414584320n + * ``` + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigUInt64LE(offset?: number): bigint; + /** + * @alias Buffer.readBigUInt64LE + * @since v14.10.0, v12.19.0 + */ + readBigUint64LE(offset?: number): bigint; + /** + * Reads a signed, big-endian 64-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed + * values. + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigInt64BE(offset?: number): bigint; + /** + * Reads a signed, little-endian 64-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed + * values. + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigInt64LE(offset?: number): bigint; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned, little-endian integer supporting + * up to 48 bits of accuracy. + * + * This function is also available under the `readUintLE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readUIntLE(0, 6).toString(16)); + * // Prints: ab9078563412 + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readUIntLE(offset: number, byteLength: number): number; + /** + * @alias Buffer.readUIntLE + * @since v14.9.0, v12.19.0 + */ + readUintLE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned big-endian integer supporting + * up to 48 bits of accuracy. + * + * This function is also available under the `readUintBE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readUIntBE(0, 6).toString(16)); + * // Prints: 1234567890ab + * console.log(buf.readUIntBE(1, 6).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readUIntBE(offset: number, byteLength: number): number; + /** + * @alias Buffer.readUIntBE + * @since v14.9.0, v12.19.0 + */ + readUintBE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a little-endian, two's complement signed value + * supporting up to 48 bits of accuracy. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readIntLE(0, 6).toString(16)); + * // Prints: -546f87a9cbee + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readIntLE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a big-endian, two's complement signed value + * supporting up to 48 bits of accuracy. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readIntBE(0, 6).toString(16)); + * // Prints: 1234567890ab + * console.log(buf.readIntBE(1, 6).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * console.log(buf.readIntBE(1, 0).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readIntBE(offset: number, byteLength: number): number; + /** + * Reads an unsigned 8-bit integer from `buf` at the specified `offset`. + * + * This function is also available under the `readUint8` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, -2]); + * + * console.log(buf.readUInt8(0)); + * // Prints: 1 + * console.log(buf.readUInt8(1)); + * // Prints: 254 + * console.log(buf.readUInt8(2)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`. + */ + readUInt8(offset?: number): number; + /** + * @alias Buffer.readUInt8 + * @since v14.9.0, v12.19.0 + */ + readUint8(offset?: number): number; + /** + * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint16LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56]); + * + * console.log(buf.readUInt16LE(0).toString(16)); + * // Prints: 3412 + * console.log(buf.readUInt16LE(1).toString(16)); + * // Prints: 5634 + * console.log(buf.readUInt16LE(2).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readUInt16LE(offset?: number): number; + /** + * @alias Buffer.readUInt16LE + * @since v14.9.0, v12.19.0 + */ + readUint16LE(offset?: number): number; + /** + * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint16BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56]); + * + * console.log(buf.readUInt16BE(0).toString(16)); + * // Prints: 1234 + * console.log(buf.readUInt16BE(1).toString(16)); + * // Prints: 3456 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readUInt16BE(offset?: number): number; + /** + * @alias Buffer.readUInt16BE + * @since v14.9.0, v12.19.0 + */ + readUint16BE(offset?: number): number; + /** + * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint32LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + * + * console.log(buf.readUInt32LE(0).toString(16)); + * // Prints: 78563412 + * console.log(buf.readUInt32LE(1).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readUInt32LE(offset?: number): number; + /** + * @alias Buffer.readUInt32LE + * @since v14.9.0, v12.19.0 + */ + readUint32LE(offset?: number): number; + /** + * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint32BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + * + * console.log(buf.readUInt32BE(0).toString(16)); + * // Prints: 12345678 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readUInt32BE(offset?: number): number; + /** + * @alias Buffer.readUInt32BE + * @since v14.9.0, v12.19.0 + */ + readUint32BE(offset?: number): number; + /** + * Reads a signed 8-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([-1, 5]); + * + * console.log(buf.readInt8(0)); + * // Prints: -1 + * console.log(buf.readInt8(1)); + * // Prints: 5 + * console.log(buf.readInt8(2)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`. + */ + readInt8(offset?: number): number; + /** + * Reads a signed, little-endian 16-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 5]); + * + * console.log(buf.readInt16LE(0)); + * // Prints: 1280 + * console.log(buf.readInt16LE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readInt16LE(offset?: number): number; + /** + * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 5]); + * + * console.log(buf.readInt16BE(0)); + * // Prints: 5 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readInt16BE(offset?: number): number; + /** + * Reads a signed, little-endian 32-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 0, 0, 5]); + * + * console.log(buf.readInt32LE(0)); + * // Prints: 83886080 + * console.log(buf.readInt32LE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readInt32LE(offset?: number): number; + /** + * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 0, 0, 5]); + * + * console.log(buf.readInt32BE(0)); + * // Prints: 5 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readInt32BE(offset?: number): number; + /** + * Reads a 32-bit, little-endian float from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4]); + * + * console.log(buf.readFloatLE(0)); + * // Prints: 1.539989614439558e-36 + * console.log(buf.readFloatLE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readFloatLE(offset?: number): number; + /** + * Reads a 32-bit, big-endian float from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4]); + * + * console.log(buf.readFloatBE(0)); + * // Prints: 2.387939260590663e-38 + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readFloatBE(offset?: number): number; + /** + * Reads a 64-bit, little-endian double from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + * + * console.log(buf.readDoubleLE(0)); + * // Prints: 5.447603722011605e-270 + * console.log(buf.readDoubleLE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`. + */ + readDoubleLE(offset?: number): number; + /** + * Reads a 64-bit, big-endian double from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + * + * console.log(buf.readDoubleBE(0)); + * // Prints: 8.20788039913184e-304 + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`. + */ + readDoubleBE(offset?: number): number; + reverse(): this; + /** + * Interprets `buf` as an array of unsigned 16-bit integers and swaps the + * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 2. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap16(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap16(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * + * One convenient use of `buf.swap16()` is to perform a fast in-place conversion + * between UTF-16 little-endian and UTF-16 big-endian: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('This is little-endian UTF-16', 'utf16le'); + * buf.swap16(); // Convert to big-endian UTF-16 text. + * ``` + * @since v5.10.0 + * @return A reference to `buf`. + */ + swap16(): Buffer; + /** + * Interprets `buf` as an array of unsigned 32-bit integers and swaps the + * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 4. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap32(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap32(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * @since v5.10.0 + * @return A reference to `buf`. + */ + swap32(): Buffer; + /** + * Interprets `buf` as an array of 64-bit numbers and swaps byte order _in-place_. + * Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 8. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap64(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap64(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * @since v6.3.0 + * @return A reference to `buf`. + */ + swap64(): Buffer; + /** + * Writes `value` to `buf` at the specified `offset`. `value` must be a + * valid unsigned 8-bit integer. Behavior is undefined when `value` is anything + * other than an unsigned 8-bit integer. + * + * This function is also available under the `writeUint8` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt8(0x3, 0); + * buf.writeUInt8(0x4, 1); + * buf.writeUInt8(0x23, 2); + * buf.writeUInt8(0x42, 3); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`. + * @return `offset` plus the number of bytes written. + */ + writeUInt8(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt8 + * @since v14.9.0, v12.19.0 + */ + writeUint8(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value` is + * anything other than an unsigned 16-bit integer. + * + * This function is also available under the `writeUint16LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt16LE(0xdead, 0); + * buf.writeUInt16LE(0xbeef, 2); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeUInt16LE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt16LE + * @since v14.9.0, v12.19.0 + */ + writeUint16LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value`is anything other than an + * unsigned 16-bit integer. + * + * This function is also available under the `writeUint16BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt16BE(0xdead, 0); + * buf.writeUInt16BE(0xbeef, 2); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeUInt16BE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt16BE + * @since v14.9.0, v12.19.0 + */ + writeUint16BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value` is + * anything other than an unsigned 32-bit integer. + * + * This function is also available under the `writeUint32LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt32LE(0xfeedface, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeUInt32LE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt32LE + * @since v14.9.0, v12.19.0 + */ + writeUint32LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value`is anything other than an + * unsigned 32-bit integer. + * + * This function is also available under the `writeUint32BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt32BE(0xfeedface, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeUInt32BE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt32BE + * @since v14.9.0, v12.19.0 + */ + writeUint32BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset`. `value` must be a valid + * signed 8-bit integer. Behavior is undefined when `value` is anything other than + * a signed 8-bit integer. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt8(2, 0); + * buf.writeInt8(-2, 1); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`. + * @return `offset` plus the number of bytes written. + */ + writeInt8(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is + * anything other than a signed 16-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt16LE(0x0304, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeInt16LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is + * anything other than a signed 16-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt16BE(0x0102, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeInt16BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is + * anything other than a signed 32-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeInt32LE(0x05060708, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeInt32LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is + * anything other than a signed 32-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeInt32BE(0x01020304, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeInt32BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. Behavior is + * undefined when `value` is anything other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeFloatLE(0xcafebabe, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeFloatLE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. Behavior is + * undefined when `value` is anything other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeFloatBE(0xcafebabe, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeFloatBE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything + * other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeDoubleLE(123.456, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeDoubleLE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything + * other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeDoubleBE(123.456, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeDoubleBE(value: number, offset?: number): number; + /** + * Fills `buf` with the specified `value`. If the `offset` and `end` are not given, + * the entire `buf` will be filled: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Fill a `Buffer` with the ASCII character 'h'. + * + * const b = Buffer.allocUnsafe(50).fill('h'); + * + * console.log(b.toString()); + * // Prints: hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh + * + * // Fill a buffer with empty string + * const c = Buffer.allocUnsafe(5).fill(''); + * + * console.log(c.fill('')); + * // Prints: + * ``` + * + * `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or + * integer. If the resulting integer is greater than `255` (decimal), `buf` will be + * filled with `value & 255`. + * + * If the final write of a `fill()` operation falls on a multi-byte character, + * then only the bytes of that character that fit into `buf` are written: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Fill a `Buffer` with character that takes up two bytes in UTF-8. + * + * console.log(Buffer.allocUnsafe(5).fill('\u0222')); + * // Prints: + * ``` + * + * If `value` contains invalid characters, it is truncated; if no valid + * fill data remains, an exception is thrown: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(5); + * + * console.log(buf.fill('a')); + * // Prints: + * console.log(buf.fill('aazz', 'hex')); + * // Prints: + * console.log(buf.fill('zz', 'hex')); + * // Throws an exception. + * ``` + * @since v0.5.0 + * @param value The value with which to fill `buf`. Empty value (string, Uint8Array, Buffer) is coerced to `0`. + * @param [offset=0] Number of bytes to skip before starting to fill `buf`. + * @param [end=buf.length] Where to stop filling `buf` (not inclusive). + * @param [encoding='utf8'] The encoding for `value` if `value` is a string. + * @return A reference to `buf`. + */ + fill(value: string | Uint8Array | number, offset?: number, end?: number, encoding?: BufferEncoding): this; + /** + * If `value` is: + * + * * a string, `value` is interpreted according to the character encoding in`encoding`. + * * a `Buffer` or [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array), `value` will be used in its entirety. + * To compare a partial `Buffer`, use `buf.subarray`. + * * a number, `value` will be interpreted as an unsigned 8-bit integer + * value between `0` and `255`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('this is a buffer'); + * + * console.log(buf.indexOf('this')); + * // Prints: 0 + * console.log(buf.indexOf('is')); + * // Prints: 2 + * console.log(buf.indexOf(Buffer.from('a buffer'))); + * // Prints: 8 + * console.log(buf.indexOf(97)); + * // Prints: 8 (97 is the decimal ASCII value for 'a') + * console.log(buf.indexOf(Buffer.from('a buffer example'))); + * // Prints: -1 + * console.log(buf.indexOf(Buffer.from('a buffer example').slice(0, 8))); + * // Prints: 8 + * + * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + * + * console.log(utf16Buffer.indexOf('\u03a3', 0, 'utf16le')); + * // Prints: 4 + * console.log(utf16Buffer.indexOf('\u03a3', -4, 'utf16le')); + * // Prints: 6 + * ``` + * + * If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value, + * an integer between 0 and 255. + * + * If `byteOffset` is not a number, it will be coerced to a number. If the result + * of coercion is `NaN` or `0`, then the entire buffer will be searched. This + * behavior matches [`String.prototype.indexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/indexOf). + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const b = Buffer.from('abcdef'); + * + * // Passing a value that's a number, but not a valid byte. + * // Prints: 2, equivalent to searching for 99 or 'c'. + * console.log(b.indexOf(99.9)); + * console.log(b.indexOf(256 + 99)); + * + * // Passing a byteOffset that coerces to NaN or 0. + * // Prints: 1, searching the whole buffer. + * console.log(b.indexOf('b', undefined)); + * console.log(b.indexOf('b', {})); + * console.log(b.indexOf('b', null)); + * console.log(b.indexOf('b', [])); + * ``` + * + * If `value` is an empty string or empty `Buffer` and `byteOffset` is less + * than `buf.length`, `byteOffset` will be returned. If `value` is empty and`byteOffset` is at least `buf.length`, `buf.length` will be returned. + * @since v1.5.0 + * @param value What to search for. + * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`. + * @return The index of the first occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`. + */ + indexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; + /** + * Identical to `buf.indexOf()`, except the last occurrence of `value` is found + * rather than the first occurrence. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('this buffer is a buffer'); + * + * console.log(buf.lastIndexOf('this')); + * // Prints: 0 + * console.log(buf.lastIndexOf('buffer')); + * // Prints: 17 + * console.log(buf.lastIndexOf(Buffer.from('buffer'))); + * // Prints: 17 + * console.log(buf.lastIndexOf(97)); + * // Prints: 15 (97 is the decimal ASCII value for 'a') + * console.log(buf.lastIndexOf(Buffer.from('yolo'))); + * // Prints: -1 + * console.log(buf.lastIndexOf('buffer', 5)); + * // Prints: 5 + * console.log(buf.lastIndexOf('buffer', 4)); + * // Prints: -1 + * + * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + * + * console.log(utf16Buffer.lastIndexOf('\u03a3', undefined, 'utf16le')); + * // Prints: 6 + * console.log(utf16Buffer.lastIndexOf('\u03a3', -5, 'utf16le')); + * // Prints: 4 + * ``` + * + * If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value, + * an integer between 0 and 255. + * + * If `byteOffset` is not a number, it will be coerced to a number. Any arguments + * that coerce to `NaN`, like `{}` or `undefined`, will search the whole buffer. + * This behavior matches [`String.prototype.lastIndexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/lastIndexOf). + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const b = Buffer.from('abcdef'); + * + * // Passing a value that's a number, but not a valid byte. + * // Prints: 2, equivalent to searching for 99 or 'c'. + * console.log(b.lastIndexOf(99.9)); + * console.log(b.lastIndexOf(256 + 99)); + * + * // Passing a byteOffset that coerces to NaN. + * // Prints: 1, searching the whole buffer. + * console.log(b.lastIndexOf('b', undefined)); + * console.log(b.lastIndexOf('b', {})); + * + * // Passing a byteOffset that coerces to 0. + * // Prints: -1, equivalent to passing 0. + * console.log(b.lastIndexOf('b', null)); + * console.log(b.lastIndexOf('b', [])); + * ``` + * + * If `value` is an empty string or empty `Buffer`, `byteOffset` will be returned. + * @since v6.0.0 + * @param value What to search for. + * @param [byteOffset=buf.length - 1] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`. + * @return The index of the last occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`. + */ + lastIndexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `[index, byte]` pairs from the contents + * of `buf`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Log the entire contents of a `Buffer`. + * + * const buf = Buffer.from('buffer'); + * + * for (const pair of buf.entries()) { + * console.log(pair); + * } + * // Prints: + * // [0, 98] + * // [1, 117] + * // [2, 102] + * // [3, 102] + * // [4, 101] + * // [5, 114] + * ``` + * @since v1.1.0 + */ + entries(): IterableIterator<[number, number]>; + /** + * Equivalent to `buf.indexOf() !== -1`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('this is a buffer'); + * + * console.log(buf.includes('this')); + * // Prints: true + * console.log(buf.includes('is')); + * // Prints: true + * console.log(buf.includes(Buffer.from('a buffer'))); + * // Prints: true + * console.log(buf.includes(97)); + * // Prints: true (97 is the decimal ASCII value for 'a') + * console.log(buf.includes(Buffer.from('a buffer example'))); + * // Prints: false + * console.log(buf.includes(Buffer.from('a buffer example').slice(0, 8))); + * // Prints: true + * console.log(buf.includes('this', 4)); + * // Prints: false + * ``` + * @since v5.3.0 + * @param value What to search for. + * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is its encoding. + * @return `true` if `value` was found in `buf`, `false` otherwise. + */ + includes(value: string | number | Buffer, byteOffset?: number, encoding?: BufferEncoding): boolean; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `buf` keys (indices). + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * for (const key of buf.keys()) { + * console.log(key); + * } + * // Prints: + * // 0 + * // 1 + * // 2 + * // 3 + * // 4 + * // 5 + * ``` + * @since v1.1.0 + */ + keys(): IterableIterator; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) for `buf` values (bytes). This function is + * called automatically when a `Buffer` is used in a `for..of` statement. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * for (const value of buf.values()) { + * console.log(value); + * } + * // Prints: + * // 98 + * // 117 + * // 102 + * // 102 + * // 101 + * // 114 + * + * for (const value of buf) { + * console.log(value); + * } + * // Prints: + * // 98 + * // 117 + * // 102 + * // 102 + * // 101 + * // 114 + * ``` + * @since v1.1.0 + */ + values(): IterableIterator; + } + var Buffer: BufferConstructor; + /** + * Decodes a string of Base64-encoded data into bytes, and encodes those bytes + * into a string using Latin-1 (ISO-8859-1). + * + * The `data` may be any JavaScript-value that can be coerced into a string. + * + * **This function is only provided for compatibility with legacy web platform APIs** + * **and should never be used in new code, because they use strings to represent** + * **binary data and predate the introduction of typed arrays in JavaScript.** + * **For code running using Node.js APIs, converting between base64-encoded strings** + * **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.** + * @since v15.13.0, v14.17.0 + * @legacy Use `Buffer.from(data, 'base64')` instead. + * @param data The Base64-encoded input string. + */ + function atob(data: string): string; + /** + * Decodes a string into bytes using Latin-1 (ISO-8859), and encodes those bytes + * into a string using Base64. + * + * The `data` may be any JavaScript-value that can be coerced into a string. + * + * **This function is only provided for compatibility with legacy web platform APIs** + * **and should never be used in new code, because they use strings to represent** + * **binary data and predate the introduction of typed arrays in JavaScript.** + * **For code running using Node.js APIs, converting between base64-encoded strings** + * **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.** + * @since v15.13.0, v14.17.0 + * @legacy Use `buf.toString('base64')` instead. + * @param data An ASCII (Latin1) string. + */ + function btoa(data: string): string; + interface Blob extends __Blob {} + /** + * `Blob` class is a global reference for `require('node:buffer').Blob` + * https://nodejs.org/api/buffer.html#class-blob + * @since v18.0.0 + */ + var Blob: typeof globalThis extends { + onmessage: any; + Blob: infer T; + } ? T + : typeof NodeBlob; + } +} +declare module "node:buffer" { + export * from "buffer"; +} diff --git a/dist/node_modules/@types/node/child_process.d.ts b/dist/node_modules/@types/node/child_process.d.ts new file mode 100644 index 0000000..e5078b8 --- /dev/null +++ b/dist/node_modules/@types/node/child_process.d.ts @@ -0,0 +1,1542 @@ +/** + * The `node:child_process` module provides the ability to spawn subprocesses in + * a manner that is similar, but not identical, to [`popen(3)`](http://man7.org/linux/man-pages/man3/popen.3.html). This capability + * is primarily provided by the {@link spawn} function: + * + * ```js + * const { spawn } = require('node:child_process'); + * const ls = spawn('ls', ['-lh', '/usr']); + * + * ls.stdout.on('data', (data) => { + * console.log(`stdout: ${data}`); + * }); + * + * ls.stderr.on('data', (data) => { + * console.error(`stderr: ${data}`); + * }); + * + * ls.on('close', (code) => { + * console.log(`child process exited with code ${code}`); + * }); + * ``` + * + * By default, pipes for `stdin`, `stdout`, and `stderr` are established between + * the parent Node.js process and the spawned subprocess. These pipes have + * limited (and platform-specific) capacity. If the subprocess writes to + * stdout in excess of that limit without the output being captured, the + * subprocess blocks waiting for the pipe buffer to accept more data. This is + * identical to the behavior of pipes in the shell. Use the `{ stdio: 'ignore' }`option if the output will not be consumed. + * + * The command lookup is performed using the `options.env.PATH` environment + * variable if `env` is in the `options` object. Otherwise, `process.env.PATH` is + * used. If `options.env` is set without `PATH`, lookup on Unix is performed + * on a default search path search of `/usr/bin:/bin` (see your operating system's + * manual for execvpe/execvp), on Windows the current processes environment + * variable `PATH` is used. + * + * On Windows, environment variables are case-insensitive. Node.js + * lexicographically sorts the `env` keys and uses the first one that + * case-insensitively matches. Only first (in lexicographic order) entry will be + * passed to the subprocess. This might lead to issues on Windows when passing + * objects to the `env` option that have multiple variants of the same key, such as`PATH` and `Path`. + * + * The {@link spawn} method spawns the child process asynchronously, + * without blocking the Node.js event loop. The {@link spawnSync} function provides equivalent functionality in a synchronous manner that blocks + * the event loop until the spawned process either exits or is terminated. + * + * For convenience, the `node:child_process` module provides a handful of + * synchronous and asynchronous alternatives to {@link spawn} and {@link spawnSync}. Each of these alternatives are implemented on + * top of {@link spawn} or {@link spawnSync}. + * + * * {@link exec}: spawns a shell and runs a command within that + * shell, passing the `stdout` and `stderr` to a callback function when + * complete. + * * {@link execFile}: similar to {@link exec} except + * that it spawns the command directly without first spawning a shell by + * default. + * * {@link fork}: spawns a new Node.js process and invokes a + * specified module with an IPC communication channel established that allows + * sending messages between parent and child. + * * {@link execSync}: a synchronous version of {@link exec} that will block the Node.js event loop. + * * {@link execFileSync}: a synchronous version of {@link execFile} that will block the Node.js event loop. + * + * For certain use cases, such as automating shell scripts, the `synchronous counterparts` may be more convenient. In many cases, however, + * the synchronous methods can have significant impact on performance due to + * stalling the event loop while spawned processes complete. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/child_process.js) + */ +declare module "child_process" { + import { ObjectEncodingOptions } from "node:fs"; + import { Abortable, EventEmitter } from "node:events"; + import * as net from "node:net"; + import { Pipe, Readable, Stream, Writable } from "node:stream"; + import { URL } from "node:url"; + type Serializable = string | object | number | boolean | bigint; + type SendHandle = net.Socket | net.Server; + /** + * Instances of the `ChildProcess` represent spawned child processes. + * + * Instances of `ChildProcess` are not intended to be created directly. Rather, + * use the {@link spawn}, {@link exec},{@link execFile}, or {@link fork} methods to create + * instances of `ChildProcess`. + * @since v2.2.0 + */ + class ChildProcess extends EventEmitter { + /** + * A `Writable Stream` that represents the child process's `stdin`. + * + * If a child process waits to read all of its input, the child will not continue + * until this stream has been closed via `end()`. + * + * If the child was spawned with `stdio[0]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stdin` is an alias for `subprocess.stdio[0]`. Both properties will + * refer to the same value. + * + * The `subprocess.stdin` property can be `null` or `undefined`if the child process could not be successfully spawned. + * @since v0.1.90 + */ + stdin: Writable | null; + /** + * A `Readable Stream` that represents the child process's `stdout`. + * + * If the child was spawned with `stdio[1]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stdout` is an alias for `subprocess.stdio[1]`. Both properties will + * refer to the same value. + * + * ```js + * const { spawn } = require('node:child_process'); + * + * const subprocess = spawn('ls'); + * + * subprocess.stdout.on('data', (data) => { + * console.log(`Received chunk ${data}`); + * }); + * ``` + * + * The `subprocess.stdout` property can be `null` or `undefined`if the child process could not be successfully spawned. + * @since v0.1.90 + */ + stdout: Readable | null; + /** + * A `Readable Stream` that represents the child process's `stderr`. + * + * If the child was spawned with `stdio[2]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stderr` is an alias for `subprocess.stdio[2]`. Both properties will + * refer to the same value. + * + * The `subprocess.stderr` property can be `null` or `undefined`if the child process could not be successfully spawned. + * @since v0.1.90 + */ + stderr: Readable | null; + /** + * The `subprocess.channel` property is a reference to the child's IPC channel. If + * no IPC channel exists, this property is `undefined`. + * @since v7.1.0 + */ + readonly channel?: Pipe | null | undefined; + /** + * A sparse array of pipes to the child process, corresponding with positions in + * the `stdio` option passed to {@link spawn} that have been set + * to the value `'pipe'`. `subprocess.stdio[0]`, `subprocess.stdio[1]`, and`subprocess.stdio[2]` are also available as `subprocess.stdin`,`subprocess.stdout`, and `subprocess.stderr`, + * respectively. + * + * In the following example, only the child's fd `1` (stdout) is configured as a + * pipe, so only the parent's `subprocess.stdio[1]` is a stream, all other values + * in the array are `null`. + * + * ```js + * const assert = require('node:assert'); + * const fs = require('node:fs'); + * const child_process = require('node:child_process'); + * + * const subprocess = child_process.spawn('ls', { + * stdio: [ + * 0, // Use parent's stdin for child. + * 'pipe', // Pipe child's stdout to parent. + * fs.openSync('err.out', 'w'), // Direct child's stderr to a file. + * ], + * }); + * + * assert.strictEqual(subprocess.stdio[0], null); + * assert.strictEqual(subprocess.stdio[0], subprocess.stdin); + * + * assert(subprocess.stdout); + * assert.strictEqual(subprocess.stdio[1], subprocess.stdout); + * + * assert.strictEqual(subprocess.stdio[2], null); + * assert.strictEqual(subprocess.stdio[2], subprocess.stderr); + * ``` + * + * The `subprocess.stdio` property can be `undefined` if the child process could + * not be successfully spawned. + * @since v0.7.10 + */ + readonly stdio: [ + Writable | null, + // stdin + Readable | null, + // stdout + Readable | null, + // stderr + Readable | Writable | null | undefined, + // extra + Readable | Writable | null | undefined, // extra + ]; + /** + * The `subprocess.killed` property indicates whether the child process + * successfully received a signal from `subprocess.kill()`. The `killed` property + * does not indicate that the child process has been terminated. + * @since v0.5.10 + */ + readonly killed: boolean; + /** + * Returns the process identifier (PID) of the child process. If the child process + * fails to spawn due to errors, then the value is `undefined` and `error` is + * emitted. + * + * ```js + * const { spawn } = require('node:child_process'); + * const grep = spawn('grep', ['ssh']); + * + * console.log(`Spawned child pid: ${grep.pid}`); + * grep.stdin.end(); + * ``` + * @since v0.1.90 + */ + readonly pid?: number | undefined; + /** + * The `subprocess.connected` property indicates whether it is still possible to + * send and receive messages from a child process. When `subprocess.connected` is`false`, it is no longer possible to send or receive messages. + * @since v0.7.2 + */ + readonly connected: boolean; + /** + * The `subprocess.exitCode` property indicates the exit code of the child process. + * If the child process is still running, the field will be `null`. + */ + readonly exitCode: number | null; + /** + * The `subprocess.signalCode` property indicates the signal received by + * the child process if any, else `null`. + */ + readonly signalCode: NodeJS.Signals | null; + /** + * The `subprocess.spawnargs` property represents the full list of command-line + * arguments the child process was launched with. + */ + readonly spawnargs: string[]; + /** + * The `subprocess.spawnfile` property indicates the executable file name of + * the child process that is launched. + * + * For {@link fork}, its value will be equal to `process.execPath`. + * For {@link spawn}, its value will be the name of + * the executable file. + * For {@link exec}, its value will be the name of the shell + * in which the child process is launched. + */ + readonly spawnfile: string; + /** + * The `subprocess.kill()` method sends a signal to the child process. If no + * argument is given, the process will be sent the `'SIGTERM'` signal. See [`signal(7)`](http://man7.org/linux/man-pages/man7/signal.7.html) for a list of available signals. This function + * returns `true` if [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) succeeds, and `false` otherwise. + * + * ```js + * const { spawn } = require('node:child_process'); + * const grep = spawn('grep', ['ssh']); + * + * grep.on('close', (code, signal) => { + * console.log( + * `child process terminated due to receipt of signal ${signal}`); + * }); + * + * // Send SIGHUP to process. + * grep.kill('SIGHUP'); + * ``` + * + * The `ChildProcess` object may emit an `'error'` event if the signal + * cannot be delivered. Sending a signal to a child process that has already exited + * is not an error but may have unforeseen consequences. Specifically, if the + * process identifier (PID) has been reassigned to another process, the signal will + * be delivered to that process instead which can have unexpected results. + * + * While the function is called `kill`, the signal delivered to the child process + * may not actually terminate the process. + * + * See [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for reference. + * + * On Windows, where POSIX signals do not exist, the `signal` argument will be + * ignored, and the process will be killed forcefully and abruptly (similar to`'SIGKILL'`). + * See `Signal Events` for more details. + * + * On Linux, child processes of child processes will not be terminated + * when attempting to kill their parent. This is likely to happen when running a + * new process in a shell or with the use of the `shell` option of `ChildProcess`: + * + * ```js + * 'use strict'; + * const { spawn } = require('node:child_process'); + * + * const subprocess = spawn( + * 'sh', + * [ + * '-c', + * `node -e "setInterval(() => { + * console.log(process.pid, 'is alive') + * }, 500);"`, + * ], { + * stdio: ['inherit', 'inherit', 'inherit'], + * }, + * ); + * + * setTimeout(() => { + * subprocess.kill(); // Does not terminate the Node.js process in the shell. + * }, 2000); + * ``` + * @since v0.1.90 + */ + kill(signal?: NodeJS.Signals | number): boolean; + /** + * Calls {@link ChildProcess.kill} with `'SIGTERM'`. + * @since v20.5.0 + */ + [Symbol.dispose](): void; + /** + * When an IPC channel has been established between the parent and child ( + * i.e. when using {@link fork}), the `subprocess.send()` method can + * be used to send messages to the child process. When the child process is a + * Node.js instance, these messages can be received via the `'message'` event. + * + * The message goes through serialization and parsing. The resulting + * message might not be the same as what is originally sent. + * + * For example, in the parent script: + * + * ```js + * const cp = require('node:child_process'); + * const n = cp.fork(`${__dirname}/sub.js`); + * + * n.on('message', (m) => { + * console.log('PARENT got message:', m); + * }); + * + * // Causes the child to print: CHILD got message: { hello: 'world' } + * n.send({ hello: 'world' }); + * ``` + * + * And then the child script, `'sub.js'` might look like this: + * + * ```js + * process.on('message', (m) => { + * console.log('CHILD got message:', m); + * }); + * + * // Causes the parent to print: PARENT got message: { foo: 'bar', baz: null } + * process.send({ foo: 'bar', baz: NaN }); + * ``` + * + * Child Node.js processes will have a `process.send()` method of their own + * that allows the child to send messages back to the parent. + * + * There is a special case when sending a `{cmd: 'NODE_foo'}` message. Messages + * containing a `NODE_` prefix in the `cmd` property are reserved for use within + * Node.js core and will not be emitted in the child's `'message'` event. Rather, such messages are emitted using the`'internalMessage'` event and are consumed internally by Node.js. + * Applications should avoid using such messages or listening for`'internalMessage'` events as it is subject to change without notice. + * + * The optional `sendHandle` argument that may be passed to `subprocess.send()` is + * for passing a TCP server or socket object to the child process. The child will + * receive the object as the second argument passed to the callback function + * registered on the `'message'` event. Any data that is received + * and buffered in the socket will not be sent to the child. + * + * The optional `callback` is a function that is invoked after the message is + * sent but before the child may have received it. The function is called with a + * single argument: `null` on success, or an `Error` object on failure. + * + * If no `callback` function is provided and the message cannot be sent, an`'error'` event will be emitted by the `ChildProcess` object. This can + * happen, for instance, when the child process has already exited. + * + * `subprocess.send()` will return `false` if the channel has closed or when the + * backlog of unsent messages exceeds a threshold that makes it unwise to send + * more. Otherwise, the method returns `true`. The `callback` function can be + * used to implement flow control. + * + * #### Example: sending a server object + * + * The `sendHandle` argument can be used, for instance, to pass the handle of + * a TCP server object to the child process as illustrated in the example below: + * + * ```js + * const subprocess = require('node:child_process').fork('subprocess.js'); + * + * // Open up the server object and send the handle. + * const server = require('node:net').createServer(); + * server.on('connection', (socket) => { + * socket.end('handled by parent'); + * }); + * server.listen(1337, () => { + * subprocess.send('server', server); + * }); + * ``` + * + * The child would then receive the server object as: + * + * ```js + * process.on('message', (m, server) => { + * if (m === 'server') { + * server.on('connection', (socket) => { + * socket.end('handled by child'); + * }); + * } + * }); + * ``` + * + * Once the server is now shared between the parent and child, some connections + * can be handled by the parent and some by the child. + * + * While the example above uses a server created using the `node:net` module,`node:dgram` module servers use exactly the same workflow with the exceptions of + * listening on a `'message'` event instead of `'connection'` and using`server.bind()` instead of `server.listen()`. This is, however, only + * supported on Unix platforms. + * + * #### Example: sending a socket object + * + * Similarly, the `sendHandler` argument can be used to pass the handle of a + * socket to the child process. The example below spawns two children that each + * handle connections with "normal" or "special" priority: + * + * ```js + * const { fork } = require('node:child_process'); + * const normal = fork('subprocess.js', ['normal']); + * const special = fork('subprocess.js', ['special']); + * + * // Open up the server and send sockets to child. Use pauseOnConnect to prevent + * // the sockets from being read before they are sent to the child process. + * const server = require('node:net').createServer({ pauseOnConnect: true }); + * server.on('connection', (socket) => { + * + * // If this is special priority... + * if (socket.remoteAddress === '74.125.127.100') { + * special.send('socket', socket); + * return; + * } + * // This is normal priority. + * normal.send('socket', socket); + * }); + * server.listen(1337); + * ``` + * + * The `subprocess.js` would receive the socket handle as the second argument + * passed to the event callback function: + * + * ```js + * process.on('message', (m, socket) => { + * if (m === 'socket') { + * if (socket) { + * // Check that the client socket exists. + * // It is possible for the socket to be closed between the time it is + * // sent and the time it is received in the child process. + * socket.end(`Request handled with ${process.argv[2]} priority`); + * } + * } + * }); + * ``` + * + * Do not use `.maxConnections` on a socket that has been passed to a subprocess. + * The parent cannot track when the socket is destroyed. + * + * Any `'message'` handlers in the subprocess should verify that `socket` exists, + * as the connection may have been closed during the time it takes to send the + * connection to the child. + * @since v0.5.9 + * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. `options` supports the following properties: + */ + send(message: Serializable, callback?: (error: Error | null) => void): boolean; + send(message: Serializable, sendHandle?: SendHandle, callback?: (error: Error | null) => void): boolean; + send( + message: Serializable, + sendHandle?: SendHandle, + options?: MessageOptions, + callback?: (error: Error | null) => void, + ): boolean; + /** + * Closes the IPC channel between parent and child, allowing the child to exit + * gracefully once there are no other connections keeping it alive. After calling + * this method the `subprocess.connected` and `process.connected` properties in + * both the parent and child (respectively) will be set to `false`, and it will be + * no longer possible to pass messages between the processes. + * + * The `'disconnect'` event will be emitted when there are no messages in the + * process of being received. This will most often be triggered immediately after + * calling `subprocess.disconnect()`. + * + * When the child process is a Node.js instance (e.g. spawned using {@link fork}), the `process.disconnect()` method can be invoked + * within the child process to close the IPC channel as well. + * @since v0.7.2 + */ + disconnect(): void; + /** + * By default, the parent will wait for the detached child to exit. To prevent the + * parent from waiting for a given `subprocess` to exit, use the`subprocess.unref()` method. Doing so will cause the parent's event loop to not + * include the child in its reference count, allowing the parent to exit + * independently of the child, unless there is an established IPC channel between + * the child and the parent. + * + * ```js + * const { spawn } = require('node:child_process'); + * + * const subprocess = spawn(process.argv[0], ['child_program.js'], { + * detached: true, + * stdio: 'ignore', + * }); + * + * subprocess.unref(); + * ``` + * @since v0.7.10 + */ + unref(): void; + /** + * Calling `subprocess.ref()` after making a call to `subprocess.unref()` will + * restore the removed reference count for the child process, forcing the parent + * to wait for the child to exit before exiting itself. + * + * ```js + * const { spawn } = require('node:child_process'); + * + * const subprocess = spawn(process.argv[0], ['child_program.js'], { + * detached: true, + * stdio: 'ignore', + * }); + * + * subprocess.unref(); + * subprocess.ref(); + * ``` + * @since v0.7.10 + */ + ref(): void; + /** + * events.EventEmitter + * 1. close + * 2. disconnect + * 3. error + * 4. exit + * 5. message + * 6. spawn + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + addListener(event: "disconnect", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + addListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + addListener(event: "spawn", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close", code: number | null, signal: NodeJS.Signals | null): boolean; + emit(event: "disconnect"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "exit", code: number | null, signal: NodeJS.Signals | null): boolean; + emit(event: "message", message: Serializable, sendHandle: SendHandle): boolean; + emit(event: "spawn", listener: () => void): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + on(event: "disconnect", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + on(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + on(event: "spawn", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + once(event: "disconnect", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + once(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + once(event: "spawn", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependListener(event: "disconnect", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + prependListener(event: "spawn", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "close", + listener: (code: number | null, signal: NodeJS.Signals | null) => void, + ): this; + prependOnceListener(event: "disconnect", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener( + event: "exit", + listener: (code: number | null, signal: NodeJS.Signals | null) => void, + ): this; + prependOnceListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + prependOnceListener(event: "spawn", listener: () => void): this; + } + // return this object when stdio option is undefined or not specified + interface ChildProcessWithoutNullStreams extends ChildProcess { + stdin: Writable; + stdout: Readable; + stderr: Readable; + readonly stdio: [ + Writable, + Readable, + Readable, + // stderr + Readable | Writable | null | undefined, + // extra, no modification + Readable | Writable | null | undefined, // extra, no modification + ]; + } + // return this object when stdio option is a tuple of 3 + interface ChildProcessByStdio + extends ChildProcess + { + stdin: I; + stdout: O; + stderr: E; + readonly stdio: [ + I, + O, + E, + Readable | Writable | null | undefined, + // extra, no modification + Readable | Writable | null | undefined, // extra, no modification + ]; + } + interface MessageOptions { + keepOpen?: boolean | undefined; + } + type IOType = "overlapped" | "pipe" | "ignore" | "inherit"; + type StdioOptions = IOType | Array; + type SerializationType = "json" | "advanced"; + interface MessagingOptions extends Abortable { + /** + * Specify the kind of serialization used for sending messages between processes. + * @default 'json' + */ + serialization?: SerializationType | undefined; + /** + * The signal value to be used when the spawned process will be killed by the abort signal. + * @default 'SIGTERM' + */ + killSignal?: NodeJS.Signals | number | undefined; + /** + * In milliseconds the maximum amount of time the process is allowed to run. + */ + timeout?: number | undefined; + } + interface ProcessEnvOptions { + uid?: number | undefined; + gid?: number | undefined; + cwd?: string | URL | undefined; + env?: NodeJS.ProcessEnv | undefined; + } + interface CommonOptions extends ProcessEnvOptions { + /** + * @default false + */ + windowsHide?: boolean | undefined; + /** + * @default 0 + */ + timeout?: number | undefined; + } + interface CommonSpawnOptions extends CommonOptions, MessagingOptions, Abortable { + argv0?: string | undefined; + /** + * Can be set to 'pipe', 'inherit', 'overlapped', or 'ignore', or an array of these strings. + * If passed as an array, the first element is used for `stdin`, the second for + * `stdout`, and the third for `stderr`. A fourth element can be used to + * specify the `stdio` behavior beyond the standard streams. See + * {@link ChildProcess.stdio} for more information. + * + * @default 'pipe' + */ + stdio?: StdioOptions | undefined; + shell?: boolean | string | undefined; + windowsVerbatimArguments?: boolean | undefined; + } + interface SpawnOptions extends CommonSpawnOptions { + detached?: boolean | undefined; + } + interface SpawnOptionsWithoutStdio extends SpawnOptions { + stdio?: StdioPipeNamed | StdioPipe[] | undefined; + } + type StdioNull = "inherit" | "ignore" | Stream; + type StdioPipeNamed = "pipe" | "overlapped"; + type StdioPipe = undefined | null | StdioPipeNamed; + interface SpawnOptionsWithStdioTuple< + Stdin extends StdioNull | StdioPipe, + Stdout extends StdioNull | StdioPipe, + Stderr extends StdioNull | StdioPipe, + > extends SpawnOptions { + stdio: [Stdin, Stdout, Stderr]; + } + /** + * The `child_process.spawn()` method spawns a new process using the given`command`, with command-line arguments in `args`. If omitted, `args` defaults + * to an empty array. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * + * A third argument may be used to specify additional options, with these defaults: + * + * ```js + * const defaults = { + * cwd: undefined, + * env: process.env, + * }; + * ``` + * + * Use `cwd` to specify the working directory from which the process is spawned. + * If not given, the default is to inherit the current working directory. If given, + * but the path does not exist, the child process emits an `ENOENT` error + * and exits immediately. `ENOENT` is also emitted when the command + * does not exist. + * + * Use `env` to specify environment variables that will be visible to the new + * process, the default is `process.env`. + * + * `undefined` values in `env` will be ignored. + * + * Example of running `ls -lh /usr`, capturing `stdout`, `stderr`, and the + * exit code: + * + * ```js + * const { spawn } = require('node:child_process'); + * const ls = spawn('ls', ['-lh', '/usr']); + * + * ls.stdout.on('data', (data) => { + * console.log(`stdout: ${data}`); + * }); + * + * ls.stderr.on('data', (data) => { + * console.error(`stderr: ${data}`); + * }); + * + * ls.on('close', (code) => { + * console.log(`child process exited with code ${code}`); + * }); + * ``` + * + * Example: A very elaborate way to run `ps ax | grep ssh` + * + * ```js + * const { spawn } = require('node:child_process'); + * const ps = spawn('ps', ['ax']); + * const grep = spawn('grep', ['ssh']); + * + * ps.stdout.on('data', (data) => { + * grep.stdin.write(data); + * }); + * + * ps.stderr.on('data', (data) => { + * console.error(`ps stderr: ${data}`); + * }); + * + * ps.on('close', (code) => { + * if (code !== 0) { + * console.log(`ps process exited with code ${code}`); + * } + * grep.stdin.end(); + * }); + * + * grep.stdout.on('data', (data) => { + * console.log(data.toString()); + * }); + * + * grep.stderr.on('data', (data) => { + * console.error(`grep stderr: ${data}`); + * }); + * + * grep.on('close', (code) => { + * if (code !== 0) { + * console.log(`grep process exited with code ${code}`); + * } + * }); + * ``` + * + * Example of checking for failed `spawn`: + * + * ```js + * const { spawn } = require('node:child_process'); + * const subprocess = spawn('bad_command'); + * + * subprocess.on('error', (err) => { + * console.error('Failed to start subprocess.'); + * }); + * ``` + * + * Certain platforms (macOS, Linux) will use the value of `argv[0]` for the process + * title while others (Windows, SunOS) will use `command`. + * + * Node.js overwrites `argv[0]` with `process.execPath` on startup, so`process.argv[0]` in a Node.js child process will not match the `argv0`parameter passed to `spawn` from the parent. Retrieve + * it with the`process.argv0` property instead. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { spawn } = require('node:child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const grep = spawn('grep', ['ssh'], { signal }); + * grep.on('error', (err) => { + * // This will be called with err being an AbortError if the controller aborts + * }); + * controller.abort(); // Stops the child process + * ``` + * @since v0.1.90 + * @param command The command to run. + * @param args List of string arguments. + */ + function spawn(command: string, options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptions): ChildProcess; + // overloads of spawn with 'args' + function spawn( + command: string, + args?: readonly string[], + options?: SpawnOptionsWithoutStdio, + ): ChildProcessWithoutNullStreams; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn(command: string, args: readonly string[], options: SpawnOptions): ChildProcess; + interface ExecOptions extends CommonOptions { + shell?: string | undefined; + signal?: AbortSignal | undefined; + maxBuffer?: number | undefined; + killSignal?: NodeJS.Signals | number | undefined; + } + interface ExecOptionsWithStringEncoding extends ExecOptions { + encoding: BufferEncoding; + } + interface ExecOptionsWithBufferEncoding extends ExecOptions { + encoding: BufferEncoding | null; // specify `null`. + } + interface ExecException extends Error { + cmd?: string | undefined; + killed?: boolean | undefined; + code?: number | undefined; + signal?: NodeJS.Signals | undefined; + stdout?: string; + stderr?: string; + } + /** + * Spawns a shell then executes the `command` within that shell, buffering any + * generated output. The `command` string passed to the exec function is processed + * directly by the shell and special characters (vary based on [shell](https://en.wikipedia.org/wiki/List_of_command-line_interpreters)) + * need to be dealt with accordingly: + * + * ```js + * const { exec } = require('node:child_process'); + * + * exec('"/path/to/test file/test.sh" arg1 arg2'); + * // Double quotes are used so that the space in the path is not interpreted as + * // a delimiter of multiple arguments. + * + * exec('echo "The \\$HOME variable is $HOME"'); + * // The $HOME variable is escaped in the first instance, but not in the second. + * ``` + * + * **Never pass unsanitized user input to this function. Any input containing shell** + * **metacharacters may be used to trigger arbitrary command execution.** + * + * If a `callback` function is provided, it is called with the arguments`(error, stdout, stderr)`. On success, `error` will be `null`. On error,`error` will be an instance of `Error`. The + * `error.code` property will be + * the exit code of the process. By convention, any exit code other than `0`indicates an error. `error.signal` will be the signal that terminated the + * process. + * + * The `stdout` and `stderr` arguments passed to the callback will contain the + * stdout and stderr output of the child process. By default, Node.js will decode + * the output as UTF-8 and pass strings to the callback. The `encoding` option + * can be used to specify the character encoding used to decode the stdout and + * stderr output. If `encoding` is `'buffer'`, or an unrecognized character + * encoding, `Buffer` objects will be passed to the callback instead. + * + * ```js + * const { exec } = require('node:child_process'); + * exec('cat *.js missing_file | wc -l', (error, stdout, stderr) => { + * if (error) { + * console.error(`exec error: ${error}`); + * return; + * } + * console.log(`stdout: ${stdout}`); + * console.error(`stderr: ${stderr}`); + * }); + * ``` + * + * If `timeout` is greater than `0`, the parent will send the signal + * identified by the `killSignal` property (the default is `'SIGTERM'`) if the + * child runs longer than `timeout` milliseconds. + * + * Unlike the [`exec(3)`](http://man7.org/linux/man-pages/man3/exec.3.html) POSIX system call, `child_process.exec()` does not replace + * the existing process and uses a shell to execute the command. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned`ChildProcess` instance is attached to the `Promise` as a `child` property. In + * case of an error (including any error resulting in an exit code other than 0), a + * rejected promise is returned, with the same `error` object given in the + * callback, but with two additional properties `stdout` and `stderr`. + * + * ```js + * const util = require('node:util'); + * const exec = util.promisify(require('node:child_process').exec); + * + * async function lsExample() { + * const { stdout, stderr } = await exec('ls'); + * console.log('stdout:', stdout); + * console.error('stderr:', stderr); + * } + * lsExample(); + * ``` + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { exec } = require('node:child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = exec('grep ssh', { signal }, (error) => { + * console.error(error); // an AbortError + * }); + * controller.abort(); + * ``` + * @since v0.1.90 + * @param command The command to run, with space-separated arguments. + * @param callback called with the output when process terminates. + */ + function exec( + command: string, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. + function exec( + command: string, + options: { + encoding: "buffer" | null; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: Buffer, stderr: Buffer) => void, + ): ChildProcess; + // `options` with well known `encoding` means stdout/stderr are definitely `string`. + function exec( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. + // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. + function exec( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + // `options` without an `encoding` means stdout/stderr are definitely `string`. + function exec( + command: string, + options: ExecOptions, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // fallback if nothing else matches. Worst case is always `string | Buffer`. + function exec( + command: string, + options: (ObjectEncodingOptions & ExecOptions) | undefined | null, + callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + interface PromiseWithChild extends Promise { + child: ChildProcess; + } + namespace exec { + function __promisify__(command: string): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options: { + encoding: "buffer" | null; + } & ExecOptions, + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options: ExecOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options?: (ObjectEncodingOptions & ExecOptions) | null, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + } + interface ExecFileOptions extends CommonOptions, Abortable { + maxBuffer?: number | undefined; + killSignal?: NodeJS.Signals | number | undefined; + windowsVerbatimArguments?: boolean | undefined; + shell?: boolean | string | undefined; + signal?: AbortSignal | undefined; + } + interface ExecFileOptionsWithStringEncoding extends ExecFileOptions { + encoding: BufferEncoding; + } + interface ExecFileOptionsWithBufferEncoding extends ExecFileOptions { + encoding: "buffer" | null; + } + interface ExecFileOptionsWithOtherEncoding extends ExecFileOptions { + encoding: BufferEncoding; + } + type ExecFileException = + & Omit + & Omit + & { code?: string | number | undefined | null }; + /** + * The `child_process.execFile()` function is similar to {@link exec} except that it does not spawn a shell by default. Rather, the specified + * executable `file` is spawned directly as a new process making it slightly more + * efficient than {@link exec}. + * + * The same options as {@link exec} are supported. Since a shell is + * not spawned, behaviors such as I/O redirection and file globbing are not + * supported. + * + * ```js + * const { execFile } = require('node:child_process'); + * const child = execFile('node', ['--version'], (error, stdout, stderr) => { + * if (error) { + * throw error; + * } + * console.log(stdout); + * }); + * ``` + * + * The `stdout` and `stderr` arguments passed to the callback will contain the + * stdout and stderr output of the child process. By default, Node.js will decode + * the output as UTF-8 and pass strings to the callback. The `encoding` option + * can be used to specify the character encoding used to decode the stdout and + * stderr output. If `encoding` is `'buffer'`, or an unrecognized character + * encoding, `Buffer` objects will be passed to the callback instead. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned`ChildProcess` instance is attached to the `Promise` as a `child` property. In + * case of an error (including any error resulting in an exit code other than 0), a + * rejected promise is returned, with the same `error` object given in the + * callback, but with two additional properties `stdout` and `stderr`. + * + * ```js + * const util = require('node:util'); + * const execFile = util.promisify(require('node:child_process').execFile); + * async function getVersion() { + * const { stdout } = await execFile('node', ['--version']); + * console.log(stdout); + * } + * getVersion(); + * ``` + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { execFile } = require('node:child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = execFile('node', ['--version'], { signal }, (error) => { + * console.error(error); // an AbortError + * }); + * controller.abort(); + * ``` + * @since v0.1.91 + * @param file The name or path of the executable file to run. + * @param args List of string arguments. + * @param callback Called with the output when process terminates. + */ + function execFile(file: string): ChildProcess; + function execFile( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): ChildProcess; + function execFile(file: string, args?: readonly string[] | null): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): ChildProcess; + // no `options` definitely means stdout/stderr are `string`. + function execFile( + file: string, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. + function execFile( + file: string, + options: ExecFileOptionsWithBufferEncoding, + callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithBufferEncoding, + callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void, + ): ChildProcess; + // `options` with well known `encoding` means stdout/stderr are definitely `string`. + function execFile( + file: string, + options: ExecFileOptionsWithStringEncoding, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithStringEncoding, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. + // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. + function execFile( + file: string, + options: ExecFileOptionsWithOtherEncoding, + callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithOtherEncoding, + callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + // `options` without an `encoding` means stdout/stderr are definitely `string`. + function execFile( + file: string, + options: ExecFileOptions, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptions, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // fallback if nothing else matches. Worst case is always `string | Buffer`. + function execFile( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + callback: + | ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void) + | undefined + | null, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + callback: + | ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void) + | undefined + | null, + ): ChildProcess; + namespace execFile { + function __promisify__(file: string): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithBufferEncoding, + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithBufferEncoding, + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithStringEncoding, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithStringEncoding, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithOtherEncoding, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithOtherEncoding, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + options: ExecFileOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + } + interface ForkOptions extends ProcessEnvOptions, MessagingOptions, Abortable { + execPath?: string | undefined; + execArgv?: string[] | undefined; + silent?: boolean | undefined; + /** + * Can be set to 'pipe', 'inherit', 'overlapped', or 'ignore', or an array of these strings. + * If passed as an array, the first element is used for `stdin`, the second for + * `stdout`, and the third for `stderr`. A fourth element can be used to + * specify the `stdio` behavior beyond the standard streams. See + * {@link ChildProcess.stdio} for more information. + * + * @default 'pipe' + */ + stdio?: StdioOptions | undefined; + detached?: boolean | undefined; + windowsVerbatimArguments?: boolean | undefined; + } + /** + * The `child_process.fork()` method is a special case of {@link spawn} used specifically to spawn new Node.js processes. + * Like {@link spawn}, a `ChildProcess` object is returned. The + * returned `ChildProcess` will have an additional communication channel + * built-in that allows messages to be passed back and forth between the parent and + * child. See `subprocess.send()` for details. + * + * Keep in mind that spawned Node.js child processes are + * independent of the parent with exception of the IPC communication channel + * that is established between the two. Each process has its own memory, with + * their own V8 instances. Because of the additional resource allocations + * required, spawning a large number of child Node.js processes is not + * recommended. + * + * By default, `child_process.fork()` will spawn new Node.js instances using the `process.execPath` of the parent process. The `execPath` property in the`options` object allows for an alternative + * execution path to be used. + * + * Node.js processes launched with a custom `execPath` will communicate with the + * parent process using the file descriptor (fd) identified using the + * environment variable `NODE_CHANNEL_FD` on the child process. + * + * Unlike the [`fork(2)`](http://man7.org/linux/man-pages/man2/fork.2.html) POSIX system call, `child_process.fork()` does not clone the + * current process. + * + * The `shell` option available in {@link spawn} is not supported by`child_process.fork()` and will be ignored if set. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * if (process.argv[2] === 'child') { + * setTimeout(() => { + * console.log(`Hello from ${process.argv[2]}!`); + * }, 1_000); + * } else { + * const { fork } = require('node:child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = fork(__filename, ['child'], { signal }); + * child.on('error', (err) => { + * // This will be called with err being an AbortError if the controller aborts + * }); + * controller.abort(); // Stops the child process + * } + * ``` + * @since v0.5.0 + * @param modulePath The module to run in the child. + * @param args List of string arguments. + */ + function fork(modulePath: string, options?: ForkOptions): ChildProcess; + function fork(modulePath: string, args?: readonly string[], options?: ForkOptions): ChildProcess; + interface SpawnSyncOptions extends CommonSpawnOptions { + input?: string | NodeJS.ArrayBufferView | undefined; + maxBuffer?: number | undefined; + encoding?: BufferEncoding | "buffer" | null | undefined; + } + interface SpawnSyncOptionsWithStringEncoding extends SpawnSyncOptions { + encoding: BufferEncoding; + } + interface SpawnSyncOptionsWithBufferEncoding extends SpawnSyncOptions { + encoding?: "buffer" | null | undefined; + } + interface SpawnSyncReturns { + pid: number; + output: Array; + stdout: T; + stderr: T; + status: number | null; + signal: NodeJS.Signals | null; + error?: Error | undefined; + } + /** + * The `child_process.spawnSync()` method is generally identical to {@link spawn} with the exception that the function will not return + * until the child process has fully closed. When a timeout has been encountered + * and `killSignal` is sent, the method won't return until the process has + * completely exited. If the process intercepts and handles the `SIGTERM` signal + * and doesn't exit, the parent process will wait until the child process has + * exited. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * @since v0.11.12 + * @param command The command to run. + * @param args List of string arguments. + */ + function spawnSync(command: string): SpawnSyncReturns; + function spawnSync(command: string, options: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns; + function spawnSync(command: string, options: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns; + function spawnSync(command: string, options?: SpawnSyncOptions): SpawnSyncReturns; + function spawnSync(command: string, args: readonly string[]): SpawnSyncReturns; + function spawnSync( + command: string, + args: readonly string[], + options: SpawnSyncOptionsWithStringEncoding, + ): SpawnSyncReturns; + function spawnSync( + command: string, + args: readonly string[], + options: SpawnSyncOptionsWithBufferEncoding, + ): SpawnSyncReturns; + function spawnSync( + command: string, + args?: readonly string[], + options?: SpawnSyncOptions, + ): SpawnSyncReturns; + interface CommonExecOptions extends CommonOptions { + input?: string | NodeJS.ArrayBufferView | undefined; + /** + * Can be set to 'pipe', 'inherit, or 'ignore', or an array of these strings. + * If passed as an array, the first element is used for `stdin`, the second for + * `stdout`, and the third for `stderr`. A fourth element can be used to + * specify the `stdio` behavior beyond the standard streams. See + * {@link ChildProcess.stdio} for more information. + * + * @default 'pipe' + */ + stdio?: StdioOptions | undefined; + killSignal?: NodeJS.Signals | number | undefined; + maxBuffer?: number | undefined; + encoding?: BufferEncoding | "buffer" | null | undefined; + } + interface ExecSyncOptions extends CommonExecOptions { + shell?: string | undefined; + } + interface ExecSyncOptionsWithStringEncoding extends ExecSyncOptions { + encoding: BufferEncoding; + } + interface ExecSyncOptionsWithBufferEncoding extends ExecSyncOptions { + encoding?: "buffer" | null | undefined; + } + /** + * The `child_process.execSync()` method is generally identical to {@link exec} with the exception that the method will not return + * until the child process has fully closed. When a timeout has been encountered + * and `killSignal` is sent, the method won't return until the process has + * completely exited. If the child process intercepts and handles the `SIGTERM`signal and doesn't exit, the parent process will wait until the child process + * has exited. + * + * If the process times out or has a non-zero exit code, this method will throw. + * The `Error` object will contain the entire result from {@link spawnSync}. + * + * **Never pass unsanitized user input to this function. Any input containing shell** + * **metacharacters may be used to trigger arbitrary command execution.** + * @since v0.11.12 + * @param command The command to run. + * @return The stdout from the command. + */ + function execSync(command: string): Buffer; + function execSync(command: string, options: ExecSyncOptionsWithStringEncoding): string; + function execSync(command: string, options: ExecSyncOptionsWithBufferEncoding): Buffer; + function execSync(command: string, options?: ExecSyncOptions): string | Buffer; + interface ExecFileSyncOptions extends CommonExecOptions { + shell?: boolean | string | undefined; + } + interface ExecFileSyncOptionsWithStringEncoding extends ExecFileSyncOptions { + encoding: BufferEncoding; + } + interface ExecFileSyncOptionsWithBufferEncoding extends ExecFileSyncOptions { + encoding?: "buffer" | null; // specify `null`. + } + /** + * The `child_process.execFileSync()` method is generally identical to {@link execFile} with the exception that the method will not + * return until the child process has fully closed. When a timeout has been + * encountered and `killSignal` is sent, the method won't return until the process + * has completely exited. + * + * If the child process intercepts and handles the `SIGTERM` signal and + * does not exit, the parent process will still wait until the child process has + * exited. + * + * If the process times out or has a non-zero exit code, this method will throw an `Error` that will include the full result of the underlying {@link spawnSync}. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * @since v0.11.12 + * @param file The name or path of the executable file to run. + * @param args List of string arguments. + * @return The stdout from the command. + */ + function execFileSync(file: string): Buffer; + function execFileSync(file: string, options: ExecFileSyncOptionsWithStringEncoding): string; + function execFileSync(file: string, options: ExecFileSyncOptionsWithBufferEncoding): Buffer; + function execFileSync(file: string, options?: ExecFileSyncOptions): string | Buffer; + function execFileSync(file: string, args: readonly string[]): Buffer; + function execFileSync( + file: string, + args: readonly string[], + options: ExecFileSyncOptionsWithStringEncoding, + ): string; + function execFileSync( + file: string, + args: readonly string[], + options: ExecFileSyncOptionsWithBufferEncoding, + ): Buffer; + function execFileSync(file: string, args?: readonly string[], options?: ExecFileSyncOptions): string | Buffer; +} +declare module "node:child_process" { + export * from "child_process"; +} diff --git a/dist/node_modules/@types/node/cluster.d.ts b/dist/node_modules/@types/node/cluster.d.ts new file mode 100644 index 0000000..39cd56a --- /dev/null +++ b/dist/node_modules/@types/node/cluster.d.ts @@ -0,0 +1,432 @@ +/** + * Clusters of Node.js processes can be used to run multiple instances of Node.js + * that can distribute workloads among their application threads. When process + * isolation is not needed, use the `worker_threads` module instead, which + * allows running multiple application threads within a single Node.js instance. + * + * The cluster module allows easy creation of child processes that all share + * server ports. + * + * ```js + * import cluster from 'node:cluster'; + * import http from 'node:http'; + * import { availableParallelism } from 'node:os'; + * import process from 'node:process'; + * + * const numCPUs = availableParallelism(); + * + * if (cluster.isPrimary) { + * console.log(`Primary ${process.pid} is running`); + * + * // Fork workers. + * for (let i = 0; i < numCPUs; i++) { + * cluster.fork(); + * } + * + * cluster.on('exit', (worker, code, signal) => { + * console.log(`worker ${worker.process.pid} died`); + * }); + * } else { + * // Workers can share any TCP connection + * // In this case it is an HTTP server + * http.createServer((req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * + * console.log(`Worker ${process.pid} started`); + * } + * ``` + * + * Running Node.js will now share port 8000 between the workers: + * + * ```console + * $ node server.js + * Primary 3596 is running + * Worker 4324 started + * Worker 4520 started + * Worker 6056 started + * Worker 5644 started + * ``` + * + * On Windows, it is not yet possible to set up a named pipe server in a worker. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/cluster.js) + */ +declare module "cluster" { + import * as child from "node:child_process"; + import EventEmitter = require("node:events"); + import * as net from "node:net"; + type SerializationType = "json" | "advanced"; + export interface ClusterSettings { + execArgv?: string[] | undefined; // default: process.execArgv + exec?: string | undefined; + args?: string[] | undefined; + silent?: boolean | undefined; + stdio?: any[] | undefined; + uid?: number | undefined; + gid?: number | undefined; + inspectPort?: number | (() => number) | undefined; + serialization?: SerializationType | undefined; + cwd?: string | undefined; + windowsHide?: boolean | undefined; + } + export interface Address { + address: string; + port: number; + addressType: number | "udp4" | "udp6"; // 4, 6, -1, "udp4", "udp6" + } + /** + * A `Worker` object contains all public information and method about a worker. + * In the primary it can be obtained using `cluster.workers`. In a worker + * it can be obtained using `cluster.worker`. + * @since v0.7.0 + */ + export class Worker extends EventEmitter { + /** + * Each new worker is given its own unique id, this id is stored in the`id`. + * + * While a worker is alive, this is the key that indexes it in`cluster.workers`. + * @since v0.8.0 + */ + id: number; + /** + * All workers are created using `child_process.fork()`, the returned object + * from this function is stored as `.process`. In a worker, the global `process`is stored. + * + * See: `Child Process module`. + * + * Workers will call `process.exit(0)` if the `'disconnect'` event occurs + * on `process` and `.exitedAfterDisconnect` is not `true`. This protects against + * accidental disconnection. + * @since v0.7.0 + */ + process: child.ChildProcess; + /** + * Send a message to a worker or primary, optionally with a handle. + * + * In the primary, this sends a message to a specific worker. It is identical to `ChildProcess.send()`. + * + * In a worker, this sends a message to the primary. It is identical to`process.send()`. + * + * This example will echo back all messages from the primary: + * + * ```js + * if (cluster.isPrimary) { + * const worker = cluster.fork(); + * worker.send('hi there'); + * + * } else if (cluster.isWorker) { + * process.on('message', (msg) => { + * process.send(msg); + * }); + * } + * ``` + * @since v0.7.0 + * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. `options` supports the following properties: + */ + send(message: child.Serializable, callback?: (error: Error | null) => void): boolean; + send( + message: child.Serializable, + sendHandle: child.SendHandle, + callback?: (error: Error | null) => void, + ): boolean; + send( + message: child.Serializable, + sendHandle: child.SendHandle, + options?: child.MessageOptions, + callback?: (error: Error | null) => void, + ): boolean; + /** + * This function will kill the worker. In the primary worker, it does this by + * disconnecting the `worker.process`, and once disconnected, killing with`signal`. In the worker, it does it by killing the process with `signal`. + * + * The `kill()` function kills the worker process without waiting for a graceful + * disconnect, it has the same behavior as `worker.process.kill()`. + * + * This method is aliased as `worker.destroy()` for backwards compatibility. + * + * In a worker, `process.kill()` exists, but it is not this function; + * it is `kill()`. + * @since v0.9.12 + * @param [signal='SIGTERM'] Name of the kill signal to send to the worker process. + */ + kill(signal?: string): void; + destroy(signal?: string): void; + /** + * In a worker, this function will close all servers, wait for the `'close'` event + * on those servers, and then disconnect the IPC channel. + * + * In the primary, an internal message is sent to the worker causing it to call`.disconnect()` on itself. + * + * Causes `.exitedAfterDisconnect` to be set. + * + * After a server is closed, it will no longer accept new connections, + * but connections may be accepted by any other listening worker. Existing + * connections will be allowed to close as usual. When no more connections exist, + * see `server.close()`, the IPC channel to the worker will close allowing it + * to die gracefully. + * + * The above applies _only_ to server connections, client connections are not + * automatically closed by workers, and disconnect does not wait for them to close + * before exiting. + * + * In a worker, `process.disconnect` exists, but it is not this function; + * it is `disconnect()`. + * + * Because long living server connections may block workers from disconnecting, it + * may be useful to send a message, so application specific actions may be taken to + * close them. It also may be useful to implement a timeout, killing a worker if + * the `'disconnect'` event has not been emitted after some time. + * + * ```js + * if (cluster.isPrimary) { + * const worker = cluster.fork(); + * let timeout; + * + * worker.on('listening', (address) => { + * worker.send('shutdown'); + * worker.disconnect(); + * timeout = setTimeout(() => { + * worker.kill(); + * }, 2000); + * }); + * + * worker.on('disconnect', () => { + * clearTimeout(timeout); + * }); + * + * } else if (cluster.isWorker) { + * const net = require('node:net'); + * const server = net.createServer((socket) => { + * // Connections never end + * }); + * + * server.listen(8000); + * + * process.on('message', (msg) => { + * if (msg === 'shutdown') { + * // Initiate graceful close of any connections to server + * } + * }); + * } + * ``` + * @since v0.7.7 + * @return A reference to `worker`. + */ + disconnect(): void; + /** + * This function returns `true` if the worker is connected to its primary via its + * IPC channel, `false` otherwise. A worker is connected to its primary after it + * has been created. It is disconnected after the `'disconnect'` event is emitted. + * @since v0.11.14 + */ + isConnected(): boolean; + /** + * This function returns `true` if the worker's process has terminated (either + * because of exiting or being signaled). Otherwise, it returns `false`. + * + * ```js + * import cluster from 'node:cluster'; + * import http from 'node:http'; + * import { availableParallelism } from 'node:os'; + * import process from 'node:process'; + * + * const numCPUs = availableParallelism(); + * + * if (cluster.isPrimary) { + * console.log(`Primary ${process.pid} is running`); + * + * // Fork workers. + * for (let i = 0; i < numCPUs; i++) { + * cluster.fork(); + * } + * + * cluster.on('fork', (worker) => { + * console.log('worker is dead:', worker.isDead()); + * }); + * + * cluster.on('exit', (worker, code, signal) => { + * console.log('worker is dead:', worker.isDead()); + * }); + * } else { + * // Workers can share any TCP connection. In this case, it is an HTTP server. + * http.createServer((req, res) => { + * res.writeHead(200); + * res.end(`Current process\n ${process.pid}`); + * process.kill(process.pid); + * }).listen(8000); + * } + * ``` + * @since v0.11.14 + */ + isDead(): boolean; + /** + * This property is `true` if the worker exited due to `.disconnect()`. + * If the worker exited any other way, it is `false`. If the + * worker has not exited, it is `undefined`. + * + * The boolean `worker.exitedAfterDisconnect` allows distinguishing between + * voluntary and accidental exit, the primary may choose not to respawn a worker + * based on this value. + * + * ```js + * cluster.on('exit', (worker, code, signal) => { + * if (worker.exitedAfterDisconnect === true) { + * console.log('Oh, it was just voluntary – no need to worry'); + * } + * }); + * + * // kill worker + * worker.kill(); + * ``` + * @since v6.0.0 + */ + exitedAfterDisconnect: boolean; + /** + * events.EventEmitter + * 1. disconnect + * 2. error + * 3. exit + * 4. listening + * 5. message + * 6. online + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "disconnect", listener: () => void): this; + addListener(event: "error", listener: (error: Error) => void): this; + addListener(event: "exit", listener: (code: number, signal: string) => void): this; + addListener(event: "listening", listener: (address: Address) => void): this; + addListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + addListener(event: "online", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "disconnect"): boolean; + emit(event: "error", error: Error): boolean; + emit(event: "exit", code: number, signal: string): boolean; + emit(event: "listening", address: Address): boolean; + emit(event: "message", message: any, handle: net.Socket | net.Server): boolean; + emit(event: "online"): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "disconnect", listener: () => void): this; + on(event: "error", listener: (error: Error) => void): this; + on(event: "exit", listener: (code: number, signal: string) => void): this; + on(event: "listening", listener: (address: Address) => void): this; + on(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + on(event: "online", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "disconnect", listener: () => void): this; + once(event: "error", listener: (error: Error) => void): this; + once(event: "exit", listener: (code: number, signal: string) => void): this; + once(event: "listening", listener: (address: Address) => void): this; + once(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + once(event: "online", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "disconnect", listener: () => void): this; + prependListener(event: "error", listener: (error: Error) => void): this; + prependListener(event: "exit", listener: (code: number, signal: string) => void): this; + prependListener(event: "listening", listener: (address: Address) => void): this; + prependListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + prependListener(event: "online", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "disconnect", listener: () => void): this; + prependOnceListener(event: "error", listener: (error: Error) => void): this; + prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this; + prependOnceListener(event: "listening", listener: (address: Address) => void): this; + prependOnceListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + prependOnceListener(event: "online", listener: () => void): this; + } + export interface Cluster extends EventEmitter { + disconnect(callback?: () => void): void; + fork(env?: any): Worker; + /** @deprecated since v16.0.0 - use isPrimary. */ + readonly isMaster: boolean; + readonly isPrimary: boolean; + readonly isWorker: boolean; + schedulingPolicy: number; + readonly settings: ClusterSettings; + /** @deprecated since v16.0.0 - use setupPrimary. */ + setupMaster(settings?: ClusterSettings): void; + /** + * `setupPrimary` is used to change the default 'fork' behavior. Once called, the settings will be present in cluster.settings. + */ + setupPrimary(settings?: ClusterSettings): void; + readonly worker?: Worker | undefined; + readonly workers?: NodeJS.Dict | undefined; + readonly SCHED_NONE: number; + readonly SCHED_RR: number; + /** + * events.EventEmitter + * 1. disconnect + * 2. exit + * 3. fork + * 4. listening + * 5. message + * 6. online + * 7. setup + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "disconnect", listener: (worker: Worker) => void): this; + addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + addListener(event: "fork", listener: (worker: Worker) => void): this; + addListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; + addListener( + event: "message", + listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void, + ): this; // the handle is a net.Socket or net.Server object, or undefined. + addListener(event: "online", listener: (worker: Worker) => void): this; + addListener(event: "setup", listener: (settings: ClusterSettings) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "disconnect", worker: Worker): boolean; + emit(event: "exit", worker: Worker, code: number, signal: string): boolean; + emit(event: "fork", worker: Worker): boolean; + emit(event: "listening", worker: Worker, address: Address): boolean; + emit(event: "message", worker: Worker, message: any, handle: net.Socket | net.Server): boolean; + emit(event: "online", worker: Worker): boolean; + emit(event: "setup", settings: ClusterSettings): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "disconnect", listener: (worker: Worker) => void): this; + on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + on(event: "fork", listener: (worker: Worker) => void): this; + on(event: "listening", listener: (worker: Worker, address: Address) => void): this; + on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + on(event: "online", listener: (worker: Worker) => void): this; + on(event: "setup", listener: (settings: ClusterSettings) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "disconnect", listener: (worker: Worker) => void): this; + once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + once(event: "fork", listener: (worker: Worker) => void): this; + once(event: "listening", listener: (worker: Worker, address: Address) => void): this; + once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + once(event: "online", listener: (worker: Worker) => void): this; + once(event: "setup", listener: (settings: ClusterSettings) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "disconnect", listener: (worker: Worker) => void): this; + prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + prependListener(event: "fork", listener: (worker: Worker) => void): this; + prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; + // the handle is a net.Socket or net.Server object, or undefined. + prependListener( + event: "message", + listener: (worker: Worker, message: any, handle?: net.Socket | net.Server) => void, + ): this; + prependListener(event: "online", listener: (worker: Worker) => void): this; + prependListener(event: "setup", listener: (settings: ClusterSettings) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): this; + prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + prependOnceListener(event: "fork", listener: (worker: Worker) => void): this; + prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; + // the handle is a net.Socket or net.Server object, or undefined. + prependOnceListener( + event: "message", + listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void, + ): this; + prependOnceListener(event: "online", listener: (worker: Worker) => void): this; + prependOnceListener(event: "setup", listener: (settings: ClusterSettings) => void): this; + } + const cluster: Cluster; + export default cluster; +} +declare module "node:cluster" { + export * from "cluster"; + export { default as default } from "cluster"; +} diff --git a/dist/node_modules/@types/node/console.d.ts b/dist/node_modules/@types/node/console.d.ts new file mode 100644 index 0000000..bcc3450 --- /dev/null +++ b/dist/node_modules/@types/node/console.d.ts @@ -0,0 +1,415 @@ +/** + * The `node:console` module provides a simple debugging console that is similar to + * the JavaScript console mechanism provided by web browsers. + * + * The module exports two specific components: + * + * * A `Console` class with methods such as `console.log()`, `console.error()`, and`console.warn()` that can be used to write to any Node.js stream. + * * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('node:console')`. + * + * _**Warning**_: The global console object's methods are neither consistently + * synchronous like the browser APIs they resemble, nor are they consistently + * asynchronous like all other Node.js streams. See the `note on process I/O` for + * more information. + * + * Example using the global `console`: + * + * ```js + * console.log('hello world'); + * // Prints: hello world, to stdout + * console.log('hello %s', 'world'); + * // Prints: hello world, to stdout + * console.error(new Error('Whoops, something bad happened')); + * // Prints error message and stack trace to stderr: + * // Error: Whoops, something bad happened + * // at [eval]:5:15 + * // at Script.runInThisContext (node:vm:132:18) + * // at Object.runInThisContext (node:vm:309:38) + * // at node:internal/process/execution:77:19 + * // at [eval]-wrapper:6:22 + * // at evalScript (node:internal/process/execution:76:60) + * // at node:internal/main/eval_string:23:3 + * + * const name = 'Will Robinson'; + * console.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to stderr + * ``` + * + * Example using the `Console` class: + * + * ```js + * const out = getStreamSomehow(); + * const err = getStreamSomehow(); + * const myConsole = new console.Console(out, err); + * + * myConsole.log('hello world'); + * // Prints: hello world, to out + * myConsole.log('hello %s', 'world'); + * // Prints: hello world, to out + * myConsole.error(new Error('Whoops, something bad happened')); + * // Prints: [Error: Whoops, something bad happened], to err + * + * const name = 'Will Robinson'; + * myConsole.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to err + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/console.js) + */ +declare module "console" { + import console = require("node:console"); + export = console; +} +declare module "node:console" { + import { InspectOptions } from "node:util"; + global { + // This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build + interface Console { + Console: console.ConsoleConstructor; + /** + * `console.assert()` writes a message if `value` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy) or omitted. It only + * writes a message and does not otherwise affect execution. The output always + * starts with `"Assertion failed"`. If provided, `message` is formatted using `util.format()`. + * + * If `value` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy), nothing happens. + * + * ```js + * console.assert(true, 'does nothing'); + * + * console.assert(false, 'Whoops %s work', 'didn\'t'); + * // Assertion failed: Whoops didn't work + * + * console.assert(); + * // Assertion failed + * ``` + * @since v0.1.101 + * @param value The value tested for being truthy. + * @param message All arguments besides `value` are used as error message. + */ + assert(value: any, message?: string, ...optionalParams: any[]): void; + /** + * When `stdout` is a TTY, calling `console.clear()` will attempt to clear the + * TTY. When `stdout` is not a TTY, this method does nothing. + * + * The specific operation of `console.clear()` can vary across operating systems + * and terminal types. For most Linux operating systems, `console.clear()`operates similarly to the `clear` shell command. On Windows, `console.clear()`will clear only the output in the + * current terminal viewport for the Node.js + * binary. + * @since v8.3.0 + */ + clear(): void; + /** + * Maintains an internal counter specific to `label` and outputs to `stdout` the + * number of times `console.count()` has been called with the given `label`. + * + * ```js + * > console.count() + * default: 1 + * undefined + * > console.count('default') + * default: 2 + * undefined + * > console.count('abc') + * abc: 1 + * undefined + * > console.count('xyz') + * xyz: 1 + * undefined + * > console.count('abc') + * abc: 2 + * undefined + * > console.count() + * default: 3 + * undefined + * > + * ``` + * @since v8.3.0 + * @param [label='default'] The display label for the counter. + */ + count(label?: string): void; + /** + * Resets the internal counter specific to `label`. + * + * ```js + * > console.count('abc'); + * abc: 1 + * undefined + * > console.countReset('abc'); + * undefined + * > console.count('abc'); + * abc: 1 + * undefined + * > + * ``` + * @since v8.3.0 + * @param [label='default'] The display label for the counter. + */ + countReset(label?: string): void; + /** + * The `console.debug()` function is an alias for {@link log}. + * @since v8.0.0 + */ + debug(message?: any, ...optionalParams: any[]): void; + /** + * Uses `util.inspect()` on `obj` and prints the resulting string to `stdout`. + * This function bypasses any custom `inspect()` function defined on `obj`. + * @since v0.1.101 + */ + dir(obj: any, options?: InspectOptions): void; + /** + * This method calls `console.log()` passing it the arguments received. + * This method does not produce any XML formatting. + * @since v8.0.0 + */ + dirxml(...data: any[]): void; + /** + * Prints to `stderr` with newline. Multiple arguments can be passed, with the + * first used as the primary message and all additional used as substitution + * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) (the arguments are all passed to `util.format()`). + * + * ```js + * const code = 5; + * console.error('error #%d', code); + * // Prints: error #5, to stderr + * console.error('error', code); + * // Prints: error 5, to stderr + * ``` + * + * If formatting elements (e.g. `%d`) are not found in the first string then `util.inspect()` is called on each argument and the resulting string + * values are concatenated. See `util.format()` for more information. + * @since v0.1.100 + */ + error(message?: any, ...optionalParams: any[]): void; + /** + * Increases indentation of subsequent lines by spaces for `groupIndentation`length. + * + * If one or more `label`s are provided, those are printed first without the + * additional indentation. + * @since v8.5.0 + */ + group(...label: any[]): void; + /** + * An alias for {@link group}. + * @since v8.5.0 + */ + groupCollapsed(...label: any[]): void; + /** + * Decreases indentation of subsequent lines by spaces for `groupIndentation`length. + * @since v8.5.0 + */ + groupEnd(): void; + /** + * The `console.info()` function is an alias for {@link log}. + * @since v0.1.100 + */ + info(message?: any, ...optionalParams: any[]): void; + /** + * Prints to `stdout` with newline. Multiple arguments can be passed, with the + * first used as the primary message and all additional used as substitution + * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) (the arguments are all passed to `util.format()`). + * + * ```js + * const count = 5; + * console.log('count: %d', count); + * // Prints: count: 5, to stdout + * console.log('count:', count); + * // Prints: count: 5, to stdout + * ``` + * + * See `util.format()` for more information. + * @since v0.1.100 + */ + log(message?: any, ...optionalParams: any[]): void; + /** + * Try to construct a table with the columns of the properties of `tabularData`(or use `properties`) and rows of `tabularData` and log it. Falls back to just + * logging the argument if it can't be parsed as tabular. + * + * ```js + * // These can't be parsed as tabular data + * console.table(Symbol()); + * // Symbol() + * + * console.table(undefined); + * // undefined + * + * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]); + * // ┌─────────┬─────┬─────┐ + * // │ (index) │ a │ b │ + * // ├─────────┼─────┼─────┤ + * // │ 0 │ 1 │ 'Y' │ + * // │ 1 │ 'Z' │ 2 │ + * // └─────────┴─────┴─────┘ + * + * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']); + * // ┌─────────┬─────┐ + * // │ (index) │ a │ + * // ├─────────┼─────┤ + * // │ 0 │ 1 │ + * // │ 1 │ 'Z' │ + * // └─────────┴─────┘ + * ``` + * @since v10.0.0 + * @param properties Alternate properties for constructing the table. + */ + table(tabularData: any, properties?: readonly string[]): void; + /** + * Starts a timer that can be used to compute the duration of an operation. Timers + * are identified by a unique `label`. Use the same `label` when calling {@link timeEnd} to stop the timer and output the elapsed time in + * suitable time units to `stdout`. For example, if the elapsed + * time is 3869ms, `console.timeEnd()` displays "3.869s". + * @since v0.1.104 + * @param [label='default'] + */ + time(label?: string): void; + /** + * Stops a timer that was previously started by calling {@link time} and + * prints the result to `stdout`: + * + * ```js + * console.time('bunch-of-stuff'); + * // Do a bunch of stuff. + * console.timeEnd('bunch-of-stuff'); + * // Prints: bunch-of-stuff: 225.438ms + * ``` + * @since v0.1.104 + * @param [label='default'] + */ + timeEnd(label?: string): void; + /** + * For a timer that was previously started by calling {@link time}, prints + * the elapsed time and other `data` arguments to `stdout`: + * + * ```js + * console.time('process'); + * const value = expensiveProcess1(); // Returns 42 + * console.timeLog('process', value); + * // Prints "process: 365.227ms 42". + * doExpensiveProcess2(value); + * console.timeEnd('process'); + * ``` + * @since v10.7.0 + * @param [label='default'] + */ + timeLog(label?: string, ...data: any[]): void; + /** + * Prints to `stderr` the string `'Trace: '`, followed by the `util.format()` formatted message and stack trace to the current position in the code. + * + * ```js + * console.trace('Show me'); + * // Prints: (stack trace will vary based on where trace is called) + * // Trace: Show me + * // at repl:2:9 + * // at REPLServer.defaultEval (repl.js:248:27) + * // at bound (domain.js:287:14) + * // at REPLServer.runBound [as eval] (domain.js:300:12) + * // at REPLServer. (repl.js:412:12) + * // at emitOne (events.js:82:20) + * // at REPLServer.emit (events.js:169:7) + * // at REPLServer.Interface._onLine (readline.js:210:10) + * // at REPLServer.Interface._line (readline.js:549:8) + * // at REPLServer.Interface._ttyWrite (readline.js:826:14) + * ``` + * @since v0.1.104 + */ + trace(message?: any, ...optionalParams: any[]): void; + /** + * The `console.warn()` function is an alias for {@link error}. + * @since v0.1.100 + */ + warn(message?: any, ...optionalParams: any[]): void; + // --- Inspector mode only --- + /** + * This method does not display anything unless used in the inspector. + * Starts a JavaScript CPU profile with an optional label. + */ + profile(label?: string): void; + /** + * This method does not display anything unless used in the inspector. + * Stops the current JavaScript CPU profiling session if one has been started and prints the report to the Profiles panel of the inspector. + */ + profileEnd(label?: string): void; + /** + * This method does not display anything unless used in the inspector. + * Adds an event with the label `label` to the Timeline panel of the inspector. + */ + timeStamp(label?: string): void; + } + /** + * The `console` module provides a simple debugging console that is similar to the + * JavaScript console mechanism provided by web browsers. + * + * The module exports two specific components: + * + * * A `Console` class with methods such as `console.log()`, `console.error()` and`console.warn()` that can be used to write to any Node.js stream. + * * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('console')`. + * + * _**Warning**_: The global console object's methods are neither consistently + * synchronous like the browser APIs they resemble, nor are they consistently + * asynchronous like all other Node.js streams. See the `note on process I/O` for + * more information. + * + * Example using the global `console`: + * + * ```js + * console.log('hello world'); + * // Prints: hello world, to stdout + * console.log('hello %s', 'world'); + * // Prints: hello world, to stdout + * console.error(new Error('Whoops, something bad happened')); + * // Prints error message and stack trace to stderr: + * // Error: Whoops, something bad happened + * // at [eval]:5:15 + * // at Script.runInThisContext (node:vm:132:18) + * // at Object.runInThisContext (node:vm:309:38) + * // at node:internal/process/execution:77:19 + * // at [eval]-wrapper:6:22 + * // at evalScript (node:internal/process/execution:76:60) + * // at node:internal/main/eval_string:23:3 + * + * const name = 'Will Robinson'; + * console.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to stderr + * ``` + * + * Example using the `Console` class: + * + * ```js + * const out = getStreamSomehow(); + * const err = getStreamSomehow(); + * const myConsole = new console.Console(out, err); + * + * myConsole.log('hello world'); + * // Prints: hello world, to out + * myConsole.log('hello %s', 'world'); + * // Prints: hello world, to out + * myConsole.error(new Error('Whoops, something bad happened')); + * // Prints: [Error: Whoops, something bad happened], to err + * + * const name = 'Will Robinson'; + * myConsole.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to err + * ``` + * @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/console.js) + */ + namespace console { + interface ConsoleConstructorOptions { + stdout: NodeJS.WritableStream; + stderr?: NodeJS.WritableStream | undefined; + ignoreErrors?: boolean | undefined; + colorMode?: boolean | "auto" | undefined; + inspectOptions?: InspectOptions | undefined; + /** + * Set group indentation + * @default 2 + */ + groupIndentation?: number | undefined; + } + interface ConsoleConstructor { + prototype: Console; + new(stdout: NodeJS.WritableStream, stderr?: NodeJS.WritableStream, ignoreErrors?: boolean): Console; + new(options: ConsoleConstructorOptions): Console; + } + } + var console: Console; + } + export = globalThis.console; +} diff --git a/dist/node_modules/@types/node/constants.d.ts b/dist/node_modules/@types/node/constants.d.ts new file mode 100644 index 0000000..c3ac2b8 --- /dev/null +++ b/dist/node_modules/@types/node/constants.d.ts @@ -0,0 +1,19 @@ +/** @deprecated since v6.3.0 - use constants property exposed by the relevant module instead. */ +declare module "constants" { + import { constants as osConstants, SignalConstants } from "node:os"; + import { constants as cryptoConstants } from "node:crypto"; + import { constants as fsConstants } from "node:fs"; + + const exp: + & typeof osConstants.errno + & typeof osConstants.priority + & SignalConstants + & typeof cryptoConstants + & typeof fsConstants; + export = exp; +} + +declare module "node:constants" { + import constants = require("constants"); + export = constants; +} diff --git a/dist/node_modules/@types/node/crypto.d.ts b/dist/node_modules/@types/node/crypto.d.ts new file mode 100644 index 0000000..718697b --- /dev/null +++ b/dist/node_modules/@types/node/crypto.d.ts @@ -0,0 +1,4487 @@ +/** + * The `node:crypto` module provides cryptographic functionality that includes a + * set of wrappers for OpenSSL's hash, HMAC, cipher, decipher, sign, and verify + * functions. + * + * ```js + * const { createHmac } = await import('node:crypto'); + * + * const secret = 'abcdefg'; + * const hash = createHmac('sha256', secret) + * .update('I love cupcakes') + * .digest('hex'); + * console.log(hash); + * // Prints: + * // c0fa1bc00531bd78ef38c628449c5102aeabd49b5dc3a2a516ea6ea959d6658e + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/crypto.js) + */ +declare module "crypto" { + import * as stream from "node:stream"; + import { PeerCertificate } from "node:tls"; + /** + * SPKAC is a Certificate Signing Request mechanism originally implemented by + * Netscape and was specified formally as part of HTML5's `keygen` element. + * + * `` is deprecated since [HTML 5.2](https://www.w3.org/TR/html52/changes.html#features-removed) and new projects + * should not use this element anymore. + * + * The `node:crypto` module provides the `Certificate` class for working with SPKAC + * data. The most common usage is handling output generated by the HTML5`` element. Node.js uses [OpenSSL's SPKAC + * implementation](https://www.openssl.org/docs/man3.0/man1/openssl-spkac.html) internally. + * @since v0.11.8 + */ + class Certificate { + /** + * ```js + * const { Certificate } = await import('node:crypto'); + * const spkac = getSpkacSomehow(); + * const challenge = Certificate.exportChallenge(spkac); + * console.log(challenge.toString('utf8')); + * // Prints: the challenge as a UTF8 string + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return The challenge component of the `spkac` data structure, which includes a public key and a challenge. + */ + static exportChallenge(spkac: BinaryLike): Buffer; + /** + * ```js + * const { Certificate } = await import('node:crypto'); + * const spkac = getSpkacSomehow(); + * const publicKey = Certificate.exportPublicKey(spkac); + * console.log(publicKey); + * // Prints: the public key as + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return The public key component of the `spkac` data structure, which includes a public key and a challenge. + */ + static exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer; + /** + * ```js + * import { Buffer } from 'node:buffer'; + * const { Certificate } = await import('node:crypto'); + * + * const spkac = getSpkacSomehow(); + * console.log(Certificate.verifySpkac(Buffer.from(spkac))); + * // Prints: true or false + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return `true` if the given `spkac` data structure is valid, `false` otherwise. + */ + static verifySpkac(spkac: NodeJS.ArrayBufferView): boolean; + /** + * @deprecated + * @param spkac + * @returns The challenge component of the `spkac` data structure, + * which includes a public key and a challenge. + */ + exportChallenge(spkac: BinaryLike): Buffer; + /** + * @deprecated + * @param spkac + * @param encoding The encoding of the spkac string. + * @returns The public key component of the `spkac` data structure, + * which includes a public key and a challenge. + */ + exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer; + /** + * @deprecated + * @param spkac + * @returns `true` if the given `spkac` data structure is valid, + * `false` otherwise. + */ + verifySpkac(spkac: NodeJS.ArrayBufferView): boolean; + } + namespace constants { + // https://nodejs.org/dist/latest-v20.x/docs/api/crypto.html#crypto-constants + const OPENSSL_VERSION_NUMBER: number; + /** Applies multiple bug workarounds within OpenSSL. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html for detail. */ + const SSL_OP_ALL: number; + /** Instructs OpenSSL to allow a non-[EC]DHE-based key exchange mode for TLS v1.3 */ + const SSL_OP_ALLOW_NO_DHE_KEX: number; + /** Allows legacy insecure renegotiation between OpenSSL and unpatched clients or servers. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */ + const SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: number; + /** Attempts to use the server's preferences instead of the client's when selecting a cipher. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */ + const SSL_OP_CIPHER_SERVER_PREFERENCE: number; + /** Instructs OpenSSL to use Cisco's "speshul" version of DTLS_BAD_VER. */ + const SSL_OP_CISCO_ANYCONNECT: number; + /** Instructs OpenSSL to turn on cookie exchange. */ + const SSL_OP_COOKIE_EXCHANGE: number; + /** Instructs OpenSSL to add server-hello extension from an early version of the cryptopro draft. */ + const SSL_OP_CRYPTOPRO_TLSEXT_BUG: number; + /** Instructs OpenSSL to disable a SSL 3.0/TLS 1.0 vulnerability workaround added in OpenSSL 0.9.6d. */ + const SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: number; + /** Allows initial connection to servers that do not support RI. */ + const SSL_OP_LEGACY_SERVER_CONNECT: number; + /** Instructs OpenSSL to disable support for SSL/TLS compression. */ + const SSL_OP_NO_COMPRESSION: number; + /** Instructs OpenSSL to disable encrypt-then-MAC. */ + const SSL_OP_NO_ENCRYPT_THEN_MAC: number; + const SSL_OP_NO_QUERY_MTU: number; + /** Instructs OpenSSL to disable renegotiation. */ + const SSL_OP_NO_RENEGOTIATION: number; + /** Instructs OpenSSL to always start a new session when performing renegotiation. */ + const SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: number; + /** Instructs OpenSSL to turn off SSL v2 */ + const SSL_OP_NO_SSLv2: number; + /** Instructs OpenSSL to turn off SSL v3 */ + const SSL_OP_NO_SSLv3: number; + /** Instructs OpenSSL to disable use of RFC4507bis tickets. */ + const SSL_OP_NO_TICKET: number; + /** Instructs OpenSSL to turn off TLS v1 */ + const SSL_OP_NO_TLSv1: number; + /** Instructs OpenSSL to turn off TLS v1.1 */ + const SSL_OP_NO_TLSv1_1: number; + /** Instructs OpenSSL to turn off TLS v1.2 */ + const SSL_OP_NO_TLSv1_2: number; + /** Instructs OpenSSL to turn off TLS v1.3 */ + const SSL_OP_NO_TLSv1_3: number; + /** Instructs OpenSSL server to prioritize ChaCha20-Poly1305 when the client does. This option has no effect if `SSL_OP_CIPHER_SERVER_PREFERENCE` is not enabled. */ + const SSL_OP_PRIORITIZE_CHACHA: number; + /** Instructs OpenSSL to disable version rollback attack detection. */ + const SSL_OP_TLS_ROLLBACK_BUG: number; + const ENGINE_METHOD_RSA: number; + const ENGINE_METHOD_DSA: number; + const ENGINE_METHOD_DH: number; + const ENGINE_METHOD_RAND: number; + const ENGINE_METHOD_EC: number; + const ENGINE_METHOD_CIPHERS: number; + const ENGINE_METHOD_DIGESTS: number; + const ENGINE_METHOD_PKEY_METHS: number; + const ENGINE_METHOD_PKEY_ASN1_METHS: number; + const ENGINE_METHOD_ALL: number; + const ENGINE_METHOD_NONE: number; + const DH_CHECK_P_NOT_SAFE_PRIME: number; + const DH_CHECK_P_NOT_PRIME: number; + const DH_UNABLE_TO_CHECK_GENERATOR: number; + const DH_NOT_SUITABLE_GENERATOR: number; + const RSA_PKCS1_PADDING: number; + const RSA_SSLV23_PADDING: number; + const RSA_NO_PADDING: number; + const RSA_PKCS1_OAEP_PADDING: number; + const RSA_X931_PADDING: number; + const RSA_PKCS1_PSS_PADDING: number; + /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the digest size when signing or verifying. */ + const RSA_PSS_SALTLEN_DIGEST: number; + /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the maximum permissible value when signing data. */ + const RSA_PSS_SALTLEN_MAX_SIGN: number; + /** Causes the salt length for RSA_PKCS1_PSS_PADDING to be determined automatically when verifying a signature. */ + const RSA_PSS_SALTLEN_AUTO: number; + const POINT_CONVERSION_COMPRESSED: number; + const POINT_CONVERSION_UNCOMPRESSED: number; + const POINT_CONVERSION_HYBRID: number; + /** Specifies the built-in default cipher list used by Node.js (colon-separated values). */ + const defaultCoreCipherList: string; + /** Specifies the active default cipher list used by the current Node.js process (colon-separated values). */ + const defaultCipherList: string; + } + interface HashOptions extends stream.TransformOptions { + /** + * For XOF hash functions such as `shake256`, the + * outputLength option can be used to specify the desired output length in bytes. + */ + outputLength?: number | undefined; + } + /** @deprecated since v10.0.0 */ + const fips: boolean; + /** + * Creates and returns a `Hash` object that can be used to generate hash digests + * using the given `algorithm`. Optional `options` argument controls stream + * behavior. For XOF hash functions such as `'shake256'`, the `outputLength` option + * can be used to specify the desired output length in bytes. + * + * The `algorithm` is dependent on the available algorithms supported by the + * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. + * On recent releases of OpenSSL, `openssl list -digest-algorithms` will + * display the available digest algorithms. + * + * Example: generating the sha256 sum of a file + * + * ```js + * import { + * createReadStream, + * } from 'node:fs'; + * import { argv } from 'node:process'; + * const { + * createHash, + * } = await import('node:crypto'); + * + * const filename = argv[2]; + * + * const hash = createHash('sha256'); + * + * const input = createReadStream(filename); + * input.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = input.read(); + * if (data) + * hash.update(data); + * else { + * console.log(`${hash.digest('hex')} ${filename}`); + * } + * }); + * ``` + * @since v0.1.92 + * @param options `stream.transform` options + */ + function createHash(algorithm: string, options?: HashOptions): Hash; + /** + * Creates and returns an `Hmac` object that uses the given `algorithm` and `key`. + * Optional `options` argument controls stream behavior. + * + * The `algorithm` is dependent on the available algorithms supported by the + * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. + * On recent releases of OpenSSL, `openssl list -digest-algorithms` will + * display the available digest algorithms. + * + * The `key` is the HMAC key used to generate the cryptographic HMAC hash. If it is + * a `KeyObject`, its type must be `secret`. If it is a string, please consider `caveats when using strings as inputs to cryptographic APIs`. If it was + * obtained from a cryptographically secure source of entropy, such as {@link randomBytes} or {@link generateKey}, its length should not + * exceed the block size of `algorithm` (e.g., 512 bits for SHA-256). + * + * Example: generating the sha256 HMAC of a file + * + * ```js + * import { + * createReadStream, + * } from 'node:fs'; + * import { argv } from 'node:process'; + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const filename = argv[2]; + * + * const hmac = createHmac('sha256', 'a secret'); + * + * const input = createReadStream(filename); + * input.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = input.read(); + * if (data) + * hmac.update(data); + * else { + * console.log(`${hmac.digest('hex')} ${filename}`); + * } + * }); + * ``` + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createHmac(algorithm: string, key: BinaryLike | KeyObject, options?: stream.TransformOptions): Hmac; + // https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings + type BinaryToTextEncoding = "base64" | "base64url" | "hex" | "binary"; + type CharacterEncoding = "utf8" | "utf-8" | "utf16le" | "utf-16le" | "latin1"; + type LegacyCharacterEncoding = "ascii" | "binary" | "ucs2" | "ucs-2"; + type Encoding = BinaryToTextEncoding | CharacterEncoding | LegacyCharacterEncoding; + type ECDHKeyFormat = "compressed" | "uncompressed" | "hybrid"; + /** + * The `Hash` class is a utility for creating hash digests of data. It can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where data is written + * to produce a computed hash digest on the readable side, or + * * Using the `hash.update()` and `hash.digest()` methods to produce the + * computed hash. + * + * The {@link createHash} method is used to create `Hash` instances. `Hash`objects are not to be created directly using the `new` keyword. + * + * Example: Using `Hash` objects as streams: + * + * ```js + * const { + * createHash, + * } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * hash.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = hash.read(); + * if (data) { + * console.log(data.toString('hex')); + * // Prints: + * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50 + * } + * }); + * + * hash.write('some data to hash'); + * hash.end(); + * ``` + * + * Example: Using `Hash` and piped streams: + * + * ```js + * import { createReadStream } from 'node:fs'; + * import { stdout } from 'node:process'; + * const { createHash } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * const input = createReadStream('test.js'); + * input.pipe(hash).setEncoding('hex').pipe(stdout); + * ``` + * + * Example: Using the `hash.update()` and `hash.digest()` methods: + * + * ```js + * const { + * createHash, + * } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * hash.update('some data to hash'); + * console.log(hash.digest('hex')); + * // Prints: + * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50 + * ``` + * @since v0.1.92 + */ + class Hash extends stream.Transform { + private constructor(); + /** + * Creates a new `Hash` object that contains a deep copy of the internal state + * of the current `Hash` object. + * + * The optional `options` argument controls stream behavior. For XOF hash + * functions such as `'shake256'`, the `outputLength` option can be used to + * specify the desired output length in bytes. + * + * An error is thrown when an attempt is made to copy the `Hash` object after + * its `hash.digest()` method has been called. + * + * ```js + * // Calculate a rolling hash. + * const { + * createHash, + * } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * hash.update('one'); + * console.log(hash.copy().digest('hex')); + * + * hash.update('two'); + * console.log(hash.copy().digest('hex')); + * + * hash.update('three'); + * console.log(hash.copy().digest('hex')); + * + * // Etc. + * ``` + * @since v13.1.0 + * @param options `stream.transform` options + */ + copy(options?: HashOptions): Hash; + /** + * Updates the hash content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Hash; + update(data: string, inputEncoding: Encoding): Hash; + /** + * Calculates the digest of all of the data passed to be hashed (using the `hash.update()` method). + * If `encoding` is provided a string will be returned; otherwise + * a `Buffer` is returned. + * + * The `Hash` object can not be used again after `hash.digest()` method has been + * called. Multiple calls will cause an error to be thrown. + * @since v0.1.92 + * @param encoding The `encoding` of the return value. + */ + digest(): Buffer; + digest(encoding: BinaryToTextEncoding): string; + } + /** + * The `Hmac` class is a utility for creating cryptographic HMAC digests. It can + * be used in one of two ways: + * + * * As a `stream` that is both readable and writable, where data is written + * to produce a computed HMAC digest on the readable side, or + * * Using the `hmac.update()` and `hmac.digest()` methods to produce the + * computed HMAC digest. + * + * The {@link createHmac} method is used to create `Hmac` instances. `Hmac`objects are not to be created directly using the `new` keyword. + * + * Example: Using `Hmac` objects as streams: + * + * ```js + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * hmac.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = hmac.read(); + * if (data) { + * console.log(data.toString('hex')); + * // Prints: + * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e + * } + * }); + * + * hmac.write('some data to hash'); + * hmac.end(); + * ``` + * + * Example: Using `Hmac` and piped streams: + * + * ```js + * import { createReadStream } from 'node:fs'; + * import { stdout } from 'node:process'; + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * const input = createReadStream('test.js'); + * input.pipe(hmac).pipe(stdout); + * ``` + * + * Example: Using the `hmac.update()` and `hmac.digest()` methods: + * + * ```js + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * hmac.update('some data to hash'); + * console.log(hmac.digest('hex')); + * // Prints: + * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e + * ``` + * @since v0.1.94 + */ + class Hmac extends stream.Transform { + private constructor(); + /** + * Updates the `Hmac` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Hmac; + update(data: string, inputEncoding: Encoding): Hmac; + /** + * Calculates the HMAC digest of all of the data passed using `hmac.update()`. + * If `encoding` is + * provided a string is returned; otherwise a `Buffer` is returned; + * + * The `Hmac` object can not be used again after `hmac.digest()` has been + * called. Multiple calls to `hmac.digest()` will result in an error being thrown. + * @since v0.1.94 + * @param encoding The `encoding` of the return value. + */ + digest(): Buffer; + digest(encoding: BinaryToTextEncoding): string; + } + type KeyObjectType = "secret" | "public" | "private"; + interface KeyExportOptions { + type: "pkcs1" | "spki" | "pkcs8" | "sec1"; + format: T; + cipher?: string | undefined; + passphrase?: string | Buffer | undefined; + } + interface JwkKeyExportOptions { + format: "jwk"; + } + interface JsonWebKey { + crv?: string | undefined; + d?: string | undefined; + dp?: string | undefined; + dq?: string | undefined; + e?: string | undefined; + k?: string | undefined; + kty?: string | undefined; + n?: string | undefined; + p?: string | undefined; + q?: string | undefined; + qi?: string | undefined; + x?: string | undefined; + y?: string | undefined; + [key: string]: unknown; + } + interface AsymmetricKeyDetails { + /** + * Key size in bits (RSA, DSA). + */ + modulusLength?: number | undefined; + /** + * Public exponent (RSA). + */ + publicExponent?: bigint | undefined; + /** + * Name of the message digest (RSA-PSS). + */ + hashAlgorithm?: string | undefined; + /** + * Name of the message digest used by MGF1 (RSA-PSS). + */ + mgf1HashAlgorithm?: string | undefined; + /** + * Minimal salt length in bytes (RSA-PSS). + */ + saltLength?: number | undefined; + /** + * Size of q in bits (DSA). + */ + divisorLength?: number | undefined; + /** + * Name of the curve (EC). + */ + namedCurve?: string | undefined; + } + /** + * Node.js uses a `KeyObject` class to represent a symmetric or asymmetric key, + * and each kind of key exposes different functions. The {@link createSecretKey}, {@link createPublicKey} and {@link createPrivateKey} methods are used to create `KeyObject`instances. `KeyObject` + * objects are not to be created directly using the `new`keyword. + * + * Most applications should consider using the new `KeyObject` API instead of + * passing keys as strings or `Buffer`s due to improved security features. + * + * `KeyObject` instances can be passed to other threads via `postMessage()`. + * The receiver obtains a cloned `KeyObject`, and the `KeyObject` does not need to + * be listed in the `transferList` argument. + * @since v11.6.0 + */ + class KeyObject { + private constructor(); + /** + * Example: Converting a `CryptoKey` instance to a `KeyObject`: + * + * ```js + * const { KeyObject } = await import('node:crypto'); + * const { subtle } = globalThis.crypto; + * + * const key = await subtle.generateKey({ + * name: 'HMAC', + * hash: 'SHA-256', + * length: 256, + * }, true, ['sign', 'verify']); + * + * const keyObject = KeyObject.from(key); + * console.log(keyObject.symmetricKeySize); + * // Prints: 32 (symmetric key size in bytes) + * ``` + * @since v15.0.0 + */ + static from(key: webcrypto.CryptoKey): KeyObject; + /** + * For asymmetric keys, this property represents the type of the key. Supported key + * types are: + * + * * `'rsa'` (OID 1.2.840.113549.1.1.1) + * * `'rsa-pss'` (OID 1.2.840.113549.1.1.10) + * * `'dsa'` (OID 1.2.840.10040.4.1) + * * `'ec'` (OID 1.2.840.10045.2.1) + * * `'x25519'` (OID 1.3.101.110) + * * `'x448'` (OID 1.3.101.111) + * * `'ed25519'` (OID 1.3.101.112) + * * `'ed448'` (OID 1.3.101.113) + * * `'dh'` (OID 1.2.840.113549.1.3.1) + * + * This property is `undefined` for unrecognized `KeyObject` types and symmetric + * keys. + * @since v11.6.0 + */ + asymmetricKeyType?: KeyType | undefined; + /** + * For asymmetric keys, this property represents the size of the embedded key in + * bytes. This property is `undefined` for symmetric keys. + */ + asymmetricKeySize?: number | undefined; + /** + * This property exists only on asymmetric keys. Depending on the type of the key, + * this object contains information about the key. None of the information obtained + * through this property can be used to uniquely identify a key or to compromise + * the security of the key. + * + * For RSA-PSS keys, if the key material contains a `RSASSA-PSS-params` sequence, + * the `hashAlgorithm`, `mgf1HashAlgorithm`, and `saltLength` properties will be + * set. + * + * Other key details might be exposed via this API using additional attributes. + * @since v15.7.0 + */ + asymmetricKeyDetails?: AsymmetricKeyDetails | undefined; + /** + * For symmetric keys, the following encoding options can be used: + * + * For public keys, the following encoding options can be used: + * + * For private keys, the following encoding options can be used: + * + * The result type depends on the selected encoding format, when PEM the + * result is a string, when DER it will be a buffer containing the data + * encoded as DER, when [JWK](https://tools.ietf.org/html/rfc7517) it will be an object. + * + * When [JWK](https://tools.ietf.org/html/rfc7517) encoding format was selected, all other encoding options are + * ignored. + * + * PKCS#1, SEC1, and PKCS#8 type keys can be encrypted by using a combination of + * the `cipher` and `format` options. The PKCS#8 `type` can be used with any`format` to encrypt any key algorithm (RSA, EC, or DH) by specifying a`cipher`. PKCS#1 and SEC1 can only be + * encrypted by specifying a `cipher`when the PEM `format` is used. For maximum compatibility, use PKCS#8 for + * encrypted private keys. Since PKCS#8 defines its own + * encryption mechanism, PEM-level encryption is not supported when encrypting + * a PKCS#8 key. See [RFC 5208](https://www.rfc-editor.org/rfc/rfc5208.txt) for PKCS#8 encryption and [RFC 1421](https://www.rfc-editor.org/rfc/rfc1421.txt) for + * PKCS#1 and SEC1 encryption. + * @since v11.6.0 + */ + export(options: KeyExportOptions<"pem">): string | Buffer; + export(options?: KeyExportOptions<"der">): Buffer; + export(options?: JwkKeyExportOptions): JsonWebKey; + /** + * Returns `true` or `false` depending on whether the keys have exactly the same + * type, value, and parameters. This method is not [constant time](https://en.wikipedia.org/wiki/Timing_attack). + * @since v17.7.0, v16.15.0 + * @param otherKeyObject A `KeyObject` with which to compare `keyObject`. + */ + equals(otherKeyObject: KeyObject): boolean; + /** + * For secret keys, this property represents the size of the key in bytes. This + * property is `undefined` for asymmetric keys. + * @since v11.6.0 + */ + symmetricKeySize?: number | undefined; + /** + * Depending on the type of this `KeyObject`, this property is either`'secret'` for secret (symmetric) keys, `'public'` for public (asymmetric) keys + * or `'private'` for private (asymmetric) keys. + * @since v11.6.0 + */ + type: KeyObjectType; + } + type CipherCCMTypes = "aes-128-ccm" | "aes-192-ccm" | "aes-256-ccm" | "chacha20-poly1305"; + type CipherGCMTypes = "aes-128-gcm" | "aes-192-gcm" | "aes-256-gcm"; + type CipherOCBTypes = "aes-128-ocb" | "aes-192-ocb" | "aes-256-ocb"; + type BinaryLike = string | NodeJS.ArrayBufferView; + type CipherKey = BinaryLike | KeyObject; + interface CipherCCMOptions extends stream.TransformOptions { + authTagLength: number; + } + interface CipherGCMOptions extends stream.TransformOptions { + authTagLength?: number | undefined; + } + interface CipherOCBOptions extends stream.TransformOptions { + authTagLength: number; + } + /** + * Creates and returns a `Cipher` object that uses the given `algorithm` and`password`. + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication + * tag that will be returned by `getAuthTag()` and defaults to 16 bytes. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `password` is used to derive the cipher key and initialization vector (IV). + * The value must be either a `'latin1'` encoded string, a `Buffer`, a`TypedArray`, or a `DataView`. + * + * **This function is semantically insecure for all** + * **supported ciphers and fatally flawed for ciphers in counter mode (such as CTR,** + * **GCM, or CCM).** + * + * The implementation of `crypto.createCipher()` derives keys using the OpenSSL + * function [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) with the digest algorithm set to MD5, one + * iteration, and no salt. The lack of salt allows dictionary attacks as the same + * password always creates the same key. The low iteration count and + * non-cryptographically secure hash algorithm allow passwords to be tested very + * rapidly. + * + * In line with OpenSSL's recommendation to use a more modern algorithm instead of [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) it is recommended that + * developers derive a key and IV on + * their own using {@link scrypt} and to use {@link createCipheriv} to create the `Cipher` object. Users should not use ciphers with counter mode + * (e.g. CTR, GCM, or CCM) in `crypto.createCipher()`. A warning is emitted when + * they are used in order to avoid the risk of IV reuse that causes + * vulnerabilities. For the case when IV is reused in GCM, see [Nonce-Disrespecting Adversaries](https://github.com/nonce-disrespect/nonce-disrespect) for details. + * @since v0.1.94 + * @deprecated Since v10.0.0 - Use {@link createCipheriv} instead. + * @param options `stream.transform` options + */ + function createCipher(algorithm: CipherCCMTypes, password: BinaryLike, options: CipherCCMOptions): CipherCCM; + /** @deprecated since v10.0.0 use `createCipheriv()` */ + function createCipher(algorithm: CipherGCMTypes, password: BinaryLike, options?: CipherGCMOptions): CipherGCM; + /** @deprecated since v10.0.0 use `createCipheriv()` */ + function createCipher(algorithm: string, password: BinaryLike, options?: stream.TransformOptions): Cipher; + /** + * Creates and returns a `Cipher` object, with the given `algorithm`, `key` and + * initialization vector (`iv`). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication + * tag that will be returned by `getAuthTag()` and defaults to 16 bytes. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded + * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be + * a `KeyObject` of type `secret`. If the cipher does not need + * an initialization vector, `iv` may be `null`. + * + * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * Initialization vectors should be unpredictable and unique; ideally, they will be + * cryptographically random. They do not have to be secret: IVs are typically just + * added to ciphertext messages unencrypted. It may sound contradictory that + * something has to be unpredictable and unique, but does not have to be secret; + * remember that an attacker must not be able to predict ahead of time what a + * given IV will be. + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createCipheriv( + algorithm: CipherCCMTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherCCMOptions, + ): CipherCCM; + function createCipheriv( + algorithm: CipherOCBTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherOCBOptions, + ): CipherOCB; + function createCipheriv( + algorithm: CipherGCMTypes, + key: CipherKey, + iv: BinaryLike, + options?: CipherGCMOptions, + ): CipherGCM; + function createCipheriv( + algorithm: string, + key: CipherKey, + iv: BinaryLike | null, + options?: stream.TransformOptions, + ): Cipher; + /** + * Instances of the `Cipher` class are used to encrypt data. The class can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where plain unencrypted + * data is written to produce encrypted data on the readable side, or + * * Using the `cipher.update()` and `cipher.final()` methods to produce + * the encrypted data. + * + * The {@link createCipher} or {@link createCipheriv} methods are + * used to create `Cipher` instances. `Cipher` objects are not to be created + * directly using the `new` keyword. + * + * Example: Using `Cipher` objects as streams: + * + * ```js + * const { + * scrypt, + * randomFill, + * createCipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * // Once we have the key and iv, we can create and use the cipher... + * const cipher = createCipheriv(algorithm, key, iv); + * + * let encrypted = ''; + * cipher.setEncoding('hex'); + * + * cipher.on('data', (chunk) => encrypted += chunk); + * cipher.on('end', () => console.log(encrypted)); + * + * cipher.write('some clear text data'); + * cipher.end(); + * }); + * }); + * ``` + * + * Example: Using `Cipher` and piped streams: + * + * ```js + * import { + * createReadStream, + * createWriteStream, + * } from 'node:fs'; + * + * import { + * pipeline, + * } from 'node:stream'; + * + * const { + * scrypt, + * randomFill, + * createCipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * const cipher = createCipheriv(algorithm, key, iv); + * + * const input = createReadStream('test.js'); + * const output = createWriteStream('test.enc'); + * + * pipeline(input, cipher, output, (err) => { + * if (err) throw err; + * }); + * }); + * }); + * ``` + * + * Example: Using the `cipher.update()` and `cipher.final()` methods: + * + * ```js + * const { + * scrypt, + * randomFill, + * createCipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * const cipher = createCipheriv(algorithm, key, iv); + * + * let encrypted = cipher.update('some clear text data', 'utf8', 'hex'); + * encrypted += cipher.final('hex'); + * console.log(encrypted); + * }); + * }); + * ``` + * @since v0.1.94 + */ + class Cipher extends stream.Transform { + private constructor(); + /** + * Updates the cipher with `data`. If the `inputEncoding` argument is given, + * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`, `TypedArray`, or`DataView`. If `data` is a `Buffer`, + * `TypedArray`, or `DataView`, then`inputEncoding` is ignored. + * + * The `outputEncoding` specifies the output format of the enciphered + * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned. + * + * The `cipher.update()` method can be called multiple times with new data until `cipher.final()` is called. Calling `cipher.update()` after `cipher.final()` will result in an error being + * thrown. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the data. + * @param outputEncoding The `encoding` of the return value. + */ + update(data: BinaryLike): Buffer; + update(data: string, inputEncoding: Encoding): Buffer; + update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string; + update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string; + /** + * Once the `cipher.final()` method has been called, the `Cipher` object can no + * longer be used to encrypt data. Attempts to call `cipher.final()` more than + * once will result in an error being thrown. + * @since v0.1.94 + * @param outputEncoding The `encoding` of the return value. + * @return Any remaining enciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned. + */ + final(): Buffer; + final(outputEncoding: BufferEncoding): string; + /** + * When using block encryption algorithms, the `Cipher` class will automatically + * add padding to the input data to the appropriate block size. To disable the + * default padding call `cipher.setAutoPadding(false)`. + * + * When `autoPadding` is `false`, the length of the entire input data must be a + * multiple of the cipher's block size or `cipher.final()` will throw an error. + * Disabling automatic padding is useful for non-standard padding, for instance + * using `0x0` instead of PKCS padding. + * + * The `cipher.setAutoPadding()` method must be called before `cipher.final()`. + * @since v0.7.1 + * @param [autoPadding=true] + * @return for method chaining. + */ + setAutoPadding(autoPadding?: boolean): this; + } + interface CipherCCM extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options: { + plaintextLength: number; + }, + ): this; + getAuthTag(): Buffer; + } + interface CipherGCM extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + getAuthTag(): Buffer; + } + interface CipherOCB extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + getAuthTag(): Buffer; + } + /** + * Creates and returns a `Decipher` object that uses the given `algorithm` and`password` (key). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * **This function is semantically insecure for all** + * **supported ciphers and fatally flawed for ciphers in counter mode (such as CTR,** + * **GCM, or CCM).** + * + * The implementation of `crypto.createDecipher()` derives keys using the OpenSSL + * function [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) with the digest algorithm set to MD5, one + * iteration, and no salt. The lack of salt allows dictionary attacks as the same + * password always creates the same key. The low iteration count and + * non-cryptographically secure hash algorithm allow passwords to be tested very + * rapidly. + * + * In line with OpenSSL's recommendation to use a more modern algorithm instead of [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) it is recommended that + * developers derive a key and IV on + * their own using {@link scrypt} and to use {@link createDecipheriv} to create the `Decipher` object. + * @since v0.1.94 + * @deprecated Since v10.0.0 - Use {@link createDecipheriv} instead. + * @param options `stream.transform` options + */ + function createDecipher(algorithm: CipherCCMTypes, password: BinaryLike, options: CipherCCMOptions): DecipherCCM; + /** @deprecated since v10.0.0 use `createDecipheriv()` */ + function createDecipher(algorithm: CipherGCMTypes, password: BinaryLike, options?: CipherGCMOptions): DecipherGCM; + /** @deprecated since v10.0.0 use `createDecipheriv()` */ + function createDecipher(algorithm: string, password: BinaryLike, options?: stream.TransformOptions): Decipher; + /** + * Creates and returns a `Decipher` object that uses the given `algorithm`, `key`and initialization vector (`iv`). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to restrict accepted authentication tags + * to those with the specified length. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded + * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be + * a `KeyObject` of type `secret`. If the cipher does not need + * an initialization vector, `iv` may be `null`. + * + * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * Initialization vectors should be unpredictable and unique; ideally, they will be + * cryptographically random. They do not have to be secret: IVs are typically just + * added to ciphertext messages unencrypted. It may sound contradictory that + * something has to be unpredictable and unique, but does not have to be secret; + * remember that an attacker must not be able to predict ahead of time what a given + * IV will be. + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createDecipheriv( + algorithm: CipherCCMTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherCCMOptions, + ): DecipherCCM; + function createDecipheriv( + algorithm: CipherOCBTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherOCBOptions, + ): DecipherOCB; + function createDecipheriv( + algorithm: CipherGCMTypes, + key: CipherKey, + iv: BinaryLike, + options?: CipherGCMOptions, + ): DecipherGCM; + function createDecipheriv( + algorithm: string, + key: CipherKey, + iv: BinaryLike | null, + options?: stream.TransformOptions, + ): Decipher; + /** + * Instances of the `Decipher` class are used to decrypt data. The class can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where plain encrypted + * data is written to produce unencrypted data on the readable side, or + * * Using the `decipher.update()` and `decipher.final()` methods to + * produce the unencrypted data. + * + * The {@link createDecipher} or {@link createDecipheriv} methods are + * used to create `Decipher` instances. `Decipher` objects are not to be created + * directly using the `new` keyword. + * + * Example: Using `Decipher` objects as streams: + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * scryptSync, + * createDecipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Key length is dependent on the algorithm. In this case for aes192, it is + * // 24 bytes (192 bits). + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * let decrypted = ''; + * decipher.on('readable', () => { + * let chunk; + * while (null !== (chunk = decipher.read())) { + * decrypted += chunk.toString('utf8'); + * } + * }); + * decipher.on('end', () => { + * console.log(decrypted); + * // Prints: some clear text data + * }); + * + * // Encrypted with same algorithm, key and iv. + * const encrypted = + * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa'; + * decipher.write(encrypted, 'hex'); + * decipher.end(); + * ``` + * + * Example: Using `Decipher` and piped streams: + * + * ```js + * import { + * createReadStream, + * createWriteStream, + * } from 'node:fs'; + * import { Buffer } from 'node:buffer'; + * const { + * scryptSync, + * createDecipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * const input = createReadStream('test.enc'); + * const output = createWriteStream('test.js'); + * + * input.pipe(decipher).pipe(output); + * ``` + * + * Example: Using the `decipher.update()` and `decipher.final()` methods: + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * scryptSync, + * createDecipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * // Encrypted using same algorithm, key and iv. + * const encrypted = + * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa'; + * let decrypted = decipher.update(encrypted, 'hex', 'utf8'); + * decrypted += decipher.final('utf8'); + * console.log(decrypted); + * // Prints: some clear text data + * ``` + * @since v0.1.94 + */ + class Decipher extends stream.Transform { + private constructor(); + /** + * Updates the decipher with `data`. If the `inputEncoding` argument is given, + * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`. If `data` is a `Buffer` then `inputEncoding` is + * ignored. + * + * The `outputEncoding` specifies the output format of the enciphered + * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned. + * + * The `decipher.update()` method can be called multiple times with new data until `decipher.final()` is called. Calling `decipher.update()` after `decipher.final()` will result in an error + * being thrown. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the `data` string. + * @param outputEncoding The `encoding` of the return value. + */ + update(data: NodeJS.ArrayBufferView): Buffer; + update(data: string, inputEncoding: Encoding): Buffer; + update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string; + update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string; + /** + * Once the `decipher.final()` method has been called, the `Decipher` object can + * no longer be used to decrypt data. Attempts to call `decipher.final()` more + * than once will result in an error being thrown. + * @since v0.1.94 + * @param outputEncoding The `encoding` of the return value. + * @return Any remaining deciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned. + */ + final(): Buffer; + final(outputEncoding: BufferEncoding): string; + /** + * When data has been encrypted without standard block padding, calling`decipher.setAutoPadding(false)` will disable automatic padding to prevent `decipher.final()` from checking for and + * removing padding. + * + * Turning auto padding off will only work if the input data's length is a + * multiple of the ciphers block size. + * + * The `decipher.setAutoPadding()` method must be called before `decipher.final()`. + * @since v0.7.1 + * @param [autoPadding=true] + * @return for method chaining. + */ + setAutoPadding(auto_padding?: boolean): this; + } + interface DecipherCCM extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options: { + plaintextLength: number; + }, + ): this; + } + interface DecipherGCM extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + } + interface DecipherOCB extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + } + interface PrivateKeyInput { + key: string | Buffer; + format?: KeyFormat | undefined; + type?: "pkcs1" | "pkcs8" | "sec1" | undefined; + passphrase?: string | Buffer | undefined; + encoding?: string | undefined; + } + interface PublicKeyInput { + key: string | Buffer; + format?: KeyFormat | undefined; + type?: "pkcs1" | "spki" | undefined; + encoding?: string | undefined; + } + /** + * Asynchronously generates a new random secret key of the given `length`. The`type` will determine which validations will be performed on the `length`. + * + * ```js + * const { + * generateKey, + * } = await import('node:crypto'); + * + * generateKey('hmac', { length: 512 }, (err, key) => { + * if (err) throw err; + * console.log(key.export().toString('hex')); // 46e..........620 + * }); + * ``` + * + * The size of a generated HMAC key should not exceed the block size of the + * underlying hash function. See {@link createHmac} for more information. + * @since v15.0.0 + * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. + */ + function generateKey( + type: "hmac" | "aes", + options: { + length: number; + }, + callback: (err: Error | null, key: KeyObject) => void, + ): void; + /** + * Synchronously generates a new random secret key of the given `length`. The`type` will determine which validations will be performed on the `length`. + * + * ```js + * const { + * generateKeySync, + * } = await import('node:crypto'); + * + * const key = generateKeySync('hmac', { length: 512 }); + * console.log(key.export().toString('hex')); // e89..........41e + * ``` + * + * The size of a generated HMAC key should not exceed the block size of the + * underlying hash function. See {@link createHmac} for more information. + * @since v15.0.0 + * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. + */ + function generateKeySync( + type: "hmac" | "aes", + options: { + length: number; + }, + ): KeyObject; + interface JsonWebKeyInput { + key: JsonWebKey; + format: "jwk"; + } + /** + * Creates and returns a new key object containing a private key. If `key` is a + * string or `Buffer`, `format` is assumed to be `'pem'`; otherwise, `key`must be an object with the properties described above. + * + * If the private key is encrypted, a `passphrase` must be specified. The length + * of the passphrase is limited to 1024 bytes. + * @since v11.6.0 + */ + function createPrivateKey(key: PrivateKeyInput | string | Buffer | JsonWebKeyInput): KeyObject; + /** + * Creates and returns a new key object containing a public key. If `key` is a + * string or `Buffer`, `format` is assumed to be `'pem'`; if `key` is a `KeyObject`with type `'private'`, the public key is derived from the given private key; + * otherwise, `key` must be an object with the properties described above. + * + * If the format is `'pem'`, the `'key'` may also be an X.509 certificate. + * + * Because public keys can be derived from private keys, a private key may be + * passed instead of a public key. In that case, this function behaves as if {@link createPrivateKey} had been called, except that the type of the + * returned `KeyObject` will be `'public'` and that the private key cannot be + * extracted from the returned `KeyObject`. Similarly, if a `KeyObject` with type`'private'` is given, a new `KeyObject` with type `'public'` will be returned + * and it will be impossible to extract the private key from the returned object. + * @since v11.6.0 + */ + function createPublicKey(key: PublicKeyInput | string | Buffer | KeyObject | JsonWebKeyInput): KeyObject; + /** + * Creates and returns a new key object containing a secret key for symmetric + * encryption or `Hmac`. + * @since v11.6.0 + * @param encoding The string encoding when `key` is a string. + */ + function createSecretKey(key: NodeJS.ArrayBufferView): KeyObject; + function createSecretKey(key: string, encoding: BufferEncoding): KeyObject; + /** + * Creates and returns a `Sign` object that uses the given `algorithm`. Use {@link getHashes} to obtain the names of the available digest algorithms. + * Optional `options` argument controls the `stream.Writable` behavior. + * + * In some cases, a `Sign` instance can be created using the name of a signature + * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use + * the corresponding digest algorithm. This does not work for all signature + * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest + * algorithm names. + * @since v0.1.92 + * @param options `stream.Writable` options + */ + function createSign(algorithm: string, options?: stream.WritableOptions): Sign; + type DSAEncoding = "der" | "ieee-p1363"; + interface SigningOptions { + /** + * @see crypto.constants.RSA_PKCS1_PADDING + */ + padding?: number | undefined; + saltLength?: number | undefined; + dsaEncoding?: DSAEncoding | undefined; + } + interface SignPrivateKeyInput extends PrivateKeyInput, SigningOptions {} + interface SignKeyObjectInput extends SigningOptions { + key: KeyObject; + } + interface VerifyPublicKeyInput extends PublicKeyInput, SigningOptions {} + interface VerifyKeyObjectInput extends SigningOptions { + key: KeyObject; + } + interface VerifyJsonWebKeyInput extends JsonWebKeyInput, SigningOptions {} + type KeyLike = string | Buffer | KeyObject; + /** + * The `Sign` class is a utility for generating signatures. It can be used in one + * of two ways: + * + * * As a writable `stream`, where data to be signed is written and the `sign.sign()` method is used to generate and return the signature, or + * * Using the `sign.update()` and `sign.sign()` methods to produce the + * signature. + * + * The {@link createSign} method is used to create `Sign` instances. The + * argument is the string name of the hash function to use. `Sign` objects are not + * to be created directly using the `new` keyword. + * + * Example: Using `Sign` and `Verify` objects as streams: + * + * ```js + * const { + * generateKeyPairSync, + * createSign, + * createVerify, + * } = await import('node:crypto'); + * + * const { privateKey, publicKey } = generateKeyPairSync('ec', { + * namedCurve: 'sect239k1', + * }); + * + * const sign = createSign('SHA256'); + * sign.write('some data to sign'); + * sign.end(); + * const signature = sign.sign(privateKey, 'hex'); + * + * const verify = createVerify('SHA256'); + * verify.write('some data to sign'); + * verify.end(); + * console.log(verify.verify(publicKey, signature, 'hex')); + * // Prints: true + * ``` + * + * Example: Using the `sign.update()` and `verify.update()` methods: + * + * ```js + * const { + * generateKeyPairSync, + * createSign, + * createVerify, + * } = await import('node:crypto'); + * + * const { privateKey, publicKey } = generateKeyPairSync('rsa', { + * modulusLength: 2048, + * }); + * + * const sign = createSign('SHA256'); + * sign.update('some data to sign'); + * sign.end(); + * const signature = sign.sign(privateKey); + * + * const verify = createVerify('SHA256'); + * verify.update('some data to sign'); + * verify.end(); + * console.log(verify.verify(publicKey, signature)); + * // Prints: true + * ``` + * @since v0.1.92 + */ + class Sign extends stream.Writable { + private constructor(); + /** + * Updates the `Sign` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): this; + update(data: string, inputEncoding: Encoding): this; + /** + * Calculates the signature on all the data passed through using either `sign.update()` or `sign.write()`. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the following additional properties can be passed: + * + * If `outputEncoding` is provided a string is returned; otherwise a `Buffer` is returned. + * + * The `Sign` object can not be again used after `sign.sign()` method has been + * called. Multiple calls to `sign.sign()` will result in an error being thrown. + * @since v0.1.92 + */ + sign(privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput): Buffer; + sign( + privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, + outputFormat: BinaryToTextEncoding, + ): string; + } + /** + * Creates and returns a `Verify` object that uses the given algorithm. + * Use {@link getHashes} to obtain an array of names of the available + * signing algorithms. Optional `options` argument controls the`stream.Writable` behavior. + * + * In some cases, a `Verify` instance can be created using the name of a signature + * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use + * the corresponding digest algorithm. This does not work for all signature + * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest + * algorithm names. + * @since v0.1.92 + * @param options `stream.Writable` options + */ + function createVerify(algorithm: string, options?: stream.WritableOptions): Verify; + /** + * The `Verify` class is a utility for verifying signatures. It can be used in one + * of two ways: + * + * * As a writable `stream` where written data is used to validate against the + * supplied signature, or + * * Using the `verify.update()` and `verify.verify()` methods to verify + * the signature. + * + * The {@link createVerify} method is used to create `Verify` instances.`Verify` objects are not to be created directly using the `new` keyword. + * + * See `Sign` for examples. + * @since v0.1.92 + */ + class Verify extends stream.Writable { + private constructor(); + /** + * Updates the `Verify` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `inputEncoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Verify; + update(data: string, inputEncoding: Encoding): Verify; + /** + * Verifies the provided data using the given `object` and `signature`. + * + * If `object` is not a `KeyObject`, this function behaves as if`object` had been passed to {@link createPublicKey}. If it is an + * object, the following additional properties can be passed: + * + * The `signature` argument is the previously calculated signature for the data, in + * the `signatureEncoding`. + * If a `signatureEncoding` is specified, the `signature` is expected to be a + * string; otherwise `signature` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * The `verify` object can not be used again after `verify.verify()` has been + * called. Multiple calls to `verify.verify()` will result in an error being + * thrown. + * + * Because public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v0.1.92 + */ + verify( + object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: NodeJS.ArrayBufferView, + ): boolean; + verify( + object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: string, + signature_format?: BinaryToTextEncoding, + ): boolean; + } + /** + * Creates a `DiffieHellman` key exchange object using the supplied `prime` and an + * optional specific `generator`. + * + * The `generator` argument can be a number, string, or `Buffer`. If`generator` is not specified, the value `2` is used. + * + * If `primeEncoding` is specified, `prime` is expected to be a string; otherwise + * a `Buffer`, `TypedArray`, or `DataView` is expected. + * + * If `generatorEncoding` is specified, `generator` is expected to be a string; + * otherwise a number, `Buffer`, `TypedArray`, or `DataView` is expected. + * @since v0.11.12 + * @param primeEncoding The `encoding` of the `prime` string. + * @param [generator=2] + * @param generatorEncoding The `encoding` of the `generator` string. + */ + function createDiffieHellman(primeLength: number, generator?: number): DiffieHellman; + function createDiffieHellman( + prime: ArrayBuffer | NodeJS.ArrayBufferView, + generator?: number | ArrayBuffer | NodeJS.ArrayBufferView, + ): DiffieHellman; + function createDiffieHellman( + prime: ArrayBuffer | NodeJS.ArrayBufferView, + generator: string, + generatorEncoding: BinaryToTextEncoding, + ): DiffieHellman; + function createDiffieHellman( + prime: string, + primeEncoding: BinaryToTextEncoding, + generator?: number | ArrayBuffer | NodeJS.ArrayBufferView, + ): DiffieHellman; + function createDiffieHellman( + prime: string, + primeEncoding: BinaryToTextEncoding, + generator: string, + generatorEncoding: BinaryToTextEncoding, + ): DiffieHellman; + /** + * The `DiffieHellman` class is a utility for creating Diffie-Hellman key + * exchanges. + * + * Instances of the `DiffieHellman` class can be created using the {@link createDiffieHellman} function. + * + * ```js + * import assert from 'node:assert'; + * + * const { + * createDiffieHellman, + * } = await import('node:crypto'); + * + * // Generate Alice's keys... + * const alice = createDiffieHellman(2048); + * const aliceKey = alice.generateKeys(); + * + * // Generate Bob's keys... + * const bob = createDiffieHellman(alice.getPrime(), alice.getGenerator()); + * const bobKey = bob.generateKeys(); + * + * // Exchange and generate the secret... + * const aliceSecret = alice.computeSecret(bobKey); + * const bobSecret = bob.computeSecret(aliceKey); + * + * // OK + * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex')); + * ``` + * @since v0.5.0 + */ + class DiffieHellman { + private constructor(); + /** + * Generates private and public Diffie-Hellman key values unless they have been + * generated or computed already, and returns + * the public key in the specified `encoding`. This key should be + * transferred to the other party. + * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned. + * + * This function is a thin wrapper around [`DH_generate_key()`](https://www.openssl.org/docs/man3.0/man3/DH_generate_key.html). In particular, + * once a private key has been generated or set, calling this function only updates + * the public key but does not generate a new private key. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + generateKeys(): Buffer; + generateKeys(encoding: BinaryToTextEncoding): string; + /** + * Computes the shared secret using `otherPublicKey` as the other + * party's public key and returns the computed shared secret. The supplied + * key is interpreted using the specified `inputEncoding`, and secret is + * encoded using specified `outputEncoding`. + * If the `inputEncoding` is not + * provided, `otherPublicKey` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * If `outputEncoding` is given a string is returned; otherwise, a `Buffer` is returned. + * @since v0.5.0 + * @param inputEncoding The `encoding` of an `otherPublicKey` string. + * @param outputEncoding The `encoding` of the return value. + */ + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, inputEncoding?: null, outputEncoding?: null): Buffer; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding, outputEncoding?: null): Buffer; + computeSecret( + otherPublicKey: NodeJS.ArrayBufferView, + inputEncoding: null, + outputEncoding: BinaryToTextEncoding, + ): string; + computeSecret( + otherPublicKey: string, + inputEncoding: BinaryToTextEncoding, + outputEncoding: BinaryToTextEncoding, + ): string; + /** + * Returns the Diffie-Hellman prime in the specified `encoding`. + * If `encoding` is provided a string is + * returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPrime(): Buffer; + getPrime(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman generator in the specified `encoding`. + * If `encoding` is provided a string is + * returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getGenerator(): Buffer; + getGenerator(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman public key in the specified `encoding`. + * If `encoding` is provided a + * string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPublicKey(): Buffer; + getPublicKey(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman private key in the specified `encoding`. + * If `encoding` is provided a + * string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPrivateKey(): Buffer; + getPrivateKey(encoding: BinaryToTextEncoding): string; + /** + * Sets the Diffie-Hellman public key. If the `encoding` argument is provided,`publicKey` is expected + * to be a string. If no `encoding` is provided, `publicKey` is expected + * to be a `Buffer`, `TypedArray`, or `DataView`. + * @since v0.5.0 + * @param encoding The `encoding` of the `publicKey` string. + */ + setPublicKey(publicKey: NodeJS.ArrayBufferView): void; + setPublicKey(publicKey: string, encoding: BufferEncoding): void; + /** + * Sets the Diffie-Hellman private key. If the `encoding` argument is provided,`privateKey` is expected + * to be a string. If no `encoding` is provided, `privateKey` is expected + * to be a `Buffer`, `TypedArray`, or `DataView`. + * + * This function does not automatically compute the associated public key. Either `diffieHellman.setPublicKey()` or `diffieHellman.generateKeys()` can be + * used to manually provide the public key or to automatically derive it. + * @since v0.5.0 + * @param encoding The `encoding` of the `privateKey` string. + */ + setPrivateKey(privateKey: NodeJS.ArrayBufferView): void; + setPrivateKey(privateKey: string, encoding: BufferEncoding): void; + /** + * A bit field containing any warnings and/or errors resulting from a check + * performed during initialization of the `DiffieHellman` object. + * + * The following values are valid for this property (as defined in `node:constants` module): + * + * * `DH_CHECK_P_NOT_SAFE_PRIME` + * * `DH_CHECK_P_NOT_PRIME` + * * `DH_UNABLE_TO_CHECK_GENERATOR` + * * `DH_NOT_SUITABLE_GENERATOR` + * @since v0.11.12 + */ + verifyError: number; + } + /** + * The `DiffieHellmanGroup` class takes a well-known modp group as its argument. + * It works the same as `DiffieHellman`, except that it does not allow changing its keys after creation. + * In other words, it does not implement `setPublicKey()` or `setPrivateKey()` methods. + * + * ```js + * const { createDiffieHellmanGroup } = await import('node:crypto'); + * const dh = createDiffieHellmanGroup('modp1'); + * ``` + * The name (e.g. `'modp1'`) is taken from [RFC 2412](https://www.rfc-editor.org/rfc/rfc2412.txt) (modp1 and 2) and [RFC 3526](https://www.rfc-editor.org/rfc/rfc3526.txt): + * ```bash + * $ perl -ne 'print "$1\n" if /"(modp\d+)"/' src/node_crypto_groups.h + * modp1 # 768 bits + * modp2 # 1024 bits + * modp5 # 1536 bits + * modp14 # 2048 bits + * modp15 # etc. + * modp16 + * modp17 + * modp18 + * ``` + * @since v0.7.5 + */ + const DiffieHellmanGroup: DiffieHellmanGroupConstructor; + interface DiffieHellmanGroupConstructor { + new(name: string): DiffieHellmanGroup; + (name: string): DiffieHellmanGroup; + readonly prototype: DiffieHellmanGroup; + } + type DiffieHellmanGroup = Omit; + /** + * Creates a predefined `DiffieHellmanGroup` key exchange object. The + * supported groups are listed in the documentation for `DiffieHellmanGroup`. + * + * The returned object mimics the interface of objects created by {@link createDiffieHellman}, but will not allow changing + * the keys (with `diffieHellman.setPublicKey()`, for example). The + * advantage of using this method is that the parties do not have to + * generate nor exchange a group modulus beforehand, saving both processor + * and communication time. + * + * Example (obtaining a shared secret): + * + * ```js + * const { + * getDiffieHellman, + * } = await import('node:crypto'); + * const alice = getDiffieHellman('modp14'); + * const bob = getDiffieHellman('modp14'); + * + * alice.generateKeys(); + * bob.generateKeys(); + * + * const aliceSecret = alice.computeSecret(bob.getPublicKey(), null, 'hex'); + * const bobSecret = bob.computeSecret(alice.getPublicKey(), null, 'hex'); + * + * // aliceSecret and bobSecret should be the same + * console.log(aliceSecret === bobSecret); + * ``` + * @since v0.7.5 + */ + function getDiffieHellman(groupName: string): DiffieHellmanGroup; + /** + * An alias for {@link getDiffieHellman} + * @since v0.9.3 + */ + function createDiffieHellmanGroup(name: string): DiffieHellmanGroup; + /** + * Provides an asynchronous Password-Based Key Derivation Function 2 (PBKDF2) + * implementation. A selected HMAC digest algorithm specified by `digest` is + * applied to derive a key of the requested byte length (`keylen`) from the`password`, `salt` and `iterations`. + * + * The supplied `callback` function is called with two arguments: `err` and`derivedKey`. If an error occurs while deriving the key, `err` will be set; + * otherwise `err` will be `null`. By default, the successfully generated`derivedKey` will be passed to the callback as a `Buffer`. An error will be + * thrown if any of the input arguments specify invalid values or types. + * + * The `iterations` argument must be a number set as high as possible. The + * higher the number of iterations, the more secure the derived key will be, + * but will take a longer amount of time to complete. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * ```js + * const { + * pbkdf2, + * } = await import('node:crypto'); + * + * pbkdf2('secret', 'salt', 100000, 64, 'sha512', (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae' + * }); + * ``` + * + * An array of supported digest functions can be retrieved using {@link getHashes}. + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * @since v0.5.5 + */ + function pbkdf2( + password: BinaryLike, + salt: BinaryLike, + iterations: number, + keylen: number, + digest: string, + callback: (err: Error | null, derivedKey: Buffer) => void, + ): void; + /** + * Provides a synchronous Password-Based Key Derivation Function 2 (PBKDF2) + * implementation. A selected HMAC digest algorithm specified by `digest` is + * applied to derive a key of the requested byte length (`keylen`) from the`password`, `salt` and `iterations`. + * + * If an error occurs an `Error` will be thrown, otherwise the derived key will be + * returned as a `Buffer`. + * + * The `iterations` argument must be a number set as high as possible. The + * higher the number of iterations, the more secure the derived key will be, + * but will take a longer amount of time to complete. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * ```js + * const { + * pbkdf2Sync, + * } = await import('node:crypto'); + * + * const key = pbkdf2Sync('secret', 'salt', 100000, 64, 'sha512'); + * console.log(key.toString('hex')); // '3745e48...08d59ae' + * ``` + * + * An array of supported digest functions can be retrieved using {@link getHashes}. + * @since v0.9.3 + */ + function pbkdf2Sync( + password: BinaryLike, + salt: BinaryLike, + iterations: number, + keylen: number, + digest: string, + ): Buffer; + /** + * Generates cryptographically strong pseudorandom data. The `size` argument + * is a number indicating the number of bytes to generate. + * + * If a `callback` function is provided, the bytes are generated asynchronously + * and the `callback` function is invoked with two arguments: `err` and `buf`. + * If an error occurs, `err` will be an `Error` object; otherwise it is `null`. The`buf` argument is a `Buffer` containing the generated bytes. + * + * ```js + * // Asynchronous + * const { + * randomBytes, + * } = await import('node:crypto'); + * + * randomBytes(256, (err, buf) => { + * if (err) throw err; + * console.log(`${buf.length} bytes of random data: ${buf.toString('hex')}`); + * }); + * ``` + * + * If the `callback` function is not provided, the random bytes are generated + * synchronously and returned as a `Buffer`. An error will be thrown if + * there is a problem generating the bytes. + * + * ```js + * // Synchronous + * const { + * randomBytes, + * } = await import('node:crypto'); + * + * const buf = randomBytes(256); + * console.log( + * `${buf.length} bytes of random data: ${buf.toString('hex')}`); + * ``` + * + * The `crypto.randomBytes()` method will not complete until there is + * sufficient entropy available. + * This should normally never take longer than a few milliseconds. The only time + * when generating the random bytes may conceivably block for a longer period of + * time is right after boot, when the whole system is still low on entropy. + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * + * The asynchronous version of `crypto.randomBytes()` is carried out in a single + * threadpool request. To minimize threadpool task length variation, partition + * large `randomBytes` requests when doing so as part of fulfilling a client + * request. + * @since v0.5.8 + * @param size The number of bytes to generate. The `size` must not be larger than `2**31 - 1`. + * @return if the `callback` function is not provided. + */ + function randomBytes(size: number): Buffer; + function randomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; + function pseudoRandomBytes(size: number): Buffer; + function pseudoRandomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; + /** + * Return a random integer `n` such that `min <= n < max`. This + * implementation avoids [modulo bias](https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle#Modulo_bias). + * + * The range (`max - min`) must be less than 2**48. `min` and `max` must + * be [safe integers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger). + * + * If the `callback` function is not provided, the random integer is + * generated synchronously. + * + * ```js + * // Asynchronous + * const { + * randomInt, + * } = await import('node:crypto'); + * + * randomInt(3, (err, n) => { + * if (err) throw err; + * console.log(`Random number chosen from (0, 1, 2): ${n}`); + * }); + * ``` + * + * ```js + * // Synchronous + * const { + * randomInt, + * } = await import('node:crypto'); + * + * const n = randomInt(3); + * console.log(`Random number chosen from (0, 1, 2): ${n}`); + * ``` + * + * ```js + * // With `min` argument + * const { + * randomInt, + * } = await import('node:crypto'); + * + * const n = randomInt(1, 7); + * console.log(`The dice rolled: ${n}`); + * ``` + * @since v14.10.0, v12.19.0 + * @param [min=0] Start of random range (inclusive). + * @param max End of random range (exclusive). + * @param callback `function(err, n) {}`. + */ + function randomInt(max: number): number; + function randomInt(min: number, max: number): number; + function randomInt(max: number, callback: (err: Error | null, value: number) => void): void; + function randomInt(min: number, max: number, callback: (err: Error | null, value: number) => void): void; + /** + * Synchronous version of {@link randomFill}. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFillSync } = await import('node:crypto'); + * + * const buf = Buffer.alloc(10); + * console.log(randomFillSync(buf).toString('hex')); + * + * randomFillSync(buf, 5); + * console.log(buf.toString('hex')); + * + * // The above is equivalent to the following: + * randomFillSync(buf, 5, 5); + * console.log(buf.toString('hex')); + * ``` + * + * Any `ArrayBuffer`, `TypedArray` or `DataView` instance may be passed as`buffer`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFillSync } = await import('node:crypto'); + * + * const a = new Uint32Array(10); + * console.log(Buffer.from(randomFillSync(a).buffer, + * a.byteOffset, a.byteLength).toString('hex')); + * + * const b = new DataView(new ArrayBuffer(10)); + * console.log(Buffer.from(randomFillSync(b).buffer, + * b.byteOffset, b.byteLength).toString('hex')); + * + * const c = new ArrayBuffer(10); + * console.log(Buffer.from(randomFillSync(c)).toString('hex')); + * ``` + * @since v7.10.0, v6.13.0 + * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`. + * @param [offset=0] + * @param [size=buffer.length - offset] + * @return The object passed as `buffer` argument. + */ + function randomFillSync(buffer: T, offset?: number, size?: number): T; + /** + * This function is similar to {@link randomBytes} but requires the first + * argument to be a `Buffer` that will be filled. It also + * requires that a callback is passed in. + * + * If the `callback` function is not provided, an error will be thrown. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFill } = await import('node:crypto'); + * + * const buf = Buffer.alloc(10); + * randomFill(buf, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * + * randomFill(buf, 5, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * + * // The above is equivalent to the following: + * randomFill(buf, 5, 5, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * ``` + * + * Any `ArrayBuffer`, `TypedArray`, or `DataView` instance may be passed as`buffer`. + * + * While this includes instances of `Float32Array` and `Float64Array`, this + * function should not be used to generate random floating-point numbers. The + * result may contain `+Infinity`, `-Infinity`, and `NaN`, and even if the array + * contains finite numbers only, they are not drawn from a uniform random + * distribution and have no meaningful lower or upper bounds. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFill } = await import('node:crypto'); + * + * const a = new Uint32Array(10); + * randomFill(a, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength) + * .toString('hex')); + * }); + * + * const b = new DataView(new ArrayBuffer(10)); + * randomFill(b, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength) + * .toString('hex')); + * }); + * + * const c = new ArrayBuffer(10); + * randomFill(c, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf).toString('hex')); + * }); + * ``` + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * + * The asynchronous version of `crypto.randomFill()` is carried out in a single + * threadpool request. To minimize threadpool task length variation, partition + * large `randomFill` requests when doing so as part of fulfilling a client + * request. + * @since v7.10.0, v6.13.0 + * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`. + * @param [offset=0] + * @param [size=buffer.length - offset] + * @param callback `function(err, buf) {}`. + */ + function randomFill( + buffer: T, + callback: (err: Error | null, buf: T) => void, + ): void; + function randomFill( + buffer: T, + offset: number, + callback: (err: Error | null, buf: T) => void, + ): void; + function randomFill( + buffer: T, + offset: number, + size: number, + callback: (err: Error | null, buf: T) => void, + ): void; + interface ScryptOptions { + cost?: number | undefined; + blockSize?: number | undefined; + parallelization?: number | undefined; + N?: number | undefined; + r?: number | undefined; + p?: number | undefined; + maxmem?: number | undefined; + } + /** + * Provides an asynchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based + * key derivation function that is designed to be expensive computationally and + * memory-wise in order to make brute-force attacks unrewarding. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * The `callback` function is called with two arguments: `err` and `derivedKey`.`err` is an exception object when key derivation fails, otherwise `err` is`null`. `derivedKey` is passed to the + * callback as a `Buffer`. + * + * An exception is thrown when any of the input arguments specify invalid values + * or types. + * + * ```js + * const { + * scrypt, + * } = await import('node:crypto'); + * + * // Using the factory defaults. + * scrypt('password', 'salt', 64, (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae' + * }); + * // Using a custom N parameter. Must be a power of two. + * scrypt('password', 'salt', 64, { N: 1024 }, (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...aa39b34' + * }); + * ``` + * @since v10.5.0 + */ + function scrypt( + password: BinaryLike, + salt: BinaryLike, + keylen: number, + callback: (err: Error | null, derivedKey: Buffer) => void, + ): void; + function scrypt( + password: BinaryLike, + salt: BinaryLike, + keylen: number, + options: ScryptOptions, + callback: (err: Error | null, derivedKey: Buffer) => void, + ): void; + /** + * Provides a synchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based + * key derivation function that is designed to be expensive computationally and + * memory-wise in order to make brute-force attacks unrewarding. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * An exception is thrown when key derivation fails, otherwise the derived key is + * returned as a `Buffer`. + * + * An exception is thrown when any of the input arguments specify invalid values + * or types. + * + * ```js + * const { + * scryptSync, + * } = await import('node:crypto'); + * // Using the factory defaults. + * + * const key1 = scryptSync('password', 'salt', 64); + * console.log(key1.toString('hex')); // '3745e48...08d59ae' + * // Using a custom N parameter. Must be a power of two. + * const key2 = scryptSync('password', 'salt', 64, { N: 1024 }); + * console.log(key2.toString('hex')); // '3745e48...aa39b34' + * ``` + * @since v10.5.0 + */ + function scryptSync(password: BinaryLike, salt: BinaryLike, keylen: number, options?: ScryptOptions): Buffer; + interface RsaPublicKey { + key: KeyLike; + padding?: number | undefined; + } + interface RsaPrivateKey { + key: KeyLike; + passphrase?: string | undefined; + /** + * @default 'sha1' + */ + oaepHash?: string | undefined; + oaepLabel?: NodeJS.TypedArray | undefined; + padding?: number | undefined; + } + /** + * Encrypts the content of `buffer` with `key` and returns a new `Buffer` with encrypted content. The returned data can be decrypted using + * the corresponding private key, for example using {@link privateDecrypt}. + * + * If `key` is not a `KeyObject`, this function behaves as if`key` had been passed to {@link createPublicKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_OAEP_PADDING`. + * + * Because RSA public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v0.11.14 + */ + function publicEncrypt(key: RsaPublicKey | RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Decrypts `buffer` with `key`.`buffer` was previously encrypted using + * the corresponding private key, for example using {@link privateEncrypt}. + * + * If `key` is not a `KeyObject`, this function behaves as if`key` had been passed to {@link createPublicKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_PADDING`. + * + * Because RSA public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v1.1.0 + */ + function publicDecrypt(key: RsaPublicKey | RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Decrypts `buffer` with `privateKey`. `buffer` was previously encrypted using + * the corresponding public key, for example using {@link publicEncrypt}. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_OAEP_PADDING`. + * @since v0.11.14 + */ + function privateDecrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Encrypts `buffer` with `privateKey`. The returned data can be decrypted using + * the corresponding public key, for example using {@link publicDecrypt}. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_PADDING`. + * @since v1.1.0 + */ + function privateEncrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * ```js + * const { + * getCiphers, + * } = await import('node:crypto'); + * + * console.log(getCiphers()); // ['aes-128-cbc', 'aes-128-ccm', ...] + * ``` + * @since v0.9.3 + * @return An array with the names of the supported cipher algorithms. + */ + function getCiphers(): string[]; + /** + * ```js + * const { + * getCurves, + * } = await import('node:crypto'); + * + * console.log(getCurves()); // ['Oakley-EC2N-3', 'Oakley-EC2N-4', ...] + * ``` + * @since v2.3.0 + * @return An array with the names of the supported elliptic curves. + */ + function getCurves(): string[]; + /** + * @since v10.0.0 + * @return `1` if and only if a FIPS compliant crypto provider is currently in use, `0` otherwise. A future semver-major release may change the return type of this API to a {boolean}. + */ + function getFips(): 1 | 0; + /** + * Enables the FIPS compliant crypto provider in a FIPS-enabled Node.js build. + * Throws an error if FIPS mode is not available. + * @since v10.0.0 + * @param bool `true` to enable FIPS mode. + */ + function setFips(bool: boolean): void; + /** + * ```js + * const { + * getHashes, + * } = await import('node:crypto'); + * + * console.log(getHashes()); // ['DSA', 'DSA-SHA', 'DSA-SHA1', ...] + * ``` + * @since v0.9.3 + * @return An array of the names of the supported hash algorithms, such as `'RSA-SHA256'`. Hash algorithms are also called "digest" algorithms. + */ + function getHashes(): string[]; + /** + * The `ECDH` class is a utility for creating Elliptic Curve Diffie-Hellman (ECDH) + * key exchanges. + * + * Instances of the `ECDH` class can be created using the {@link createECDH} function. + * + * ```js + * import assert from 'node:assert'; + * + * const { + * createECDH, + * } = await import('node:crypto'); + * + * // Generate Alice's keys... + * const alice = createECDH('secp521r1'); + * const aliceKey = alice.generateKeys(); + * + * // Generate Bob's keys... + * const bob = createECDH('secp521r1'); + * const bobKey = bob.generateKeys(); + * + * // Exchange and generate the secret... + * const aliceSecret = alice.computeSecret(bobKey); + * const bobSecret = bob.computeSecret(aliceKey); + * + * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex')); + * // OK + * ``` + * @since v0.11.14 + */ + class ECDH { + private constructor(); + /** + * Converts the EC Diffie-Hellman public key specified by `key` and `curve` to the + * format specified by `format`. The `format` argument specifies point encoding + * and can be `'compressed'`, `'uncompressed'` or `'hybrid'`. The supplied key is + * interpreted using the specified `inputEncoding`, and the returned key is encoded + * using the specified `outputEncoding`. + * + * Use {@link getCurves} to obtain a list of available curve names. + * On recent OpenSSL releases, `openssl ecparam -list_curves` will also display + * the name and description of each available elliptic curve. + * + * If `format` is not specified the point will be returned in `'uncompressed'`format. + * + * If the `inputEncoding` is not provided, `key` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * Example (uncompressing a key): + * + * ```js + * const { + * createECDH, + * ECDH, + * } = await import('node:crypto'); + * + * const ecdh = createECDH('secp256k1'); + * ecdh.generateKeys(); + * + * const compressedKey = ecdh.getPublicKey('hex', 'compressed'); + * + * const uncompressedKey = ECDH.convertKey(compressedKey, + * 'secp256k1', + * 'hex', + * 'hex', + * 'uncompressed'); + * + * // The converted key and the uncompressed public key should be the same + * console.log(uncompressedKey === ecdh.getPublicKey('hex')); + * ``` + * @since v10.0.0 + * @param inputEncoding The `encoding` of the `key` string. + * @param outputEncoding The `encoding` of the return value. + * @param [format='uncompressed'] + */ + static convertKey( + key: BinaryLike, + curve: string, + inputEncoding?: BinaryToTextEncoding, + outputEncoding?: "latin1" | "hex" | "base64" | "base64url", + format?: "uncompressed" | "compressed" | "hybrid", + ): Buffer | string; + /** + * Generates private and public EC Diffie-Hellman key values, and returns + * the public key in the specified `format` and `encoding`. This key should be + * transferred to the other party. + * + * The `format` argument specifies point encoding and can be `'compressed'` or`'uncompressed'`. If `format` is not specified, the point will be returned in`'uncompressed'` format. + * + * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @param [format='uncompressed'] + */ + generateKeys(): Buffer; + generateKeys(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; + /** + * Computes the shared secret using `otherPublicKey` as the other + * party's public key and returns the computed shared secret. The supplied + * key is interpreted using specified `inputEncoding`, and the returned secret + * is encoded using the specified `outputEncoding`. + * If the `inputEncoding` is not + * provided, `otherPublicKey` is expected to be a `Buffer`, `TypedArray`, or`DataView`. + * + * If `outputEncoding` is given a string will be returned; otherwise a `Buffer` is returned. + * + * `ecdh.computeSecret` will throw an`ERR_CRYPTO_ECDH_INVALID_PUBLIC_KEY` error when `otherPublicKey`lies outside of the elliptic curve. Since `otherPublicKey` is + * usually supplied from a remote user over an insecure network, + * be sure to handle this exception accordingly. + * @since v0.11.14 + * @param inputEncoding The `encoding` of the `otherPublicKey` string. + * @param outputEncoding The `encoding` of the return value. + */ + computeSecret(otherPublicKey: NodeJS.ArrayBufferView): Buffer; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding): Buffer; + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, outputEncoding: BinaryToTextEncoding): string; + computeSecret( + otherPublicKey: string, + inputEncoding: BinaryToTextEncoding, + outputEncoding: BinaryToTextEncoding, + ): string; + /** + * If `encoding` is specified, a string is returned; otherwise a `Buffer` is + * returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @return The EC Diffie-Hellman in the specified `encoding`. + */ + getPrivateKey(): Buffer; + getPrivateKey(encoding: BinaryToTextEncoding): string; + /** + * The `format` argument specifies point encoding and can be `'compressed'` or`'uncompressed'`. If `format` is not specified the point will be returned in`'uncompressed'` format. + * + * If `encoding` is specified, a string is returned; otherwise a `Buffer` is + * returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @param [format='uncompressed'] + * @return The EC Diffie-Hellman public key in the specified `encoding` and `format`. + */ + getPublicKey(encoding?: null, format?: ECDHKeyFormat): Buffer; + getPublicKey(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; + /** + * Sets the EC Diffie-Hellman private key. + * If `encoding` is provided, `privateKey` is expected + * to be a string; otherwise `privateKey` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * If `privateKey` is not valid for the curve specified when the `ECDH` object was + * created, an error is thrown. Upon setting the private key, the associated + * public point (key) is also generated and set in the `ECDH` object. + * @since v0.11.14 + * @param encoding The `encoding` of the `privateKey` string. + */ + setPrivateKey(privateKey: NodeJS.ArrayBufferView): void; + setPrivateKey(privateKey: string, encoding: BinaryToTextEncoding): void; + } + /** + * Creates an Elliptic Curve Diffie-Hellman (`ECDH`) key exchange object using a + * predefined curve specified by the `curveName` string. Use {@link getCurves} to obtain a list of available curve names. On recent + * OpenSSL releases, `openssl ecparam -list_curves` will also display the name + * and description of each available elliptic curve. + * @since v0.11.14 + */ + function createECDH(curveName: string): ECDH; + /** + * This function compares the underlying bytes that represent the given`ArrayBuffer`, `TypedArray`, or `DataView` instances using a constant-time + * algorithm. + * + * This function does not leak timing information that + * would allow an attacker to guess one of the values. This is suitable for + * comparing HMAC digests or secret values like authentication cookies or [capability urls](https://www.w3.org/TR/capability-urls/). + * + * `a` and `b` must both be `Buffer`s, `TypedArray`s, or `DataView`s, and they + * must have the same byte length. An error is thrown if `a` and `b` have + * different byte lengths. + * + * If at least one of `a` and `b` is a `TypedArray` with more than one byte per + * entry, such as `Uint16Array`, the result will be computed using the platform + * byte order. + * + * **When both of the inputs are `Float32Array`s or`Float64Array`s, this function might return unexpected results due to IEEE 754** + * **encoding of floating-point numbers. In particular, neither `x === y` nor`Object.is(x, y)` implies that the byte representations of two floating-point** + * **numbers `x` and `y` are equal.** + * + * Use of `crypto.timingSafeEqual` does not guarantee that the _surrounding_ code + * is timing-safe. Care should be taken to ensure that the surrounding code does + * not introduce timing vulnerabilities. + * @since v6.6.0 + */ + function timingSafeEqual(a: NodeJS.ArrayBufferView, b: NodeJS.ArrayBufferView): boolean; + type KeyType = "rsa" | "rsa-pss" | "dsa" | "ec" | "ed25519" | "ed448" | "x25519" | "x448"; + type KeyFormat = "pem" | "der" | "jwk"; + interface BasePrivateKeyEncodingOptions { + format: T; + cipher?: string | undefined; + passphrase?: string | undefined; + } + interface KeyPairKeyObjectResult { + publicKey: KeyObject; + privateKey: KeyObject; + } + interface ED25519KeyPairKeyObjectOptions {} + interface ED448KeyPairKeyObjectOptions {} + interface X25519KeyPairKeyObjectOptions {} + interface X448KeyPairKeyObjectOptions {} + interface ECKeyPairKeyObjectOptions { + /** + * Name of the curve to use + */ + namedCurve: string; + /** + * Must be `'named'` or `'explicit'`. Default: `'named'`. + */ + paramEncoding?: "explicit" | "named" | undefined; + } + interface RSAKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + } + interface RSAPSSKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + /** + * Name of the message digest + */ + hashAlgorithm?: string; + /** + * Name of the message digest used by MGF1 + */ + mgf1HashAlgorithm?: string; + /** + * Minimal salt length in bytes + */ + saltLength?: string; + } + interface DSAKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Size of q in bits + */ + divisorLength: number; + } + interface RSAKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + publicKeyEncoding: { + type: "pkcs1" | "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs1" | "pkcs8"; + }; + } + interface RSAPSSKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + /** + * Name of the message digest + */ + hashAlgorithm?: string; + /** + * Name of the message digest used by MGF1 + */ + mgf1HashAlgorithm?: string; + /** + * Minimal salt length in bytes + */ + saltLength?: string; + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface DSAKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Size of q in bits + */ + divisorLength: number; + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface ECKeyPairOptions extends ECKeyPairKeyObjectOptions { + publicKeyEncoding: { + type: "pkcs1" | "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "sec1" | "pkcs8"; + }; + } + interface ED25519KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface ED448KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface X25519KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface X448KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface KeyPairSyncResult { + publicKey: T1; + privateKey: T2; + } + /** + * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, + * Ed25519, Ed448, X25519, X448, and DH are currently supported. + * + * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function + * behaves as if `keyObject.export()` had been called on its result. Otherwise, + * the respective part of the key is returned as a `KeyObject`. + * + * When encoding public keys, it is recommended to use `'spki'`. When encoding + * private keys, it is recommended to use `'pkcs8'` with a strong passphrase, + * and to keep the passphrase confidential. + * + * ```js + * const { + * generateKeyPairSync, + * } = await import('node:crypto'); + * + * const { + * publicKey, + * privateKey, + * } = generateKeyPairSync('rsa', { + * modulusLength: 4096, + * publicKeyEncoding: { + * type: 'spki', + * format: 'pem', + * }, + * privateKeyEncoding: { + * type: 'pkcs8', + * format: 'pem', + * cipher: 'aes-256-cbc', + * passphrase: 'top secret', + * }, + * }); + * ``` + * + * The return value `{ publicKey, privateKey }` represents the generated key pair. + * When PEM encoding was selected, the respective key will be a string, otherwise + * it will be a buffer containing the data encoded as DER. + * @since v10.12.0 + * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. + */ + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "rsa", options: RSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "rsa-pss", options: RSAPSSKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "dsa", options: DSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "ec", options: ECKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "ed25519", options?: ED25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "ed448", options?: ED448KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "x25519", options?: X25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "x448", options?: X448KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + /** + * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, + * Ed25519, Ed448, X25519, X448, and DH are currently supported. + * + * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function + * behaves as if `keyObject.export()` had been called on its result. Otherwise, + * the respective part of the key is returned as a `KeyObject`. + * + * It is recommended to encode public keys as `'spki'` and private keys as`'pkcs8'` with encryption for long-term storage: + * + * ```js + * const { + * generateKeyPair, + * } = await import('node:crypto'); + * + * generateKeyPair('rsa', { + * modulusLength: 4096, + * publicKeyEncoding: { + * type: 'spki', + * format: 'pem', + * }, + * privateKeyEncoding: { + * type: 'pkcs8', + * format: 'pem', + * cipher: 'aes-256-cbc', + * passphrase: 'top secret', + * }, + * }, (err, publicKey, privateKey) => { + * // Handle errors and use the generated key pair. + * }); + * ``` + * + * On completion, `callback` will be called with `err` set to `undefined` and`publicKey` / `privateKey` representing the generated key pair. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `publicKey` and `privateKey` properties. + * @since v10.12.0 + * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. + */ + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + namespace generateKeyPair { + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "rsa", options: RSAKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairKeyObjectOptions, + ): Promise; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "dsa", options: DSAKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "ec", options: ECKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__( + type: "ed25519", + options?: ED25519KeyPairKeyObjectOptions, + ): Promise; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "ed448", options?: ED448KeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__( + type: "x25519", + options?: X25519KeyPairKeyObjectOptions, + ): Promise; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "x448", options?: X448KeyPairKeyObjectOptions): Promise; + } + /** + * Calculates and returns the signature for `data` using the given private key and + * algorithm. If `algorithm` is `null` or `undefined`, then the algorithm is + * dependent upon the key type (especially Ed25519 and Ed448). + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been + * passed to {@link createPrivateKey}. If it is an object, the following + * additional properties can be passed: + * + * If the `callback` function is provided this function uses libuv's threadpool. + * @since v12.0.0 + */ + function sign( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, + ): Buffer; + function sign( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, + callback: (error: Error | null, data: Buffer) => void, + ): void; + /** + * Verifies the given signature for `data` using the given key and algorithm. If`algorithm` is `null` or `undefined`, then the algorithm is dependent upon the + * key type (especially Ed25519 and Ed448). + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been + * passed to {@link createPublicKey}. If it is an object, the following + * additional properties can be passed: + * + * The `signature` argument is the previously calculated signature for the `data`. + * + * Because public keys can be derived from private keys, a private key or a public + * key may be passed for `key`. + * + * If the `callback` function is provided this function uses libuv's threadpool. + * @since v12.0.0 + */ + function verify( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: NodeJS.ArrayBufferView, + ): boolean; + function verify( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: NodeJS.ArrayBufferView, + callback: (error: Error | null, result: boolean) => void, + ): void; + /** + * Computes the Diffie-Hellman secret based on a `privateKey` and a `publicKey`. + * Both keys must have the same `asymmetricKeyType`, which must be one of `'dh'`(for Diffie-Hellman), `'ec'` (for ECDH), `'x448'`, or `'x25519'` (for ECDH-ES). + * @since v13.9.0, v12.17.0 + */ + function diffieHellman(options: { privateKey: KeyObject; publicKey: KeyObject }): Buffer; + type CipherMode = "cbc" | "ccm" | "cfb" | "ctr" | "ecb" | "gcm" | "ocb" | "ofb" | "stream" | "wrap" | "xts"; + interface CipherInfoOptions { + /** + * A test key length. + */ + keyLength?: number | undefined; + /** + * A test IV length. + */ + ivLength?: number | undefined; + } + interface CipherInfo { + /** + * The name of the cipher. + */ + name: string; + /** + * The nid of the cipher. + */ + nid: number; + /** + * The block size of the cipher in bytes. + * This property is omitted when mode is 'stream'. + */ + blockSize?: number | undefined; + /** + * The expected or default initialization vector length in bytes. + * This property is omitted if the cipher does not use an initialization vector. + */ + ivLength?: number | undefined; + /** + * The expected or default key length in bytes. + */ + keyLength: number; + /** + * The cipher mode. + */ + mode: CipherMode; + } + /** + * Returns information about a given cipher. + * + * Some ciphers accept variable length keys and initialization vectors. By default, + * the `crypto.getCipherInfo()` method will return the default values for these + * ciphers. To test if a given key length or iv length is acceptable for given + * cipher, use the `keyLength` and `ivLength` options. If the given values are + * unacceptable, `undefined` will be returned. + * @since v15.0.0 + * @param nameOrNid The name or nid of the cipher to query. + */ + function getCipherInfo(nameOrNid: string | number, options?: CipherInfoOptions): CipherInfo | undefined; + /** + * HKDF is a simple key derivation function defined in RFC 5869\. The given `ikm`,`salt` and `info` are used with the `digest` to derive a key of `keylen` bytes. + * + * The supplied `callback` function is called with two arguments: `err` and`derivedKey`. If an errors occurs while deriving the key, `err` will be set; + * otherwise `err` will be `null`. The successfully generated `derivedKey` will + * be passed to the callback as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). An error will be thrown if any + * of the input arguments specify invalid values or types. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * hkdf, + * } = await import('node:crypto'); + * + * hkdf('sha512', 'key', 'salt', 'info', 64, (err, derivedKey) => { + * if (err) throw err; + * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' + * }); + * ``` + * @since v15.0.0 + * @param digest The digest algorithm to use. + * @param ikm The input keying material. Must be provided but can be zero-length. + * @param salt The salt value. Must be provided but can be zero-length. + * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes. + * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512` + * generates 64-byte hashes, making the maximum HKDF output 16320 bytes). + */ + function hkdf( + digest: string, + irm: BinaryLike | KeyObject, + salt: BinaryLike, + info: BinaryLike, + keylen: number, + callback: (err: Error | null, derivedKey: ArrayBuffer) => void, + ): void; + /** + * Provides a synchronous HKDF key derivation function as defined in RFC 5869\. The + * given `ikm`, `salt` and `info` are used with the `digest` to derive a key of`keylen` bytes. + * + * The successfully generated `derivedKey` will be returned as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). + * + * An error will be thrown if any of the input arguments specify invalid values or + * types, or if the derived key cannot be generated. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * hkdfSync, + * } = await import('node:crypto'); + * + * const derivedKey = hkdfSync('sha512', 'key', 'salt', 'info', 64); + * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' + * ``` + * @since v15.0.0 + * @param digest The digest algorithm to use. + * @param ikm The input keying material. Must be provided but can be zero-length. + * @param salt The salt value. Must be provided but can be zero-length. + * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes. + * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512` + * generates 64-byte hashes, making the maximum HKDF output 16320 bytes). + */ + function hkdfSync( + digest: string, + ikm: BinaryLike | KeyObject, + salt: BinaryLike, + info: BinaryLike, + keylen: number, + ): ArrayBuffer; + interface SecureHeapUsage { + /** + * The total allocated secure heap size as specified using the `--secure-heap=n` command-line flag. + */ + total: number; + /** + * The minimum allocation from the secure heap as specified using the `--secure-heap-min` command-line flag. + */ + min: number; + /** + * The total number of bytes currently allocated from the secure heap. + */ + used: number; + /** + * The calculated ratio of `used` to `total` allocated bytes. + */ + utilization: number; + } + /** + * @since v15.6.0 + */ + function secureHeapUsed(): SecureHeapUsage; + interface RandomUUIDOptions { + /** + * By default, to improve performance, + * Node.js will pre-emptively generate and persistently cache enough + * random data to generate up to 128 random UUIDs. To generate a UUID + * without using the cache, set `disableEntropyCache` to `true`. + * + * @default `false` + */ + disableEntropyCache?: boolean | undefined; + } + type UUID = `${string}-${string}-${string}-${string}-${string}`; + /** + * Generates a random [RFC 4122](https://www.rfc-editor.org/rfc/rfc4122.txt) version 4 UUID. The UUID is generated using a + * cryptographic pseudorandom number generator. + * @since v15.6.0, v14.17.0 + */ + function randomUUID(options?: RandomUUIDOptions): UUID; + interface X509CheckOptions { + /** + * @default 'always' + */ + subject?: "always" | "default" | "never"; + /** + * @default true + */ + wildcards?: boolean; + /** + * @default true + */ + partialWildcards?: boolean; + /** + * @default false + */ + multiLabelWildcards?: boolean; + /** + * @default false + */ + singleLabelSubdomains?: boolean; + } + /** + * Encapsulates an X509 certificate and provides read-only access to + * its information. + * + * ```js + * const { X509Certificate } = await import('node:crypto'); + * + * const x509 = new X509Certificate('{... pem encoded cert ...}'); + * + * console.log(x509.subject); + * ``` + * @since v15.6.0 + */ + class X509Certificate { + /** + * Will be \`true\` if this is a Certificate Authority (CA) certificate. + * @since v15.6.0 + */ + readonly ca: boolean; + /** + * The SHA-1 fingerprint of this certificate. + * + * Because SHA-1 is cryptographically broken and because the security of SHA-1 is + * significantly worse than that of algorithms that are commonly used to sign + * certificates, consider using `x509.fingerprint256` instead. + * @since v15.6.0 + */ + readonly fingerprint: string; + /** + * The SHA-256 fingerprint of this certificate. + * @since v15.6.0 + */ + readonly fingerprint256: string; + /** + * The SHA-512 fingerprint of this certificate. + * + * Because computing the SHA-256 fingerprint is usually faster and because it is + * only half the size of the SHA-512 fingerprint, `x509.fingerprint256` may be + * a better choice. While SHA-512 presumably provides a higher level of security in + * general, the security of SHA-256 matches that of most algorithms that are + * commonly used to sign certificates. + * @since v17.2.0, v16.14.0 + */ + readonly fingerprint512: string; + /** + * The complete subject of this certificate. + * @since v15.6.0 + */ + readonly subject: string; + /** + * The subject alternative name specified for this certificate. + * + * This is a comma-separated list of subject alternative names. Each entry begins + * with a string identifying the kind of the subject alternative name followed by + * a colon and the value associated with the entry. + * + * Earlier versions of Node.js incorrectly assumed that it is safe to split this + * property at the two-character sequence `', '` (see [CVE-2021-44532](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44532)). However, + * both malicious and legitimate certificates can contain subject alternative names + * that include this sequence when represented as a string. + * + * After the prefix denoting the type of the entry, the remainder of each entry + * might be enclosed in quotes to indicate that the value is a JSON string literal. + * For backward compatibility, Node.js only uses JSON string literals within this + * property when necessary to avoid ambiguity. Third-party code should be prepared + * to handle both possible entry formats. + * @since v15.6.0 + */ + readonly subjectAltName: string | undefined; + /** + * A textual representation of the certificate's authority information access + * extension. + * + * This is a line feed separated list of access descriptions. Each line begins with + * the access method and the kind of the access location, followed by a colon and + * the value associated with the access location. + * + * After the prefix denoting the access method and the kind of the access location, + * the remainder of each line might be enclosed in quotes to indicate that the + * value is a JSON string literal. For backward compatibility, Node.js only uses + * JSON string literals within this property when necessary to avoid ambiguity. + * Third-party code should be prepared to handle both possible entry formats. + * @since v15.6.0 + */ + readonly infoAccess: string | undefined; + /** + * An array detailing the key usages for this certificate. + * @since v15.6.0 + */ + readonly keyUsage: string[]; + /** + * The issuer identification included in this certificate. + * @since v15.6.0 + */ + readonly issuer: string; + /** + * The issuer certificate or `undefined` if the issuer certificate is not + * available. + * @since v15.9.0 + */ + readonly issuerCertificate?: X509Certificate | undefined; + /** + * The public key `KeyObject` for this certificate. + * @since v15.6.0 + */ + readonly publicKey: KeyObject; + /** + * A `Buffer` containing the DER encoding of this certificate. + * @since v15.6.0 + */ + readonly raw: Buffer; + /** + * The serial number of this certificate. + * + * Serial numbers are assigned by certificate authorities and do not uniquely + * identify certificates. Consider using `x509.fingerprint256` as a unique + * identifier instead. + * @since v15.6.0 + */ + readonly serialNumber: string; + /** + * The date/time from which this certificate is considered valid. + * @since v15.6.0 + */ + readonly validFrom: string; + /** + * The date/time until which this certificate is considered valid. + * @since v15.6.0 + */ + readonly validTo: string; + constructor(buffer: BinaryLike); + /** + * Checks whether the certificate matches the given email address. + * + * If the `'subject'` option is undefined or set to `'default'`, the certificate + * subject is only considered if the subject alternative name extension either does + * not exist or does not contain any email addresses. + * + * If the `'subject'` option is set to `'always'` and if the subject alternative + * name extension either does not exist or does not contain a matching email + * address, the certificate subject is considered. + * + * If the `'subject'` option is set to `'never'`, the certificate subject is never + * considered, even if the certificate contains no subject alternative names. + * @since v15.6.0 + * @return Returns `email` if the certificate matches, `undefined` if it does not. + */ + checkEmail(email: string, options?: Pick): string | undefined; + /** + * Checks whether the certificate matches the given host name. + * + * If the certificate matches the given host name, the matching subject name is + * returned. The returned name might be an exact match (e.g., `foo.example.com`) + * or it might contain wildcards (e.g., `*.example.com`). Because host name + * comparisons are case-insensitive, the returned subject name might also differ + * from the given `name` in capitalization. + * + * If the `'subject'` option is undefined or set to `'default'`, the certificate + * subject is only considered if the subject alternative name extension either does + * not exist or does not contain any DNS names. This behavior is consistent with [RFC 2818](https://www.rfc-editor.org/rfc/rfc2818.txt) ("HTTP Over TLS"). + * + * If the `'subject'` option is set to `'always'` and if the subject alternative + * name extension either does not exist or does not contain a matching DNS name, + * the certificate subject is considered. + * + * If the `'subject'` option is set to `'never'`, the certificate subject is never + * considered, even if the certificate contains no subject alternative names. + * @since v15.6.0 + * @return Returns a subject name that matches `name`, or `undefined` if no subject name matches `name`. + */ + checkHost(name: string, options?: X509CheckOptions): string | undefined; + /** + * Checks whether the certificate matches the given IP address (IPv4 or IPv6). + * + * Only [RFC 5280](https://www.rfc-editor.org/rfc/rfc5280.txt) `iPAddress` subject alternative names are considered, and they + * must match the given `ip` address exactly. Other subject alternative names as + * well as the subject field of the certificate are ignored. + * @since v15.6.0 + * @return Returns `ip` if the certificate matches, `undefined` if it does not. + */ + checkIP(ip: string): string | undefined; + /** + * Checks whether this certificate was issued by the given `otherCert`. + * @since v15.6.0 + */ + checkIssued(otherCert: X509Certificate): boolean; + /** + * Checks whether the public key for this certificate is consistent with + * the given private key. + * @since v15.6.0 + * @param privateKey A private key. + */ + checkPrivateKey(privateKey: KeyObject): boolean; + /** + * There is no standard JSON encoding for X509 certificates. The`toJSON()` method returns a string containing the PEM encoded + * certificate. + * @since v15.6.0 + */ + toJSON(): string; + /** + * Returns information about this certificate using the legacy `certificate object` encoding. + * @since v15.6.0 + */ + toLegacyObject(): PeerCertificate; + /** + * Returns the PEM-encoded certificate. + * @since v15.6.0 + */ + toString(): string; + /** + * Verifies that this certificate was signed by the given public key. + * Does not perform any other validation checks on the certificate. + * @since v15.6.0 + * @param publicKey A public key. + */ + verify(publicKey: KeyObject): boolean; + } + type LargeNumberLike = NodeJS.ArrayBufferView | SharedArrayBuffer | ArrayBuffer | bigint; + interface GeneratePrimeOptions { + add?: LargeNumberLike | undefined; + rem?: LargeNumberLike | undefined; + /** + * @default false + */ + safe?: boolean | undefined; + bigint?: boolean | undefined; + } + interface GeneratePrimeOptionsBigInt extends GeneratePrimeOptions { + bigint: true; + } + interface GeneratePrimeOptionsArrayBuffer extends GeneratePrimeOptions { + bigint?: false | undefined; + } + /** + * Generates a pseudorandom prime of `size` bits. + * + * If `options.safe` is `true`, the prime will be a safe prime -- that is,`(prime - 1) / 2` will also be a prime. + * + * The `options.add` and `options.rem` parameters can be used to enforce additional + * requirements, e.g., for Diffie-Hellman: + * + * * If `options.add` and `options.rem` are both set, the prime will satisfy the + * condition that `prime % add = rem`. + * * If only `options.add` is set and `options.safe` is not `true`, the prime will + * satisfy the condition that `prime % add = 1`. + * * If only `options.add` is set and `options.safe` is set to `true`, the prime + * will instead satisfy the condition that `prime % add = 3`. This is necessary + * because `prime % add = 1` for `options.add > 2` would contradict the condition + * enforced by `options.safe`. + * * `options.rem` is ignored if `options.add` is not given. + * + * Both `options.add` and `options.rem` must be encoded as big-endian sequences + * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or`DataView`. + * + * By default, the prime is encoded as a big-endian sequence of octets + * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided. + * @since v15.8.0 + * @param size The size (in bits) of the prime to generate. + */ + function generatePrime(size: number, callback: (err: Error | null, prime: ArrayBuffer) => void): void; + function generatePrime( + size: number, + options: GeneratePrimeOptionsBigInt, + callback: (err: Error | null, prime: bigint) => void, + ): void; + function generatePrime( + size: number, + options: GeneratePrimeOptionsArrayBuffer, + callback: (err: Error | null, prime: ArrayBuffer) => void, + ): void; + function generatePrime( + size: number, + options: GeneratePrimeOptions, + callback: (err: Error | null, prime: ArrayBuffer | bigint) => void, + ): void; + /** + * Generates a pseudorandom prime of `size` bits. + * + * If `options.safe` is `true`, the prime will be a safe prime -- that is,`(prime - 1) / 2` will also be a prime. + * + * The `options.add` and `options.rem` parameters can be used to enforce additional + * requirements, e.g., for Diffie-Hellman: + * + * * If `options.add` and `options.rem` are both set, the prime will satisfy the + * condition that `prime % add = rem`. + * * If only `options.add` is set and `options.safe` is not `true`, the prime will + * satisfy the condition that `prime % add = 1`. + * * If only `options.add` is set and `options.safe` is set to `true`, the prime + * will instead satisfy the condition that `prime % add = 3`. This is necessary + * because `prime % add = 1` for `options.add > 2` would contradict the condition + * enforced by `options.safe`. + * * `options.rem` is ignored if `options.add` is not given. + * + * Both `options.add` and `options.rem` must be encoded as big-endian sequences + * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or`DataView`. + * + * By default, the prime is encoded as a big-endian sequence of octets + * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided. + * @since v15.8.0 + * @param size The size (in bits) of the prime to generate. + */ + function generatePrimeSync(size: number): ArrayBuffer; + function generatePrimeSync(size: number, options: GeneratePrimeOptionsBigInt): bigint; + function generatePrimeSync(size: number, options: GeneratePrimeOptionsArrayBuffer): ArrayBuffer; + function generatePrimeSync(size: number, options: GeneratePrimeOptions): ArrayBuffer | bigint; + interface CheckPrimeOptions { + /** + * The number of Miller-Rabin probabilistic primality iterations to perform. + * When the value is 0 (zero), a number of checks is used that yields a false positive rate of at most `2**-64` for random input. + * Care must be used when selecting a number of checks. + * Refer to the OpenSSL documentation for the BN_is_prime_ex function nchecks options for more details. + * + * @default 0 + */ + checks?: number | undefined; + } + /** + * Checks the primality of the `candidate`. + * @since v15.8.0 + * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length. + */ + function checkPrime(value: LargeNumberLike, callback: (err: Error | null, result: boolean) => void): void; + function checkPrime( + value: LargeNumberLike, + options: CheckPrimeOptions, + callback: (err: Error | null, result: boolean) => void, + ): void; + /** + * Checks the primality of the `candidate`. + * @since v15.8.0 + * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length. + * @return `true` if the candidate is a prime with an error probability less than `0.25 ** options.checks`. + */ + function checkPrimeSync(candidate: LargeNumberLike, options?: CheckPrimeOptions): boolean; + /** + * Load and set the `engine` for some or all OpenSSL functions (selected by flags). + * + * `engine` could be either an id or a path to the engine's shared library. + * + * The optional `flags` argument uses `ENGINE_METHOD_ALL` by default. The `flags`is a bit field taking one of or a mix of the following flags (defined in`crypto.constants`): + * + * * `crypto.constants.ENGINE_METHOD_RSA` + * * `crypto.constants.ENGINE_METHOD_DSA` + * * `crypto.constants.ENGINE_METHOD_DH` + * * `crypto.constants.ENGINE_METHOD_RAND` + * * `crypto.constants.ENGINE_METHOD_EC` + * * `crypto.constants.ENGINE_METHOD_CIPHERS` + * * `crypto.constants.ENGINE_METHOD_DIGESTS` + * * `crypto.constants.ENGINE_METHOD_PKEY_METHS` + * * `crypto.constants.ENGINE_METHOD_PKEY_ASN1_METHS` + * * `crypto.constants.ENGINE_METHOD_ALL` + * * `crypto.constants.ENGINE_METHOD_NONE` + * @since v0.11.11 + * @param flags + */ + function setEngine(engine: string, flags?: number): void; + /** + * A convenient alias for {@link webcrypto.getRandomValues}. This + * implementation is not compliant with the Web Crypto spec, to write + * web-compatible code use {@link webcrypto.getRandomValues} instead. + * @since v17.4.0 + * @return Returns `typedArray`. + */ + function getRandomValues(typedArray: T): T; + /** + * A convenient alias for `crypto.webcrypto.subtle`. + * @since v17.4.0 + */ + const subtle: webcrypto.SubtleCrypto; + /** + * An implementation of the Web Crypto API standard. + * + * See the {@link https://nodejs.org/docs/latest/api/webcrypto.html Web Crypto API documentation} for details. + * @since v15.0.0 + */ + const webcrypto: webcrypto.Crypto; + namespace webcrypto { + type BufferSource = ArrayBufferView | ArrayBuffer; + type KeyFormat = "jwk" | "pkcs8" | "raw" | "spki"; + type KeyType = "private" | "public" | "secret"; + type KeyUsage = + | "decrypt" + | "deriveBits" + | "deriveKey" + | "encrypt" + | "sign" + | "unwrapKey" + | "verify" + | "wrapKey"; + type AlgorithmIdentifier = Algorithm | string; + type HashAlgorithmIdentifier = AlgorithmIdentifier; + type NamedCurve = string; + type BigInteger = Uint8Array; + interface AesCbcParams extends Algorithm { + iv: BufferSource; + } + interface AesCtrParams extends Algorithm { + counter: BufferSource; + length: number; + } + interface AesDerivedKeyParams extends Algorithm { + length: number; + } + interface AesGcmParams extends Algorithm { + additionalData?: BufferSource; + iv: BufferSource; + tagLength?: number; + } + interface AesKeyAlgorithm extends KeyAlgorithm { + length: number; + } + interface AesKeyGenParams extends Algorithm { + length: number; + } + interface Algorithm { + name: string; + } + interface EcKeyAlgorithm extends KeyAlgorithm { + namedCurve: NamedCurve; + } + interface EcKeyGenParams extends Algorithm { + namedCurve: NamedCurve; + } + interface EcKeyImportParams extends Algorithm { + namedCurve: NamedCurve; + } + interface EcdhKeyDeriveParams extends Algorithm { + public: CryptoKey; + } + interface EcdsaParams extends Algorithm { + hash: HashAlgorithmIdentifier; + } + interface Ed448Params extends Algorithm { + context?: BufferSource; + } + interface HkdfParams extends Algorithm { + hash: HashAlgorithmIdentifier; + info: BufferSource; + salt: BufferSource; + } + interface HmacImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; + } + interface HmacKeyAlgorithm extends KeyAlgorithm { + hash: KeyAlgorithm; + length: number; + } + interface HmacKeyGenParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; + } + interface JsonWebKey { + alg?: string; + crv?: string; + d?: string; + dp?: string; + dq?: string; + e?: string; + ext?: boolean; + k?: string; + key_ops?: string[]; + kty?: string; + n?: string; + oth?: RsaOtherPrimesInfo[]; + p?: string; + q?: string; + qi?: string; + use?: string; + x?: string; + y?: string; + } + interface KeyAlgorithm { + name: string; + } + interface Pbkdf2Params extends Algorithm { + hash: HashAlgorithmIdentifier; + iterations: number; + salt: BufferSource; + } + interface RsaHashedImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + } + interface RsaHashedKeyAlgorithm extends RsaKeyAlgorithm { + hash: KeyAlgorithm; + } + interface RsaHashedKeyGenParams extends RsaKeyGenParams { + hash: HashAlgorithmIdentifier; + } + interface RsaKeyAlgorithm extends KeyAlgorithm { + modulusLength: number; + publicExponent: BigInteger; + } + interface RsaKeyGenParams extends Algorithm { + modulusLength: number; + publicExponent: BigInteger; + } + interface RsaOaepParams extends Algorithm { + label?: BufferSource; + } + interface RsaOtherPrimesInfo { + d?: string; + r?: string; + t?: string; + } + interface RsaPssParams extends Algorithm { + saltLength: number; + } + /** + * Calling `require('node:crypto').webcrypto` returns an instance of the `Crypto` class. + * `Crypto` is a singleton that provides access to the remainder of the crypto API. + * @since v15.0.0 + */ + interface Crypto { + /** + * Provides access to the `SubtleCrypto` API. + * @since v15.0.0 + */ + readonly subtle: SubtleCrypto; + /** + * Generates cryptographically strong random values. + * The given `typedArray` is filled with random values, and a reference to `typedArray` is returned. + * + * The given `typedArray` must be an integer-based instance of {@link NodeJS.TypedArray}, i.e. `Float32Array` and `Float64Array` are not accepted. + * + * An error will be thrown if the given `typedArray` is larger than 65,536 bytes. + * @since v15.0.0 + */ + getRandomValues>(typedArray: T): T; + /** + * Generates a random {@link https://www.rfc-editor.org/rfc/rfc4122.txt RFC 4122} version 4 UUID. + * The UUID is generated using a cryptographic pseudorandom number generator. + * @since v16.7.0 + */ + randomUUID(): UUID; + CryptoKey: CryptoKeyConstructor; + } + // This constructor throws ILLEGAL_CONSTRUCTOR so it should not be newable. + interface CryptoKeyConstructor { + /** Illegal constructor */ + (_: { readonly _: unique symbol }): never; // Allows instanceof to work but not be callable by the user. + readonly length: 0; + readonly name: "CryptoKey"; + readonly prototype: CryptoKey; + } + /** + * @since v15.0.0 + */ + interface CryptoKey { + /** + * An object detailing the algorithm for which the key can be used along with additional algorithm-specific parameters. + * @since v15.0.0 + */ + readonly algorithm: KeyAlgorithm; + /** + * When `true`, the {@link CryptoKey} can be extracted using either `subtleCrypto.exportKey()` or `subtleCrypto.wrapKey()`. + * @since v15.0.0 + */ + readonly extractable: boolean; + /** + * A string identifying whether the key is a symmetric (`'secret'`) or asymmetric (`'private'` or `'public'`) key. + * @since v15.0.0 + */ + readonly type: KeyType; + /** + * An array of strings identifying the operations for which the key may be used. + * + * The possible usages are: + * - `'encrypt'` - The key may be used to encrypt data. + * - `'decrypt'` - The key may be used to decrypt data. + * - `'sign'` - The key may be used to generate digital signatures. + * - `'verify'` - The key may be used to verify digital signatures. + * - `'deriveKey'` - The key may be used to derive a new key. + * - `'deriveBits'` - The key may be used to derive bits. + * - `'wrapKey'` - The key may be used to wrap another key. + * - `'unwrapKey'` - The key may be used to unwrap another key. + * + * Valid key usages depend on the key algorithm (identified by `cryptokey.algorithm.name`). + * @since v15.0.0 + */ + readonly usages: KeyUsage[]; + } + /** + * The `CryptoKeyPair` is a simple dictionary object with `publicKey` and `privateKey` properties, representing an asymmetric key pair. + * @since v15.0.0 + */ + interface CryptoKeyPair { + /** + * A {@link CryptoKey} whose type will be `'private'`. + * @since v15.0.0 + */ + privateKey: CryptoKey; + /** + * A {@link CryptoKey} whose type will be `'public'`. + * @since v15.0.0 + */ + publicKey: CryptoKey; + } + /** + * @since v15.0.0 + */ + interface SubtleCrypto { + /** + * Using the method and parameters specified in `algorithm` and the keying material provided by `key`, + * `subtle.decrypt()` attempts to decipher the provided `data`. If successful, + * the returned promise will be resolved with an `` containing the plaintext result. + * + * The algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * @since v15.0.0 + */ + decrypt( + algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + key: CryptoKey, + data: BufferSource, + ): Promise; + /** + * Using the method and parameters specified in `algorithm` and the keying material provided by `baseKey`, + * `subtle.deriveBits()` attempts to generate `length` bits. + * The Node.js implementation requires that when `length` is a number it must be multiple of `8`. + * When `length` is `null` the maximum number of bits for a given algorithm is generated. This is allowed + * for the `'ECDH'`, `'X25519'`, and `'X448'` algorithms. + * If successful, the returned promise will be resolved with an `` containing the generated data. + * + * The algorithms currently supported include: + * + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HKDF'` + * - `'PBKDF2'` + * @since v15.0.0 + */ + deriveBits(algorithm: EcdhKeyDeriveParams, baseKey: CryptoKey, length: number | null): Promise; + deriveBits( + algorithm: AlgorithmIdentifier | HkdfParams | Pbkdf2Params, + baseKey: CryptoKey, + length: number, + ): Promise; + /** + * Using the method and parameters specified in `algorithm`, and the keying material provided by `baseKey`, + * `subtle.deriveKey()` attempts to generate a new ` based on the method and parameters in `derivedKeyAlgorithm`. + * + * Calling `subtle.deriveKey()` is equivalent to calling `subtle.deriveBits()` to generate raw keying material, + * then passing the result into the `subtle.importKey()` method using the `deriveKeyAlgorithm`, `extractable`, and `keyUsages` parameters as input. + * + * The algorithms currently supported include: + * + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HKDF'` + * - `'PBKDF2'` + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + deriveKey( + algorithm: AlgorithmIdentifier | EcdhKeyDeriveParams | HkdfParams | Pbkdf2Params, + baseKey: CryptoKey, + derivedKeyAlgorithm: + | AlgorithmIdentifier + | AesDerivedKeyParams + | HmacImportParams + | HkdfParams + | Pbkdf2Params, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + /** + * Using the method identified by `algorithm`, `subtle.digest()` attempts to generate a digest of `data`. + * If successful, the returned promise is resolved with an `` containing the computed digest. + * + * If `algorithm` is provided as a ``, it must be one of: + * + * - `'SHA-1'` + * - `'SHA-256'` + * - `'SHA-384'` + * - `'SHA-512'` + * + * If `algorithm` is provided as an ``, it must have a `name` property whose value is one of the above. + * @since v15.0.0 + */ + digest(algorithm: AlgorithmIdentifier, data: BufferSource): Promise; + /** + * Using the method and parameters specified by `algorithm` and the keying material provided by `key`, + * `subtle.encrypt()` attempts to encipher `data`. If successful, + * the returned promise is resolved with an `` containing the encrypted result. + * + * The algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * @since v15.0.0 + */ + encrypt( + algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + key: CryptoKey, + data: BufferSource, + ): Promise; + /** + * Exports the given key into the specified format, if supported. + * + * If the `` is not extractable, the returned promise will reject. + * + * When `format` is either `'pkcs8'` or `'spki'` and the export is successful, + * the returned promise will be resolved with an `` containing the exported key data. + * + * When `format` is `'jwk'` and the export is successful, the returned promise will be resolved with a + * JavaScript object conforming to the {@link https://tools.ietf.org/html/rfc7517 JSON Web Key} specification. + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @returns `` containing ``. + * @since v15.0.0 + */ + exportKey(format: "jwk", key: CryptoKey): Promise; + exportKey(format: Exclude, key: CryptoKey): Promise; + /** + * Using the method and parameters provided in `algorithm`, + * `subtle.generateKey()` attempts to generate new keying material. + * Depending the method used, the method may generate either a single `` or a ``. + * + * The `` (public and private key) generating algorithms supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'RSA-OAEP'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * The `` (secret key) generating algorithms supported include: + * + * - `'HMAC'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + generateKey( + algorithm: RsaHashedKeyGenParams | EcKeyGenParams, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + generateKey( + algorithm: AesKeyGenParams | HmacKeyGenParams | Pbkdf2Params, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + generateKey( + algorithm: AlgorithmIdentifier, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + /** + * The `subtle.importKey()` method attempts to interpret the provided `keyData` as the given `format` + * to create a `` instance using the provided `algorithm`, `extractable`, and `keyUsages` arguments. + * If the import is successful, the returned promise will be resolved with the created ``. + * + * If importing a `'PBKDF2'` key, `extractable` must be `false`. + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + importKey( + format: "jwk", + keyData: JsonWebKey, + algorithm: + | AlgorithmIdentifier + | RsaHashedImportParams + | EcKeyImportParams + | HmacImportParams + | AesKeyAlgorithm, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + importKey( + format: Exclude, + keyData: BufferSource, + algorithm: + | AlgorithmIdentifier + | RsaHashedImportParams + | EcKeyImportParams + | HmacImportParams + | AesKeyAlgorithm, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + /** + * Using the method and parameters given by `algorithm` and the keying material provided by `key`, + * `subtle.sign()` attempts to generate a cryptographic signature of `data`. If successful, + * the returned promise is resolved with an `` containing the generated signature. + * + * The algorithms currently supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'HMAC'` + * @since v15.0.0 + */ + sign( + algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams | Ed448Params, + key: CryptoKey, + data: BufferSource, + ): Promise; + /** + * In cryptography, "wrapping a key" refers to exporting and then encrypting the keying material. + * The `subtle.unwrapKey()` method attempts to decrypt a wrapped key and create a `` instance. + * It is equivalent to calling `subtle.decrypt()` first on the encrypted key data (using the `wrappedKey`, `unwrapAlgo`, and `unwrappingKey` arguments as input) + * then passing the results in to the `subtle.importKey()` method using the `unwrappedKeyAlgo`, `extractable`, and `keyUsages` arguments as inputs. + * If successful, the returned promise is resolved with a `` object. + * + * The wrapping algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * + * The unwrapped key algorithms supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'RSA-OAEP'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HMAC'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + unwrapKey( + format: KeyFormat, + wrappedKey: BufferSource, + unwrappingKey: CryptoKey, + unwrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + unwrappedKeyAlgorithm: + | AlgorithmIdentifier + | RsaHashedImportParams + | EcKeyImportParams + | HmacImportParams + | AesKeyAlgorithm, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + /** + * Using the method and parameters given in `algorithm` and the keying material provided by `key`, + * `subtle.verify()` attempts to verify that `signature` is a valid cryptographic signature of `data`. + * The returned promise is resolved with either `true` or `false`. + * + * The algorithms currently supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'HMAC'` + * @since v15.0.0 + */ + verify( + algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams | Ed448Params, + key: CryptoKey, + signature: BufferSource, + data: BufferSource, + ): Promise; + /** + * In cryptography, "wrapping a key" refers to exporting and then encrypting the keying material. + * The `subtle.wrapKey()` method exports the keying material into the format identified by `format`, + * then encrypts it using the method and parameters specified by `wrapAlgo` and the keying material provided by `wrappingKey`. + * It is the equivalent to calling `subtle.exportKey()` using `format` and `key` as the arguments, + * then passing the result to the `subtle.encrypt()` method using `wrappingKey` and `wrapAlgo` as inputs. + * If successful, the returned promise will be resolved with an `` containing the encrypted key data. + * + * The wrapping algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @since v15.0.0 + */ + wrapKey( + format: KeyFormat, + key: CryptoKey, + wrappingKey: CryptoKey, + wrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + ): Promise; + } + } + + global { + var crypto: typeof globalThis extends { + crypto: infer T; + onmessage: any; + } ? T + : webcrypto.Crypto; + } +} +declare module "node:crypto" { + export * from "crypto"; +} diff --git a/dist/node_modules/@types/node/dgram.d.ts b/dist/node_modules/@types/node/dgram.d.ts new file mode 100644 index 0000000..0b86ae7 --- /dev/null +++ b/dist/node_modules/@types/node/dgram.d.ts @@ -0,0 +1,596 @@ +/** + * The `node:dgram` module provides an implementation of UDP datagram sockets. + * + * ```js + * import dgram from 'node:dgram'; + * + * const server = dgram.createSocket('udp4'); + * + * server.on('error', (err) => { + * console.error(`server error:\n${err.stack}`); + * server.close(); + * }); + * + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * + * server.on('listening', () => { + * const address = server.address(); + * console.log(`server listening ${address.address}:${address.port}`); + * }); + * + * server.bind(41234); + * // Prints: server listening 0.0.0.0:41234 + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/dgram.js) + */ +declare module "dgram" { + import { AddressInfo } from "node:net"; + import * as dns from "node:dns"; + import { Abortable, EventEmitter } from "node:events"; + interface RemoteInfo { + address: string; + family: "IPv4" | "IPv6"; + port: number; + size: number; + } + interface BindOptions { + port?: number | undefined; + address?: string | undefined; + exclusive?: boolean | undefined; + fd?: number | undefined; + } + type SocketType = "udp4" | "udp6"; + interface SocketOptions extends Abortable { + type: SocketType; + reuseAddr?: boolean | undefined; + /** + * @default false + */ + ipv6Only?: boolean | undefined; + recvBufferSize?: number | undefined; + sendBufferSize?: number | undefined; + lookup?: + | (( + hostname: string, + options: dns.LookupOneOptions, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ) => void) + | undefined; + } + /** + * Creates a `dgram.Socket` object. Once the socket is created, calling `socket.bind()` will instruct the socket to begin listening for datagram + * messages. When `address` and `port` are not passed to `socket.bind()` the + * method will bind the socket to the "all interfaces" address on a random port + * (it does the right thing for both `udp4` and `udp6` sockets). The bound address + * and port can be retrieved using `socket.address().address` and `socket.address().port`. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.close()` on the socket: + * + * ```js + * const controller = new AbortController(); + * const { signal } = controller; + * const server = dgram.createSocket({ type: 'udp4', signal }); + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * // Later, when you want to close the server. + * controller.abort(); + * ``` + * @since v0.11.13 + * @param options Available options are: + * @param callback Attached as a listener for `'message'` events. Optional. + */ + function createSocket(type: SocketType, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; + function createSocket(options: SocketOptions, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; + /** + * Encapsulates the datagram functionality. + * + * New instances of `dgram.Socket` are created using {@link createSocket}. + * The `new` keyword is not to be used to create `dgram.Socket` instances. + * @since v0.1.99 + */ + class Socket extends EventEmitter { + /** + * Tells the kernel to join a multicast group at the given `multicastAddress` and`multicastInterface` using the `IP_ADD_MEMBERSHIP` socket option. If the`multicastInterface` argument is not + * specified, the operating system will choose + * one interface and will add membership to it. To add membership to every + * available interface, call `addMembership` multiple times, once per interface. + * + * When called on an unbound socket, this method will implicitly bind to a random + * port, listening on all interfaces. + * + * When sharing a UDP socket across multiple `cluster` workers, the`socket.addMembership()` function must be called only once or an`EADDRINUSE` error will occur: + * + * ```js + * import cluster from 'node:cluster'; + * import dgram from 'node:dgram'; + * + * if (cluster.isPrimary) { + * cluster.fork(); // Works ok. + * cluster.fork(); // Fails with EADDRINUSE. + * } else { + * const s = dgram.createSocket('udp4'); + * s.bind(1234, () => { + * s.addMembership('224.0.0.114'); + * }); + * } + * ``` + * @since v0.6.9 + */ + addMembership(multicastAddress: string, multicastInterface?: string): void; + /** + * Returns an object containing the address information for a socket. + * For UDP sockets, this object will contain `address`, `family`, and `port`properties. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.1.99 + */ + address(): AddressInfo; + /** + * For UDP sockets, causes the `dgram.Socket` to listen for datagram + * messages on a named `port` and optional `address`. If `port` is not + * specified or is `0`, the operating system will attempt to bind to a + * random port. If `address` is not specified, the operating system will + * attempt to listen on all addresses. Once binding is complete, a`'listening'` event is emitted and the optional `callback` function is + * called. + * + * Specifying both a `'listening'` event listener and passing a`callback` to the `socket.bind()` method is not harmful but not very + * useful. + * + * A bound datagram socket keeps the Node.js process running to receive + * datagram messages. + * + * If binding fails, an `'error'` event is generated. In rare case (e.g. + * attempting to bind with a closed socket), an `Error` may be thrown. + * + * Example of a UDP server listening on port 41234: + * + * ```js + * import dgram from 'node:dgram'; + * + * const server = dgram.createSocket('udp4'); + * + * server.on('error', (err) => { + * console.error(`server error:\n${err.stack}`); + * server.close(); + * }); + * + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * + * server.on('listening', () => { + * const address = server.address(); + * console.log(`server listening ${address.address}:${address.port}`); + * }); + * + * server.bind(41234); + * // Prints: server listening 0.0.0.0:41234 + * ``` + * @since v0.1.99 + * @param callback with no parameters. Called when binding is complete. + */ + bind(port?: number, address?: string, callback?: () => void): this; + bind(port?: number, callback?: () => void): this; + bind(callback?: () => void): this; + bind(options: BindOptions, callback?: () => void): this; + /** + * Close the underlying socket and stop listening for data on it. If a callback is + * provided, it is added as a listener for the `'close'` event. + * @since v0.1.99 + * @param callback Called when the socket has been closed. + */ + close(callback?: () => void): this; + /** + * Associates the `dgram.Socket` to a remote address and port. Every + * message sent by this handle is automatically sent to that destination. Also, + * the socket will only receive messages from that remote peer. + * Trying to call `connect()` on an already connected socket will result + * in an `ERR_SOCKET_DGRAM_IS_CONNECTED` exception. If `address` is not + * provided, `'127.0.0.1'` (for `udp4` sockets) or `'::1'` (for `udp6` sockets) + * will be used by default. Once the connection is complete, a `'connect'` event + * is emitted and the optional `callback` function is called. In case of failure, + * the `callback` is called or, failing this, an `'error'` event is emitted. + * @since v12.0.0 + * @param callback Called when the connection is completed or on error. + */ + connect(port: number, address?: string, callback?: () => void): void; + connect(port: number, callback: () => void): void; + /** + * A synchronous function that disassociates a connected `dgram.Socket` from + * its remote address. Trying to call `disconnect()` on an unbound or already + * disconnected socket will result in an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception. + * @since v12.0.0 + */ + disconnect(): void; + /** + * Instructs the kernel to leave a multicast group at `multicastAddress` using the`IP_DROP_MEMBERSHIP` socket option. This method is automatically called by the + * kernel when the socket is closed or the process terminates, so most apps will + * never have reason to call this. + * + * If `multicastInterface` is not specified, the operating system will attempt to + * drop membership on all valid interfaces. + * @since v0.6.9 + */ + dropMembership(multicastAddress: string, multicastInterface?: string): void; + /** + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + * @return the `SO_RCVBUF` socket receive buffer size in bytes. + */ + getRecvBufferSize(): number; + /** + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + * @return the `SO_SNDBUF` socket send buffer size in bytes. + */ + getSendBufferSize(): number; + /** + * @since v18.8.0, v16.19.0 + * @return Number of bytes queued for sending. + */ + getSendQueueSize(): number; + /** + * @since v18.8.0, v16.19.0 + * @return Number of send requests currently in the queue awaiting to be processed. + */ + getSendQueueCount(): number; + /** + * By default, binding a socket will cause it to block the Node.js process from + * exiting as long as the socket is open. The `socket.unref()` method can be used + * to exclude the socket from the reference counting that keeps the Node.js + * process active. The `socket.ref()` method adds the socket back to the reference + * counting and restores the default behavior. + * + * Calling `socket.ref()` multiples times will have no additional effect. + * + * The `socket.ref()` method returns a reference to the socket so calls can be + * chained. + * @since v0.9.1 + */ + ref(): this; + /** + * Returns an object containing the `address`, `family`, and `port` of the remote + * endpoint. This method throws an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception + * if the socket is not connected. + * @since v12.0.0 + */ + remoteAddress(): AddressInfo; + /** + * Broadcasts a datagram on the socket. + * For connectionless sockets, the destination `port` and `address` must be + * specified. Connected sockets, on the other hand, will use their associated + * remote endpoint, so the `port` and `address` arguments must not be set. + * + * The `msg` argument contains the message to be sent. + * Depending on its type, different behavior can apply. If `msg` is a `Buffer`, + * any `TypedArray` or a `DataView`, + * the `offset` and `length` specify the offset within the `Buffer` where the + * message begins and the number of bytes in the message, respectively. + * If `msg` is a `String`, then it is automatically converted to a `Buffer`with `'utf8'` encoding. With messages that + * contain multi-byte characters, `offset` and `length` will be calculated with + * respect to `byte length` and not the character position. + * If `msg` is an array, `offset` and `length` must not be specified. + * + * The `address` argument is a string. If the value of `address` is a host name, + * DNS will be used to resolve the address of the host. If `address` is not + * provided or otherwise nullish, `'127.0.0.1'` (for `udp4` sockets) or `'::1'`(for `udp6` sockets) will be used by default. + * + * If the socket has not been previously bound with a call to `bind`, the socket + * is assigned a random port number and is bound to the "all interfaces" address + * (`'0.0.0.0'` for `udp4` sockets, `'::0'` for `udp6` sockets.) + * + * An optional `callback` function may be specified to as a way of reporting + * DNS errors or for determining when it is safe to reuse the `buf` object. + * DNS lookups delay the time to send for at least one tick of the + * Node.js event loop. + * + * The only way to know for sure that the datagram has been sent is by using a`callback`. If an error occurs and a `callback` is given, the error will be + * passed as the first argument to the `callback`. If a `callback` is not given, + * the error is emitted as an `'error'` event on the `socket` object. + * + * Offset and length are optional but both _must_ be set if either are used. + * They are supported only when the first argument is a `Buffer`, a `TypedArray`, + * or a `DataView`. + * + * This method throws `ERR_SOCKET_BAD_PORT` if called on an unbound socket. + * + * Example of sending a UDP packet to a port on `localhost`; + * + * ```js + * import dgram from 'node:dgram'; + * import { Buffer } from 'node:buffer'; + * + * const message = Buffer.from('Some bytes'); + * const client = dgram.createSocket('udp4'); + * client.send(message, 41234, 'localhost', (err) => { + * client.close(); + * }); + * ``` + * + * Example of sending a UDP packet composed of multiple buffers to a port on`127.0.0.1`; + * + * ```js + * import dgram from 'node:dgram'; + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('Some '); + * const buf2 = Buffer.from('bytes'); + * const client = dgram.createSocket('udp4'); + * client.send([buf1, buf2], 41234, (err) => { + * client.close(); + * }); + * ``` + * + * Sending multiple buffers might be faster or slower depending on the + * application and operating system. Run benchmarks to + * determine the optimal strategy on a case-by-case basis. Generally speaking, + * however, sending multiple buffers is faster. + * + * Example of sending a UDP packet using a socket connected to a port on`localhost`: + * + * ```js + * import dgram from 'node:dgram'; + * import { Buffer } from 'node:buffer'; + * + * const message = Buffer.from('Some bytes'); + * const client = dgram.createSocket('udp4'); + * client.connect(41234, 'localhost', (err) => { + * client.send(message, (err) => { + * client.close(); + * }); + * }); + * ``` + * @since v0.1.99 + * @param msg Message to be sent. + * @param offset Offset in the buffer where the message starts. + * @param length Number of bytes in the message. + * @param port Destination port. + * @param address Destination host name or IP address. + * @param callback Called when the message has been sent. + */ + send( + msg: string | Uint8Array | readonly any[], + port?: number, + address?: string, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array | readonly any[], + port?: number, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array | readonly any[], + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array, + offset: number, + length: number, + port?: number, + address?: string, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array, + offset: number, + length: number, + port?: number, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array, + offset: number, + length: number, + callback?: (error: Error | null, bytes: number) => void, + ): void; + /** + * Sets or clears the `SO_BROADCAST` socket option. When set to `true`, UDP + * packets may be sent to a local interface's broadcast address. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.6.9 + */ + setBroadcast(flag: boolean): void; + /** + * _All references to scope in this section are referring to [IPv6 Zone Indices](https://en.wikipedia.org/wiki/IPv6_address#Scoped_literal_IPv6_addresses), which are defined by [RFC + * 4007](https://tools.ietf.org/html/rfc4007). In string form, an IP_ + * _with a scope index is written as `'IP%scope'` where scope is an interface name_ + * _or interface number._ + * + * Sets the default outgoing multicast interface of the socket to a chosen + * interface or back to system interface selection. The `multicastInterface` must + * be a valid string representation of an IP from the socket's family. + * + * For IPv4 sockets, this should be the IP configured for the desired physical + * interface. All packets sent to multicast on the socket will be sent on the + * interface determined by the most recent successful use of this call. + * + * For IPv6 sockets, `multicastInterface` should include a scope to indicate the + * interface as in the examples that follow. In IPv6, individual `send` calls can + * also use explicit scope in addresses, so only packets sent to a multicast + * address without specifying an explicit scope are affected by the most recent + * successful use of this call. + * + * This method throws `EBADF` if called on an unbound socket. + * + * #### Example: IPv6 outgoing multicast interface + * + * On most systems, where scope format uses the interface name: + * + * ```js + * const socket = dgram.createSocket('udp6'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('::%eth1'); + * }); + * ``` + * + * On Windows, where scope format uses an interface number: + * + * ```js + * const socket = dgram.createSocket('udp6'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('::%2'); + * }); + * ``` + * + * #### Example: IPv4 outgoing multicast interface + * + * All systems use an IP of the host on the desired physical interface: + * + * ```js + * const socket = dgram.createSocket('udp4'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('10.0.0.2'); + * }); + * ``` + * @since v8.6.0 + */ + setMulticastInterface(multicastInterface: string): void; + /** + * Sets or clears the `IP_MULTICAST_LOOP` socket option. When set to `true`, + * multicast packets will also be received on the local interface. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.3.8 + */ + setMulticastLoopback(flag: boolean): boolean; + /** + * Sets the `IP_MULTICAST_TTL` socket option. While TTL generally stands for + * "Time to Live", in this context it specifies the number of IP hops that a + * packet is allowed to travel through, specifically for multicast traffic. Each + * router or gateway that forwards a packet decrements the TTL. If the TTL is + * decremented to 0 by a router, it will not be forwarded. + * + * The `ttl` argument may be between 0 and 255\. The default on most systems is `1`. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.3.8 + */ + setMulticastTTL(ttl: number): number; + /** + * Sets the `SO_RCVBUF` socket option. Sets the maximum socket receive buffer + * in bytes. + * + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + */ + setRecvBufferSize(size: number): void; + /** + * Sets the `SO_SNDBUF` socket option. Sets the maximum socket send buffer + * in bytes. + * + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + */ + setSendBufferSize(size: number): void; + /** + * Sets the `IP_TTL` socket option. While TTL generally stands for "Time to Live", + * in this context it specifies the number of IP hops that a packet is allowed to + * travel through. Each router or gateway that forwards a packet decrements the + * TTL. If the TTL is decremented to 0 by a router, it will not be forwarded. + * Changing TTL values is typically done for network probes or when multicasting. + * + * The `ttl` argument may be between 1 and 255\. The default on most systems + * is 64. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.1.101 + */ + setTTL(ttl: number): number; + /** + * By default, binding a socket will cause it to block the Node.js process from + * exiting as long as the socket is open. The `socket.unref()` method can be used + * to exclude the socket from the reference counting that keeps the Node.js + * process active, allowing the process to exit even if the socket is still + * listening. + * + * Calling `socket.unref()` multiple times will have no additional effect. + * + * The `socket.unref()` method returns a reference to the socket so calls can be + * chained. + * @since v0.9.1 + */ + unref(): this; + /** + * Tells the kernel to join a source-specific multicast channel at the given`sourceAddress` and `groupAddress`, using the `multicastInterface` with the`IP_ADD_SOURCE_MEMBERSHIP` socket + * option. If the `multicastInterface` argument + * is not specified, the operating system will choose one interface and will add + * membership to it. To add membership to every available interface, call`socket.addSourceSpecificMembership()` multiple times, once per interface. + * + * When called on an unbound socket, this method will implicitly bind to a random + * port, listening on all interfaces. + * @since v13.1.0, v12.16.0 + */ + addSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void; + /** + * Instructs the kernel to leave a source-specific multicast channel at the given`sourceAddress` and `groupAddress` using the `IP_DROP_SOURCE_MEMBERSHIP`socket option. This method is + * automatically called by the kernel when the + * socket is closed or the process terminates, so most apps will never have + * reason to call this. + * + * If `multicastInterface` is not specified, the operating system will attempt to + * drop membership on all valid interfaces. + * @since v13.1.0, v12.16.0 + */ + dropSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void; + /** + * events.EventEmitter + * 1. close + * 2. connect + * 3. error + * 4. listening + * 5. message + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connect", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "connect"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit(event: "message", msg: Buffer, rinfo: RemoteInfo): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connect", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connect", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connect", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connect", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + /** + * Calls `socket.close()` and returns a promise that fulfills when the socket has closed. + * @since v20.5.0 + */ + [Symbol.asyncDispose](): Promise; + } +} +declare module "node:dgram" { + export * from "dgram"; +} diff --git a/dist/node_modules/@types/node/diagnostics_channel.d.ts b/dist/node_modules/@types/node/diagnostics_channel.d.ts new file mode 100644 index 0000000..cd4bd71 --- /dev/null +++ b/dist/node_modules/@types/node/diagnostics_channel.d.ts @@ -0,0 +1,545 @@ +/** + * The `node:diagnostics_channel` module provides an API to create named channels + * to report arbitrary message data for diagnostics purposes. + * + * It can be accessed using: + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * ``` + * + * It is intended that a module writer wanting to report diagnostics messages + * will create one or many top-level channels to report messages through. + * Channels may also be acquired at runtime but it is not encouraged + * due to the additional overhead of doing so. Channels may be exported for + * convenience, but as long as the name is known it can be acquired anywhere. + * + * If you intend for your module to produce diagnostics data for others to + * consume it is recommended that you include documentation of what named + * channels are used along with the shape of the message data. Channel names + * should generally include the module name to avoid collisions with data from + * other modules. + * @since v15.1.0, v14.17.0 + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/diagnostics_channel.js) + */ +declare module "diagnostics_channel" { + import { AsyncLocalStorage } from "node:async_hooks"; + /** + * Check if there are active subscribers to the named channel. This is helpful if + * the message you want to send might be expensive to prepare. + * + * This API is optional but helpful when trying to publish messages from very + * performance-sensitive code. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * if (diagnostics_channel.hasSubscribers('my-channel')) { + * // There are subscribers, prepare and publish message + * } + * ``` + * @since v15.1.0, v14.17.0 + * @param name The channel name + * @return If there are active subscribers + */ + function hasSubscribers(name: string | symbol): boolean; + /** + * This is the primary entry-point for anyone wanting to publish to a named + * channel. It produces a channel object which is optimized to reduce overhead at + * publish time as much as possible. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * ``` + * @since v15.1.0, v14.17.0 + * @param name The channel name + * @return The named channel object + */ + function channel(name: string | symbol): Channel; + type ChannelListener = (message: unknown, name: string | symbol) => void; + /** + * Register a message handler to subscribe to this channel. This message handler + * will be run synchronously whenever a message is published to the channel. Any + * errors thrown in the message handler will trigger an `'uncaughtException'`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * diagnostics_channel.subscribe('my-channel', (message, name) => { + * // Received data + * }); + * ``` + * @since v18.7.0, v16.17.0 + * @param name The channel name + * @param onMessage The handler to receive channel messages + */ + function subscribe(name: string | symbol, onMessage: ChannelListener): void; + /** + * Remove a message handler previously registered to this channel with {@link subscribe}. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * function onMessage(message, name) { + * // Received data + * } + * + * diagnostics_channel.subscribe('my-channel', onMessage); + * + * diagnostics_channel.unsubscribe('my-channel', onMessage); + * ``` + * @since v18.7.0, v16.17.0 + * @param name The channel name + * @param onMessage The previous subscribed handler to remove + * @return `true` if the handler was found, `false` otherwise. + */ + function unsubscribe(name: string | symbol, onMessage: ChannelListener): boolean; + /** + * Creates a `TracingChannel` wrapper for the given `TracingChannel Channels`. If a name is given, the corresponding tracing + * channels will be created in the form of `tracing:${name}:${eventType}` where`eventType` corresponds to the types of `TracingChannel Channels`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channelsByName = diagnostics_channel.tracingChannel('my-channel'); + * + * // or... + * + * const channelsByCollection = diagnostics_channel.tracingChannel({ + * start: diagnostics_channel.channel('tracing:my-channel:start'), + * end: diagnostics_channel.channel('tracing:my-channel:end'), + * asyncStart: diagnostics_channel.channel('tracing:my-channel:asyncStart'), + * asyncEnd: diagnostics_channel.channel('tracing:my-channel:asyncEnd'), + * error: diagnostics_channel.channel('tracing:my-channel:error'), + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param nameOrChannels Channel name or object containing all the `TracingChannel Channels` + * @return Collection of channels to trace with + */ + function tracingChannel< + StoreType = unknown, + ContextType extends object = StoreType extends object ? StoreType : object, + >( + nameOrChannels: string | TracingChannelCollection, + ): TracingChannel; + /** + * The class `Channel` represents an individual named channel within the data + * pipeline. It is used to track subscribers and to publish messages when there + * are subscribers present. It exists as a separate object to avoid channel + * lookups at publish time, enabling very fast publish speeds and allowing + * for heavy use while incurring very minimal cost. Channels are created with {@link channel}, constructing a channel directly + * with `new Channel(name)` is not supported. + * @since v15.1.0, v14.17.0 + */ + class Channel { + readonly name: string | symbol; + /** + * Check if there are active subscribers to this channel. This is helpful if + * the message you want to send might be expensive to prepare. + * + * This API is optional but helpful when trying to publish messages from very + * performance-sensitive code. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * if (channel.hasSubscribers) { + * // There are subscribers, prepare and publish message + * } + * ``` + * @since v15.1.0, v14.17.0 + */ + readonly hasSubscribers: boolean; + private constructor(name: string | symbol); + /** + * Publish a message to any subscribers to the channel. This will trigger + * message handlers synchronously so they will execute within the same context. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.publish({ + * some: 'message', + * }); + * ``` + * @since v15.1.0, v14.17.0 + * @param message The message to send to the channel subscribers + */ + publish(message: unknown): void; + /** + * Register a message handler to subscribe to this channel. This message handler + * will be run synchronously whenever a message is published to the channel. Any + * errors thrown in the message handler will trigger an `'uncaughtException'`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.subscribe((message, name) => { + * // Received data + * }); + * ``` + * @since v15.1.0, v14.17.0 + * @deprecated Since v18.7.0,v16.17.0 - Use {@link subscribe(name, onMessage)} + * @param onMessage The handler to receive channel messages + */ + subscribe(onMessage: ChannelListener): void; + /** + * Remove a message handler previously registered to this channel with `channel.subscribe(onMessage)`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * function onMessage(message, name) { + * // Received data + * } + * + * channel.subscribe(onMessage); + * + * channel.unsubscribe(onMessage); + * ``` + * @since v15.1.0, v14.17.0 + * @deprecated Since v18.7.0,v16.17.0 - Use {@link unsubscribe(name, onMessage)} + * @param onMessage The previous subscribed handler to remove + * @return `true` if the handler was found, `false` otherwise. + */ + unsubscribe(onMessage: ChannelListener): void; + /** + * When `channel.runStores(context, ...)` is called, the given context data + * will be applied to any store bound to the channel. If the store has already been + * bound the previous `transform` function will be replaced with the new one. + * The `transform` function may be omitted to set the given context data as the + * context directly. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const store = new AsyncLocalStorage(); + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.bindStore(store, (data) => { + * return { data }; + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param store The store to which to bind the context data + * @param transform Transform context data before setting the store context + */ + bindStore(store: AsyncLocalStorage, transform?: (context: ContextType) => StoreType): void; + /** + * Remove a message handler previously registered to this channel with `channel.bindStore(store)`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const store = new AsyncLocalStorage(); + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.bindStore(store); + * channel.unbindStore(store); + * ``` + * @since v19.9.0 + * @experimental + * @param store The store to unbind from the channel. + * @return `true` if the store was found, `false` otherwise. + */ + unbindStore(store: any): void; + /** + * Applies the given data to any AsyncLocalStorage instances bound to the channel + * for the duration of the given function, then publishes to the channel within + * the scope of that data is applied to the stores. + * + * If a transform function was given to `channel.bindStore(store)` it will be + * applied to transform the message data before it becomes the context value for + * the store. The prior storage context is accessible from within the transform + * function in cases where context linking is required. + * + * The context applied to the store should be accessible in any async code which + * continues from execution which began during the given function, however + * there are some situations in which `context loss` may occur. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const store = new AsyncLocalStorage(); + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.bindStore(store, (message) => { + * const parent = store.getStore(); + * return new Span(message, parent); + * }); + * channel.runStores({ some: 'message' }, () => { + * store.getStore(); // Span({ some: 'message' }) + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param context Message to send to subscribers and bind to stores + * @param fn Handler to run within the entered storage context + * @param thisArg The receiver to be used for the function call. + * @param args Optional arguments to pass to the function. + */ + runStores(): void; + } + interface TracingChannelSubscribers { + start: (message: ContextType) => void; + end: ( + message: ContextType & { + error?: unknown; + result?: unknown; + }, + ) => void; + asyncStart: ( + message: ContextType & { + error?: unknown; + result?: unknown; + }, + ) => void; + asyncEnd: ( + message: ContextType & { + error?: unknown; + result?: unknown; + }, + ) => void; + error: ( + message: ContextType & { + error: unknown; + }, + ) => void; + } + interface TracingChannelCollection { + start: Channel; + end: Channel; + asyncStart: Channel; + asyncEnd: Channel; + error: Channel; + } + /** + * The class `TracingChannel` is a collection of `TracingChannel Channels` which + * together express a single traceable action. It is used to formalize and + * simplify the process of producing events for tracing application flow.{@link tracingChannel} is used to construct a`TracingChannel`. As with `Channel` it is recommended to create and reuse a + * single `TracingChannel` at the top-level of the file rather than creating them + * dynamically. + * @since v19.9.0 + * @experimental + */ + class TracingChannel implements TracingChannelCollection { + start: Channel; + end: Channel; + asyncStart: Channel; + asyncEnd: Channel; + error: Channel; + /** + * Helper to subscribe a collection of functions to the corresponding channels. + * This is the same as calling `channel.subscribe(onMessage)` on each channel + * individually. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.subscribe({ + * start(message) { + * // Handle start message + * }, + * end(message) { + * // Handle end message + * }, + * asyncStart(message) { + * // Handle asyncStart message + * }, + * asyncEnd(message) { + * // Handle asyncEnd message + * }, + * error(message) { + * // Handle error message + * }, + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param subscribers Set of `TracingChannel Channels` subscribers + */ + subscribe(subscribers: TracingChannelSubscribers): void; + /** + * Helper to unsubscribe a collection of functions from the corresponding channels. + * This is the same as calling `channel.unsubscribe(onMessage)` on each channel + * individually. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.unsubscribe({ + * start(message) { + * // Handle start message + * }, + * end(message) { + * // Handle end message + * }, + * asyncStart(message) { + * // Handle asyncStart message + * }, + * asyncEnd(message) { + * // Handle asyncEnd message + * }, + * error(message) { + * // Handle error message + * }, + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param subscribers Set of `TracingChannel Channels` subscribers + * @return `true` if all handlers were successfully unsubscribed, and `false` otherwise. + */ + unsubscribe(subscribers: TracingChannelSubscribers): void; + /** + * Trace a synchronous function call. This will always produce a `start event` and `end event` around the execution and may produce an `error event` if the given function throws an error. + * This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all + * events should have any bound stores set to match this trace context. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.traceSync(() => { + * // Do something + * }, { + * some: 'thing', + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param fn Function to wrap a trace around + * @param context Shared object to correlate events through + * @param thisArg The receiver to be used for the function call + * @param args Optional arguments to pass to the function + * @return The return value of the given function + */ + traceSync( + fn: (this: ThisArg, ...args: Args) => any, + context?: ContextType, + thisArg?: ThisArg, + ...args: Args + ): void; + /** + * Trace a promise-returning function call. This will always produce a `start event` and `end event` around the synchronous portion of the + * function execution, and will produce an `asyncStart event` and `asyncEnd event` when a promise continuation is reached. It may also + * produce an `error event` if the given function throws an error or the + * returned promise rejects. This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all + * events should have any bound stores set to match this trace context. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.tracePromise(async () => { + * // Do something + * }, { + * some: 'thing', + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param fn Promise-returning function to wrap a trace around + * @param context Shared object to correlate trace events through + * @param thisArg The receiver to be used for the function call + * @param args Optional arguments to pass to the function + * @return Chained from promise returned by the given function + */ + tracePromise( + fn: (this: ThisArg, ...args: Args) => Promise, + context?: ContextType, + thisArg?: ThisArg, + ...args: Args + ): void; + /** + * Trace a callback-receiving function call. This will always produce a `start event` and `end event` around the synchronous portion of the + * function execution, and will produce a `asyncStart event` and `asyncEnd event` around the callback execution. It may also produce an `error event` if the given function throws an error or + * the returned + * promise rejects. This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all + * events should have any bound stores set to match this trace context. + * + * The `position` will be -1 by default to indicate the final argument should + * be used as the callback. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.traceCallback((arg1, callback) => { + * // Do something + * callback(null, 'result'); + * }, 1, { + * some: 'thing', + * }, thisArg, arg1, callback); + * ``` + * + * The callback will also be run with `channel.runStores(context, ...)` which + * enables context loss recovery in some cases. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * const myStore = new AsyncLocalStorage(); + * + * // The start channel sets the initial store data to something + * // and stores that store data value on the trace context object + * channels.start.bindStore(myStore, (data) => { + * const span = new Span(data); + * data.span = span; + * return span; + * }); + * + * // Then asyncStart can restore from that data it stored previously + * channels.asyncStart.bindStore(myStore, (data) => { + * return data.span; + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param fn callback using function to wrap a trace around + * @param position Zero-indexed argument position of expected callback + * @param context Shared object to correlate trace events through + * @param thisArg The receiver to be used for the function call + * @param args Optional arguments to pass to the function + * @return The return value of the given function + */ + traceCallback any>( + fn: Fn, + position: number | undefined, + context: ContextType | undefined, + thisArg: any, + ...args: Parameters + ): void; + } +} +declare module "node:diagnostics_channel" { + export * from "diagnostics_channel"; +} diff --git a/dist/node_modules/@types/node/dns.d.ts b/dist/node_modules/@types/node/dns.d.ts new file mode 100644 index 0000000..380cf7d --- /dev/null +++ b/dist/node_modules/@types/node/dns.d.ts @@ -0,0 +1,809 @@ +/** + * The `node:dns` module enables name resolution. For example, use it to look up IP + * addresses of host names. + * + * Although named for the [Domain Name System (DNS)](https://en.wikipedia.org/wiki/Domain_Name_System), it does not always use the + * DNS protocol for lookups. {@link lookup} uses the operating system + * facilities to perform name resolution. It may not need to perform any network + * communication. To perform name resolution the way other applications on the same + * system do, use {@link lookup}. + * + * ```js + * const dns = require('node:dns'); + * + * dns.lookup('example.org', (err, address, family) => { + * console.log('address: %j family: IPv%s', address, family); + * }); + * // address: "93.184.216.34" family: IPv4 + * ``` + * + * All other functions in the `node:dns` module connect to an actual DNS server to + * perform name resolution. They will always use the network to perform DNS + * queries. These functions do not use the same set of configuration files used by {@link lookup} (e.g. `/etc/hosts`). Use these functions to always perform + * DNS queries, bypassing other name-resolution facilities. + * + * ```js + * const dns = require('node:dns'); + * + * dns.resolve4('archive.org', (err, addresses) => { + * if (err) throw err; + * + * console.log(`addresses: ${JSON.stringify(addresses)}`); + * + * addresses.forEach((a) => { + * dns.reverse(a, (err, hostnames) => { + * if (err) { + * throw err; + * } + * console.log(`reverse for ${a}: ${JSON.stringify(hostnames)}`); + * }); + * }); + * }); + * ``` + * + * See the `Implementation considerations section` for more information. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/dns.js) + */ +declare module "dns" { + import * as dnsPromises from "node:dns/promises"; + // Supported getaddrinfo flags. + export const ADDRCONFIG: number; + export const V4MAPPED: number; + /** + * If `dns.V4MAPPED` is specified, return resolved IPv6 addresses as + * well as IPv4 mapped IPv6 addresses. + */ + export const ALL: number; + export interface LookupOptions { + family?: number | undefined; + hints?: number | undefined; + all?: boolean | undefined; + /** + * @default true + */ + verbatim?: boolean | undefined; + } + export interface LookupOneOptions extends LookupOptions { + all?: false | undefined; + } + export interface LookupAllOptions extends LookupOptions { + all: true; + } + export interface LookupAddress { + address: string; + family: number; + } + /** + * Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or + * AAAA (IPv6) record. All `option` properties are optional. If `options` is an + * integer, then it must be `4` or `6` – if `options` is `0` or not provided, then + * IPv4 and IPv6 addresses are both returned if found. + * + * With the `all` option set to `true`, the arguments for `callback` change to`(err, addresses)`, with `addresses` being an array of objects with the + * properties `address` and `family`. + * + * On error, `err` is an `Error` object, where `err.code` is the error code. + * Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when + * the host name does not exist but also when the lookup fails in other ways + * such as no available file descriptors. + * + * `dns.lookup()` does not necessarily have anything to do with the DNS protocol. + * The implementation uses an operating system facility that can associate names + * with addresses and vice versa. This implementation can have subtle but + * important consequences on the behavior of any Node.js program. Please take some + * time to consult the `Implementation considerations section` before using`dns.lookup()`. + * + * Example usage: + * + * ```js + * const dns = require('node:dns'); + * const options = { + * family: 6, + * hints: dns.ADDRCONFIG | dns.V4MAPPED, + * }; + * dns.lookup('example.com', options, (err, address, family) => + * console.log('address: %j family: IPv%s', address, family)); + * // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6 + * + * // When options.all is true, the result will be an Array. + * options.all = true; + * dns.lookup('example.com', options, (err, addresses) => + * console.log('addresses: %j', addresses)); + * // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}] + * ``` + * + * If this method is invoked as its `util.promisify()` ed version, and `all`is not set to `true`, it returns a `Promise` for an `Object` with `address` and`family` properties. + * @since v0.1.90 + */ + export function lookup( + hostname: string, + family: number, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ): void; + export function lookup( + hostname: string, + options: LookupOneOptions, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ): void; + export function lookup( + hostname: string, + options: LookupAllOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: LookupAddress[]) => void, + ): void; + export function lookup( + hostname: string, + options: LookupOptions, + callback: (err: NodeJS.ErrnoException | null, address: string | LookupAddress[], family: number) => void, + ): void; + export function lookup( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ): void; + export namespace lookup { + function __promisify__(hostname: string, options: LookupAllOptions): Promise; + function __promisify__(hostname: string, options?: LookupOneOptions | number): Promise; + function __promisify__(hostname: string, options: LookupOptions): Promise; + } + /** + * Resolves the given `address` and `port` into a host name and service using + * the operating system's underlying `getnameinfo` implementation. + * + * If `address` is not a valid IP address, a `TypeError` will be thrown. + * The `port` will be coerced to a number. If it is not a legal port, a `TypeError`will be thrown. + * + * On an error, `err` is an `Error` object, where `err.code` is the error code. + * + * ```js + * const dns = require('node:dns'); + * dns.lookupService('127.0.0.1', 22, (err, hostname, service) => { + * console.log(hostname, service); + * // Prints: localhost ssh + * }); + * ``` + * + * If this method is invoked as its `util.promisify()` ed version, it returns a`Promise` for an `Object` with `hostname` and `service` properties. + * @since v0.11.14 + */ + export function lookupService( + address: string, + port: number, + callback: (err: NodeJS.ErrnoException | null, hostname: string, service: string) => void, + ): void; + export namespace lookupService { + function __promisify__( + address: string, + port: number, + ): Promise<{ + hostname: string; + service: string; + }>; + } + export interface ResolveOptions { + ttl: boolean; + } + export interface ResolveWithTtlOptions extends ResolveOptions { + ttl: true; + } + export interface RecordWithTtl { + address: string; + ttl: number; + } + /** @deprecated Use `AnyARecord` or `AnyAaaaRecord` instead. */ + export type AnyRecordWithTtl = AnyARecord | AnyAaaaRecord; + export interface AnyARecord extends RecordWithTtl { + type: "A"; + } + export interface AnyAaaaRecord extends RecordWithTtl { + type: "AAAA"; + } + export interface CaaRecord { + critical: number; + issue?: string | undefined; + issuewild?: string | undefined; + iodef?: string | undefined; + contactemail?: string | undefined; + contactphone?: string | undefined; + } + export interface MxRecord { + priority: number; + exchange: string; + } + export interface AnyMxRecord extends MxRecord { + type: "MX"; + } + export interface NaptrRecord { + flags: string; + service: string; + regexp: string; + replacement: string; + order: number; + preference: number; + } + export interface AnyNaptrRecord extends NaptrRecord { + type: "NAPTR"; + } + export interface SoaRecord { + nsname: string; + hostmaster: string; + serial: number; + refresh: number; + retry: number; + expire: number; + minttl: number; + } + export interface AnySoaRecord extends SoaRecord { + type: "SOA"; + } + export interface SrvRecord { + priority: number; + weight: number; + port: number; + name: string; + } + export interface AnySrvRecord extends SrvRecord { + type: "SRV"; + } + export interface AnyTxtRecord { + type: "TXT"; + entries: string[]; + } + export interface AnyNsRecord { + type: "NS"; + value: string; + } + export interface AnyPtrRecord { + type: "PTR"; + value: string; + } + export interface AnyCnameRecord { + type: "CNAME"; + value: string; + } + export type AnyRecord = + | AnyARecord + | AnyAaaaRecord + | AnyCnameRecord + | AnyMxRecord + | AnyNaptrRecord + | AnyNsRecord + | AnyPtrRecord + | AnySoaRecord + | AnySrvRecord + | AnyTxtRecord; + /** + * Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array + * of the resource records. The `callback` function has arguments`(err, records)`. When successful, `records` will be an array of resource + * records. The type and structure of individual results varies based on `rrtype`: + * + * + * + * On error, `err` is an `Error` object, where `err.code` is one of the `DNS error codes`. + * @since v0.1.27 + * @param hostname Host name to resolve. + * @param [rrtype='A'] Resource record type. + */ + export function resolve( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "A", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "AAAA", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "ANY", + callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "CNAME", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "MX", + callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "NAPTR", + callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "NS", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "PTR", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "SOA", + callback: (err: NodeJS.ErrnoException | null, addresses: SoaRecord) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "SRV", + callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "TXT", + callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: string, + callback: ( + err: NodeJS.ErrnoException | null, + addresses: string[] | MxRecord[] | NaptrRecord[] | SoaRecord | SrvRecord[] | string[][] | AnyRecord[], + ) => void, + ): void; + export namespace resolve { + function __promisify__(hostname: string, rrtype?: "A" | "AAAA" | "CNAME" | "NS" | "PTR"): Promise; + function __promisify__(hostname: string, rrtype: "ANY"): Promise; + function __promisify__(hostname: string, rrtype: "MX"): Promise; + function __promisify__(hostname: string, rrtype: "NAPTR"): Promise; + function __promisify__(hostname: string, rrtype: "SOA"): Promise; + function __promisify__(hostname: string, rrtype: "SRV"): Promise; + function __promisify__(hostname: string, rrtype: "TXT"): Promise; + function __promisify__( + hostname: string, + rrtype: string, + ): Promise; + } + /** + * Uses the DNS protocol to resolve a IPv4 addresses (`A` records) for the`hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of IPv4 addresses (e.g.`['74.125.79.104', '74.125.79.105', '74.125.79.106']`). + * @since v0.1.16 + * @param hostname Host name to resolve. + */ + export function resolve4( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve4( + hostname: string, + options: ResolveWithTtlOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void, + ): void; + export function resolve4( + hostname: string, + options: ResolveOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void, + ): void; + export namespace resolve4 { + function __promisify__(hostname: string): Promise; + function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; + function __promisify__(hostname: string, options?: ResolveOptions): Promise; + } + /** + * Uses the DNS protocol to resolve IPv6 addresses (`AAAA` records) for the`hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of IPv6 addresses. + * @since v0.1.16 + * @param hostname Host name to resolve. + */ + export function resolve6( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve6( + hostname: string, + options: ResolveWithTtlOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void, + ): void; + export function resolve6( + hostname: string, + options: ResolveOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void, + ): void; + export namespace resolve6 { + function __promisify__(hostname: string): Promise; + function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; + function __promisify__(hostname: string, options?: ResolveOptions): Promise; + } + /** + * Uses the DNS protocol to resolve `CNAME` records for the `hostname`. The`addresses` argument passed to the `callback` function + * will contain an array of canonical name records available for the `hostname`(e.g. `['bar.example.com']`). + * @since v0.3.2 + */ + export function resolveCname( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export namespace resolveCname { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve `CAA` records for the `hostname`. The`addresses` argument passed to the `callback` function + * will contain an array of certification authority authorization records + * available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'}, {critical: 128, issue: 'pki.example.com'}]`). + * @since v15.0.0, v14.17.0 + */ + export function resolveCaa( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, records: CaaRecord[]) => void, + ): void; + export namespace resolveCaa { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * contain an array of objects containing both a `priority` and `exchange`property (e.g. `[{priority: 10, exchange: 'mx.example.com'}, ...]`). + * @since v0.1.27 + */ + export function resolveMx( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void, + ): void; + export namespace resolveMx { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve regular expression-based records (`NAPTR`records) for the `hostname`. The `addresses` argument passed to the `callback`function will contain an array of + * objects with the following properties: + * + * * `flags` + * * `service` + * * `regexp` + * * `replacement` + * * `order` + * * `preference` + * + * ```js + * { + * flags: 's', + * service: 'SIP+D2U', + * regexp: '', + * replacement: '_sip._udp.example.com', + * order: 30, + * preference: 100 + * } + * ``` + * @since v0.9.12 + */ + export function resolveNaptr( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void, + ): void; + export namespace resolveNaptr { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * contain an array of name server records available for `hostname`(e.g. `['ns1.example.com', 'ns2.example.com']`). + * @since v0.1.90 + */ + export function resolveNs( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export namespace resolveNs { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * be an array of strings containing the reply records. + * @since v6.0.0 + */ + export function resolvePtr( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export namespace resolvePtr { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve a start of authority record (`SOA` record) for + * the `hostname`. The `address` argument passed to the `callback` function will + * be an object with the following properties: + * + * * `nsname` + * * `hostmaster` + * * `serial` + * * `refresh` + * * `retry` + * * `expire` + * * `minttl` + * + * ```js + * { + * nsname: 'ns.example.com', + * hostmaster: 'root.example.com', + * serial: 2013101809, + * refresh: 10000, + * retry: 2400, + * expire: 604800, + * minttl: 3600 + * } + * ``` + * @since v0.11.10 + */ + export function resolveSoa( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, address: SoaRecord) => void, + ): void; + export namespace resolveSoa { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * be an array of objects with the following properties: + * + * * `priority` + * * `weight` + * * `port` + * * `name` + * + * ```js + * { + * priority: 10, + * weight: 5, + * port: 21223, + * name: 'service.example.com' + * } + * ``` + * @since v0.1.27 + */ + export function resolveSrv( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void, + ): void; + export namespace resolveSrv { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. The `records` argument passed to the `callback` function is a + * two-dimensional array of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of + * one record. Depending on the use case, these could be either joined together or + * treated separately. + * @since v0.1.27 + */ + export function resolveTxt( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void, + ): void; + export namespace resolveTxt { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query). + * The `ret` argument passed to the `callback` function will be an array containing + * various types of records. Each object has a property `type` that indicates the + * type of the current record. And depending on the `type`, additional properties + * will be present on the object: + * + * + * + * Here is an example of the `ret` object passed to the callback: + * + * ```js + * [ { type: 'A', address: '127.0.0.1', ttl: 299 }, + * { type: 'CNAME', value: 'example.com' }, + * { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 }, + * { type: 'NS', value: 'ns1.example.com' }, + * { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] }, + * { type: 'SOA', + * nsname: 'ns1.example.com', + * hostmaster: 'admin.example.com', + * serial: 156696742, + * refresh: 900, + * retry: 900, + * expire: 1800, + * minttl: 60 } ] + * ``` + * + * DNS server operators may choose not to respond to `ANY`queries. It may be better to call individual methods like {@link resolve4},{@link resolveMx}, and so on. For more details, see [RFC + * 8482](https://tools.ietf.org/html/rfc8482). + */ + export function resolveAny( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void, + ): void; + export namespace resolveAny { + function __promisify__(hostname: string): Promise; + } + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an + * array of host names. + * + * On error, `err` is an `Error` object, where `err.code` is + * one of the `DNS error codes`. + * @since v0.1.16 + */ + export function reverse( + ip: string, + callback: (err: NodeJS.ErrnoException | null, hostnames: string[]) => void, + ): void; + /** + * Get the default value for `verbatim` in {@link lookup} and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: for `verbatim` defaulting to `false`. + * * `verbatim`: for `verbatim` defaulting to `true`. + * @since v20.1.0 + */ + export function getDefaultResultOrder(): "ipv4first" | "verbatim"; + /** + * Sets the IP address and port of servers to be used when performing DNS + * resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted + * addresses. If the port is the IANA default DNS port (53) it can be omitted. + * + * ```js + * dns.setServers([ + * '4.4.4.4', + * '[2001:4860:4860::8888]', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ]); + * ``` + * + * An error will be thrown if an invalid address is provided. + * + * The `dns.setServers()` method must not be called while a DNS query is in + * progress. + * + * The {@link setServers} method affects only {@link resolve},`dns.resolve*()` and {@link reverse} (and specifically _not_ {@link lookup}). + * + * This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html). + * That is, if attempting to resolve with the first server provided results in a`NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with + * subsequent servers provided. Fallback DNS servers will only be used if the + * earlier ones time out or result in some other error. + * @since v0.11.3 + * @param servers array of `RFC 5952` formatted addresses + */ + export function setServers(servers: readonly string[]): void; + /** + * Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6), + * that are currently configured for DNS resolution. A string will include a port + * section if a custom port is used. + * + * ```js + * [ + * '4.4.4.4', + * '2001:4860:4860::8888', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ] + * ``` + * @since v0.11.3 + */ + export function getServers(): string[]; + /** + * Set the default value of `verbatim` in {@link lookup} and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: sets default `verbatim` `false`. + * * `verbatim`: sets default `verbatim` `true`. + * + * The default is `verbatim` and {@link setDefaultResultOrder} have higher + * priority than `--dns-result-order`. When using `worker threads`,{@link setDefaultResultOrder} from the main thread won't affect the default + * dns orders in workers. + * @since v16.4.0, v14.18.0 + * @param order must be `'ipv4first'` or `'verbatim'`. + */ + export function setDefaultResultOrder(order: "ipv4first" | "verbatim"): void; + // Error codes + export const NODATA: string; + export const FORMERR: string; + export const SERVFAIL: string; + export const NOTFOUND: string; + export const NOTIMP: string; + export const REFUSED: string; + export const BADQUERY: string; + export const BADNAME: string; + export const BADFAMILY: string; + export const BADRESP: string; + export const CONNREFUSED: string; + export const TIMEOUT: string; + export const EOF: string; + export const FILE: string; + export const NOMEM: string; + export const DESTRUCTION: string; + export const BADSTR: string; + export const BADFLAGS: string; + export const NONAME: string; + export const BADHINTS: string; + export const NOTINITIALIZED: string; + export const LOADIPHLPAPI: string; + export const ADDRGETNETWORKPARAMS: string; + export const CANCELLED: string; + export interface ResolverOptions { + timeout?: number | undefined; + /** + * @default 4 + */ + tries?: number; + } + /** + * An independent resolver for DNS requests. + * + * Creating a new resolver uses the default server settings. Setting + * the servers used for a resolver using `resolver.setServers()` does not affect + * other resolvers: + * + * ```js + * const { Resolver } = require('node:dns'); + * const resolver = new Resolver(); + * resolver.setServers(['4.4.4.4']); + * + * // This request will use the server at 4.4.4.4, independent of global settings. + * resolver.resolve4('example.org', (err, addresses) => { + * // ... + * }); + * ``` + * + * The following methods from the `node:dns` module are available: + * + * * `resolver.getServers()` + * * `resolver.resolve()` + * * `resolver.resolve4()` + * * `resolver.resolve6()` + * * `resolver.resolveAny()` + * * `resolver.resolveCaa()` + * * `resolver.resolveCname()` + * * `resolver.resolveMx()` + * * `resolver.resolveNaptr()` + * * `resolver.resolveNs()` + * * `resolver.resolvePtr()` + * * `resolver.resolveSoa()` + * * `resolver.resolveSrv()` + * * `resolver.resolveTxt()` + * * `resolver.reverse()` + * * `resolver.setServers()` + * @since v8.3.0 + */ + export class Resolver { + constructor(options?: ResolverOptions); + /** + * Cancel all outstanding DNS queries made by this resolver. The corresponding + * callbacks will be called with an error with code `ECANCELLED`. + * @since v8.3.0 + */ + cancel(): void; + getServers: typeof getServers; + resolve: typeof resolve; + resolve4: typeof resolve4; + resolve6: typeof resolve6; + resolveAny: typeof resolveAny; + resolveCaa: typeof resolveCaa; + resolveCname: typeof resolveCname; + resolveMx: typeof resolveMx; + resolveNaptr: typeof resolveNaptr; + resolveNs: typeof resolveNs; + resolvePtr: typeof resolvePtr; + resolveSoa: typeof resolveSoa; + resolveSrv: typeof resolveSrv; + resolveTxt: typeof resolveTxt; + reverse: typeof reverse; + /** + * The resolver instance will send its requests from the specified IP address. + * This allows programs to specify outbound interfaces when used on multi-homed + * systems. + * + * If a v4 or v6 address is not specified, it is set to the default and the + * operating system will choose a local address automatically. + * + * The resolver will use the v4 local address when making requests to IPv4 DNS + * servers, and the v6 local address when making requests to IPv6 DNS servers. + * The `rrtype` of resolution requests has no impact on the local address used. + * @since v15.1.0, v14.17.0 + * @param [ipv4='0.0.0.0'] A string representation of an IPv4 address. + * @param [ipv6='::0'] A string representation of an IPv6 address. + */ + setLocalAddress(ipv4?: string, ipv6?: string): void; + setServers: typeof setServers; + } + export { dnsPromises as promises }; +} +declare module "node:dns" { + export * from "dns"; +} diff --git a/dist/node_modules/@types/node/dns/promises.d.ts b/dist/node_modules/@types/node/dns/promises.d.ts new file mode 100644 index 0000000..ef9b222 --- /dev/null +++ b/dist/node_modules/@types/node/dns/promises.d.ts @@ -0,0 +1,425 @@ +/** + * The `dns.promises` API provides an alternative set of asynchronous DNS methods + * that return `Promise` objects rather than using callbacks. The API is accessible + * via `require('node:dns').promises` or `require('node:dns/promises')`. + * @since v10.6.0 + */ +declare module "dns/promises" { + import { + AnyRecord, + CaaRecord, + LookupAddress, + LookupAllOptions, + LookupOneOptions, + LookupOptions, + MxRecord, + NaptrRecord, + RecordWithTtl, + ResolveOptions, + ResolverOptions, + ResolveWithTtlOptions, + SoaRecord, + SrvRecord, + } from "node:dns"; + /** + * Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6), + * that are currently configured for DNS resolution. A string will include a port + * section if a custom port is used. + * + * ```js + * [ + * '4.4.4.4', + * '2001:4860:4860::8888', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ] + * ``` + * @since v10.6.0 + */ + function getServers(): string[]; + /** + * Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or + * AAAA (IPv6) record. All `option` properties are optional. If `options` is an + * integer, then it must be `4` or `6` – if `options` is not provided, then IPv4 + * and IPv6 addresses are both returned if found. + * + * With the `all` option set to `true`, the `Promise` is resolved with `addresses`being an array of objects with the properties `address` and `family`. + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is the error code. + * Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when + * the host name does not exist but also when the lookup fails in other ways + * such as no available file descriptors. + * + * `dnsPromises.lookup()` does not necessarily have anything to do with the DNS + * protocol. The implementation uses an operating system facility that can + * associate names with addresses and vice versa. This implementation can have + * subtle but important consequences on the behavior of any Node.js program. Please + * take some time to consult the `Implementation considerations section` before + * using `dnsPromises.lookup()`. + * + * Example usage: + * + * ```js + * const dns = require('node:dns'); + * const dnsPromises = dns.promises; + * const options = { + * family: 6, + * hints: dns.ADDRCONFIG | dns.V4MAPPED, + * }; + * + * dnsPromises.lookup('example.com', options).then((result) => { + * console.log('address: %j family: IPv%s', result.address, result.family); + * // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6 + * }); + * + * // When options.all is true, the result will be an Array. + * options.all = true; + * dnsPromises.lookup('example.com', options).then((result) => { + * console.log('addresses: %j', result); + * // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}] + * }); + * ``` + * @since v10.6.0 + */ + function lookup(hostname: string, family: number): Promise; + function lookup(hostname: string, options: LookupOneOptions): Promise; + function lookup(hostname: string, options: LookupAllOptions): Promise; + function lookup(hostname: string, options: LookupOptions): Promise; + function lookup(hostname: string): Promise; + /** + * Resolves the given `address` and `port` into a host name and service using + * the operating system's underlying `getnameinfo` implementation. + * + * If `address` is not a valid IP address, a `TypeError` will be thrown. + * The `port` will be coerced to a number. If it is not a legal port, a `TypeError`will be thrown. + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is the error code. + * + * ```js + * const dnsPromises = require('node:dns').promises; + * dnsPromises.lookupService('127.0.0.1', 22).then((result) => { + * console.log(result.hostname, result.service); + * // Prints: localhost ssh + * }); + * ``` + * @since v10.6.0 + */ + function lookupService( + address: string, + port: number, + ): Promise<{ + hostname: string; + service: string; + }>; + /** + * Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array + * of the resource records. When successful, the `Promise` is resolved with an + * array of resource records. The type and structure of individual results vary + * based on `rrtype`: + * + * + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is one of the `DNS error codes`. + * @since v10.6.0 + * @param hostname Host name to resolve. + * @param [rrtype='A'] Resource record type. + */ + function resolve(hostname: string): Promise; + function resolve(hostname: string, rrtype: "A"): Promise; + function resolve(hostname: string, rrtype: "AAAA"): Promise; + function resolve(hostname: string, rrtype: "ANY"): Promise; + function resolve(hostname: string, rrtype: "CAA"): Promise; + function resolve(hostname: string, rrtype: "CNAME"): Promise; + function resolve(hostname: string, rrtype: "MX"): Promise; + function resolve(hostname: string, rrtype: "NAPTR"): Promise; + function resolve(hostname: string, rrtype: "NS"): Promise; + function resolve(hostname: string, rrtype: "PTR"): Promise; + function resolve(hostname: string, rrtype: "SOA"): Promise; + function resolve(hostname: string, rrtype: "SRV"): Promise; + function resolve(hostname: string, rrtype: "TXT"): Promise; + function resolve( + hostname: string, + rrtype: string, + ): Promise; + /** + * Uses the DNS protocol to resolve IPv4 addresses (`A` records) for the`hostname`. On success, the `Promise` is resolved with an array of IPv4 + * addresses (e.g. `['74.125.79.104', '74.125.79.105', '74.125.79.106']`). + * @since v10.6.0 + * @param hostname Host name to resolve. + */ + function resolve4(hostname: string): Promise; + function resolve4(hostname: string, options: ResolveWithTtlOptions): Promise; + function resolve4(hostname: string, options: ResolveOptions): Promise; + /** + * Uses the DNS protocol to resolve IPv6 addresses (`AAAA` records) for the`hostname`. On success, the `Promise` is resolved with an array of IPv6 + * addresses. + * @since v10.6.0 + * @param hostname Host name to resolve. + */ + function resolve6(hostname: string): Promise; + function resolve6(hostname: string, options: ResolveWithTtlOptions): Promise; + function resolve6(hostname: string, options: ResolveOptions): Promise; + /** + * Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query). + * On success, the `Promise` is resolved with an array containing various types of + * records. Each object has a property `type` that indicates the type of the + * current record. And depending on the `type`, additional properties will be + * present on the object: + * + * + * + * Here is an example of the result object: + * + * ```js + * [ { type: 'A', address: '127.0.0.1', ttl: 299 }, + * { type: 'CNAME', value: 'example.com' }, + * { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 }, + * { type: 'NS', value: 'ns1.example.com' }, + * { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] }, + * { type: 'SOA', + * nsname: 'ns1.example.com', + * hostmaster: 'admin.example.com', + * serial: 156696742, + * refresh: 900, + * retry: 900, + * expire: 1800, + * minttl: 60 } ] + * ``` + * @since v10.6.0 + */ + function resolveAny(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve `CAA` records for the `hostname`. On success, + * the `Promise` is resolved with an array of objects containing available + * certification authority authorization records available for the `hostname`(e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'},{critical: 128, issue: 'pki.example.com'}]`). + * @since v15.0.0, v14.17.0 + */ + function resolveCaa(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve `CNAME` records for the `hostname`. On success, + * the `Promise` is resolved with an array of canonical name records available for + * the `hostname` (e.g. `['bar.example.com']`). + * @since v10.6.0 + */ + function resolveCname(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects + * containing both a `priority` and `exchange` property (e.g.`[{priority: 10, exchange: 'mx.example.com'}, ...]`). + * @since v10.6.0 + */ + function resolveMx(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve regular expression-based records (`NAPTR`records) for the `hostname`. On success, the `Promise` is resolved with an array + * of objects with the following properties: + * + * * `flags` + * * `service` + * * `regexp` + * * `replacement` + * * `order` + * * `preference` + * + * ```js + * { + * flags: 's', + * service: 'SIP+D2U', + * regexp: '', + * replacement: '_sip._udp.example.com', + * order: 30, + * preference: 100 + * } + * ``` + * @since v10.6.0 + */ + function resolveNaptr(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. On success, the `Promise` is resolved with an array of name server + * records available for `hostname` (e.g.`['ns1.example.com', 'ns2.example.com']`). + * @since v10.6.0 + */ + function resolveNs(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. On success, the `Promise` is resolved with an array of strings + * containing the reply records. + * @since v10.6.0 + */ + function resolvePtr(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve a start of authority record (`SOA` record) for + * the `hostname`. On success, the `Promise` is resolved with an object with the + * following properties: + * + * * `nsname` + * * `hostmaster` + * * `serial` + * * `refresh` + * * `retry` + * * `expire` + * * `minttl` + * + * ```js + * { + * nsname: 'ns.example.com', + * hostmaster: 'root.example.com', + * serial: 2013101809, + * refresh: 10000, + * retry: 2400, + * expire: 604800, + * minttl: 3600 + * } + * ``` + * @since v10.6.0 + */ + function resolveSoa(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects with + * the following properties: + * + * * `priority` + * * `weight` + * * `port` + * * `name` + * + * ```js + * { + * priority: 10, + * weight: 5, + * port: 21223, + * name: 'service.example.com' + * } + * ``` + * @since v10.6.0 + */ + function resolveSrv(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. On success, the `Promise` is resolved with a two-dimensional array + * of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of + * one record. Depending on the use case, these could be either joined together or + * treated separately. + * @since v10.6.0 + */ + function resolveTxt(hostname: string): Promise; + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an + * array of host names. + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is one of the `DNS error codes`. + * @since v10.6.0 + */ + function reverse(ip: string): Promise; + /** + * Get the default value for `verbatim` in {@link lookup} and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: for `verbatim` defaulting to `false`. + * * `verbatim`: for `verbatim` defaulting to `true`. + * @since v20.1.0 + */ + function getDefaultResultOrder(): "ipv4first" | "verbatim"; + /** + * Sets the IP address and port of servers to be used when performing DNS + * resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted + * addresses. If the port is the IANA default DNS port (53) it can be omitted. + * + * ```js + * dnsPromises.setServers([ + * '4.4.4.4', + * '[2001:4860:4860::8888]', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ]); + * ``` + * + * An error will be thrown if an invalid address is provided. + * + * The `dnsPromises.setServers()` method must not be called while a DNS query is in + * progress. + * + * This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html). + * That is, if attempting to resolve with the first server provided results in a`NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with + * subsequent servers provided. Fallback DNS servers will only be used if the + * earlier ones time out or result in some other error. + * @since v10.6.0 + * @param servers array of `RFC 5952` formatted addresses + */ + function setServers(servers: readonly string[]): void; + /** + * Set the default value of `verbatim` in `dns.lookup()` and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: sets default `verbatim` `false`. + * * `verbatim`: sets default `verbatim` `true`. + * + * The default is `verbatim` and `dnsPromises.setDefaultResultOrder()` have + * higher priority than `--dns-result-order`. When using `worker threads`,`dnsPromises.setDefaultResultOrder()` from the main thread won't affect the + * default dns orders in workers. + * @since v16.4.0, v14.18.0 + * @param order must be `'ipv4first'` or `'verbatim'`. + */ + function setDefaultResultOrder(order: "ipv4first" | "verbatim"): void; + /** + * An independent resolver for DNS requests. + * + * Creating a new resolver uses the default server settings. Setting + * the servers used for a resolver using `resolver.setServers()` does not affect + * other resolvers: + * + * ```js + * const { Resolver } = require('node:dns').promises; + * const resolver = new Resolver(); + * resolver.setServers(['4.4.4.4']); + * + * // This request will use the server at 4.4.4.4, independent of global settings. + * resolver.resolve4('example.org').then((addresses) => { + * // ... + * }); + * + * // Alternatively, the same code can be written using async-await style. + * (async function() { + * const addresses = await resolver.resolve4('example.org'); + * })(); + * ``` + * + * The following methods from the `dnsPromises` API are available: + * + * * `resolver.getServers()` + * * `resolver.resolve()` + * * `resolver.resolve4()` + * * `resolver.resolve6()` + * * `resolver.resolveAny()` + * * `resolver.resolveCaa()` + * * `resolver.resolveCname()` + * * `resolver.resolveMx()` + * * `resolver.resolveNaptr()` + * * `resolver.resolveNs()` + * * `resolver.resolvePtr()` + * * `resolver.resolveSoa()` + * * `resolver.resolveSrv()` + * * `resolver.resolveTxt()` + * * `resolver.reverse()` + * * `resolver.setServers()` + * @since v10.6.0 + */ + class Resolver { + constructor(options?: ResolverOptions); + cancel(): void; + getServers: typeof getServers; + resolve: typeof resolve; + resolve4: typeof resolve4; + resolve6: typeof resolve6; + resolveAny: typeof resolveAny; + resolveCaa: typeof resolveCaa; + resolveCname: typeof resolveCname; + resolveMx: typeof resolveMx; + resolveNaptr: typeof resolveNaptr; + resolveNs: typeof resolveNs; + resolvePtr: typeof resolvePtr; + resolveSoa: typeof resolveSoa; + resolveSrv: typeof resolveSrv; + resolveTxt: typeof resolveTxt; + reverse: typeof reverse; + setLocalAddress(ipv4?: string, ipv6?: string): void; + setServers: typeof setServers; + } +} +declare module "node:dns/promises" { + export * from "dns/promises"; +} diff --git a/dist/node_modules/@types/node/dom-events.d.ts b/dist/node_modules/@types/node/dom-events.d.ts new file mode 100644 index 0000000..147a7b6 --- /dev/null +++ b/dist/node_modules/@types/node/dom-events.d.ts @@ -0,0 +1,122 @@ +export {}; // Don't export anything! + +//// DOM-like Events +// NB: The Event / EventTarget / EventListener implementations below were copied +// from lib.dom.d.ts, then edited to reflect Node's documentation at +// https://nodejs.org/api/events.html#class-eventtarget. +// Please read that link to understand important implementation differences. + +// This conditional type will be the existing global Event in a browser, or +// the copy below in a Node environment. +type __Event = typeof globalThis extends { onmessage: any; Event: any } ? {} + : { + /** This is not used in Node.js and is provided purely for completeness. */ + readonly bubbles: boolean; + /** Alias for event.stopPropagation(). This is not used in Node.js and is provided purely for completeness. */ + cancelBubble: () => void; + /** True if the event was created with the cancelable option */ + readonly cancelable: boolean; + /** This is not used in Node.js and is provided purely for completeness. */ + readonly composed: boolean; + /** Returns an array containing the current EventTarget as the only entry or empty if the event is not being dispatched. This is not used in Node.js and is provided purely for completeness. */ + composedPath(): [EventTarget?]; + /** Alias for event.target. */ + readonly currentTarget: EventTarget | null; + /** Is true if cancelable is true and event.preventDefault() has been called. */ + readonly defaultPrevented: boolean; + /** This is not used in Node.js and is provided purely for completeness. */ + readonly eventPhase: 0 | 2; + /** The `AbortSignal` "abort" event is emitted with `isTrusted` set to `true`. The value is `false` in all other cases. */ + readonly isTrusted: boolean; + /** Sets the `defaultPrevented` property to `true` if `cancelable` is `true`. */ + preventDefault(): void; + /** This is not used in Node.js and is provided purely for completeness. */ + returnValue: boolean; + /** Alias for event.target. */ + readonly srcElement: EventTarget | null; + /** Stops the invocation of event listeners after the current one completes. */ + stopImmediatePropagation(): void; + /** This is not used in Node.js and is provided purely for completeness. */ + stopPropagation(): void; + /** The `EventTarget` dispatching the event */ + readonly target: EventTarget | null; + /** The millisecond timestamp when the Event object was created. */ + readonly timeStamp: number; + /** Returns the type of event, e.g. "click", "hashchange", or "submit". */ + readonly type: string; + }; + +// See comment above explaining conditional type +type __EventTarget = typeof globalThis extends { onmessage: any; EventTarget: any } ? {} + : { + /** + * Adds a new handler for the `type` event. Any given `listener` is added only once per `type` and per `capture` option value. + * + * If the `once` option is true, the `listener` is removed after the next time a `type` event is dispatched. + * + * The `capture` option is not used by Node.js in any functional way other than tracking registered event listeners per the `EventTarget` specification. + * Specifically, the `capture` option is used as part of the key when registering a `listener`. + * Any individual `listener` may be added once with `capture = false`, and once with `capture = true`. + */ + addEventListener( + type: string, + listener: EventListener | EventListenerObject, + options?: AddEventListenerOptions | boolean, + ): void; + /** Dispatches a synthetic event event to target and returns true if either event's cancelable attribute value is false or its preventDefault() method was not invoked, and false otherwise. */ + dispatchEvent(event: Event): boolean; + /** Removes the event listener in target's event listener list with the same type, callback, and options. */ + removeEventListener( + type: string, + listener: EventListener | EventListenerObject, + options?: EventListenerOptions | boolean, + ): void; + }; + +interface EventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; +} + +interface EventListenerOptions { + /** Not directly used by Node.js. Added for API completeness. Default: `false`. */ + capture?: boolean; +} + +interface AddEventListenerOptions extends EventListenerOptions { + /** When `true`, the listener is automatically removed when it is first invoked. Default: `false`. */ + once?: boolean; + /** When `true`, serves as a hint that the listener will not call the `Event` object's `preventDefault()` method. Default: false. */ + passive?: boolean; +} + +interface EventListener { + (evt: Event): void; +} + +interface EventListenerObject { + handleEvent(object: Event): void; +} + +import {} from "events"; // Make this an ambient declaration +declare global { + /** An event which takes place in the DOM. */ + interface Event extends __Event {} + var Event: typeof globalThis extends { onmessage: any; Event: infer T } ? T + : { + prototype: __Event; + new(type: string, eventInitDict?: EventInit): __Event; + }; + + /** + * EventTarget is a DOM interface implemented by objects that can + * receive events and may have listeners for them. + */ + interface EventTarget extends __EventTarget {} + var EventTarget: typeof globalThis extends { onmessage: any; EventTarget: infer T } ? T + : { + prototype: __EventTarget; + new(): __EventTarget; + }; +} diff --git a/dist/node_modules/@types/node/domain.d.ts b/dist/node_modules/@types/node/domain.d.ts new file mode 100644 index 0000000..72f17bd --- /dev/null +++ b/dist/node_modules/@types/node/domain.d.ts @@ -0,0 +1,170 @@ +/** + * **This module is pending deprecation.** Once a replacement API has been + * finalized, this module will be fully deprecated. Most developers should + * **not** have cause to use this module. Users who absolutely must have + * the functionality that domains provide may rely on it for the time being + * but should expect to have to migrate to a different solution + * in the future. + * + * Domains provide a way to handle multiple different IO operations as a + * single group. If any of the event emitters or callbacks registered to a + * domain emit an `'error'` event, or throw an error, then the domain object + * will be notified, rather than losing the context of the error in the`process.on('uncaughtException')` handler, or causing the program to + * exit immediately with an error code. + * @deprecated Since v1.4.2 - Deprecated + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/domain.js) + */ +declare module "domain" { + import EventEmitter = require("node:events"); + /** + * The `Domain` class encapsulates the functionality of routing errors and + * uncaught exceptions to the active `Domain` object. + * + * To handle the errors that it catches, listen to its `'error'` event. + */ + class Domain extends EventEmitter { + /** + * An array of timers and event emitters that have been explicitly added + * to the domain. + */ + members: Array; + /** + * The `enter()` method is plumbing used by the `run()`, `bind()`, and`intercept()` methods to set the active domain. It sets `domain.active` and`process.domain` to the domain, and implicitly + * pushes the domain onto the domain + * stack managed by the domain module (see {@link exit} for details on the + * domain stack). The call to `enter()` delimits the beginning of a chain of + * asynchronous calls and I/O operations bound to a domain. + * + * Calling `enter()` changes only the active domain, and does not alter the domain + * itself. `enter()` and `exit()` can be called an arbitrary number of times on a + * single domain. + */ + enter(): void; + /** + * The `exit()` method exits the current domain, popping it off the domain stack. + * Any time execution is going to switch to the context of a different chain of + * asynchronous calls, it's important to ensure that the current domain is exited. + * The call to `exit()` delimits either the end of or an interruption to the chain + * of asynchronous calls and I/O operations bound to a domain. + * + * If there are multiple, nested domains bound to the current execution context,`exit()` will exit any domains nested within this domain. + * + * Calling `exit()` changes only the active domain, and does not alter the domain + * itself. `enter()` and `exit()` can be called an arbitrary number of times on a + * single domain. + */ + exit(): void; + /** + * Run the supplied function in the context of the domain, implicitly + * binding all event emitters, timers, and low-level requests that are + * created in that context. Optionally, arguments can be passed to + * the function. + * + * This is the most basic way to use a domain. + * + * ```js + * const domain = require('node:domain'); + * const fs = require('node:fs'); + * const d = domain.create(); + * d.on('error', (er) => { + * console.error('Caught error!', er); + * }); + * d.run(() => { + * process.nextTick(() => { + * setTimeout(() => { // Simulating some various async stuff + * fs.open('non-existent file', 'r', (er, fd) => { + * if (er) throw er; + * // proceed... + * }); + * }, 100); + * }); + * }); + * ``` + * + * In this example, the `d.on('error')` handler will be triggered, rather + * than crashing the program. + */ + run(fn: (...args: any[]) => T, ...args: any[]): T; + /** + * Explicitly adds an emitter to the domain. If any event handlers called by + * the emitter throw an error, or if the emitter emits an `'error'` event, it + * will be routed to the domain's `'error'` event, just like with implicit + * binding. + * + * This also works with timers that are returned from `setInterval()` and `setTimeout()`. If their callback function throws, it will be caught by + * the domain `'error'` handler. + * + * If the Timer or `EventEmitter` was already bound to a domain, it is removed + * from that one, and bound to this one instead. + * @param emitter emitter or timer to be added to the domain + */ + add(emitter: EventEmitter | NodeJS.Timer): void; + /** + * The opposite of {@link add}. Removes domain handling from the + * specified emitter. + * @param emitter emitter or timer to be removed from the domain + */ + remove(emitter: EventEmitter | NodeJS.Timer): void; + /** + * The returned function will be a wrapper around the supplied callback + * function. When the returned function is called, any errors that are + * thrown will be routed to the domain's `'error'` event. + * + * ```js + * const d = domain.create(); + * + * function readSomeFile(filename, cb) { + * fs.readFile(filename, 'utf8', d.bind((er, data) => { + * // If this throws, it will also be passed to the domain. + * return cb(er, data ? JSON.parse(data) : null); + * })); + * } + * + * d.on('error', (er) => { + * // An error occurred somewhere. If we throw it now, it will crash the program + * // with the normal line number and stack message. + * }); + * ``` + * @param callback The callback function + * @return The bound function + */ + bind(callback: T): T; + /** + * This method is almost identical to {@link bind}. However, in + * addition to catching thrown errors, it will also intercept `Error` objects sent as the first argument to the function. + * + * In this way, the common `if (err) return callback(err);` pattern can be replaced + * with a single error handler in a single place. + * + * ```js + * const d = domain.create(); + * + * function readSomeFile(filename, cb) { + * fs.readFile(filename, 'utf8', d.intercept((data) => { + * // Note, the first argument is never passed to the + * // callback since it is assumed to be the 'Error' argument + * // and thus intercepted by the domain. + * + * // If this throws, it will also be passed to the domain + * // so the error-handling logic can be moved to the 'error' + * // event on the domain instead of being repeated throughout + * // the program. + * return cb(null, JSON.parse(data)); + * })); + * } + * + * d.on('error', (er) => { + * // An error occurred somewhere. If we throw it now, it will crash the program + * // with the normal line number and stack message. + * }); + * ``` + * @param callback The callback function + * @return The intercepted function + */ + intercept(callback: T): T; + } + function create(): Domain; +} +declare module "node:domain" { + export * from "domain"; +} diff --git a/dist/node_modules/@types/node/events.d.ts b/dist/node_modules/@types/node/events.d.ts new file mode 100644 index 0000000..87b3ba0 --- /dev/null +++ b/dist/node_modules/@types/node/events.d.ts @@ -0,0 +1,896 @@ +/** + * Much of the Node.js core API is built around an idiomatic asynchronous + * event-driven architecture in which certain kinds of objects (called "emitters") + * emit named events that cause `Function` objects ("listeners") to be called. + * + * For instance: a `net.Server` object emits an event each time a peer + * connects to it; a `fs.ReadStream` emits an event when the file is opened; + * a `stream` emits an event whenever data is available to be read. + * + * All objects that emit events are instances of the `EventEmitter` class. These + * objects expose an `eventEmitter.on()` function that allows one or more + * functions to be attached to named events emitted by the object. Typically, + * event names are camel-cased strings but any valid JavaScript property key + * can be used. + * + * When the `EventEmitter` object emits an event, all of the functions attached + * to that specific event are called _synchronously_. Any values returned by the + * called listeners are _ignored_ and discarded. + * + * The following example shows a simple `EventEmitter` instance with a single + * listener. The `eventEmitter.on()` method is used to register listeners, while + * the `eventEmitter.emit()` method is used to trigger the event. + * + * ```js + * import { EventEmitter } from 'node:events'; + * + * class MyEmitter extends EventEmitter {} + * + * const myEmitter = new MyEmitter(); + * myEmitter.on('event', () => { + * console.log('an event occurred!'); + * }); + * myEmitter.emit('event'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/events.js) + */ +declare module "events" { + import { AsyncResource, AsyncResourceOptions } from "node:async_hooks"; + // NOTE: This class is in the docs but is **not actually exported** by Node. + // If https://github.com/nodejs/node/issues/39903 gets resolved and Node + // actually starts exporting the class, uncomment below. + // import { EventListener, EventListenerObject } from '__dom-events'; + // /** The NodeEventTarget is a Node.js-specific extension to EventTarget that emulates a subset of the EventEmitter API. */ + // interface NodeEventTarget extends EventTarget { + // /** + // * Node.js-specific extension to the `EventTarget` class that emulates the equivalent `EventEmitter` API. + // * The only difference between `addListener()` and `addEventListener()` is that addListener() will return a reference to the EventTarget. + // */ + // addListener(type: string, listener: EventListener | EventListenerObject, options?: { once: boolean }): this; + // /** Node.js-specific extension to the `EventTarget` class that returns an array of event `type` names for which event listeners are registered. */ + // eventNames(): string[]; + // /** Node.js-specific extension to the `EventTarget` class that returns the number of event listeners registered for the `type`. */ + // listenerCount(type: string): number; + // /** Node.js-specific alias for `eventTarget.removeListener()`. */ + // off(type: string, listener: EventListener | EventListenerObject): this; + // /** Node.js-specific alias for `eventTarget.addListener()`. */ + // on(type: string, listener: EventListener | EventListenerObject, options?: { once: boolean }): this; + // /** Node.js-specific extension to the `EventTarget` class that adds a `once` listener for the given event `type`. This is equivalent to calling `on` with the `once` option set to `true`. */ + // once(type: string, listener: EventListener | EventListenerObject): this; + // /** + // * Node.js-specific extension to the `EventTarget` class. + // * If `type` is specified, removes all registered listeners for `type`, + // * otherwise removes all registered listeners. + // */ + // removeAllListeners(type: string): this; + // /** + // * Node.js-specific extension to the `EventTarget` class that removes the listener for the given `type`. + // * The only difference between `removeListener()` and `removeEventListener()` is that `removeListener()` will return a reference to the `EventTarget`. + // */ + // removeListener(type: string, listener: EventListener | EventListenerObject): this; + // } + interface EventEmitterOptions { + /** + * Enables automatic capturing of promise rejection. + */ + captureRejections?: boolean | undefined; + } + // Any EventTarget with a Node-style `once` function + interface _NodeEventTarget { + once(eventName: string | symbol, listener: (...args: any[]) => void): this; + } + // Any EventTarget with a DOM-style `addEventListener` + interface _DOMEventTarget { + addEventListener( + eventName: string, + listener: (...args: any[]) => void, + opts?: { + once: boolean; + }, + ): any; + } + interface StaticEventEmitterOptions { + signal?: AbortSignal | undefined; + } + interface EventEmitter = DefaultEventMap> extends NodeJS.EventEmitter {} + type EventMap = Record | DefaultEventMap; + type DefaultEventMap = [never]; + type AnyRest = [...args: any[]]; + type Args = T extends DefaultEventMap ? AnyRest : ( + K extends keyof T ? T[K] : never + ); + type Key = T extends DefaultEventMap ? string | symbol : K | keyof T; + type Key2 = T extends DefaultEventMap ? string | symbol : K & keyof T; + type Listener = T extends DefaultEventMap ? F : ( + K extends keyof T ? ( + T[K] extends unknown[] ? (...args: T[K]) => void : never + ) + : never + ); + type Listener1 = Listener void>; + type Listener2 = Listener; + + /** + * The `EventEmitter` class is defined and exposed by the `node:events` module: + * + * ```js + * import { EventEmitter } from 'node:events'; + * ``` + * + * All `EventEmitter`s emit the event `'newListener'` when new listeners are + * added and `'removeListener'` when existing listeners are removed. + * + * It supports the following option: + * @since v0.1.26 + */ + class EventEmitter = DefaultEventMap> { + constructor(options?: EventEmitterOptions); + + [EventEmitter.captureRejectionSymbol]?(error: Error, event: Key, ...args: Args): void; + + /** + * Creates a `Promise` that is fulfilled when the `EventEmitter` emits the given + * event or that is rejected if the `EventEmitter` emits `'error'` while waiting. + * The `Promise` will resolve with an array of all the arguments emitted to the + * given event. + * + * This method is intentionally generic and works with the web platform [EventTarget](https://dom.spec.whatwg.org/#interface-eventtarget) interface, which has no special`'error'` event + * semantics and does not listen to the `'error'` event. + * + * ```js + * import { once, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ee = new EventEmitter(); + * + * process.nextTick(() => { + * ee.emit('myevent', 42); + * }); + * + * const [value] = await once(ee, 'myevent'); + * console.log(value); + * + * const err = new Error('kaboom'); + * process.nextTick(() => { + * ee.emit('error', err); + * }); + * + * try { + * await once(ee, 'myevent'); + * } catch (err) { + * console.error('error happened', err); + * } + * ``` + * + * The special handling of the `'error'` event is only used when `events.once()`is used to wait for another event. If `events.once()` is used to wait for the + * '`error'` event itself, then it is treated as any other kind of event without + * special handling: + * + * ```js + * import { EventEmitter, once } from 'node:events'; + * + * const ee = new EventEmitter(); + * + * once(ee, 'error') + * .then(([err]) => console.log('ok', err.message)) + * .catch((err) => console.error('error', err.message)); + * + * ee.emit('error', new Error('boom')); + * + * // Prints: ok boom + * ``` + * + * An `AbortSignal` can be used to cancel waiting for the event: + * + * ```js + * import { EventEmitter, once } from 'node:events'; + * + * const ee = new EventEmitter(); + * const ac = new AbortController(); + * + * async function foo(emitter, event, signal) { + * try { + * await once(emitter, event, { signal }); + * console.log('event emitted!'); + * } catch (error) { + * if (error.name === 'AbortError') { + * console.error('Waiting for the event was canceled!'); + * } else { + * console.error('There was an error', error.message); + * } + * } + * } + * + * foo(ee, 'foo', ac.signal); + * ac.abort(); // Abort waiting for the event + * ee.emit('foo'); // Prints: Waiting for the event was canceled! + * ``` + * @since v11.13.0, v10.16.0 + */ + static once( + emitter: _NodeEventTarget, + eventName: string | symbol, + options?: StaticEventEmitterOptions, + ): Promise; + static once(emitter: _DOMEventTarget, eventName: string, options?: StaticEventEmitterOptions): Promise; + /** + * ```js + * import { on, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * }); + * + * for await (const event of on(ee, 'foo')) { + * // The execution of this inner block is synchronous and it + * // processes one event at a time (even with await). Do not use + * // if concurrent execution is required. + * console.log(event); // prints ['bar'] [42] + * } + * // Unreachable here + * ``` + * + * Returns an `AsyncIterator` that iterates `eventName` events. It will throw + * if the `EventEmitter` emits `'error'`. It removes all listeners when + * exiting the loop. The `value` returned by each iteration is an array + * composed of the emitted event arguments. + * + * An `AbortSignal` can be used to cancel waiting on events: + * + * ```js + * import { on, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ac = new AbortController(); + * + * (async () => { + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * }); + * + * for await (const event of on(ee, 'foo', { signal: ac.signal })) { + * // The execution of this inner block is synchronous and it + * // processes one event at a time (even with await). Do not use + * // if concurrent execution is required. + * console.log(event); // prints ['bar'] [42] + * } + * // Unreachable here + * })(); + * + * process.nextTick(() => ac.abort()); + * ``` + * @since v13.6.0, v12.16.0 + * @param eventName The name of the event being listened for + * @return that iterates `eventName` events emitted by the `emitter` + */ + static on( + emitter: NodeJS.EventEmitter, + eventName: string, + options?: StaticEventEmitterOptions, + ): AsyncIterableIterator; + /** + * A class method that returns the number of listeners for the given `eventName`registered on the given `emitter`. + * + * ```js + * import { EventEmitter, listenerCount } from 'node:events'; + * + * const myEmitter = new EventEmitter(); + * myEmitter.on('event', () => {}); + * myEmitter.on('event', () => {}); + * console.log(listenerCount(myEmitter, 'event')); + * // Prints: 2 + * ``` + * @since v0.9.12 + * @deprecated Since v3.2.0 - Use `listenerCount` instead. + * @param emitter The emitter to query + * @param eventName The event name + */ + static listenerCount(emitter: NodeJS.EventEmitter, eventName: string | symbol): number; + /** + * Returns a copy of the array of listeners for the event named `eventName`. + * + * For `EventEmitter`s this behaves exactly the same as calling `.listeners` on + * the emitter. + * + * For `EventTarget`s this is the only way to get the event listeners for the + * event target. This is useful for debugging and diagnostic purposes. + * + * ```js + * import { getEventListeners, EventEmitter } from 'node:events'; + * + * { + * const ee = new EventEmitter(); + * const listener = () => console.log('Events are fun'); + * ee.on('foo', listener); + * console.log(getEventListeners(ee, 'foo')); // [ [Function: listener] ] + * } + * { + * const et = new EventTarget(); + * const listener = () => console.log('Events are fun'); + * et.addEventListener('foo', listener); + * console.log(getEventListeners(et, 'foo')); // [ [Function: listener] ] + * } + * ``` + * @since v15.2.0, v14.17.0 + */ + static getEventListeners(emitter: _DOMEventTarget | NodeJS.EventEmitter, name: string | symbol): Function[]; + /** + * Returns the currently set max amount of listeners. + * + * For `EventEmitter`s this behaves exactly the same as calling `.getMaxListeners` on + * the emitter. + * + * For `EventTarget`s this is the only way to get the max event listeners for the + * event target. If the number of event handlers on a single EventTarget exceeds + * the max set, the EventTarget will print a warning. + * + * ```js + * import { getMaxListeners, setMaxListeners, EventEmitter } from 'node:events'; + * + * { + * const ee = new EventEmitter(); + * console.log(getMaxListeners(ee)); // 10 + * setMaxListeners(11, ee); + * console.log(getMaxListeners(ee)); // 11 + * } + * { + * const et = new EventTarget(); + * console.log(getMaxListeners(et)); // 10 + * setMaxListeners(11, et); + * console.log(getMaxListeners(et)); // 11 + * } + * ``` + * @since v19.9.0 + */ + static getMaxListeners(emitter: _DOMEventTarget | NodeJS.EventEmitter): number; + /** + * ```js + * import { setMaxListeners, EventEmitter } from 'node:events'; + * + * const target = new EventTarget(); + * const emitter = new EventEmitter(); + * + * setMaxListeners(5, target, emitter); + * ``` + * @since v15.4.0 + * @param n A non-negative number. The maximum number of listeners per `EventTarget` event. + * @param eventsTargets Zero or more {EventTarget} or {EventEmitter} instances. If none are specified, `n` is set as the default max for all newly created {EventTarget} and {EventEmitter} + * objects. + */ + static setMaxListeners(n?: number, ...eventTargets: Array<_DOMEventTarget | NodeJS.EventEmitter>): void; + /** + * Listens once to the `abort` event on the provided `signal`. + * + * Listening to the `abort` event on abort signals is unsafe and may + * lead to resource leaks since another third party with the signal can + * call `e.stopImmediatePropagation()`. Unfortunately Node.js cannot change + * this since it would violate the web standard. Additionally, the original + * API makes it easy to forget to remove listeners. + * + * This API allows safely using `AbortSignal`s in Node.js APIs by solving these + * two issues by listening to the event such that `stopImmediatePropagation` does + * not prevent the listener from running. + * + * Returns a disposable so that it may be unsubscribed from more easily. + * + * ```js + * import { addAbortListener } from 'node:events'; + * + * function example(signal) { + * let disposable; + * try { + * signal.addEventListener('abort', (e) => e.stopImmediatePropagation()); + * disposable = addAbortListener(signal, (e) => { + * // Do something when signal is aborted. + * }); + * } finally { + * disposable?.[Symbol.dispose](); + * } + * } + * ``` + * @since v20.5.0 + * @experimental + * @return Disposable that removes the `abort` listener. + */ + static addAbortListener(signal: AbortSignal, resource: (event: Event) => void): Disposable; + /** + * This symbol shall be used to install a listener for only monitoring `'error'`events. Listeners installed using this symbol are called before the regular`'error'` listeners are called. + * + * Installing a listener using this symbol does not change the behavior once an`'error'` event is emitted. Therefore, the process will still crash if no + * regular `'error'` listener is installed. + * @since v13.6.0, v12.17.0 + */ + static readonly errorMonitor: unique symbol; + /** + * Value: `Symbol.for('nodejs.rejection')` + * + * See how to write a custom `rejection handler`. + * @since v13.4.0, v12.16.0 + */ + static readonly captureRejectionSymbol: unique symbol; + /** + * Value: [boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) + * + * Change the default `captureRejections` option on all new `EventEmitter` objects. + * @since v13.4.0, v12.16.0 + */ + static captureRejections: boolean; + /** + * By default, a maximum of `10` listeners can be registered for any single + * event. This limit can be changed for individual `EventEmitter` instances + * using the `emitter.setMaxListeners(n)` method. To change the default + * for _all_`EventEmitter` instances, the `events.defaultMaxListeners`property can be used. If this value is not a positive number, a `RangeError`is thrown. + * + * Take caution when setting the `events.defaultMaxListeners` because the + * change affects _all_`EventEmitter` instances, including those created before + * the change is made. However, calling `emitter.setMaxListeners(n)` still has + * precedence over `events.defaultMaxListeners`. + * + * This is not a hard limit. The `EventEmitter` instance will allow + * more listeners to be added but will output a trace warning to stderr indicating + * that a "possible EventEmitter memory leak" has been detected. For any single`EventEmitter`, the `emitter.getMaxListeners()` and `emitter.setMaxListeners()`methods can be used to + * temporarily avoid this warning: + * + * ```js + * import { EventEmitter } from 'node:events'; + * const emitter = new EventEmitter(); + * emitter.setMaxListeners(emitter.getMaxListeners() + 1); + * emitter.once('event', () => { + * // do stuff + * emitter.setMaxListeners(Math.max(emitter.getMaxListeners() - 1, 0)); + * }); + * ``` + * + * The `--trace-warnings` command-line flag can be used to display the + * stack trace for such warnings. + * + * The emitted warning can be inspected with `process.on('warning')` and will + * have the additional `emitter`, `type`, and `count` properties, referring to + * the event emitter instance, the event's name and the number of attached + * listeners, respectively. + * Its `name` property is set to `'MaxListenersExceededWarning'`. + * @since v0.11.2 + */ + static defaultMaxListeners: number; + } + import internal = require("node:events"); + namespace EventEmitter { + // Should just be `export { EventEmitter }`, but that doesn't work in TypeScript 3.4 + export { internal as EventEmitter }; + export interface Abortable { + /** + * When provided the corresponding `AbortController` can be used to cancel an asynchronous action. + */ + signal?: AbortSignal | undefined; + } + + export interface EventEmitterReferencingAsyncResource extends AsyncResource { + readonly eventEmitter: EventEmitterAsyncResource; + } + + export interface EventEmitterAsyncResourceOptions extends AsyncResourceOptions, EventEmitterOptions { + /** + * The type of async event, this is required when instantiating `EventEmitterAsyncResource` + * directly rather than as a child class. + * @default new.target.name if instantiated as a child class. + */ + name?: string; + } + + /** + * Integrates `EventEmitter` with `AsyncResource` for `EventEmitter`s that + * require manual async tracking. Specifically, all events emitted by instances + * of `events.EventEmitterAsyncResource` will run within its `async context`. + * + * ```js + * import { EventEmitterAsyncResource, EventEmitter } from 'node:events'; + * import { notStrictEqual, strictEqual } from 'node:assert'; + * import { executionAsyncId, triggerAsyncId } from 'node:async_hooks'; + * + * // Async tracking tooling will identify this as 'Q'. + * const ee1 = new EventEmitterAsyncResource({ name: 'Q' }); + * + * // 'foo' listeners will run in the EventEmitters async context. + * ee1.on('foo', () => { + * strictEqual(executionAsyncId(), ee1.asyncId); + * strictEqual(triggerAsyncId(), ee1.triggerAsyncId); + * }); + * + * const ee2 = new EventEmitter(); + * + * // 'foo' listeners on ordinary EventEmitters that do not track async + * // context, however, run in the same async context as the emit(). + * ee2.on('foo', () => { + * notStrictEqual(executionAsyncId(), ee2.asyncId); + * notStrictEqual(triggerAsyncId(), ee2.triggerAsyncId); + * }); + * + * Promise.resolve().then(() => { + * ee1.emit('foo'); + * ee2.emit('foo'); + * }); + * ``` + * + * The `EventEmitterAsyncResource` class has the same methods and takes the + * same options as `EventEmitter` and `AsyncResource` themselves. + * @since v17.4.0, v16.14.0 + */ + export class EventEmitterAsyncResource extends EventEmitter { + /** + * @param options Only optional in child class. + */ + constructor(options?: EventEmitterAsyncResourceOptions); + /** + * Call all `destroy` hooks. This should only ever be called once. An error will + * be thrown if it is called more than once. This **must** be manually called. If + * the resource is left to be collected by the GC then the `destroy` hooks will + * never be called. + */ + emitDestroy(): void; + /** + * The unique `asyncId` assigned to the resource. + */ + readonly asyncId: number; + /** + * The same triggerAsyncId that is passed to the AsyncResource constructor. + */ + readonly triggerAsyncId: number; + /** + * The returned `AsyncResource` object has an additional `eventEmitter` property + * that provides a reference to this `EventEmitterAsyncResource`. + */ + readonly asyncResource: EventEmitterReferencingAsyncResource; + } + } + global { + namespace NodeJS { + interface EventEmitter = DefaultEventMap> { + [EventEmitter.captureRejectionSymbol]?(error: Error, event: Key, ...args: Args): void; + /** + * Alias for `emitter.on(eventName, listener)`. + * @since v0.1.26 + */ + addListener(eventName: Key, listener: Listener1): this; + /** + * Adds the `listener` function to the end of the listeners array for the + * event named `eventName`. No checks are made to see if the `listener` has + * already been added. Multiple calls passing the same combination of `eventName`and `listener` will result in the `listener` being added, and called, multiple + * times. + * + * ```js + * server.on('connection', (stream) => { + * console.log('someone connected!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * + * By default, event listeners are invoked in the order they are added. The`emitter.prependListener()` method can be used as an alternative to add the + * event listener to the beginning of the listeners array. + * + * ```js + * import { EventEmitter } from 'node:events'; + * const myEE = new EventEmitter(); + * myEE.on('foo', () => console.log('a')); + * myEE.prependListener('foo', () => console.log('b')); + * myEE.emit('foo'); + * // Prints: + * // b + * // a + * ``` + * @since v0.1.101 + * @param eventName The name of the event. + * @param listener The callback function + */ + on(eventName: Key, listener: Listener1): this; + /** + * Adds a **one-time**`listener` function for the event named `eventName`. The + * next time `eventName` is triggered, this listener is removed and then invoked. + * + * ```js + * server.once('connection', (stream) => { + * console.log('Ah, we have our first user!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * + * By default, event listeners are invoked in the order they are added. The`emitter.prependOnceListener()` method can be used as an alternative to add the + * event listener to the beginning of the listeners array. + * + * ```js + * import { EventEmitter } from 'node:events'; + * const myEE = new EventEmitter(); + * myEE.once('foo', () => console.log('a')); + * myEE.prependOnceListener('foo', () => console.log('b')); + * myEE.emit('foo'); + * // Prints: + * // b + * // a + * ``` + * @since v0.3.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + once(eventName: Key, listener: Listener1): this; + /** + * Removes the specified `listener` from the listener array for the event named`eventName`. + * + * ```js + * const callback = (stream) => { + * console.log('someone connected!'); + * }; + * server.on('connection', callback); + * // ... + * server.removeListener('connection', callback); + * ``` + * + * `removeListener()` will remove, at most, one instance of a listener from the + * listener array. If any single listener has been added multiple times to the + * listener array for the specified `eventName`, then `removeListener()` must be + * called multiple times to remove each instance. + * + * Once an event is emitted, all listeners attached to it at the + * time of emitting are called in order. This implies that any`removeListener()` or `removeAllListeners()` calls _after_ emitting and _before_ the last listener finishes execution + * will not remove them from`emit()` in progress. Subsequent events behave as expected. + * + * ```js + * import { EventEmitter } from 'node:events'; + * class MyEmitter extends EventEmitter {} + * const myEmitter = new MyEmitter(); + * + * const callbackA = () => { + * console.log('A'); + * myEmitter.removeListener('event', callbackB); + * }; + * + * const callbackB = () => { + * console.log('B'); + * }; + * + * myEmitter.on('event', callbackA); + * + * myEmitter.on('event', callbackB); + * + * // callbackA removes listener callbackB but it will still be called. + * // Internal listener array at time of emit [callbackA, callbackB] + * myEmitter.emit('event'); + * // Prints: + * // A + * // B + * + * // callbackB is now removed. + * // Internal listener array [callbackA] + * myEmitter.emit('event'); + * // Prints: + * // A + * ``` + * + * Because listeners are managed using an internal array, calling this will + * change the position indices of any listener registered _after_ the listener + * being removed. This will not impact the order in which listeners are called, + * but it means that any copies of the listener array as returned by + * the `emitter.listeners()` method will need to be recreated. + * + * When a single function has been added as a handler multiple times for a single + * event (as in the example below), `removeListener()` will remove the most + * recently added instance. In the example the `once('ping')`listener is removed: + * + * ```js + * import { EventEmitter } from 'node:events'; + * const ee = new EventEmitter(); + * + * function pong() { + * console.log('pong'); + * } + * + * ee.on('ping', pong); + * ee.once('ping', pong); + * ee.removeListener('ping', pong); + * + * ee.emit('ping'); + * ee.emit('ping'); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.1.26 + */ + removeListener(eventName: Key, listener: Listener1): this; + /** + * Alias for `emitter.removeListener()`. + * @since v10.0.0 + */ + off(eventName: Key, listener: Listener1): this; + /** + * Removes all listeners, or those of the specified `eventName`. + * + * It is bad practice to remove listeners added elsewhere in the code, + * particularly when the `EventEmitter` instance was created by some other + * component or module (e.g. sockets or file streams). + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.1.26 + */ + removeAllListeners(event?: Key): this; + /** + * By default `EventEmitter`s will print a warning if more than `10` listeners are + * added for a particular event. This is a useful default that helps finding + * memory leaks. The `emitter.setMaxListeners()` method allows the limit to be + * modified for this specific `EventEmitter` instance. The value can be set to`Infinity` (or `0`) to indicate an unlimited number of listeners. + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.3.5 + */ + setMaxListeners(n: number): this; + /** + * Returns the current max listener value for the `EventEmitter` which is either + * set by `emitter.setMaxListeners(n)` or defaults to {@link defaultMaxListeners}. + * @since v1.0.0 + */ + getMaxListeners(): number; + /** + * Returns a copy of the array of listeners for the event named `eventName`. + * + * ```js + * server.on('connection', (stream) => { + * console.log('someone connected!'); + * }); + * console.log(util.inspect(server.listeners('connection'))); + * // Prints: [ [Function] ] + * ``` + * @since v0.1.26 + */ + listeners(eventName: Key): Array>; + /** + * Returns a copy of the array of listeners for the event named `eventName`, + * including any wrappers (such as those created by `.once()`). + * + * ```js + * import { EventEmitter } from 'node:events'; + * const emitter = new EventEmitter(); + * emitter.once('log', () => console.log('log once')); + * + * // Returns a new Array with a function `onceWrapper` which has a property + * // `listener` which contains the original listener bound above + * const listeners = emitter.rawListeners('log'); + * const logFnWrapper = listeners[0]; + * + * // Logs "log once" to the console and does not unbind the `once` event + * logFnWrapper.listener(); + * + * // Logs "log once" to the console and removes the listener + * logFnWrapper(); + * + * emitter.on('log', () => console.log('log persistently')); + * // Will return a new Array with a single function bound by `.on()` above + * const newListeners = emitter.rawListeners('log'); + * + * // Logs "log persistently" twice + * newListeners[0](); + * emitter.emit('log'); + * ``` + * @since v9.4.0 + */ + rawListeners(eventName: Key): Array>; + /** + * Synchronously calls each of the listeners registered for the event named`eventName`, in the order they were registered, passing the supplied arguments + * to each. + * + * Returns `true` if the event had listeners, `false` otherwise. + * + * ```js + * import { EventEmitter } from 'node:events'; + * const myEmitter = new EventEmitter(); + * + * // First listener + * myEmitter.on('event', function firstListener() { + * console.log('Helloooo! first listener'); + * }); + * // Second listener + * myEmitter.on('event', function secondListener(arg1, arg2) { + * console.log(`event with parameters ${arg1}, ${arg2} in second listener`); + * }); + * // Third listener + * myEmitter.on('event', function thirdListener(...args) { + * const parameters = args.join(', '); + * console.log(`event with parameters ${parameters} in third listener`); + * }); + * + * console.log(myEmitter.listeners('event')); + * + * myEmitter.emit('event', 1, 2, 3, 4, 5); + * + * // Prints: + * // [ + * // [Function: firstListener], + * // [Function: secondListener], + * // [Function: thirdListener] + * // ] + * // Helloooo! first listener + * // event with parameters 1, 2 in second listener + * // event with parameters 1, 2, 3, 4, 5 in third listener + * ``` + * @since v0.1.26 + */ + emit(eventName: Key, ...args: Args): boolean; + /** + * Returns the number of listeners listening for the event named `eventName`. + * If `listener` is provided, it will return how many times the listener is found + * in the list of the listeners of the event. + * @since v3.2.0 + * @param eventName The name of the event being listened for + * @param listener The event handler function + */ + listenerCount(eventName: Key, listener?: Listener2): number; + /** + * Adds the `listener` function to the _beginning_ of the listeners array for the + * event named `eventName`. No checks are made to see if the `listener` has + * already been added. Multiple calls passing the same combination of `eventName`and `listener` will result in the `listener` being added, and called, multiple + * times. + * + * ```js + * server.prependListener('connection', (stream) => { + * console.log('someone connected!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v6.0.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + prependListener(eventName: Key, listener: Listener1): this; + /** + * Adds a **one-time**`listener` function for the event named `eventName` to the _beginning_ of the listeners array. The next time `eventName` is triggered, this + * listener is removed, and then invoked. + * + * ```js + * server.prependOnceListener('connection', (stream) => { + * console.log('Ah, we have our first user!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v6.0.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + prependOnceListener(eventName: Key, listener: Listener1): this; + /** + * Returns an array listing the events for which the emitter has registered + * listeners. The values in the array are strings or `Symbol`s. + * + * ```js + * import { EventEmitter } from 'node:events'; + * + * const myEE = new EventEmitter(); + * myEE.on('foo', () => {}); + * myEE.on('bar', () => {}); + * + * const sym = Symbol('symbol'); + * myEE.on(sym, () => {}); + * + * console.log(myEE.eventNames()); + * // Prints: [ 'foo', 'bar', Symbol(symbol) ] + * ``` + * @since v6.0.0 + */ + eventNames(): Array<(string | symbol) & Key2>; + } + } + } + export = EventEmitter; +} +declare module "node:events" { + import events = require("events"); + export = events; +} diff --git a/dist/node_modules/@types/node/fs.d.ts b/dist/node_modules/@types/node/fs.d.ts new file mode 100644 index 0000000..5cbca12 --- /dev/null +++ b/dist/node_modules/@types/node/fs.d.ts @@ -0,0 +1,4311 @@ +/** + * The `node:fs` module enables interacting with the file system in a + * way modeled on standard POSIX functions. + * + * To use the promise-based APIs: + * + * ```js + * import * as fs from 'node:fs/promises'; + * ``` + * + * To use the callback and sync APIs: + * + * ```js + * import * as fs from 'node:fs'; + * ``` + * + * All file system operations have synchronous, callback, and promise-based + * forms, and are accessible using both CommonJS syntax and ES6 Modules (ESM). + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/fs.js) + */ +declare module "fs" { + import * as stream from "node:stream"; + import { Abortable, EventEmitter } from "node:events"; + import { URL } from "node:url"; + import * as promises from "node:fs/promises"; + export { promises }; + /** + * Valid types for path values in "fs". + */ + export type PathLike = string | Buffer | URL; + export type PathOrFileDescriptor = PathLike | number; + export type TimeLike = string | number | Date; + export type NoParamCallback = (err: NodeJS.ErrnoException | null) => void; + export type BufferEncodingOption = + | "buffer" + | { + encoding: "buffer"; + }; + export interface ObjectEncodingOptions { + encoding?: BufferEncoding | null | undefined; + } + export type EncodingOption = ObjectEncodingOptions | BufferEncoding | undefined | null; + export type OpenMode = number | string; + export type Mode = number | string; + export interface StatsBase { + isFile(): boolean; + isDirectory(): boolean; + isBlockDevice(): boolean; + isCharacterDevice(): boolean; + isSymbolicLink(): boolean; + isFIFO(): boolean; + isSocket(): boolean; + dev: T; + ino: T; + mode: T; + nlink: T; + uid: T; + gid: T; + rdev: T; + size: T; + blksize: T; + blocks: T; + atimeMs: T; + mtimeMs: T; + ctimeMs: T; + birthtimeMs: T; + atime: Date; + mtime: Date; + ctime: Date; + birthtime: Date; + } + export interface Stats extends StatsBase {} + /** + * A `fs.Stats` object provides information about a file. + * + * Objects returned from {@link stat}, {@link lstat}, {@link fstat}, and + * their synchronous counterparts are of this type. + * If `bigint` in the `options` passed to those methods is true, the numeric values + * will be `bigint` instead of `number`, and the object will contain additional + * nanosecond-precision properties suffixed with `Ns`. + * + * ```console + * Stats { + * dev: 2114, + * ino: 48064969, + * mode: 33188, + * nlink: 1, + * uid: 85, + * gid: 100, + * rdev: 0, + * size: 527, + * blksize: 4096, + * blocks: 8, + * atimeMs: 1318289051000.1, + * mtimeMs: 1318289051000.1, + * ctimeMs: 1318289051000.1, + * birthtimeMs: 1318289051000.1, + * atime: Mon, 10 Oct 2011 23:24:11 GMT, + * mtime: Mon, 10 Oct 2011 23:24:11 GMT, + * ctime: Mon, 10 Oct 2011 23:24:11 GMT, + * birthtime: Mon, 10 Oct 2011 23:24:11 GMT } + * ``` + * + * `bigint` version: + * + * ```console + * BigIntStats { + * dev: 2114n, + * ino: 48064969n, + * mode: 33188n, + * nlink: 1n, + * uid: 85n, + * gid: 100n, + * rdev: 0n, + * size: 527n, + * blksize: 4096n, + * blocks: 8n, + * atimeMs: 1318289051000n, + * mtimeMs: 1318289051000n, + * ctimeMs: 1318289051000n, + * birthtimeMs: 1318289051000n, + * atimeNs: 1318289051000000000n, + * mtimeNs: 1318289051000000000n, + * ctimeNs: 1318289051000000000n, + * birthtimeNs: 1318289051000000000n, + * atime: Mon, 10 Oct 2011 23:24:11 GMT, + * mtime: Mon, 10 Oct 2011 23:24:11 GMT, + * ctime: Mon, 10 Oct 2011 23:24:11 GMT, + * birthtime: Mon, 10 Oct 2011 23:24:11 GMT } + * ``` + * @since v0.1.21 + */ + export class Stats {} + export interface StatsFsBase { + /** Type of file system. */ + type: T; + /** Optimal transfer block size. */ + bsize: T; + /** Total data blocks in file system. */ + blocks: T; + /** Free blocks in file system. */ + bfree: T; + /** Available blocks for unprivileged users */ + bavail: T; + /** Total file nodes in file system. */ + files: T; + /** Free file nodes in file system. */ + ffree: T; + } + export interface StatsFs extends StatsFsBase {} + /** + * Provides information about a mounted file system. + * + * Objects returned from {@link statfs} and its synchronous counterpart are of + * this type. If `bigint` in the `options` passed to those methods is `true`, the + * numeric values will be `bigint` instead of `number`. + * + * ```console + * StatFs { + * type: 1397114950, + * bsize: 4096, + * blocks: 121938943, + * bfree: 61058895, + * bavail: 61058895, + * files: 999, + * ffree: 1000000 + * } + * ``` + * + * `bigint` version: + * + * ```console + * StatFs { + * type: 1397114950n, + * bsize: 4096n, + * blocks: 121938943n, + * bfree: 61058895n, + * bavail: 61058895n, + * files: 999n, + * ffree: 1000000n + * } + * ``` + * @since v19.6.0, v18.15.0 + */ + export class StatsFs {} + export interface BigIntStatsFs extends StatsFsBase {} + export interface StatFsOptions { + bigint?: boolean | undefined; + } + /** + * A representation of a directory entry, which can be a file or a subdirectory + * within the directory, as returned by reading from an `fs.Dir`. The + * directory entry is a combination of the file name and file type pairs. + * + * Additionally, when {@link readdir} or {@link readdirSync} is called with + * the `withFileTypes` option set to `true`, the resulting array is filled with `fs.Dirent` objects, rather than strings or `Buffer` s. + * @since v10.10.0 + */ + export class Dirent { + /** + * Returns `true` if the `fs.Dirent` object describes a regular file. + * @since v10.10.0 + */ + isFile(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a file system + * directory. + * @since v10.10.0 + */ + isDirectory(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a block device. + * @since v10.10.0 + */ + isBlockDevice(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a character device. + * @since v10.10.0 + */ + isCharacterDevice(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a symbolic link. + * @since v10.10.0 + */ + isSymbolicLink(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a first-in-first-out + * (FIFO) pipe. + * @since v10.10.0 + */ + isFIFO(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a socket. + * @since v10.10.0 + */ + isSocket(): boolean; + /** + * The file name that this `fs.Dirent` object refers to. The type of this + * value is determined by the `options.encoding` passed to {@link readdir} or {@link readdirSync}. + * @since v10.10.0 + */ + name: string; + /** + * The base path that this `fs.Dirent` object refers to. + * @since v20.1.0 + */ + path: string; + } + /** + * A class representing a directory stream. + * + * Created by {@link opendir}, {@link opendirSync}, or `fsPromises.opendir()`. + * + * ```js + * import { opendir } from 'node:fs/promises'; + * + * try { + * const dir = await opendir('./'); + * for await (const dirent of dir) + * console.log(dirent.name); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * When using the async iterator, the `fs.Dir` object will be automatically + * closed after the iterator exits. + * @since v12.12.0 + */ + export class Dir implements AsyncIterable { + /** + * The read-only path of this directory as was provided to {@link opendir},{@link opendirSync}, or `fsPromises.opendir()`. + * @since v12.12.0 + */ + readonly path: string; + /** + * Asynchronously iterates over the directory via `readdir(3)` until all entries have been read. + */ + [Symbol.asyncIterator](): AsyncIterableIterator; + /** + * Asynchronously close the directory's underlying resource handle. + * Subsequent reads will result in errors. + * + * A promise is returned that will be fulfilled after the resource has been + * closed. + * @since v12.12.0 + */ + close(): Promise; + close(cb: NoParamCallback): void; + /** + * Synchronously close the directory's underlying resource handle. + * Subsequent reads will result in errors. + * @since v12.12.0 + */ + closeSync(): void; + /** + * Asynchronously read the next directory entry via [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) as an `fs.Dirent`. + * + * A promise is returned that will be fulfilled with an `fs.Dirent`, or `null`if there are no more directory entries to read. + * + * Directory entries returned by this function are in no particular order as + * provided by the operating system's underlying directory mechanisms. + * Entries added or removed while iterating over the directory might not be + * included in the iteration results. + * @since v12.12.0 + * @return containing {fs.Dirent|null} + */ + read(): Promise; + read(cb: (err: NodeJS.ErrnoException | null, dirEnt: Dirent | null) => void): void; + /** + * Synchronously read the next directory entry as an `fs.Dirent`. See the + * POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more detail. + * + * If there are no more directory entries to read, `null` will be returned. + * + * Directory entries returned by this function are in no particular order as + * provided by the operating system's underlying directory mechanisms. + * Entries added or removed while iterating over the directory might not be + * included in the iteration results. + * @since v12.12.0 + */ + readSync(): Dirent | null; + } + /** + * Class: fs.StatWatcher + * @since v14.3.0, v12.20.0 + * Extends `EventEmitter` + * A successful call to {@link watchFile} method will return a new fs.StatWatcher object. + */ + export interface StatWatcher extends EventEmitter { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the `fs.StatWatcher` is active. Calling `watcher.ref()` multiple times will have + * no effect. + * + * By default, all `fs.StatWatcher` objects are "ref'ed", making it normally + * unnecessary to call `watcher.ref()` unless `watcher.unref()` had been + * called previously. + * @since v14.3.0, v12.20.0 + */ + ref(): this; + /** + * When called, the active `fs.StatWatcher` object will not require the Node.js + * event loop to remain active. If there is no other activity keeping the + * event loop running, the process may exit before the `fs.StatWatcher` object's + * callback is invoked. Calling `watcher.unref()` multiple times will have + * no effect. + * @since v14.3.0, v12.20.0 + */ + unref(): this; + } + export interface FSWatcher extends EventEmitter { + /** + * Stop watching for changes on the given `fs.FSWatcher`. Once stopped, the `fs.FSWatcher` object is no longer usable. + * @since v0.5.8 + */ + close(): void; + /** + * When called, requests that the Node.js event loop _not_ exit so long as the `fs.FSWatcher` is active. Calling `watcher.ref()` multiple times will have + * no effect. + * + * By default, all `fs.FSWatcher` objects are "ref'ed", making it normally + * unnecessary to call `watcher.ref()` unless `watcher.unref()` had been + * called previously. + * @since v14.3.0, v12.20.0 + */ + ref(): this; + /** + * When called, the active `fs.FSWatcher` object will not require the Node.js + * event loop to remain active. If there is no other activity keeping the + * event loop running, the process may exit before the `fs.FSWatcher` object's + * callback is invoked. Calling `watcher.unref()` multiple times will have + * no effect. + * @since v14.3.0, v12.20.0 + */ + unref(): this; + /** + * events.EventEmitter + * 1. change + * 2. close + * 3. error + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "error", listener: (error: Error) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + on(event: "close", listener: () => void): this; + on(event: "error", listener: (error: Error) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + once(event: "close", listener: () => void): this; + once(event: "error", listener: (error: Error) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "error", listener: (error: Error) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "error", listener: (error: Error) => void): this; + } + /** + * Instances of `fs.ReadStream` are created and returned using the {@link createReadStream} function. + * @since v0.1.93 + */ + export class ReadStream extends stream.Readable { + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void; + /** + * The number of bytes that have been read so far. + * @since v6.4.0 + */ + bytesRead: number; + /** + * The path to the file the stream is reading from as specified in the first + * argument to `fs.createReadStream()`. If `path` is passed as a string, then`readStream.path` will be a string. If `path` is passed as a `Buffer`, then`readStream.path` will be a + * `Buffer`. If `fd` is specified, then`readStream.path` will be `undefined`. + * @since v0.1.93 + */ + path: string | Buffer; + /** + * This property is `true` if the underlying file has not been opened yet, + * i.e. before the `'ready'` event is emitted. + * @since v11.2.0, v10.16.0 + */ + pending: boolean; + /** + * events.EventEmitter + * 1. open + * 2. close + * 3. ready + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "open", listener: (fd: number) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "ready", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "open", listener: (fd: number) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "ready", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "open", listener: (fd: number) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "ready", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "open", listener: (fd: number) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "ready", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "open", listener: (fd: number) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "ready", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * * Extends `stream.Writable` + * + * Instances of `fs.WriteStream` are created and returned using the {@link createWriteStream} function. + * @since v0.1.93 + */ + export class WriteStream extends stream.Writable { + /** + * Closes `writeStream`. Optionally accepts a + * callback that will be executed once the `writeStream`is closed. + * @since v0.9.4 + */ + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void; + /** + * The number of bytes written so far. Does not include data that is still queued + * for writing. + * @since v0.4.7 + */ + bytesWritten: number; + /** + * The path to the file the stream is writing to as specified in the first + * argument to {@link createWriteStream}. If `path` is passed as a string, then`writeStream.path` will be a string. If `path` is passed as a `Buffer`, then`writeStream.path` will be a + * `Buffer`. + * @since v0.1.93 + */ + path: string | Buffer; + /** + * This property is `true` if the underlying file has not been opened yet, + * i.e. before the `'ready'` event is emitted. + * @since v11.2.0 + */ + pending: boolean; + /** + * events.EventEmitter + * 1. open + * 2. close + * 3. ready + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "open", listener: (fd: number) => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "ready", listener: () => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "open", listener: (fd: number) => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "ready", listener: () => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "open", listener: (fd: number) => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "ready", listener: () => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "open", listener: (fd: number) => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "ready", listener: () => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "open", listener: (fd: number) => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "ready", listener: () => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * Asynchronously rename file at `oldPath` to the pathname provided + * as `newPath`. In the case that `newPath` already exists, it will + * be overwritten. If there is a directory at `newPath`, an error will + * be raised instead. No arguments other than a possible exception are + * given to the completion callback. + * + * See also: [`rename(2)`](http://man7.org/linux/man-pages/man2/rename.2.html). + * + * ```js + * import { rename } from 'node:fs'; + * + * rename('oldFile.txt', 'newFile.txt', (err) => { + * if (err) throw err; + * console.log('Rename complete!'); + * }); + * ``` + * @since v0.0.2 + */ + export function rename(oldPath: PathLike, newPath: PathLike, callback: NoParamCallback): void; + export namespace rename { + /** + * Asynchronous rename(2) - Change the name or location of a file or directory. + * @param oldPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(oldPath: PathLike, newPath: PathLike): Promise; + } + /** + * Renames the file from `oldPath` to `newPath`. Returns `undefined`. + * + * See the POSIX [`rename(2)`](http://man7.org/linux/man-pages/man2/rename.2.html) documentation for more details. + * @since v0.1.21 + */ + export function renameSync(oldPath: PathLike, newPath: PathLike): void; + /** + * Truncates the file. No arguments other than a possible exception are + * given to the completion callback. A file descriptor can also be passed as the + * first argument. In this case, `fs.ftruncate()` is called. + * + * ```js + * import { truncate } from 'node:fs'; + * // Assuming that 'path/file.txt' is a regular file. + * truncate('path/file.txt', (err) => { + * if (err) throw err; + * console.log('path/file.txt was truncated'); + * }); + * ``` + * + * Passing a file descriptor is deprecated and may result in an error being thrown + * in the future. + * + * See the POSIX [`truncate(2)`](http://man7.org/linux/man-pages/man2/truncate.2.html) documentation for more details. + * @since v0.8.6 + * @param [len=0] + */ + export function truncate(path: PathLike, len: number | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous truncate(2) - Truncate a file to a specified length. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function truncate(path: PathLike, callback: NoParamCallback): void; + export namespace truncate { + /** + * Asynchronous truncate(2) - Truncate a file to a specified length. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param len If not specified, defaults to `0`. + */ + function __promisify__(path: PathLike, len?: number | null): Promise; + } + /** + * Truncates the file. Returns `undefined`. A file descriptor can also be + * passed as the first argument. In this case, `fs.ftruncateSync()` is called. + * + * Passing a file descriptor is deprecated and may result in an error being thrown + * in the future. + * @since v0.8.6 + * @param [len=0] + */ + export function truncateSync(path: PathLike, len?: number | null): void; + /** + * Truncates the file descriptor. No arguments other than a possible exception are + * given to the completion callback. + * + * See the POSIX [`ftruncate(2)`](http://man7.org/linux/man-pages/man2/ftruncate.2.html) documentation for more detail. + * + * If the file referred to by the file descriptor was larger than `len` bytes, only + * the first `len` bytes will be retained in the file. + * + * For example, the following program retains only the first four bytes of the + * file: + * + * ```js + * import { open, close, ftruncate } from 'node:fs'; + * + * function closeFd(fd) { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * + * open('temp.txt', 'r+', (err, fd) => { + * if (err) throw err; + * + * try { + * ftruncate(fd, 4, (err) => { + * closeFd(fd); + * if (err) throw err; + * }); + * } catch (err) { + * closeFd(fd); + * if (err) throw err; + * } + * }); + * ``` + * + * If the file previously was shorter than `len` bytes, it is extended, and the + * extended part is filled with null bytes (`'\0'`): + * + * If `len` is negative then `0` will be used. + * @since v0.8.6 + * @param [len=0] + */ + export function ftruncate(fd: number, len: number | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous ftruncate(2) - Truncate a file to a specified length. + * @param fd A file descriptor. + */ + export function ftruncate(fd: number, callback: NoParamCallback): void; + export namespace ftruncate { + /** + * Asynchronous ftruncate(2) - Truncate a file to a specified length. + * @param fd A file descriptor. + * @param len If not specified, defaults to `0`. + */ + function __promisify__(fd: number, len?: number | null): Promise; + } + /** + * Truncates the file descriptor. Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link ftruncate}. + * @since v0.8.6 + * @param [len=0] + */ + export function ftruncateSync(fd: number, len?: number | null): void; + /** + * Asynchronously changes owner and group of a file. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html) documentation for more detail. + * @since v0.1.97 + */ + export function chown(path: PathLike, uid: number, gid: number, callback: NoParamCallback): void; + export namespace chown { + /** + * Asynchronous chown(2) - Change ownership of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, uid: number, gid: number): Promise; + } + /** + * Synchronously changes owner and group of a file. Returns `undefined`. + * This is the synchronous version of {@link chown}. + * + * See the POSIX [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html) documentation for more detail. + * @since v0.1.97 + */ + export function chownSync(path: PathLike, uid: number, gid: number): void; + /** + * Sets the owner of the file. No arguments other than a possible exception are + * given to the completion callback. + * + * See the POSIX [`fchown(2)`](http://man7.org/linux/man-pages/man2/fchown.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchown(fd: number, uid: number, gid: number, callback: NoParamCallback): void; + export namespace fchown { + /** + * Asynchronous fchown(2) - Change ownership of a file. + * @param fd A file descriptor. + */ + function __promisify__(fd: number, uid: number, gid: number): Promise; + } + /** + * Sets the owner of the file. Returns `undefined`. + * + * See the POSIX [`fchown(2)`](http://man7.org/linux/man-pages/man2/fchown.2.html) documentation for more detail. + * @since v0.4.7 + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + */ + export function fchownSync(fd: number, uid: number, gid: number): void; + /** + * Set the owner of the symbolic link. No arguments other than a possible + * exception are given to the completion callback. + * + * See the POSIX [`lchown(2)`](http://man7.org/linux/man-pages/man2/lchown.2.html) documentation for more detail. + */ + export function lchown(path: PathLike, uid: number, gid: number, callback: NoParamCallback): void; + export namespace lchown { + /** + * Asynchronous lchown(2) - Change ownership of a file. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, uid: number, gid: number): Promise; + } + /** + * Set the owner for the path. Returns `undefined`. + * + * See the POSIX [`lchown(2)`](http://man7.org/linux/man-pages/man2/lchown.2.html) documentation for more details. + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + */ + export function lchownSync(path: PathLike, uid: number, gid: number): void; + /** + * Changes the access and modification times of a file in the same way as {@link utimes}, with the difference that if the path refers to a symbolic + * link, then the link is not dereferenced: instead, the timestamps of the + * symbolic link itself are changed. + * + * No arguments other than a possible exception are given to the completion + * callback. + * @since v14.5.0, v12.19.0 + */ + export function lutimes(path: PathLike, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace lutimes { + /** + * Changes the access and modification times of a file in the same way as `fsPromises.utimes()`, + * with the difference that if the path refers to a symbolic link, then the link is not + * dereferenced: instead, the timestamps of the symbolic link itself are changed. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Change the file system timestamps of the symbolic link referenced by `path`. + * Returns `undefined`, or throws an exception when parameters are incorrect or + * the operation fails. This is the synchronous version of {@link lutimes}. + * @since v14.5.0, v12.19.0 + */ + export function lutimesSync(path: PathLike, atime: TimeLike, mtime: TimeLike): void; + /** + * Asynchronously changes the permissions of a file. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html) documentation for more detail. + * + * ```js + * import { chmod } from 'node:fs'; + * + * chmod('my_file.txt', 0o775, (err) => { + * if (err) throw err; + * console.log('The permissions for file "my_file.txt" have been changed!'); + * }); + * ``` + * @since v0.1.30 + */ + export function chmod(path: PathLike, mode: Mode, callback: NoParamCallback): void; + export namespace chmod { + /** + * Asynchronous chmod(2) - Change permissions of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(path: PathLike, mode: Mode): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link chmod}. + * + * See the POSIX [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html) documentation for more detail. + * @since v0.6.7 + */ + export function chmodSync(path: PathLike, mode: Mode): void; + /** + * Sets the permissions on the file. No arguments other than a possible exception + * are given to the completion callback. + * + * See the POSIX [`fchmod(2)`](http://man7.org/linux/man-pages/man2/fchmod.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchmod(fd: number, mode: Mode, callback: NoParamCallback): void; + export namespace fchmod { + /** + * Asynchronous fchmod(2) - Change permissions of a file. + * @param fd A file descriptor. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(fd: number, mode: Mode): Promise; + } + /** + * Sets the permissions on the file. Returns `undefined`. + * + * See the POSIX [`fchmod(2)`](http://man7.org/linux/man-pages/man2/fchmod.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchmodSync(fd: number, mode: Mode): void; + /** + * Changes the permissions on a symbolic link. No arguments other than a possible + * exception are given to the completion callback. + * + * This method is only implemented on macOS. + * + * See the POSIX [`lchmod(2)`](https://www.freebsd.org/cgi/man.cgi?query=lchmod&sektion=2) documentation for more detail. + * @deprecated Since v0.4.7 + */ + export function lchmod(path: PathLike, mode: Mode, callback: NoParamCallback): void; + /** @deprecated */ + export namespace lchmod { + /** + * Asynchronous lchmod(2) - Change permissions of a file. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(path: PathLike, mode: Mode): Promise; + } + /** + * Changes the permissions on a symbolic link. Returns `undefined`. + * + * This method is only implemented on macOS. + * + * See the POSIX [`lchmod(2)`](https://www.freebsd.org/cgi/man.cgi?query=lchmod&sektion=2) documentation for more detail. + * @deprecated Since v0.4.7 + */ + export function lchmodSync(path: PathLike, mode: Mode): void; + /** + * Asynchronous [`stat(2)`](http://man7.org/linux/man-pages/man2/stat.2.html). The callback gets two arguments `(err, stats)` where`stats` is an `fs.Stats` object. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * + * {@link stat} follows symbolic links. Use {@link lstat} to look at the + * links themselves. + * + * Using `fs.stat()` to check for the existence of a file before calling`fs.open()`, `fs.readFile()`, or `fs.writeFile()` is not recommended. + * Instead, user code should open/read/write the file directly and handle the + * error raised if the file is not available. + * + * To check if a file exists without manipulating it afterwards, {@link access} is recommended. + * + * For example, given the following directory structure: + * + * ```text + * - txtDir + * -- file.txt + * - app.js + * ``` + * + * The next program will check for the stats of the given paths: + * + * ```js + * import { stat } from 'node:fs'; + * + * const pathsToCheck = ['./txtDir', './txtDir/file.txt']; + * + * for (let i = 0; i < pathsToCheck.length; i++) { + * stat(pathsToCheck[i], (err, stats) => { + * console.log(stats.isDirectory()); + * console.log(stats); + * }); + * } + * ``` + * + * The resulting output will resemble: + * + * ```console + * true + * Stats { + * dev: 16777220, + * mode: 16877, + * nlink: 3, + * uid: 501, + * gid: 20, + * rdev: 0, + * blksize: 4096, + * ino: 14214262, + * size: 96, + * blocks: 0, + * atimeMs: 1561174653071.963, + * mtimeMs: 1561174614583.3518, + * ctimeMs: 1561174626623.5366, + * birthtimeMs: 1561174126937.2893, + * atime: 2019-06-22T03:37:33.072Z, + * mtime: 2019-06-22T03:36:54.583Z, + * ctime: 2019-06-22T03:37:06.624Z, + * birthtime: 2019-06-22T03:28:46.937Z + * } + * false + * Stats { + * dev: 16777220, + * mode: 33188, + * nlink: 1, + * uid: 501, + * gid: 20, + * rdev: 0, + * blksize: 4096, + * ino: 14214074, + * size: 8, + * blocks: 8, + * atimeMs: 1561174616618.8555, + * mtimeMs: 1561174614584, + * ctimeMs: 1561174614583.8145, + * birthtimeMs: 1561174007710.7478, + * atime: 2019-06-22T03:36:56.619Z, + * mtime: 2019-06-22T03:36:54.584Z, + * ctime: 2019-06-22T03:36:54.584Z, + * birthtime: 2019-06-22T03:26:47.711Z + * } + * ``` + * @since v0.0.2 + */ + export function stat(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function stat( + path: PathLike, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void, + ): void; + export function stat( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void, + ): void; + export function stat( + path: PathLike, + options: StatOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void, + ): void; + export namespace stat { + /** + * Asynchronous stat(2) - Get file status. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__( + path: PathLike, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(path: PathLike, options?: StatOptions): Promise; + } + export interface StatSyncFn extends Function { + (path: PathLike, options?: undefined): Stats; + ( + path: PathLike, + options?: StatSyncOptions & { + bigint?: false | undefined; + throwIfNoEntry: false; + }, + ): Stats | undefined; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: true; + throwIfNoEntry: false; + }, + ): BigIntStats | undefined; + ( + path: PathLike, + options?: StatSyncOptions & { + bigint?: false | undefined; + }, + ): Stats; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: true; + }, + ): BigIntStats; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: boolean; + throwIfNoEntry?: false | undefined; + }, + ): Stats | BigIntStats; + (path: PathLike, options?: StatSyncOptions): Stats | BigIntStats | undefined; + } + /** + * Synchronous stat(2) - Get file status. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export const statSync: StatSyncFn; + /** + * Invokes the callback with the `fs.Stats` for the file descriptor. + * + * See the POSIX [`fstat(2)`](http://man7.org/linux/man-pages/man2/fstat.2.html) documentation for more detail. + * @since v0.1.95 + */ + export function fstat(fd: number, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function fstat( + fd: number, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void, + ): void; + export function fstat( + fd: number, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void, + ): void; + export function fstat( + fd: number, + options: StatOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void, + ): void; + export namespace fstat { + /** + * Asynchronous fstat(2) - Get file status. + * @param fd A file descriptor. + */ + function __promisify__( + fd: number, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + fd: number, + options: StatOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(fd: number, options?: StatOptions): Promise; + } + /** + * Retrieves the `fs.Stats` for the file descriptor. + * + * See the POSIX [`fstat(2)`](http://man7.org/linux/man-pages/man2/fstat.2.html) documentation for more detail. + * @since v0.1.95 + */ + export function fstatSync( + fd: number, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Stats; + export function fstatSync( + fd: number, + options: StatOptions & { + bigint: true; + }, + ): BigIntStats; + export function fstatSync(fd: number, options?: StatOptions): Stats | BigIntStats; + /** + * Retrieves the `fs.Stats` for the symbolic link referred to by the path. + * The callback gets two arguments `(err, stats)` where `stats` is a `fs.Stats` object. `lstat()` is identical to `stat()`, except that if `path` is a symbolic + * link, then the link itself is stat-ed, not the file that it refers to. + * + * See the POSIX [`lstat(2)`](http://man7.org/linux/man-pages/man2/lstat.2.html) documentation for more details. + * @since v0.1.30 + */ + export function lstat(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function lstat( + path: PathLike, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void, + ): void; + export function lstat( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void, + ): void; + export function lstat( + path: PathLike, + options: StatOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void, + ): void; + export namespace lstat { + /** + * Asynchronous lstat(2) - Get file status. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__( + path: PathLike, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(path: PathLike, options?: StatOptions): Promise; + } + /** + * Asynchronous [`statfs(2)`](http://man7.org/linux/man-pages/man2/statfs.2.html). Returns information about the mounted file system which + * contains `path`. The callback gets two arguments `(err, stats)` where `stats`is an `fs.StatFs` object. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * @since v19.6.0, v18.15.0 + * @param path A path to an existing file or directory on the file system to be queried. + */ + export function statfs(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: StatsFs) => void): void; + export function statfs( + path: PathLike, + options: + | (StatFsOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: StatsFs) => void, + ): void; + export function statfs( + path: PathLike, + options: StatFsOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStatsFs) => void, + ): void; + export function statfs( + path: PathLike, + options: StatFsOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: StatsFs | BigIntStatsFs) => void, + ): void; + export namespace statfs { + /** + * Asynchronous statfs(2) - Returns information about the mounted file system which contains path. The callback gets two arguments (err, stats) where stats is an object. + * @param path A path to an existing file or directory on the file system to be queried. + */ + function __promisify__( + path: PathLike, + options?: StatFsOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + path: PathLike, + options: StatFsOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(path: PathLike, options?: StatFsOptions): Promise; + } + /** + * Synchronous [`statfs(2)`](http://man7.org/linux/man-pages/man2/statfs.2.html). Returns information about the mounted file system which + * contains `path`. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * @since v19.6.0, v18.15.0 + * @param path A path to an existing file or directory on the file system to be queried. + */ + export function statfsSync( + path: PathLike, + options?: StatFsOptions & { + bigint?: false | undefined; + }, + ): StatsFs; + export function statfsSync( + path: PathLike, + options: StatFsOptions & { + bigint: true; + }, + ): BigIntStatsFs; + export function statfsSync(path: PathLike, options?: StatFsOptions): StatsFs | BigIntStatsFs; + /** + * Synchronous lstat(2) - Get file status. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export const lstatSync: StatSyncFn; + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. No arguments other than + * a possible + * exception are given to the completion callback. + * @since v0.1.31 + */ + export function link(existingPath: PathLike, newPath: PathLike, callback: NoParamCallback): void; + export namespace link { + /** + * Asynchronous link(2) - Create a new link (also known as a hard link) to an existing file. + * @param existingPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(existingPath: PathLike, newPath: PathLike): Promise; + } + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. Returns `undefined`. + * @since v0.1.31 + */ + export function linkSync(existingPath: PathLike, newPath: PathLike): void; + /** + * Creates the link called `path` pointing to `target`. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`symlink(2)`](http://man7.org/linux/man-pages/man2/symlink.2.html) documentation for more details. + * + * The `type` argument is only available on Windows and ignored on other platforms. + * It can be set to `'dir'`, `'file'`, or `'junction'`. If the `type` argument is + * not a string, Node.js will autodetect `target` type and use `'file'` or `'dir'`. + * If the `target` does not exist, `'file'` will be used. Windows junction points + * require the destination path to be absolute. When using `'junction'`, the`target` argument will automatically be normalized to absolute path. Junction + * points on NTFS volumes can only point to directories. + * + * Relative targets are relative to the link's parent directory. + * + * ```js + * import { symlink } from 'node:fs'; + * + * symlink('./mew', './mewtwo', callback); + * ``` + * + * The above example creates a symbolic link `mewtwo` which points to `mew` in the + * same directory: + * + * ```bash + * $ tree . + * . + * ├── mew + * └── mewtwo -> ./mew + * ``` + * @since v0.1.31 + * @param [type='null'] + */ + export function symlink( + target: PathLike, + path: PathLike, + type: symlink.Type | undefined | null, + callback: NoParamCallback, + ): void; + /** + * Asynchronous symlink(2) - Create a new symbolic link to an existing file. + * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. + * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. + */ + export function symlink(target: PathLike, path: PathLike, callback: NoParamCallback): void; + export namespace symlink { + /** + * Asynchronous symlink(2) - Create a new symbolic link to an existing file. + * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. + * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. + * @param type May be set to `'dir'`, `'file'`, or `'junction'` (default is `'file'`) and is only available on Windows (ignored on other platforms). + * When using `'junction'`, the `target` argument will automatically be normalized to an absolute path. + */ + function __promisify__(target: PathLike, path: PathLike, type?: string | null): Promise; + type Type = "dir" | "file" | "junction"; + } + /** + * Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link symlink}. + * @since v0.1.31 + * @param [type='null'] + */ + export function symlinkSync(target: PathLike, path: PathLike, type?: symlink.Type | null): void; + /** + * Reads the contents of the symbolic link referred to by `path`. The callback gets + * two arguments `(err, linkString)`. + * + * See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path passed to the callback. If the `encoding` is set to `'buffer'`, + * the link path returned will be passed as a `Buffer` object. + * @since v0.1.31 + */ + export function readlink( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, linkString: string) => void, + ): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlink( + path: PathLike, + options: BufferEncodingOption, + callback: (err: NodeJS.ErrnoException | null, linkString: Buffer) => void, + ): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlink( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, linkString: string | Buffer) => void, + ): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function readlink( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, linkString: string) => void, + ): void; + export namespace readlink { + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + } + /** + * Returns the symbolic link's string value. + * + * See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path returned. If the `encoding` is set to `'buffer'`, + * the link path returned will be passed as a `Buffer` object. + * @since v0.1.31 + */ + export function readlinkSync(path: PathLike, options?: EncodingOption): string; + /** + * Synchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlinkSync(path: PathLike, options: BufferEncodingOption): Buffer; + /** + * Synchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlinkSync(path: PathLike, options?: EncodingOption): string | Buffer; + /** + * Asynchronously computes the canonical pathname by resolving `.`, `..`, and + * symbolic links. + * + * A canonical pathname is not necessarily unique. Hard links and bind mounts can + * expose a file system entity through many pathnames. + * + * This function behaves like [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html), with some exceptions: + * + * 1. No case conversion is performed on case-insensitive file systems. + * 2. The maximum number of symbolic links is platform-independent and generally + * (much) higher than what the native [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html) implementation supports. + * + * The `callback` gets two arguments `(err, resolvedPath)`. May use `process.cwd`to resolve relative paths. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path passed to the callback. If the `encoding` is set to `'buffer'`, + * the path returned will be passed as a `Buffer` object. + * + * If `path` resolves to a socket or a pipe, the function will return a system + * dependent name for that object. + * @since v0.1.31 + */ + export function realpath( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpath( + path: PathLike, + options: BufferEncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: Buffer) => void, + ): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpath( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string | Buffer) => void, + ): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function realpath( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + export namespace realpath { + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html). + * + * The `callback` gets two arguments `(err, resolvedPath)`. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path passed to the callback. If the `encoding` is set to `'buffer'`, + * the path returned will be passed as a `Buffer` object. + * + * On Linux, when Node.js is linked against musl libc, the procfs file system must + * be mounted on `/proc` in order for this function to work. Glibc does not have + * this restriction. + * @since v9.2.0 + */ + function native( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + function native( + path: PathLike, + options: BufferEncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: Buffer) => void, + ): void; + function native( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string | Buffer) => void, + ): void; + function native( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + } + /** + * Returns the resolved pathname. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link realpath}. + * @since v0.1.31 + */ + export function realpathSync(path: PathLike, options?: EncodingOption): string; + /** + * Synchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpathSync(path: PathLike, options: BufferEncodingOption): Buffer; + /** + * Synchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpathSync(path: PathLike, options?: EncodingOption): string | Buffer; + export namespace realpathSync { + function native(path: PathLike, options?: EncodingOption): string; + function native(path: PathLike, options: BufferEncodingOption): Buffer; + function native(path: PathLike, options?: EncodingOption): string | Buffer; + } + /** + * Asynchronously removes a file or symbolic link. No arguments other than a + * possible exception are given to the completion callback. + * + * ```js + * import { unlink } from 'node:fs'; + * // Assuming that 'path/file.txt' is a regular file. + * unlink('path/file.txt', (err) => { + * if (err) throw err; + * console.log('path/file.txt was deleted'); + * }); + * ``` + * + * `fs.unlink()` will not work on a directory, empty or otherwise. To remove a + * directory, use {@link rmdir}. + * + * See the POSIX [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html) documentation for more details. + * @since v0.0.2 + */ + export function unlink(path: PathLike, callback: NoParamCallback): void; + export namespace unlink { + /** + * Asynchronous unlink(2) - delete a name and possibly the file it refers to. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike): Promise; + } + /** + * Synchronous [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html). Returns `undefined`. + * @since v0.1.21 + */ + export function unlinkSync(path: PathLike): void; + export interface RmDirOptions { + /** + * If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or + * `EPERM` error is encountered, Node.js will retry the operation with a linear + * backoff wait of `retryDelay` ms longer on each try. This option represents the + * number of retries. This option is ignored if the `recursive` option is not + * `true`. + * @default 0 + */ + maxRetries?: number | undefined; + /** + * @deprecated since v14.14.0 In future versions of Node.js and will trigger a warning + * `fs.rmdir(path, { recursive: true })` will throw if `path` does not exist or is a file. + * Use `fs.rm(path, { recursive: true, force: true })` instead. + * + * If `true`, perform a recursive directory removal. In + * recursive mode, operations are retried on failure. + * @default false + */ + recursive?: boolean | undefined; + /** + * The amount of time in milliseconds to wait between retries. + * This option is ignored if the `recursive` option is not `true`. + * @default 100 + */ + retryDelay?: number | undefined; + } + /** + * Asynchronous [`rmdir(2)`](http://man7.org/linux/man-pages/man2/rmdir.2.html). No arguments other than a possible exception are given + * to the completion callback. + * + * Using `fs.rmdir()` on a file (not a directory) results in an `ENOENT` error on + * Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use {@link rm} with options `{ recursive: true, force: true }`. + * @since v0.0.2 + */ + export function rmdir(path: PathLike, callback: NoParamCallback): void; + export function rmdir(path: PathLike, options: RmDirOptions, callback: NoParamCallback): void; + export namespace rmdir { + /** + * Asynchronous rmdir(2) - delete a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, options?: RmDirOptions): Promise; + } + /** + * Synchronous [`rmdir(2)`](http://man7.org/linux/man-pages/man2/rmdir.2.html). Returns `undefined`. + * + * Using `fs.rmdirSync()` on a file (not a directory) results in an `ENOENT` error + * on Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use {@link rmSync} with options `{ recursive: true, force: true }`. + * @since v0.1.21 + */ + export function rmdirSync(path: PathLike, options?: RmDirOptions): void; + export interface RmOptions { + /** + * When `true`, exceptions will be ignored if `path` does not exist. + * @default false + */ + force?: boolean | undefined; + /** + * If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or + * `EPERM` error is encountered, Node.js will retry the operation with a linear + * backoff wait of `retryDelay` ms longer on each try. This option represents the + * number of retries. This option is ignored if the `recursive` option is not + * `true`. + * @default 0 + */ + maxRetries?: number | undefined; + /** + * If `true`, perform a recursive directory removal. In + * recursive mode, operations are retried on failure. + * @default false + */ + recursive?: boolean | undefined; + /** + * The amount of time in milliseconds to wait between retries. + * This option is ignored if the `recursive` option is not `true`. + * @default 100 + */ + retryDelay?: number | undefined; + } + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm`utility). No arguments other than a possible exception are given to the + * completion callback. + * @since v14.14.0 + */ + export function rm(path: PathLike, callback: NoParamCallback): void; + export function rm(path: PathLike, options: RmOptions, callback: NoParamCallback): void; + export namespace rm { + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm` utility). + */ + function __promisify__(path: PathLike, options?: RmOptions): Promise; + } + /** + * Synchronously removes files and directories (modeled on the standard POSIX `rm`utility). Returns `undefined`. + * @since v14.14.0 + */ + export function rmSync(path: PathLike, options?: RmOptions): void; + export interface MakeDirectoryOptions { + /** + * Indicates whether parent folders should be created. + * If a folder was created, the path to the first created folder will be returned. + * @default false + */ + recursive?: boolean | undefined; + /** + * A file mode. If a string is passed, it is parsed as an octal integer. If not specified + * @default 0o777 + */ + mode?: Mode | undefined; + } + /** + * Asynchronously creates a directory. + * + * The callback is given a possible exception and, if `recursive` is `true`, the + * first directory path created, `(err[, path])`.`path` can still be `undefined` when `recursive` is `true`, if no directory was + * created (for instance, if it was previously created). + * + * The optional `options` argument can be an integer specifying `mode` (permission + * and sticky bits), or an object with a `mode` property and a `recursive`property indicating whether parent directories should be created. Calling`fs.mkdir()` when `path` is a directory that + * exists results in an error only + * when `recursive` is false. If `recursive` is false and the directory exists, + * an `EEXIST` error occurs. + * + * ```js + * import { mkdir } from 'node:fs'; + * + * // Create ./tmp/a/apple, regardless of whether ./tmp and ./tmp/a exist. + * mkdir('./tmp/a/apple', { recursive: true }, (err) => { + * if (err) throw err; + * }); + * ``` + * + * On Windows, using `fs.mkdir()` on the root directory even with recursion will + * result in an error: + * + * ```js + * import { mkdir } from 'node:fs'; + * + * mkdir('/', { recursive: true }, (err) => { + * // => [Error: EPERM: operation not permitted, mkdir 'C:\'] + * }); + * ``` + * + * See the POSIX [`mkdir(2)`](http://man7.org/linux/man-pages/man2/mkdir.2.html) documentation for more details. + * @since v0.1.8 + */ + export function mkdir( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + callback: (err: NodeJS.ErrnoException | null, path?: string) => void, + ): void; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdir( + path: PathLike, + options: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null + | undefined, + callback: NoParamCallback, + ): void; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdir( + path: PathLike, + options: Mode | MakeDirectoryOptions | null | undefined, + callback: (err: NodeJS.ErrnoException | null, path?: string) => void, + ): void; + /** + * Asynchronous mkdir(2) - create a directory with a mode of `0o777`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function mkdir(path: PathLike, callback: NoParamCallback): void; + export namespace mkdir { + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options?: Mode | MakeDirectoryOptions | null, + ): Promise; + } + /** + * Synchronously creates a directory. Returns `undefined`, or if `recursive` is`true`, the first directory path created. + * This is the synchronous version of {@link mkdir}. + * + * See the POSIX [`mkdir(2)`](http://man7.org/linux/man-pages/man2/mkdir.2.html) documentation for more details. + * @since v0.1.21 + */ + export function mkdirSync( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + ): string | undefined; + /** + * Synchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdirSync( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null, + ): void; + /** + * Synchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdirSync(path: PathLike, options?: Mode | MakeDirectoryOptions | null): string | undefined; + /** + * Creates a unique temporary directory. + * + * Generates six random characters to be appended behind a required`prefix` to create a unique temporary directory. Due to platform + * inconsistencies, avoid trailing `X` characters in `prefix`. Some platforms, + * notably the BSDs, can return more than six random characters, and replace + * trailing `X` characters in `prefix` with random characters. + * + * The created directory path is passed as a string to the callback's second + * parameter. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * + * ```js + * import { mkdtemp } from 'node:fs'; + * import { join } from 'node:path'; + * import { tmpdir } from 'node:os'; + * + * mkdtemp(join(tmpdir(), 'foo-'), (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Prints: /tmp/foo-itXde2 or C:\Users\...\AppData\Local\Temp\foo-itXde2 + * }); + * ``` + * + * The `fs.mkdtemp()` method will append the six randomly selected characters + * directly to the `prefix` string. For instance, given a directory `/tmp`, if the + * intention is to create a temporary directory _within_`/tmp`, the `prefix`must end with a trailing platform-specific path separator + * (`require('node:path').sep`). + * + * ```js + * import { tmpdir } from 'node:os'; + * import { mkdtemp } from 'node:fs'; + * + * // The parent directory for the new temporary directory + * const tmpDir = tmpdir(); + * + * // This method is *INCORRECT*: + * mkdtemp(tmpDir, (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Will print something similar to `/tmpabc123`. + * // A new temporary directory is created at the file system root + * // rather than *within* the /tmp directory. + * }); + * + * // This method is *CORRECT*: + * import { sep } from 'node:path'; + * mkdtemp(`${tmpDir}${sep}`, (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Will print something similar to `/tmp/abc123`. + * // A new temporary directory is created within + * // the /tmp directory. + * }); + * ``` + * @since v5.10.0 + */ + export function mkdtemp( + prefix: string, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, folder: string) => void, + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtemp( + prefix: string, + options: + | "buffer" + | { + encoding: "buffer"; + }, + callback: (err: NodeJS.ErrnoException | null, folder: Buffer) => void, + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtemp( + prefix: string, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, folder: string | Buffer) => void, + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + */ + export function mkdtemp( + prefix: string, + callback: (err: NodeJS.ErrnoException | null, folder: string) => void, + ): void; + export namespace mkdtemp { + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options?: EncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options: BufferEncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options?: EncodingOption): Promise; + } + /** + * Returns the created directory path. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link mkdtemp}. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * @since v5.10.0 + */ + export function mkdtempSync(prefix: string, options?: EncodingOption): string; + /** + * Synchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtempSync(prefix: string, options: BufferEncodingOption): Buffer; + /** + * Synchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtempSync(prefix: string, options?: EncodingOption): string | Buffer; + /** + * Reads the contents of a directory. The callback gets two arguments `(err, files)`where `files` is an array of the names of the files in the directory excluding`'.'` and `'..'`. + * + * See the POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames passed to the callback. If the `encoding` is set to `'buffer'`, + * the filenames returned will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the `files` array will contain `fs.Dirent` objects. + * @since v0.1.8 + */ + export function readdir( + path: PathLike, + options: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, files: string[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdir( + path: PathLike, + options: + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | "buffer", + callback: (err: NodeJS.ErrnoException | null, files: Buffer[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdir( + path: PathLike, + options: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, files: string[] | Buffer[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function readdir( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, files: string[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + export function readdir( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + callback: (err: NodeJS.ErrnoException | null, files: Dirent[]) => void, + ): void; + export namespace readdir { + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options?: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options: + | "buffer" + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent + */ + function __promisify__( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + ): Promise; + } + /** + * Reads the contents of the directory. + * + * See the POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames returned. If the `encoding` is set to `'buffer'`, + * the filenames returned will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the result will contain `fs.Dirent` objects. + * @since v0.1.21 + */ + export function readdirSync( + path: PathLike, + options?: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | BufferEncoding + | null, + ): string[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdirSync( + path: PathLike, + options: + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | "buffer", + ): Buffer[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdirSync( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): string[] | Buffer[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + export function readdirSync( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + ): Dirent[]; + /** + * Closes the file descriptor. No arguments other than a possible exception are + * given to the completion callback. + * + * Calling `fs.close()` on any file descriptor (`fd`) that is currently in use + * through any other `fs` operation may lead to undefined behavior. + * + * See the POSIX [`close(2)`](http://man7.org/linux/man-pages/man2/close.2.html) documentation for more detail. + * @since v0.0.2 + */ + export function close(fd: number, callback?: NoParamCallback): void; + export namespace close { + /** + * Asynchronous close(2) - close a file descriptor. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Closes the file descriptor. Returns `undefined`. + * + * Calling `fs.closeSync()` on any file descriptor (`fd`) that is currently in use + * through any other `fs` operation may lead to undefined behavior. + * + * See the POSIX [`close(2)`](http://man7.org/linux/man-pages/man2/close.2.html) documentation for more detail. + * @since v0.1.21 + */ + export function closeSync(fd: number): void; + /** + * Asynchronous file open. See the POSIX [`open(2)`](http://man7.org/linux/man-pages/man2/open.2.html) documentation for more details. + * + * `mode` sets the file mode (permission and sticky bits), but only if the file was + * created. On Windows, only the write permission can be manipulated; see {@link chmod}. + * + * The callback gets two arguments `(err, fd)`. + * + * Some characters (`< > : " / \ | ? *`) are reserved under Windows as documented + * by [Naming Files, Paths, and Namespaces](https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file). Under NTFS, if the filename contains + * a colon, Node.js will open a file system stream, as described by [this MSDN page](https://docs.microsoft.com/en-us/windows/desktop/FileIO/using-streams). + * + * Functions based on `fs.open()` exhibit this behavior as well:`fs.writeFile()`, `fs.readFile()`, etc. + * @since v0.0.2 + * @param [flags='r'] See `support of file system `flags``. + * @param [mode=0o666] + */ + export function open( + path: PathLike, + flags: OpenMode | undefined, + mode: Mode | undefined | null, + callback: (err: NodeJS.ErrnoException | null, fd: number) => void, + ): void; + /** + * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param [flags='r'] See `support of file system `flags``. + */ + export function open( + path: PathLike, + flags: OpenMode | undefined, + callback: (err: NodeJS.ErrnoException | null, fd: number) => void, + ): void; + /** + * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function open(path: PathLike, callback: (err: NodeJS.ErrnoException | null, fd: number) => void): void; + export namespace open { + /** + * Asynchronous open(2) - open and possibly create a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not supplied, defaults to `0o666`. + */ + function __promisify__(path: PathLike, flags: OpenMode, mode?: Mode | null): Promise; + } + /** + * Returns an integer representing the file descriptor. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link open}. + * @since v0.1.21 + * @param [flags='r'] + * @param [mode=0o666] + */ + export function openSync(path: PathLike, flags: OpenMode, mode?: Mode | null): number; + /** + * Change the file system timestamps of the object referenced by `path`. + * + * The `atime` and `mtime` arguments follow these rules: + * + * * Values can be either numbers representing Unix epoch time in seconds,`Date`s, or a numeric string like `'123456789.0'`. + * * If the value can not be converted to a number, or is `NaN`, `Infinity`, or`-Infinity`, an `Error` will be thrown. + * @since v0.4.2 + */ + export function utimes(path: PathLike, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace utimes { + /** + * Asynchronously change file timestamps of the file referenced by the supplied path. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link utimes}. + * @since v0.4.2 + */ + export function utimesSync(path: PathLike, atime: TimeLike, mtime: TimeLike): void; + /** + * Change the file system timestamps of the object referenced by the supplied file + * descriptor. See {@link utimes}. + * @since v0.4.2 + */ + export function futimes(fd: number, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace futimes { + /** + * Asynchronously change file timestamps of the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(fd: number, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Synchronous version of {@link futimes}. Returns `undefined`. + * @since v0.4.2 + */ + export function futimesSync(fd: number, atime: TimeLike, mtime: TimeLike): void; + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. No arguments other + * than a possible exception are given to the completion callback. + * @since v0.1.96 + */ + export function fsync(fd: number, callback: NoParamCallback): void; + export namespace fsync { + /** + * Asynchronous fsync(2) - synchronize a file's in-core state with the underlying storage device. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. Returns `undefined`. + * @since v0.1.96 + */ + export function fsyncSync(fd: number): void; + /** + * Write `buffer` to the file specified by `fd`. + * + * `offset` determines the part of the buffer to be written, and `length` is + * an integer specifying the number of bytes to write. + * + * `position` refers to the offset from the beginning of the file where this data + * should be written. If `typeof position !== 'number'`, the data will be written + * at the current position. See [`pwrite(2)`](http://man7.org/linux/man-pages/man2/pwrite.2.html). + * + * The callback will be given three arguments `(err, bytesWritten, buffer)` where`bytesWritten` specifies how many _bytes_ were written from `buffer`. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesWritten` and `buffer` properties. + * + * It is unsafe to use `fs.write()` multiple times on the same file without waiting + * for the callback. For this scenario, {@link createWriteStream} is + * recommended. + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v0.0.2 + * @param [offset=0] + * @param [length=buffer.byteLength - offset] + * @param [position='null'] + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + length: number | undefined | null, + position: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + length: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + */ + export function write( + fd: number, + buffer: TBuffer, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + export function write( + fd: number, + string: string, + position: number | undefined | null, + encoding: BufferEncoding | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void, + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + */ + export function write( + fd: number, + string: string, + position: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void, + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + */ + export function write( + fd: number, + string: string, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void, + ): void; + export namespace write { + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + */ + function __promisify__( + fd: number, + buffer?: TBuffer, + offset?: number, + length?: number, + position?: number | null, + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + function __promisify__( + fd: number, + string: string, + position?: number | null, + encoding?: BufferEncoding | null, + ): Promise<{ + bytesWritten: number; + buffer: string; + }>; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link write}. + * @since v0.1.21 + * @param [offset=0] + * @param [length=buffer.byteLength - offset] + * @param [position='null'] + * @return The number of bytes written. + */ + export function writeSync( + fd: number, + buffer: NodeJS.ArrayBufferView, + offset?: number | null, + length?: number | null, + position?: number | null, + ): number; + /** + * Synchronously writes `string` to the file referenced by the supplied file descriptor, returning the number of bytes written. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + export function writeSync( + fd: number, + string: string, + position?: number | null, + encoding?: BufferEncoding | null, + ): number; + export type ReadPosition = number | bigint; + export interface ReadSyncOptions { + /** + * @default 0 + */ + offset?: number | undefined; + /** + * @default `length of buffer` + */ + length?: number | undefined; + /** + * @default null + */ + position?: ReadPosition | null | undefined; + } + export interface ReadAsyncOptions extends ReadSyncOptions { + buffer?: TBuffer; + } + /** + * Read data from the file specified by `fd`. + * + * The callback is given the three arguments, `(err, bytesRead, buffer)`. + * + * If the file is not modified concurrently, the end-of-file is reached when the + * number of bytes read is zero. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesRead` and `buffer` properties. + * @since v0.0.2 + * @param buffer The buffer that the data will be written to. + * @param offset The position in `buffer` to write the data to. + * @param length The number of bytes to read. + * @param position Specifies where to begin reading from in the file. If `position` is `null` or `-1 `, data will be read from the current file position, and the file position will be updated. If + * `position` is an integer, the file position will be unchanged. + */ + export function read( + fd: number, + buffer: TBuffer, + offset: number, + length: number, + position: ReadPosition | null, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void, + ): void; + /** + * Similar to the above `fs.read` function, this version takes an optional `options` object. + * If not otherwise specified in an `options` object, + * `buffer` defaults to `Buffer.alloc(16384)`, + * `offset` defaults to `0`, + * `length` defaults to `buffer.byteLength`, `- offset` as of Node 17.6.0 + * `position` defaults to `null` + * @since v12.17.0, 13.11.0 + */ + export function read( + fd: number, + options: ReadAsyncOptions, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void, + ): void; + export function read( + fd: number, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: NodeJS.ArrayBufferView) => void, + ): void; + export namespace read { + /** + * @param fd A file descriptor. + * @param buffer The buffer that the data will be written to. + * @param offset The offset in the buffer at which to start writing. + * @param length The number of bytes to read. + * @param position The offset from the beginning of the file from which data should be read. If `null`, data will be read from the current position. + */ + function __promisify__( + fd: number, + buffer: TBuffer, + offset: number, + length: number, + position: number | null, + ): Promise<{ + bytesRead: number; + buffer: TBuffer; + }>; + function __promisify__( + fd: number, + options: ReadAsyncOptions, + ): Promise<{ + bytesRead: number; + buffer: TBuffer; + }>; + function __promisify__(fd: number): Promise<{ + bytesRead: number; + buffer: NodeJS.ArrayBufferView; + }>; + } + /** + * Returns the number of `bytesRead`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link read}. + * @since v0.1.21 + * @param [position='null'] + */ + export function readSync( + fd: number, + buffer: NodeJS.ArrayBufferView, + offset: number, + length: number, + position: ReadPosition | null, + ): number; + /** + * Similar to the above `fs.readSync` function, this version takes an optional `options` object. + * If no `options` object is specified, it will default with the above values. + */ + export function readSync(fd: number, buffer: NodeJS.ArrayBufferView, opts?: ReadSyncOptions): number; + /** + * Asynchronously reads the entire contents of a file. + * + * ```js + * import { readFile } from 'node:fs'; + * + * readFile('/etc/passwd', (err, data) => { + * if (err) throw err; + * console.log(data); + * }); + * ``` + * + * The callback is passed two arguments `(err, data)`, where `data` is the + * contents of the file. + * + * If no encoding is specified, then the raw buffer is returned. + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { readFile } from 'node:fs'; + * + * readFile('/etc/passwd', 'utf8', callback); + * ``` + * + * When the path is a directory, the behavior of `fs.readFile()` and {@link readFileSync} is platform-specific. On macOS, Linux, and Windows, an + * error will be returned. On FreeBSD, a representation of the directory's contents + * will be returned. + * + * ```js + * import { readFile } from 'node:fs'; + * + * // macOS, Linux, and Windows + * readFile('', (err, data) => { + * // => [Error: EISDIR: illegal operation on a directory, read ] + * }); + * + * // FreeBSD + * readFile('', (err, data) => { + * // => null, + * }); + * ``` + * + * It is possible to abort an ongoing request using an `AbortSignal`. If a + * request is aborted the callback is called with an `AbortError`: + * + * ```js + * import { readFile } from 'node:fs'; + * + * const controller = new AbortController(); + * const signal = controller.signal; + * readFile(fileInfo[0].name, { signal }, (err, buf) => { + * // ... + * }); + * // When you want to abort the request + * controller.abort(); + * ``` + * + * The `fs.readFile()` function buffers the entire file. To minimize memory costs, + * when possible prefer streaming via `fs.createReadStream()`. + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.readFile` performs. + * @since v0.1.29 + * @param path filename or file descriptor + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | ({ + encoding?: null | undefined; + flag?: string | undefined; + } & Abortable) + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, data: Buffer) => void, + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | ({ + encoding: BufferEncoding; + flag?: string | undefined; + } & Abortable) + | BufferEncoding, + callback: (err: NodeJS.ErrnoException | null, data: string) => void, + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | (ObjectEncodingOptions & { + flag?: string | undefined; + } & Abortable) + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, data: string | Buffer) => void, + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + */ + export function readFile( + path: PathOrFileDescriptor, + callback: (err: NodeJS.ErrnoException | null, data: Buffer) => void, + ): void; + export namespace readFile { + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options?: { + encoding?: null | undefined; + flag?: string | undefined; + } | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options: + | { + encoding: BufferEncoding; + flag?: string | undefined; + } + | BufferEncoding, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options?: + | (ObjectEncodingOptions & { + flag?: string | undefined; + }) + | BufferEncoding + | null, + ): Promise; + } + /** + * Returns the contents of the `path`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link readFile}. + * + * If the `encoding` option is specified then this function returns a + * string. Otherwise it returns a buffer. + * + * Similar to {@link readFile}, when the path is a directory, the behavior of`fs.readFileSync()` is platform-specific. + * + * ```js + * import { readFileSync } from 'node:fs'; + * + * // macOS, Linux, and Windows + * readFileSync(''); + * // => [Error: EISDIR: illegal operation on a directory, read ] + * + * // FreeBSD + * readFileSync(''); // => + * ``` + * @since v0.1.8 + * @param path filename or file descriptor + */ + export function readFileSync( + path: PathOrFileDescriptor, + options?: { + encoding?: null | undefined; + flag?: string | undefined; + } | null, + ): Buffer; + /** + * Synchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFileSync( + path: PathOrFileDescriptor, + options: + | { + encoding: BufferEncoding; + flag?: string | undefined; + } + | BufferEncoding, + ): string; + /** + * Synchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFileSync( + path: PathOrFileDescriptor, + options?: + | (ObjectEncodingOptions & { + flag?: string | undefined; + }) + | BufferEncoding + | null, + ): string | Buffer; + export type WriteFileOptions = + | ( + & ObjectEncodingOptions + & Abortable + & { + mode?: Mode | undefined; + flag?: string | undefined; + flush?: boolean | undefined; + } + ) + | BufferEncoding + | null; + /** + * When `file` is a filename, asynchronously writes data to the file, replacing the + * file if it already exists. `data` can be a string or a buffer. + * + * When `file` is a file descriptor, the behavior is similar to calling`fs.write()` directly (which is recommended). See the notes below on using + * a file descriptor. + * + * The `encoding` option is ignored if `data` is a buffer. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { writeFile } from 'node:fs'; + * import { Buffer } from 'node:buffer'; + * + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * writeFile('message.txt', data, (err) => { + * if (err) throw err; + * console.log('The file has been saved!'); + * }); + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { writeFile } from 'node:fs'; + * + * writeFile('message.txt', 'Hello Node.js', 'utf8', callback); + * ``` + * + * It is unsafe to use `fs.writeFile()` multiple times on the same file without + * waiting for the callback. For this scenario, {@link createWriteStream} is + * recommended. + * + * Similarly to `fs.readFile` \- `fs.writeFile` is a convenience method that + * performs multiple `write` calls internally to write the buffer passed to it. + * For performance sensitive code consider using {@link createWriteStream}. + * + * It is possible to use an `AbortSignal` to cancel an `fs.writeFile()`. + * Cancelation is "best effort", and some amount of data is likely still + * to be written. + * + * ```js + * import { writeFile } from 'node:fs'; + * import { Buffer } from 'node:buffer'; + * + * const controller = new AbortController(); + * const { signal } = controller; + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * writeFile('message.txt', data, { signal }, (err) => { + * // When a request is aborted - the callback is called with an AbortError + * }); + * // When the request should be aborted + * controller.abort(); + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.writeFile` performs. + * @since v0.1.29 + * @param file filename or file descriptor + */ + export function writeFile( + file: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + options: WriteFileOptions, + callback: NoParamCallback, + ): void; + /** + * Asynchronously writes data to a file, replacing the file if it already exists. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + */ + export function writeFile( + path: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + callback: NoParamCallback, + ): void; + export namespace writeFile { + /** + * Asynchronously writes data to a file, replacing the file if it already exists. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `mode` is not supplied, the default of `0o666` is used. + * If `mode` is a string, it is parsed as an octal integer. + * If `flag` is not supplied, the default of `'w'` is used. + */ + function __promisify__( + path: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + options?: WriteFileOptions, + ): Promise; + } + /** + * Returns `undefined`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link writeFile}. + * @since v0.1.29 + * @param file filename or file descriptor + */ + export function writeFileSync( + file: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + options?: WriteFileOptions, + ): void; + /** + * Asynchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { appendFile } from 'node:fs'; + * + * appendFile('message.txt', 'data to append', (err) => { + * if (err) throw err; + * console.log('The "data to append" was appended to file!'); + * }); + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { appendFile } from 'node:fs'; + * + * appendFile('message.txt', 'data to append', 'utf8', callback); + * ``` + * + * The `path` may be specified as a numeric file descriptor that has been opened + * for appending (using `fs.open()` or `fs.openSync()`). The file descriptor will + * not be closed automatically. + * + * ```js + * import { open, close, appendFile } from 'node:fs'; + * + * function closeFd(fd) { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * + * open('message.txt', 'a', (err, fd) => { + * if (err) throw err; + * + * try { + * appendFile(fd, 'data to append', 'utf8', (err) => { + * closeFd(fd); + * if (err) throw err; + * }); + * } catch (err) { + * closeFd(fd); + * throw err; + * } + * }); + * ``` + * @since v0.6.7 + * @param path filename or file descriptor + */ + export function appendFile( + path: PathOrFileDescriptor, + data: string | Uint8Array, + options: WriteFileOptions, + callback: NoParamCallback, + ): void; + /** + * Asynchronously append data to a file, creating the file if it does not exist. + * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + */ + export function appendFile(file: PathOrFileDescriptor, data: string | Uint8Array, callback: NoParamCallback): void; + export namespace appendFile { + /** + * Asynchronously append data to a file, creating the file if it does not exist. + * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `mode` is not supplied, the default of `0o666` is used. + * If `mode` is a string, it is parsed as an octal integer. + * If `flag` is not supplied, the default of `'a'` is used. + */ + function __promisify__( + file: PathOrFileDescriptor, + data: string | Uint8Array, + options?: WriteFileOptions, + ): Promise; + } + /** + * Synchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { appendFileSync } from 'node:fs'; + * + * try { + * appendFileSync('message.txt', 'data to append'); + * console.log('The "data to append" was appended to file!'); + * } catch (err) { + * // Handle the error + * } + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { appendFileSync } from 'node:fs'; + * + * appendFileSync('message.txt', 'data to append', 'utf8'); + * ``` + * + * The `path` may be specified as a numeric file descriptor that has been opened + * for appending (using `fs.open()` or `fs.openSync()`). The file descriptor will + * not be closed automatically. + * + * ```js + * import { openSync, closeSync, appendFileSync } from 'node:fs'; + * + * let fd; + * + * try { + * fd = openSync('message.txt', 'a'); + * appendFileSync(fd, 'data to append', 'utf8'); + * } catch (err) { + * // Handle the error + * } finally { + * if (fd !== undefined) + * closeSync(fd); + * } + * ``` + * @since v0.6.7 + * @param path filename or file descriptor + */ + export function appendFileSync( + path: PathOrFileDescriptor, + data: string | Uint8Array, + options?: WriteFileOptions, + ): void; + /** + * Watch for changes on `filename`. The callback `listener` will be called each + * time the file is accessed. + * + * The `options` argument may be omitted. If provided, it should be an object. The`options` object may contain a boolean named `persistent` that indicates + * whether the process should continue to run as long as files are being watched. + * The `options` object may specify an `interval` property indicating how often the + * target should be polled in milliseconds. + * + * The `listener` gets two arguments the current stat object and the previous + * stat object: + * + * ```js + * import { watchFile } from 'fs'; + * + * watchFile('message.text', (curr, prev) => { + * console.log(`the current mtime is: ${curr.mtime}`); + * console.log(`the previous mtime was: ${prev.mtime}`); + * }); + * ``` + * + * These stat objects are instances of `fs.Stat`. If the `bigint` option is `true`, + * the numeric values in these objects are specified as `BigInt`s. + * + * To be notified when the file was modified, not just accessed, it is necessary + * to compare `curr.mtimeMs` and `prev.mtimeMs`. + * + * When an `fs.watchFile` operation results in an `ENOENT` error, it + * will invoke the listener once, with all the fields zeroed (or, for dates, the + * Unix Epoch). If the file is created later on, the listener will be called + * again, with the latest stat objects. This is a change in functionality since + * v0.10. + * + * Using {@link watch} is more efficient than `fs.watchFile` and`fs.unwatchFile`. `fs.watch` should be used instead of `fs.watchFile` and`fs.unwatchFile` when possible. + * + * When a file being watched by `fs.watchFile()` disappears and reappears, + * then the contents of `previous` in the second callback event (the file's + * reappearance) will be the same as the contents of `previous` in the first + * callback event (its disappearance). + * + * This happens when: + * + * * the file is deleted, followed by a restore + * * the file is renamed and then renamed a second time back to its original name + * @since v0.1.31 + */ + export interface WatchFileOptions { + bigint?: boolean | undefined; + persistent?: boolean | undefined; + interval?: number | undefined; + } + /** + * Watch for changes on `filename`. The callback `listener` will be called each + * time the file is accessed. + * + * The `options` argument may be omitted. If provided, it should be an object. The`options` object may contain a boolean named `persistent` that indicates + * whether the process should continue to run as long as files are being watched. + * The `options` object may specify an `interval` property indicating how often the + * target should be polled in milliseconds. + * + * The `listener` gets two arguments the current stat object and the previous + * stat object: + * + * ```js + * import { watchFile } from 'node:fs'; + * + * watchFile('message.text', (curr, prev) => { + * console.log(`the current mtime is: ${curr.mtime}`); + * console.log(`the previous mtime was: ${prev.mtime}`); + * }); + * ``` + * + * These stat objects are instances of `fs.Stat`. If the `bigint` option is `true`, + * the numeric values in these objects are specified as `BigInt`s. + * + * To be notified when the file was modified, not just accessed, it is necessary + * to compare `curr.mtimeMs` and `prev.mtimeMs`. + * + * When an `fs.watchFile` operation results in an `ENOENT` error, it + * will invoke the listener once, with all the fields zeroed (or, for dates, the + * Unix Epoch). If the file is created later on, the listener will be called + * again, with the latest stat objects. This is a change in functionality since + * v0.10. + * + * Using {@link watch} is more efficient than `fs.watchFile` and`fs.unwatchFile`. `fs.watch` should be used instead of `fs.watchFile` and`fs.unwatchFile` when possible. + * + * When a file being watched by `fs.watchFile()` disappears and reappears, + * then the contents of `previous` in the second callback event (the file's + * reappearance) will be the same as the contents of `previous` in the first + * callback event (its disappearance). + * + * This happens when: + * + * * the file is deleted, followed by a restore + * * the file is renamed and then renamed a second time back to its original name + * @since v0.1.31 + */ + export function watchFile( + filename: PathLike, + options: + | (WatchFileOptions & { + bigint?: false | undefined; + }) + | undefined, + listener: StatsListener, + ): StatWatcher; + export function watchFile( + filename: PathLike, + options: + | (WatchFileOptions & { + bigint: true; + }) + | undefined, + listener: BigIntStatsListener, + ): StatWatcher; + /** + * Watch for changes on `filename`. The callback `listener` will be called each time the file is accessed. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function watchFile(filename: PathLike, listener: StatsListener): StatWatcher; + /** + * Stop watching for changes on `filename`. If `listener` is specified, only that + * particular listener is removed. Otherwise, _all_ listeners are removed, + * effectively stopping watching of `filename`. + * + * Calling `fs.unwatchFile()` with a filename that is not being watched is a + * no-op, not an error. + * + * Using {@link watch} is more efficient than `fs.watchFile()` and`fs.unwatchFile()`. `fs.watch()` should be used instead of `fs.watchFile()`and `fs.unwatchFile()` when possible. + * @since v0.1.31 + * @param listener Optional, a listener previously attached using `fs.watchFile()` + */ + export function unwatchFile(filename: PathLike, listener?: StatsListener): void; + export function unwatchFile(filename: PathLike, listener?: BigIntStatsListener): void; + export interface WatchOptions extends Abortable { + encoding?: BufferEncoding | "buffer" | undefined; + persistent?: boolean | undefined; + recursive?: boolean | undefined; + } + export type WatchEventType = "rename" | "change"; + export type WatchListener = (event: WatchEventType, filename: T | null) => void; + export type StatsListener = (curr: Stats, prev: Stats) => void; + export type BigIntStatsListener = (curr: BigIntStats, prev: BigIntStats) => void; + /** + * Watch for changes on `filename`, where `filename` is either a file or a + * directory. + * + * The second argument is optional. If `options` is provided as a string, it + * specifies the `encoding`. Otherwise `options` should be passed as an object. + * + * The listener callback gets two arguments `(eventType, filename)`. `eventType`is either `'rename'` or `'change'`, and `filename` is the name of the file + * which triggered the event. + * + * On most platforms, `'rename'` is emitted whenever a filename appears or + * disappears in the directory. + * + * The listener callback is attached to the `'change'` event fired by `fs.FSWatcher`, but it is not the same thing as the `'change'` value of`eventType`. + * + * If a `signal` is passed, aborting the corresponding AbortController will close + * the returned `fs.FSWatcher`. + * @since v0.5.10 + * @param listener + */ + export function watch( + filename: PathLike, + options: + | (WatchOptions & { + encoding: "buffer"; + }) + | "buffer", + listener?: WatchListener, + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + export function watch( + filename: PathLike, + options?: WatchOptions | BufferEncoding | null, + listener?: WatchListener, + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + export function watch( + filename: PathLike, + options: WatchOptions | string, + listener?: WatchListener, + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function watch(filename: PathLike, listener?: WatchListener): FSWatcher; + /** + * Test whether or not the given path exists by checking with the file system. + * Then call the `callback` argument with either true or false: + * + * ```js + * import { exists } from 'node:fs'; + * + * exists('/etc/passwd', (e) => { + * console.log(e ? 'it exists' : 'no passwd!'); + * }); + * ``` + * + * **The parameters for this callback are not consistent with other Node.js** + * **callbacks.** Normally, the first parameter to a Node.js callback is an `err`parameter, optionally followed by other parameters. The `fs.exists()` callback + * has only one boolean parameter. This is one reason `fs.access()` is recommended + * instead of `fs.exists()`. + * + * Using `fs.exists()` to check for the existence of a file before calling`fs.open()`, `fs.readFile()`, or `fs.writeFile()` is not recommended. Doing + * so introduces a race condition, since other processes may change the file's + * state between the two calls. Instead, user code should open/read/write the + * file directly and handle the error raised if the file does not exist. + * + * **write (NOT RECOMMENDED)** + * + * ```js + * import { exists, open, close } from 'node:fs'; + * + * exists('myfile', (e) => { + * if (e) { + * console.error('myfile already exists'); + * } else { + * open('myfile', 'wx', (err, fd) => { + * if (err) throw err; + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * } + * }); + * ``` + * + * **write (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * open('myfile', 'wx', (err, fd) => { + * if (err) { + * if (err.code === 'EEXIST') { + * console.error('myfile already exists'); + * return; + * } + * + * throw err; + * } + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * **read (NOT RECOMMENDED)** + * + * ```js + * import { open, close, exists } from 'node:fs'; + * + * exists('myfile', (e) => { + * if (e) { + * open('myfile', 'r', (err, fd) => { + * if (err) throw err; + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * } else { + * console.error('myfile does not exist'); + * } + * }); + * ``` + * + * **read (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * + * open('myfile', 'r', (err, fd) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * The "not recommended" examples above check for existence and then use the + * file; the "recommended" examples are better because they use the file directly + * and handle the error, if any. + * + * In general, check for the existence of a file only if the file won't be + * used directly, for example when its existence is a signal from another + * process. + * @since v0.0.2 + * @deprecated Since v1.0.0 - Use {@link stat} or {@link access} instead. + */ + export function exists(path: PathLike, callback: (exists: boolean) => void): void; + /** @deprecated */ + export namespace exists { + /** + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(path: PathLike): Promise; + } + /** + * Returns `true` if the path exists, `false` otherwise. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link exists}. + * + * `fs.exists()` is deprecated, but `fs.existsSync()` is not. The `callback`parameter to `fs.exists()` accepts parameters that are inconsistent with other + * Node.js callbacks. `fs.existsSync()` does not use a callback. + * + * ```js + * import { existsSync } from 'node:fs'; + * + * if (existsSync('/etc/passwd')) + * console.log('The path exists.'); + * ``` + * @since v0.1.21 + */ + export function existsSync(path: PathLike): boolean; + export namespace constants { + // File Access Constants + /** Constant for fs.access(). File is visible to the calling process. */ + const F_OK: number; + /** Constant for fs.access(). File can be read by the calling process. */ + const R_OK: number; + /** Constant for fs.access(). File can be written by the calling process. */ + const W_OK: number; + /** Constant for fs.access(). File can be executed by the calling process. */ + const X_OK: number; + // File Copy Constants + /** Constant for fs.copyFile. Flag indicating the destination file should not be overwritten if it already exists. */ + const COPYFILE_EXCL: number; + /** + * Constant for fs.copyFile. copy operation will attempt to create a copy-on-write reflink. + * If the underlying platform does not support copy-on-write, then a fallback copy mechanism is used. + */ + const COPYFILE_FICLONE: number; + /** + * Constant for fs.copyFile. Copy operation will attempt to create a copy-on-write reflink. + * If the underlying platform does not support copy-on-write, then the operation will fail with an error. + */ + const COPYFILE_FICLONE_FORCE: number; + // File Open Constants + /** Constant for fs.open(). Flag indicating to open a file for read-only access. */ + const O_RDONLY: number; + /** Constant for fs.open(). Flag indicating to open a file for write-only access. */ + const O_WRONLY: number; + /** Constant for fs.open(). Flag indicating to open a file for read-write access. */ + const O_RDWR: number; + /** Constant for fs.open(). Flag indicating to create the file if it does not already exist. */ + const O_CREAT: number; + /** Constant for fs.open(). Flag indicating that opening a file should fail if the O_CREAT flag is set and the file already exists. */ + const O_EXCL: number; + /** + * Constant for fs.open(). Flag indicating that if path identifies a terminal device, + * opening the path shall not cause that terminal to become the controlling terminal for the process + * (if the process does not already have one). + */ + const O_NOCTTY: number; + /** Constant for fs.open(). Flag indicating that if the file exists and is a regular file, and the file is opened successfully for write access, its length shall be truncated to zero. */ + const O_TRUNC: number; + /** Constant for fs.open(). Flag indicating that data will be appended to the end of the file. */ + const O_APPEND: number; + /** Constant for fs.open(). Flag indicating that the open should fail if the path is not a directory. */ + const O_DIRECTORY: number; + /** + * constant for fs.open(). + * Flag indicating reading accesses to the file system will no longer result in + * an update to the atime information associated with the file. + * This flag is available on Linux operating systems only. + */ + const O_NOATIME: number; + /** Constant for fs.open(). Flag indicating that the open should fail if the path is a symbolic link. */ + const O_NOFOLLOW: number; + /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O. */ + const O_SYNC: number; + /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O with write operations waiting for data integrity. */ + const O_DSYNC: number; + /** Constant for fs.open(). Flag indicating to open the symbolic link itself rather than the resource it is pointing to. */ + const O_SYMLINK: number; + /** Constant for fs.open(). When set, an attempt will be made to minimize caching effects of file I/O. */ + const O_DIRECT: number; + /** Constant for fs.open(). Flag indicating to open the file in nonblocking mode when possible. */ + const O_NONBLOCK: number; + // File Type Constants + /** Constant for fs.Stats mode property for determining a file's type. Bit mask used to extract the file type code. */ + const S_IFMT: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a regular file. */ + const S_IFREG: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a directory. */ + const S_IFDIR: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a character-oriented device file. */ + const S_IFCHR: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a block-oriented device file. */ + const S_IFBLK: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a FIFO/pipe. */ + const S_IFIFO: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a symbolic link. */ + const S_IFLNK: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a socket. */ + const S_IFSOCK: number; + // File Mode Constants + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by owner. */ + const S_IRWXU: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by owner. */ + const S_IRUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by owner. */ + const S_IWUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by owner. */ + const S_IXUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by group. */ + const S_IRWXG: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by group. */ + const S_IRGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by group. */ + const S_IWGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by group. */ + const S_IXGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by others. */ + const S_IRWXO: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by others. */ + const S_IROTH: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by others. */ + const S_IWOTH: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by others. */ + const S_IXOTH: number; + /** + * When set, a memory file mapping is used to access the file. This flag + * is available on Windows operating systems only. On other operating systems, + * this flag is ignored. + */ + const UV_FS_O_FILEMAP: number; + } + /** + * Tests a user's permissions for the file or directory specified by `path`. + * The `mode` argument is an optional integer that specifies the accessibility + * checks to be performed. `mode` should be either the value `fs.constants.F_OK`or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`,`fs.constants.W_OK`, and `fs.constants.X_OK` + * (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * The final argument, `callback`, is a callback function that is invoked with + * a possible error argument. If any of the accessibility checks fail, the error + * argument will be an `Error` object. The following examples check if`package.json` exists, and if it is readable or writable. + * + * ```js + * import { access, constants } from 'node:fs'; + * + * const file = 'package.json'; + * + * // Check if the file exists in the current directory. + * access(file, constants.F_OK, (err) => { + * console.log(`${file} ${err ? 'does not exist' : 'exists'}`); + * }); + * + * // Check if the file is readable. + * access(file, constants.R_OK, (err) => { + * console.log(`${file} ${err ? 'is not readable' : 'is readable'}`); + * }); + * + * // Check if the file is writable. + * access(file, constants.W_OK, (err) => { + * console.log(`${file} ${err ? 'is not writable' : 'is writable'}`); + * }); + * + * // Check if the file is readable and writable. + * access(file, constants.R_OK | constants.W_OK, (err) => { + * console.log(`${file} ${err ? 'is not' : 'is'} readable and writable`); + * }); + * ``` + * + * Do not use `fs.access()` to check for the accessibility of a file before calling`fs.open()`, `fs.readFile()`, or `fs.writeFile()`. Doing + * so introduces a race condition, since other processes may change the file's + * state between the two calls. Instead, user code should open/read/write the + * file directly and handle the error raised if the file is not accessible. + * + * **write (NOT RECOMMENDED)** + * + * ```js + * import { access, open, close } from 'node:fs'; + * + * access('myfile', (err) => { + * if (!err) { + * console.error('myfile already exists'); + * return; + * } + * + * open('myfile', 'wx', (err, fd) => { + * if (err) throw err; + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * }); + * ``` + * + * **write (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * + * open('myfile', 'wx', (err, fd) => { + * if (err) { + * if (err.code === 'EEXIST') { + * console.error('myfile already exists'); + * return; + * } + * + * throw err; + * } + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * **read (NOT RECOMMENDED)** + * + * ```js + * import { access, open, close } from 'node:fs'; + * access('myfile', (err) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * open('myfile', 'r', (err, fd) => { + * if (err) throw err; + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * }); + * ``` + * + * **read (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * + * open('myfile', 'r', (err, fd) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * The "not recommended" examples above check for accessibility and then use the + * file; the "recommended" examples are better because they use the file directly + * and handle the error, if any. + * + * In general, check for the accessibility of a file only if the file will not be + * used directly, for example when its accessibility is a signal from another + * process. + * + * On Windows, access-control policies (ACLs) on a directory may limit access to + * a file or directory. The `fs.access()` function, however, does not check the + * ACL and therefore may report that a path is accessible even if the ACL restricts + * the user from reading or writing to it. + * @since v0.11.15 + * @param [mode=fs.constants.F_OK] + */ + export function access(path: PathLike, mode: number | undefined, callback: NoParamCallback): void; + /** + * Asynchronously tests a user's permissions for the file specified by path. + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function access(path: PathLike, callback: NoParamCallback): void; + export namespace access { + /** + * Asynchronously tests a user's permissions for the file specified by path. + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(path: PathLike, mode?: number): Promise; + } + /** + * Synchronously tests a user's permissions for the file or directory specified + * by `path`. The `mode` argument is an optional integer that specifies the + * accessibility checks to be performed. `mode` should be either the value`fs.constants.F_OK` or a mask consisting of the bitwise OR of any of`fs.constants.R_OK`, `fs.constants.W_OK`, and + * `fs.constants.X_OK` (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * If any of the accessibility checks fail, an `Error` will be thrown. Otherwise, + * the method will return `undefined`. + * + * ```js + * import { accessSync, constants } from 'node:fs'; + * + * try { + * accessSync('etc/passwd', constants.R_OK | constants.W_OK); + * console.log('can read/write'); + * } catch (err) { + * console.error('no access!'); + * } + * ``` + * @since v0.11.15 + * @param [mode=fs.constants.F_OK] + */ + export function accessSync(path: PathLike, mode?: number): void; + interface StreamOptions { + flags?: string | undefined; + encoding?: BufferEncoding | undefined; + fd?: number | promises.FileHandle | undefined; + mode?: number | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + signal?: AbortSignal | null | undefined; + highWaterMark?: number | undefined; + } + interface FSImplementation { + open?: (...args: any[]) => any; + close?: (...args: any[]) => any; + } + interface CreateReadStreamFSImplementation extends FSImplementation { + read: (...args: any[]) => any; + } + interface CreateWriteStreamFSImplementation extends FSImplementation { + write: (...args: any[]) => any; + writev?: (...args: any[]) => any; + } + interface ReadStreamOptions extends StreamOptions { + fs?: CreateReadStreamFSImplementation | null | undefined; + end?: number | undefined; + } + interface WriteStreamOptions extends StreamOptions { + fs?: CreateWriteStreamFSImplementation | null | undefined; + flush?: boolean | undefined; + } + /** + * Unlike the 16 KiB default `highWaterMark` for a `stream.Readable`, the stream + * returned by this method has a default `highWaterMark` of 64 KiB. + * + * `options` can include `start` and `end` values to read a range of bytes from + * the file instead of the entire file. Both `start` and `end` are inclusive and + * start counting at 0, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. If `fd` is specified and `start` is + * omitted or `undefined`, `fs.createReadStream()` reads sequentially from the + * current file position. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `fd` is specified, `ReadStream` will ignore the `path` argument and will use + * the specified file descriptor. This means that no `'open'` event will be + * emitted. `fd` should be blocking; non-blocking `fd`s should be passed to `net.Socket`. + * + * If `fd` points to a character device that only supports blocking reads + * (such as keyboard or sound card), read operations do not finish until data is + * available. This can prevent the process from exiting and the stream from + * closing naturally. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * By providing the `fs` option, it is possible to override the corresponding `fs`implementations for `open`, `read`, and `close`. When providing the `fs` option, + * an override for `read` is required. If no `fd` is provided, an override for`open` is also required. If `autoClose` is `true`, an override for `close` is + * also required. + * + * ```js + * import { createReadStream } from 'node:fs'; + * + * // Create a stream from some character device. + * const stream = createReadStream('/dev/input/event0'); + * setTimeout(() => { + * stream.close(); // This may not close the stream. + * // Artificially marking end-of-stream, as if the underlying resource had + * // indicated end-of-file by itself, allows the stream to close. + * // This does not cancel pending read operations, and if there is such an + * // operation, the process may still not be able to exit successfully + * // until it finishes. + * stream.push(null); + * stream.read(0); + * }, 100); + * ``` + * + * If `autoClose` is false, then the file descriptor won't be closed, even if + * there's an error. It is the application's responsibility to close it and make + * sure there's no file descriptor leak. If `autoClose` is set to true (default + * behavior), on `'error'` or `'end'` the file descriptor will be closed + * automatically. + * + * `mode` sets the file mode (permission and sticky bits), but only if the + * file was created. + * + * An example to read the last 10 bytes of a file which is 100 bytes long: + * + * ```js + * import { createReadStream } from 'node:fs'; + * + * createReadStream('sample.txt', { start: 90, end: 99 }); + * ``` + * + * If `options` is a string, then it specifies the encoding. + * @since v0.1.31 + */ + export function createReadStream(path: PathLike, options?: BufferEncoding | ReadStreamOptions): ReadStream; + /** + * `options` may also include a `start` option to allow writing data at some + * position past the beginning of the file, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. Modifying a file rather than + * replacing it may require the `flags` option to be set to `r+` rather than the + * default `w`. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `autoClose` is set to true (default behavior) on `'error'` or `'finish'`the file descriptor will be closed automatically. If `autoClose` is false, + * then the file descriptor won't be closed, even if there's an error. + * It is the application's responsibility to close it and make sure there's no + * file descriptor leak. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * By providing the `fs` option it is possible to override the corresponding `fs`implementations for `open`, `write`, `writev`, and `close`. Overriding `write()`without `writev()` can reduce + * performance as some optimizations (`_writev()`) + * will be disabled. When providing the `fs` option, overrides for at least one of`write` and `writev` are required. If no `fd` option is supplied, an override + * for `open` is also required. If `autoClose` is `true`, an override for `close`is also required. + * + * Like `fs.ReadStream`, if `fd` is specified, `fs.WriteStream` will ignore the`path` argument and will use the specified file descriptor. This means that no`'open'` event will be + * emitted. `fd` should be blocking; non-blocking `fd`s + * should be passed to `net.Socket`. + * + * If `options` is a string, then it specifies the encoding. + * @since v0.1.31 + */ + export function createWriteStream(path: PathLike, options?: BufferEncoding | WriteStreamOptions): WriteStream; + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. No arguments other + * than a possible + * exception are given to the completion callback. + * @since v0.1.96 + */ + export function fdatasync(fd: number, callback: NoParamCallback): void; + export namespace fdatasync { + /** + * Asynchronous fdatasync(2) - synchronize a file's in-core state with storage device. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. Returns `undefined`. + * @since v0.1.96 + */ + export function fdatasyncSync(fd: number): void; + /** + * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. No arguments other than a possible exception are given to the + * callback function. Node.js makes no guarantees about the atomicity of the copy + * operation. If an error occurs after the destination file has been opened for + * writing, Node.js will attempt to remove the destination. + * + * `mode` is an optional integer that specifies the behavior + * of the copy operation. It is possible to create a mask consisting of the bitwise + * OR of two or more values (e.g.`fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`). + * + * * `fs.constants.COPYFILE_EXCL`: The copy operation will fail if `dest` already + * exists. + * * `fs.constants.COPYFILE_FICLONE`: The copy operation will attempt to create a + * copy-on-write reflink. If the platform does not support copy-on-write, then a + * fallback copy mechanism is used. + * * `fs.constants.COPYFILE_FICLONE_FORCE`: The copy operation will attempt to + * create a copy-on-write reflink. If the platform does not support + * copy-on-write, then the operation will fail. + * + * ```js + * import { copyFile, constants } from 'node:fs'; + * + * function callback(err) { + * if (err) throw err; + * console.log('source.txt was copied to destination.txt'); + * } + * + * // destination.txt will be created or overwritten by default. + * copyFile('source.txt', 'destination.txt', callback); + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * copyFile('source.txt', 'destination.txt', constants.COPYFILE_EXCL, callback); + * ``` + * @since v8.5.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] modifiers for copy operation. + */ + export function copyFile(src: PathLike, dest: PathLike, callback: NoParamCallback): void; + export function copyFile(src: PathLike, dest: PathLike, mode: number, callback: NoParamCallback): void; + export namespace copyFile { + function __promisify__(src: PathLike, dst: PathLike, mode?: number): Promise; + } + /** + * Synchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. Returns `undefined`. Node.js makes no guarantees about the + * atomicity of the copy operation. If an error occurs after the destination file + * has been opened for writing, Node.js will attempt to remove the destination. + * + * `mode` is an optional integer that specifies the behavior + * of the copy operation. It is possible to create a mask consisting of the bitwise + * OR of two or more values (e.g.`fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`). + * + * * `fs.constants.COPYFILE_EXCL`: The copy operation will fail if `dest` already + * exists. + * * `fs.constants.COPYFILE_FICLONE`: The copy operation will attempt to create a + * copy-on-write reflink. If the platform does not support copy-on-write, then a + * fallback copy mechanism is used. + * * `fs.constants.COPYFILE_FICLONE_FORCE`: The copy operation will attempt to + * create a copy-on-write reflink. If the platform does not support + * copy-on-write, then the operation will fail. + * + * ```js + * import { copyFileSync, constants } from 'node:fs'; + * + * // destination.txt will be created or overwritten by default. + * copyFileSync('source.txt', 'destination.txt'); + * console.log('source.txt was copied to destination.txt'); + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * copyFileSync('source.txt', 'destination.txt', constants.COPYFILE_EXCL); + * ``` + * @since v8.5.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] modifiers for copy operation. + */ + export function copyFileSync(src: PathLike, dest: PathLike, mode?: number): void; + /** + * Write an array of `ArrayBufferView`s to the file specified by `fd` using`writev()`. + * + * `position` is the offset from the beginning of the file where this data + * should be written. If `typeof position !== 'number'`, the data will be written + * at the current position. + * + * The callback will be given three arguments: `err`, `bytesWritten`, and`buffers`. `bytesWritten` is how many bytes were written from `buffers`. + * + * If this method is `util.promisify()` ed, it returns a promise for an`Object` with `bytesWritten` and `buffers` properties. + * + * It is unsafe to use `fs.writev()` multiple times on the same file without + * waiting for the callback. For this scenario, use {@link createWriteStream}. + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v12.9.0 + * @param [position='null'] + */ + export function writev( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + cb: (err: NodeJS.ErrnoException | null, bytesWritten: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export function writev( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position: number, + cb: (err: NodeJS.ErrnoException | null, bytesWritten: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export interface WriteVResult { + bytesWritten: number; + buffers: NodeJS.ArrayBufferView[]; + } + export namespace writev { + function __promisify__( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position?: number, + ): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link writev}. + * @since v12.9.0 + * @param [position='null'] + * @return The number of bytes written. + */ + export function writevSync(fd: number, buffers: readonly NodeJS.ArrayBufferView[], position?: number): number; + /** + * Read from a file specified by `fd` and write to an array of `ArrayBufferView`s + * using `readv()`. + * + * `position` is the offset from the beginning of the file from where data + * should be read. If `typeof position !== 'number'`, the data will be read + * from the current position. + * + * The callback will be given three arguments: `err`, `bytesRead`, and`buffers`. `bytesRead` is how many bytes were read from the file. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesRead` and `buffers` properties. + * @since v13.13.0, v12.17.0 + * @param [position='null'] + */ + export function readv( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + cb: (err: NodeJS.ErrnoException | null, bytesRead: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export function readv( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position: number, + cb: (err: NodeJS.ErrnoException | null, bytesRead: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export interface ReadVResult { + bytesRead: number; + buffers: NodeJS.ArrayBufferView[]; + } + export namespace readv { + function __promisify__( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position?: number, + ): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link readv}. + * @since v13.13.0, v12.17.0 + * @param [position='null'] + * @return The number of bytes read. + */ + export function readvSync(fd: number, buffers: readonly NodeJS.ArrayBufferView[], position?: number): number; + + export interface OpenAsBlobOptions { + /** + * An optional mime type for the blob. + * + * @default 'undefined' + */ + type?: string | undefined; + } + + /** + * Returns a `Blob` whose data is backed by the given file. + * + * The file must not be modified after the `Blob` is created. Any modifications + * will cause reading the `Blob` data to fail with a `DOMException` error. + * Synchronous stat operations on the file when the `Blob` is created, and before + * each read in order to detect whether the file data has been modified on disk. + * + * ```js + * import { openAsBlob } from 'node:fs'; + * + * const blob = await openAsBlob('the.file.txt'); + * const ab = await blob.arrayBuffer(); + * blob.stream(); + * ``` + * @since v19.8.0 + * @experimental + */ + export function openAsBlob(path: PathLike, options?: OpenAsBlobOptions): Promise; + + export interface OpenDirOptions { + /** + * @default 'utf8' + */ + encoding?: BufferEncoding | undefined; + /** + * Number of directory entries that are buffered + * internally when reading from the directory. Higher values lead to better + * performance but higher memory usage. + * @default 32 + */ + bufferSize?: number | undefined; + /** + * @default false + */ + recursive?: boolean; + } + /** + * Synchronously open a directory. See [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html). + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * @since v12.12.0 + */ + export function opendirSync(path: PathLike, options?: OpenDirOptions): Dir; + /** + * Asynchronously open a directory. See the POSIX [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html) documentation for + * more details. + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * @since v12.12.0 + */ + export function opendir(path: PathLike, cb: (err: NodeJS.ErrnoException | null, dir: Dir) => void): void; + export function opendir( + path: PathLike, + options: OpenDirOptions, + cb: (err: NodeJS.ErrnoException | null, dir: Dir) => void, + ): void; + export namespace opendir { + function __promisify__(path: PathLike, options?: OpenDirOptions): Promise; + } + export interface BigIntStats extends StatsBase { + atimeNs: bigint; + mtimeNs: bigint; + ctimeNs: bigint; + birthtimeNs: bigint; + } + export interface BigIntOptions { + bigint: true; + } + export interface StatOptions { + bigint?: boolean | undefined; + } + export interface StatSyncOptions extends StatOptions { + throwIfNoEntry?: boolean | undefined; + } + interface CopyOptionsBase { + /** + * Dereference symlinks + * @default false + */ + dereference?: boolean; + /** + * When `force` is `false`, and the destination + * exists, throw an error. + * @default false + */ + errorOnExist?: boolean; + /** + * Overwrite existing file or directory. _The copy + * operation will ignore errors if you set this to false and the destination + * exists. Use the `errorOnExist` option to change this behavior. + * @default true + */ + force?: boolean; + /** + * Modifiers for copy operation. See `mode` flag of {@link copyFileSync()} + */ + mode?: number; + /** + * When `true` timestamps from `src` will + * be preserved. + * @default false + */ + preserveTimestamps?: boolean; + /** + * Copy directories recursively. + * @default false + */ + recursive?: boolean; + /** + * When true, path resolution for symlinks will be skipped + * @default false + */ + verbatimSymlinks?: boolean; + } + export interface CopyOptions extends CopyOptionsBase { + /** + * Function to filter copied files/directories. Return + * `true` to copy the item, `false` to ignore it. + */ + filter?(source: string, destination: string): boolean | Promise; + } + export interface CopySyncOptions extends CopyOptionsBase { + /** + * Function to filter copied files/directories. Return + * `true` to copy the item, `false` to ignore it. + */ + filter?(source: string, destination: string): boolean; + } + /** + * Asynchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + */ + export function cp( + source: string | URL, + destination: string | URL, + callback: (err: NodeJS.ErrnoException | null) => void, + ): void; + export function cp( + source: string | URL, + destination: string | URL, + opts: CopyOptions, + callback: (err: NodeJS.ErrnoException | null) => void, + ): void; + /** + * Synchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + */ + export function cpSync(source: string | URL, destination: string | URL, opts?: CopySyncOptions): void; +} +declare module "node:fs" { + export * from "fs"; +} diff --git a/dist/node_modules/@types/node/fs/promises.d.ts b/dist/node_modules/@types/node/fs/promises.d.ts new file mode 100644 index 0000000..88a9fc3 --- /dev/null +++ b/dist/node_modules/@types/node/fs/promises.d.ts @@ -0,0 +1,1239 @@ +/** + * The `fs/promises` API provides asynchronous file system methods that return + * promises. + * + * The promise APIs use the underlying Node.js threadpool to perform file + * system operations off the event loop thread. These operations are not + * synchronized or threadsafe. Care must be taken when performing multiple + * concurrent modifications on the same file or data corruption may occur. + * @since v10.0.0 + */ +declare module "fs/promises" { + import { Abortable } from "node:events"; + import { Stream } from "node:stream"; + import { ReadableStream } from "node:stream/web"; + import { + BigIntStats, + BigIntStatsFs, + BufferEncodingOption, + constants as fsConstants, + CopyOptions, + Dir, + Dirent, + MakeDirectoryOptions, + Mode, + ObjectEncodingOptions, + OpenDirOptions, + OpenMode, + PathLike, + ReadStream, + ReadVResult, + RmDirOptions, + RmOptions, + StatFsOptions, + StatOptions, + Stats, + StatsFs, + TimeLike, + WatchEventType, + WatchOptions, + WriteStream, + WriteVResult, + } from "node:fs"; + import { Interface as ReadlineInterface } from "node:readline"; + interface FileChangeInfo { + eventType: WatchEventType; + filename: T | null; + } + interface FlagAndOpenMode { + mode?: Mode | undefined; + flag?: OpenMode | undefined; + } + interface FileReadResult { + bytesRead: number; + buffer: T; + } + interface FileReadOptions { + /** + * @default `Buffer.alloc(0xffff)` + */ + buffer?: T; + /** + * @default 0 + */ + offset?: number | null; + /** + * @default `buffer.byteLength` + */ + length?: number | null; + position?: number | null; + } + interface CreateReadStreamOptions { + encoding?: BufferEncoding | null | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + end?: number | undefined; + highWaterMark?: number | undefined; + } + interface CreateWriteStreamOptions { + encoding?: BufferEncoding | null | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + highWaterMark?: number | undefined; + flush?: boolean | undefined; + } + interface ReadableWebStreamOptions { + /** + * Whether to open a normal or a `'bytes'` stream. + * @since v20.0.0 + */ + type?: "bytes" | undefined; + } + // TODO: Add `EventEmitter` close + interface FileHandle { + /** + * The numeric file descriptor managed by the {FileHandle} object. + * @since v10.0.0 + */ + readonly fd: number; + /** + * Alias of `filehandle.writeFile()`. + * + * When operating on file handles, the mode cannot be changed from what it was set + * to with `fsPromises.open()`. Therefore, this is equivalent to `filehandle.writeFile()`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + appendFile( + data: string | Uint8Array, + options?: + | (ObjectEncodingOptions & FlagAndOpenMode & { flush?: boolean | undefined }) + | BufferEncoding + | null, + ): Promise; + /** + * Changes the ownership of the file. A wrapper for [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html). + * @since v10.0.0 + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + * @return Fulfills with `undefined` upon success. + */ + chown(uid: number, gid: number): Promise; + /** + * Modifies the permissions on the file. See [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html). + * @since v10.0.0 + * @param mode the file mode bit mask. + * @return Fulfills with `undefined` upon success. + */ + chmod(mode: Mode): Promise; + /** + * Unlike the 16 KiB default `highWaterMark` for a `stream.Readable`, the stream + * returned by this method has a default `highWaterMark` of 64 KiB. + * + * `options` can include `start` and `end` values to read a range of bytes from + * the file instead of the entire file. Both `start` and `end` are inclusive and + * start counting at 0, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. If `start` is + * omitted or `undefined`, `filehandle.createReadStream()` reads sequentially from + * the current file position. The `encoding` can be any one of those accepted by `Buffer`. + * + * If the `FileHandle` points to a character device that only supports blocking + * reads (such as keyboard or sound card), read operations do not finish until data + * is available. This can prevent the process from exiting and the stream from + * closing naturally. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const fd = await open('/dev/input/event0'); + * // Create a stream from some character device. + * const stream = fd.createReadStream(); + * setTimeout(() => { + * stream.close(); // This may not close the stream. + * // Artificially marking end-of-stream, as if the underlying resource had + * // indicated end-of-file by itself, allows the stream to close. + * // This does not cancel pending read operations, and if there is such an + * // operation, the process may still not be able to exit successfully + * // until it finishes. + * stream.push(null); + * stream.read(0); + * }, 100); + * ``` + * + * If `autoClose` is false, then the file descriptor won't be closed, even if + * there's an error. It is the application's responsibility to close it and make + * sure there's no file descriptor leak. If `autoClose` is set to true (default + * behavior), on `'error'` or `'end'` the file descriptor will be closed + * automatically. + * + * An example to read the last 10 bytes of a file which is 100 bytes long: + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const fd = await open('sample.txt'); + * fd.createReadStream({ start: 90, end: 99 }); + * ``` + * @since v16.11.0 + */ + createReadStream(options?: CreateReadStreamOptions): ReadStream; + /** + * `options` may also include a `start` option to allow writing data at some + * position past the beginning of the file, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. Modifying a file rather than + * replacing it may require the `flags` `open` option to be set to `r+` rather than + * the default `r`. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `autoClose` is set to true (default behavior) on `'error'` or `'finish'`the file descriptor will be closed automatically. If `autoClose` is false, + * then the file descriptor won't be closed, even if there's an error. + * It is the application's responsibility to close it and make sure there's no + * file descriptor leak. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * @since v16.11.0 + */ + createWriteStream(options?: CreateWriteStreamOptions): WriteStream; + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. + * + * Unlike `filehandle.sync` this method does not flush modified metadata. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + datasync(): Promise; + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + sync(): Promise; + /** + * Reads data from the file and stores that in the given buffer. + * + * If the file is not modified concurrently, the end-of-file is reached when the + * number of bytes read is zero. + * @since v10.0.0 + * @param buffer A buffer that will be filled with the file data read. + * @param offset The location in the buffer at which to start filling. + * @param length The number of bytes to read. + * @param position The location where to begin reading data from the file. If `null`, data will be read from the current file position, and the position will be updated. If `position` is an + * integer, the current file position will remain unchanged. + * @return Fulfills upon success with an object with two properties: + */ + read( + buffer: T, + offset?: number | null, + length?: number | null, + position?: number | null, + ): Promise>; + read(options?: FileReadOptions): Promise>; + /** + * Returns a `ReadableStream` that may be used to read the files data. + * + * An error will be thrown if this method is called more than once or is called + * after the `FileHandle` is closed or closing. + * + * ```js + * import { + * open, + * } from 'node:fs/promises'; + * + * const file = await open('./some/file/to/read'); + * + * for await (const chunk of file.readableWebStream()) + * console.log(chunk); + * + * await file.close(); + * ``` + * + * While the `ReadableStream` will read the file to completion, it will not + * close the `FileHandle` automatically. User code must still call the`fileHandle.close()` method. + * @since v17.0.0 + * @experimental + */ + readableWebStream(options?: ReadableWebStreamOptions): ReadableStream; + /** + * Asynchronously reads the entire contents of a file. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `FileHandle` has to support reading. + * + * If one or more `filehandle.read()` calls are made on a file handle and then a`filehandle.readFile()` call is made, the data will be read from the current + * position till the end of the file. It doesn't always read from the beginning + * of the file. + * @since v10.0.0 + * @return Fulfills upon a successful read with the contents of the file. If no encoding is specified (using `options.encoding`), the data is returned as a {Buffer} object. Otherwise, the + * data will be a string. + */ + readFile( + options?: { + encoding?: null | undefined; + flag?: OpenMode | undefined; + } | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. + * The `FileHandle` must have been opened for reading. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + readFile( + options: + | { + encoding: BufferEncoding; + flag?: OpenMode | undefined; + } + | BufferEncoding, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. + * The `FileHandle` must have been opened for reading. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + readFile( + options?: + | (ObjectEncodingOptions & { + flag?: OpenMode | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Convenience method to create a `readline` interface and stream over the file. + * See `filehandle.createReadStream()` for the options. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const file = await open('./some/file/to/read'); + * + * for await (const line of file.readLines()) { + * console.log(line); + * } + * ``` + * @since v18.11.0 + */ + readLines(options?: CreateReadStreamOptions): ReadlineInterface; + /** + * @since v10.0.0 + * @return Fulfills with an {fs.Stats} for the file. + */ + stat( + opts?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + stat( + opts: StatOptions & { + bigint: true; + }, + ): Promise; + stat(opts?: StatOptions): Promise; + /** + * Truncates the file. + * + * If the file was larger than `len` bytes, only the first `len` bytes will be + * retained in the file. + * + * The following example retains only the first four bytes of the file: + * + * ```js + * import { open } from 'node:fs/promises'; + * + * let filehandle = null; + * try { + * filehandle = await open('temp.txt', 'r+'); + * await filehandle.truncate(4); + * } finally { + * await filehandle?.close(); + * } + * ``` + * + * If the file previously was shorter than `len` bytes, it is extended, and the + * extended part is filled with null bytes (`'\0'`): + * + * If `len` is negative then `0` will be used. + * @since v10.0.0 + * @param [len=0] + * @return Fulfills with `undefined` upon success. + */ + truncate(len?: number): Promise; + /** + * Change the file system timestamps of the object referenced by the `FileHandle` then fulfills the promise with no arguments upon success. + * @since v10.0.0 + */ + utimes(atime: TimeLike, mtime: TimeLike): Promise; + /** + * Asynchronously writes data to a file, replacing the file if it already exists.`data` can be a string, a buffer, an + * [AsyncIterable](https://tc39.github.io/ecma262/#sec-asynciterable-interface), or an + * [Iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol) object. + * The promise is fulfilled with no arguments upon success. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `FileHandle` has to support writing. + * + * It is unsafe to use `filehandle.writeFile()` multiple times on the same file + * without waiting for the promise to be fulfilled (or rejected). + * + * If one or more `filehandle.write()` calls are made on a file handle and then a`filehandle.writeFile()` call is made, the data will be written from the + * current position till the end of the file. It doesn't always write from the + * beginning of the file. + * @since v10.0.0 + */ + writeFile( + data: string | Uint8Array, + options?: + | (ObjectEncodingOptions & FlagAndOpenMode & Abortable & { flush?: boolean | undefined }) + | BufferEncoding + | null, + ): Promise; + /** + * Write `buffer` to the file. + * + * The promise is fulfilled with an object containing two properties: + * + * It is unsafe to use `filehandle.write()` multiple times on the same file + * without waiting for the promise to be fulfilled (or rejected). For this + * scenario, use `filehandle.createWriteStream()`. + * + * On Linux, positional writes do not work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v10.0.0 + * @param offset The start position from within `buffer` where the data to write begins. + * @param [length=buffer.byteLength - offset] The number of bytes from `buffer` to write. + * @param [position='null'] The offset from the beginning of the file where the data from `buffer` should be written. If `position` is not a `number`, the data will be written at the current + * position. See the POSIX pwrite(2) documentation for more detail. + */ + write( + buffer: TBuffer, + offset?: number | null, + length?: number | null, + position?: number | null, + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + write( + data: string, + position?: number | null, + encoding?: BufferEncoding | null, + ): Promise<{ + bytesWritten: number; + buffer: string; + }>; + /** + * Write an array of [ArrayBufferView](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView) s to the file. + * + * The promise is fulfilled with an object containing a two properties: + * + * It is unsafe to call `writev()` multiple times on the same file without waiting + * for the promise to be fulfilled (or rejected). + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v12.9.0 + * @param [position='null'] The offset from the beginning of the file where the data from `buffers` should be written. If `position` is not a `number`, the data will be written at the current + * position. + */ + writev(buffers: readonly NodeJS.ArrayBufferView[], position?: number): Promise; + /** + * Read from a file and write to an array of [ArrayBufferView](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView) s + * @since v13.13.0, v12.17.0 + * @param [position='null'] The offset from the beginning of the file where the data should be read from. If `position` is not a `number`, the data will be read from the current position. + * @return Fulfills upon success an object containing two properties: + */ + readv(buffers: readonly NodeJS.ArrayBufferView[], position?: number): Promise; + /** + * Closes the file handle after waiting for any pending operation on the handle to + * complete. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * let filehandle; + * try { + * filehandle = await open('thefile.txt', 'r'); + * } finally { + * await filehandle?.close(); + * } + * ``` + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + close(): Promise; + /** + * An alias for {@link FileHandle.close()}. + * @since v20.4.0 + */ + [Symbol.asyncDispose](): Promise; + } + const constants: typeof fsConstants; + /** + * Tests a user's permissions for the file or directory specified by `path`. + * The `mode` argument is an optional integer that specifies the accessibility + * checks to be performed. `mode` should be either the value `fs.constants.F_OK`or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`,`fs.constants.W_OK`, and `fs.constants.X_OK` + * (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * If the accessibility check is successful, the promise is fulfilled with no + * value. If any of the accessibility checks fail, the promise is rejected + * with an [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object. The following example checks if the file`/etc/passwd` can be read and + * written by the current process. + * + * ```js + * import { access, constants } from 'node:fs/promises'; + * + * try { + * await access('/etc/passwd', constants.R_OK | constants.W_OK); + * console.log('can access'); + * } catch { + * console.error('cannot access'); + * } + * ``` + * + * Using `fsPromises.access()` to check for the accessibility of a file before + * calling `fsPromises.open()` is not recommended. Doing so introduces a race + * condition, since other processes may change the file's state between the two + * calls. Instead, user code should open/read/write the file directly and handle + * the error raised if the file is not accessible. + * @since v10.0.0 + * @param [mode=fs.constants.F_OK] + * @return Fulfills with `undefined` upon success. + */ + function access(path: PathLike, mode?: number): Promise; + /** + * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. + * + * No guarantees are made about the atomicity of the copy operation. If an + * error occurs after the destination file has been opened for writing, an attempt + * will be made to remove the destination. + * + * ```js + * import { copyFile, constants } from 'node:fs/promises'; + * + * try { + * await copyFile('source.txt', 'destination.txt'); + * console.log('source.txt was copied to destination.txt'); + * } catch { + * console.error('The file could not be copied'); + * } + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * try { + * await copyFile('source.txt', 'destination.txt', constants.COPYFILE_EXCL); + * console.log('source.txt was copied to destination.txt'); + * } catch { + * console.error('The file could not be copied'); + * } + * ``` + * @since v10.0.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] Optional modifiers that specify the behavior of the copy operation. It is possible to create a mask consisting of the bitwise OR of two or more values (e.g. + * `fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`) + * @return Fulfills with `undefined` upon success. + */ + function copyFile(src: PathLike, dest: PathLike, mode?: number): Promise; + /** + * Opens a `FileHandle`. + * + * Refer to the POSIX [`open(2)`](http://man7.org/linux/man-pages/man2/open.2.html) documentation for more detail. + * + * Some characters (`< > : " / \ | ? *`) are reserved under Windows as documented + * by [Naming Files, Paths, and Namespaces](https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file). Under NTFS, if the filename contains + * a colon, Node.js will open a file system stream, as described by [this MSDN page](https://docs.microsoft.com/en-us/windows/desktop/FileIO/using-streams). + * @since v10.0.0 + * @param [flags='r'] See `support of file system `flags``. + * @param [mode=0o666] Sets the file mode (permission and sticky bits) if the file is created. + * @return Fulfills with a {FileHandle} object. + */ + function open(path: PathLike, flags?: string | number, mode?: Mode): Promise; + /** + * Renames `oldPath` to `newPath`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function rename(oldPath: PathLike, newPath: PathLike): Promise; + /** + * Truncates (shortens or extends the length) of the content at `path` to `len`bytes. + * @since v10.0.0 + * @param [len=0] + * @return Fulfills with `undefined` upon success. + */ + function truncate(path: PathLike, len?: number): Promise; + /** + * Removes the directory identified by `path`. + * + * Using `fsPromises.rmdir()` on a file (not a directory) results in the + * promise being rejected with an `ENOENT` error on Windows and an `ENOTDIR`error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use `fsPromises.rm()` with options `{ recursive: true, force: true }`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function rmdir(path: PathLike, options?: RmDirOptions): Promise; + /** + * Removes files and directories (modeled on the standard POSIX `rm` utility). + * @since v14.14.0 + * @return Fulfills with `undefined` upon success. + */ + function rm(path: PathLike, options?: RmOptions): Promise; + /** + * Asynchronously creates a directory. + * + * The optional `options` argument can be an integer specifying `mode` (permission + * and sticky bits), or an object with a `mode` property and a `recursive`property indicating whether parent directories should be created. Calling`fsPromises.mkdir()` when `path` is a directory + * that exists results in a + * rejection only when `recursive` is false. + * + * ```js + * import { mkdir } from 'node:fs/promises'; + * + * try { + * const projectFolder = new URL('./test/project/', import.meta.url); + * const createDir = await mkdir(projectFolder, { recursive: true }); + * + * console.log(`created ${createDir}`); + * } catch (err) { + * console.error(err.message); + * } + * ``` + * @since v10.0.0 + * @return Upon success, fulfills with `undefined` if `recursive` is `false`, or the first directory path created if `recursive` is `true`. + */ + function mkdir( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function mkdir( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function mkdir(path: PathLike, options?: Mode | MakeDirectoryOptions | null): Promise; + /** + * Reads the contents of a directory. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames. If the `encoding` is set to `'buffer'`, the filenames returned + * will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the returned array will contain `fs.Dirent` objects. + * + * ```js + * import { readdir } from 'node:fs/promises'; + * + * try { + * const files = await readdir(path); + * for (const file of files) + * console.log(file); + * } catch (err) { + * console.error(err); + * } + * ``` + * @since v10.0.0 + * @return Fulfills with an array of the names of the files in the directory excluding `'.'` and `'..'`. + */ + function readdir( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readdir( + path: PathLike, + options: + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | "buffer", + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readdir( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + function readdir( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + ): Promise; + /** + * Reads the contents of the symbolic link referred to by `path`. See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more detail. The promise is + * fulfilled with the`linkString` upon success. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path returned. If the `encoding` is set to `'buffer'`, the link path + * returned will be passed as a `Buffer` object. + * @since v10.0.0 + * @return Fulfills with the `linkString` upon success. + */ + function readlink(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readlink(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readlink(path: PathLike, options?: ObjectEncodingOptions | string | null): Promise; + /** + * Creates a symbolic link. + * + * The `type` argument is only used on Windows platforms and can be one of `'dir'`,`'file'`, or `'junction'`. If the `type` argument is not a string, Node.js will + * autodetect `target` type and use `'file'` or `'dir'`. If the `target` does not + * exist, `'file'` will be used. Windows junction points require the destination + * path to be absolute. When using `'junction'`, the `target` argument will + * automatically be normalized to absolute path. Junction points on NTFS volumes + * can only point to directories. + * @since v10.0.0 + * @param [type='null'] + * @return Fulfills with `undefined` upon success. + */ + function symlink(target: PathLike, path: PathLike, type?: string | null): Promise; + /** + * Equivalent to `fsPromises.stat()` unless `path` refers to a symbolic link, + * in which case the link itself is stat-ed, not the file that it refers to. + * Refer to the POSIX [`lstat(2)`](http://man7.org/linux/man-pages/man2/lstat.2.html) document for more detail. + * @since v10.0.0 + * @return Fulfills with the {fs.Stats} object for the given symbolic link `path`. + */ + function lstat( + path: PathLike, + opts?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function lstat( + path: PathLike, + opts: StatOptions & { + bigint: true; + }, + ): Promise; + function lstat(path: PathLike, opts?: StatOptions): Promise; + /** + * @since v10.0.0 + * @return Fulfills with the {fs.Stats} object for the given `path`. + */ + function stat( + path: PathLike, + opts?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function stat( + path: PathLike, + opts: StatOptions & { + bigint: true; + }, + ): Promise; + function stat(path: PathLike, opts?: StatOptions): Promise; + /** + * @since v19.6.0, v18.15.0 + * @return Fulfills with the {fs.StatFs} object for the given `path`. + */ + function statfs( + path: PathLike, + opts?: StatFsOptions & { + bigint?: false | undefined; + }, + ): Promise; + function statfs( + path: PathLike, + opts: StatFsOptions & { + bigint: true; + }, + ): Promise; + function statfs(path: PathLike, opts?: StatFsOptions): Promise; + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function link(existingPath: PathLike, newPath: PathLike): Promise; + /** + * If `path` refers to a symbolic link, then the link is removed without affecting + * the file or directory to which that link refers. If the `path` refers to a file + * path that is not a symbolic link, the file is deleted. See the POSIX [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function unlink(path: PathLike): Promise; + /** + * Changes the permissions of a file. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function chmod(path: PathLike, mode: Mode): Promise; + /** + * Changes the permissions on a symbolic link. + * + * This method is only implemented on macOS. + * @deprecated Since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function lchmod(path: PathLike, mode: Mode): Promise; + /** + * Changes the ownership on a symbolic link. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function lchown(path: PathLike, uid: number, gid: number): Promise; + /** + * Changes the access and modification times of a file in the same way as `fsPromises.utimes()`, with the difference that if the path refers to a + * symbolic link, then the link is not dereferenced: instead, the timestamps of + * the symbolic link itself are changed. + * @since v14.5.0, v12.19.0 + * @return Fulfills with `undefined` upon success. + */ + function lutimes(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + /** + * Changes the ownership of a file. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function chown(path: PathLike, uid: number, gid: number): Promise; + /** + * Change the file system timestamps of the object referenced by `path`. + * + * The `atime` and `mtime` arguments follow these rules: + * + * * Values can be either numbers representing Unix epoch time, `Date`s, or a + * numeric string like `'123456789.0'`. + * * If the value can not be converted to a number, or is `NaN`, `Infinity`, or`-Infinity`, an `Error` will be thrown. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function utimes(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + /** + * Determines the actual location of `path` using the same semantics as the`fs.realpath.native()` function. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path. If the `encoding` is set to `'buffer'`, the path returned will be + * passed as a `Buffer` object. + * + * On Linux, when Node.js is linked against musl libc, the procfs file system must + * be mounted on `/proc` in order for this function to work. Glibc does not have + * this restriction. + * @since v10.0.0 + * @return Fulfills with the resolved path upon success. + */ + function realpath(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function realpath(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function realpath( + path: PathLike, + options?: ObjectEncodingOptions | BufferEncoding | null, + ): Promise; + /** + * Creates a unique temporary directory. A unique directory name is generated by + * appending six random characters to the end of the provided `prefix`. Due to + * platform inconsistencies, avoid trailing `X` characters in `prefix`. Some + * platforms, notably the BSDs, can return more than six random characters, and + * replace trailing `X` characters in `prefix` with random characters. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * + * ```js + * import { mkdtemp } from 'node:fs/promises'; + * import { join } from 'node:path'; + * import { tmpdir } from 'node:os'; + * + * try { + * await mkdtemp(join(tmpdir(), 'foo-')); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * The `fsPromises.mkdtemp()` method will append the six randomly selected + * characters directly to the `prefix` string. For instance, given a directory`/tmp`, if the intention is to create a temporary directory _within_`/tmp`, the`prefix` must end with a trailing + * platform-specific path separator + * (`require('node:path').sep`). + * @since v10.0.0 + * @return Fulfills with a string containing the file system path of the newly created temporary directory. + */ + function mkdtemp(prefix: string, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function mkdtemp(prefix: string, options: BufferEncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function mkdtemp(prefix: string, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronously writes data to a file, replacing the file if it already exists.`data` can be a string, a buffer, an + * [AsyncIterable](https://tc39.github.io/ecma262/#sec-asynciterable-interface), or an + * [Iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol) object. + * + * The `encoding` option is ignored if `data` is a buffer. + * + * If `options` is a string, then it specifies the encoding. + * + * The `mode` option only affects the newly created file. See `fs.open()` for more details. + * + * Any specified `FileHandle` has to support writing. + * + * It is unsafe to use `fsPromises.writeFile()` multiple times on the same file + * without waiting for the promise to be settled. + * + * Similarly to `fsPromises.readFile` \- `fsPromises.writeFile` is a convenience + * method that performs multiple `write` calls internally to write the buffer + * passed to it. For performance sensitive code consider using `fs.createWriteStream()` or `filehandle.createWriteStream()`. + * + * It is possible to use an `AbortSignal` to cancel an `fsPromises.writeFile()`. + * Cancelation is "best effort", and some amount of data is likely still + * to be written. + * + * ```js + * import { writeFile } from 'node:fs/promises'; + * import { Buffer } from 'node:buffer'; + * + * try { + * const controller = new AbortController(); + * const { signal } = controller; + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * const promise = writeFile('message.txt', data, { signal }); + * + * // Abort the request before the promise settles. + * controller.abort(); + * + * await promise; + * } catch (err) { + * // When a request is aborted - err is an AbortError + * console.error(err); + * } + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.writeFile` performs. + * @since v10.0.0 + * @param file filename or `FileHandle` + * @return Fulfills with `undefined` upon success. + */ + function writeFile( + file: PathLike | FileHandle, + data: + | string + | NodeJS.ArrayBufferView + | Iterable + | AsyncIterable + | Stream, + options?: + | (ObjectEncodingOptions & { + mode?: Mode | undefined; + flag?: OpenMode | undefined; + } & Abortable) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `mode` option only affects the newly created file. See `fs.open()` for more details. + * + * The `path` may be specified as a `FileHandle` that has been opened + * for appending (using `fsPromises.open()`). + * @since v10.0.0 + * @param path filename or {FileHandle} + * @return Fulfills with `undefined` upon success. + */ + function appendFile( + path: PathLike | FileHandle, + data: string | Uint8Array, + options?: (ObjectEncodingOptions & FlagAndOpenMode & { flush?: boolean | undefined }) | BufferEncoding | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * + * If no encoding is specified (using `options.encoding`), the data is returned + * as a `Buffer` object. Otherwise, the data will be a string. + * + * If `options` is a string, then it specifies the encoding. + * + * When the `path` is a directory, the behavior of `fsPromises.readFile()` is + * platform-specific. On macOS, Linux, and Windows, the promise will be rejected + * with an error. On FreeBSD, a representation of the directory's contents will be + * returned. + * + * An example of reading a `package.json` file located in the same directory of the + * running code: + * + * ```js + * import { readFile } from 'node:fs/promises'; + * try { + * const filePath = new URL('./package.json', import.meta.url); + * const contents = await readFile(filePath, { encoding: 'utf8' }); + * console.log(contents); + * } catch (err) { + * console.error(err.message); + * } + * ``` + * + * It is possible to abort an ongoing `readFile` using an `AbortSignal`. If a + * request is aborted the promise returned is rejected with an `AbortError`: + * + * ```js + * import { readFile } from 'node:fs/promises'; + * + * try { + * const controller = new AbortController(); + * const { signal } = controller; + * const promise = readFile(fileName, { signal }); + * + * // Abort the request before the promise settles. + * controller.abort(); + * + * await promise; + * } catch (err) { + * // When a request is aborted - err is an AbortError + * console.error(err); + * } + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.readFile` performs. + * + * Any specified `FileHandle` has to support reading. + * @since v10.0.0 + * @param path filename or `FileHandle` + * @return Fulfills with the contents of the file. + */ + function readFile( + path: PathLike | FileHandle, + options?: + | ({ + encoding?: null | undefined; + flag?: OpenMode | undefined; + } & Abortable) + | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function readFile( + path: PathLike | FileHandle, + options: + | ({ + encoding: BufferEncoding; + flag?: OpenMode | undefined; + } & Abortable) + | BufferEncoding, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function readFile( + path: PathLike | FileHandle, + options?: + | ( + & ObjectEncodingOptions + & Abortable + & { + flag?: OpenMode | undefined; + } + ) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronously open a directory for iterative scanning. See the POSIX [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html) documentation for more detail. + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * + * Example using async iteration: + * + * ```js + * import { opendir } from 'node:fs/promises'; + * + * try { + * const dir = await opendir('./'); + * for await (const dirent of dir) + * console.log(dirent.name); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * When using the async iterator, the `fs.Dir` object will be automatically + * closed after the iterator exits. + * @since v12.12.0 + * @return Fulfills with an {fs.Dir}. + */ + function opendir(path: PathLike, options?: OpenDirOptions): Promise; + /** + * Returns an async iterator that watches for changes on `filename`, where `filename`is either a file or a directory. + * + * ```js + * const { watch } = require('node:fs/promises'); + * + * const ac = new AbortController(); + * const { signal } = ac; + * setTimeout(() => ac.abort(), 10000); + * + * (async () => { + * try { + * const watcher = watch(__filename, { signal }); + * for await (const event of watcher) + * console.log(event); + * } catch (err) { + * if (err.name === 'AbortError') + * return; + * throw err; + * } + * })(); + * ``` + * + * On most platforms, `'rename'` is emitted whenever a filename appears or + * disappears in the directory. + * + * All the `caveats` for `fs.watch()` also apply to `fsPromises.watch()`. + * @since v15.9.0, v14.18.0 + * @return of objects with the properties: + */ + function watch( + filename: PathLike, + options: + | (WatchOptions & { + encoding: "buffer"; + }) + | "buffer", + ): AsyncIterable>; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + function watch(filename: PathLike, options?: WatchOptions | BufferEncoding): AsyncIterable>; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + function watch( + filename: PathLike, + options: WatchOptions | string, + ): AsyncIterable> | AsyncIterable>; + /** + * Asynchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + * @return Fulfills with `undefined` upon success. + */ + function cp(source: string | URL, destination: string | URL, opts?: CopyOptions): Promise; +} +declare module "node:fs/promises" { + export * from "fs/promises"; +} diff --git a/dist/node_modules/@types/node/globals.d.ts b/dist/node_modules/@types/node/globals.d.ts new file mode 100644 index 0000000..5f25006 --- /dev/null +++ b/dist/node_modules/@types/node/globals.d.ts @@ -0,0 +1,411 @@ +export {}; // Make this a module + +// #region Fetch and friends +// Conditional type aliases, used at the end of this file. +// Will either be empty if lib-dom is included, or the undici version otherwise. +type _Request = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").Request; +type _Response = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").Response; +type _FormData = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").FormData; +type _Headers = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").Headers; +type _RequestInit = typeof globalThis extends { onmessage: any } ? {} + : import("undici-types").RequestInit; +type _ResponseInit = typeof globalThis extends { onmessage: any } ? {} + : import("undici-types").ResponseInit; +type _File = typeof globalThis extends { onmessage: any } ? {} : import("node:buffer").File; +// #endregion Fetch and friends + +declare global { + // Declare "static" methods in Error + interface ErrorConstructor { + /** Create .stack property on a target object */ + captureStackTrace(targetObject: object, constructorOpt?: Function): void; + + /** + * Optional override for formatting stack traces + * + * @see https://v8.dev/docs/stack-trace-api#customizing-stack-traces + */ + prepareStackTrace?: ((err: Error, stackTraces: NodeJS.CallSite[]) => any) | undefined; + + stackTraceLimit: number; + } + + /*-----------------------------------------------* + * * + * GLOBAL * + * * + ------------------------------------------------*/ + + // For backwards compability + interface NodeRequire extends NodeJS.Require {} + interface RequireResolve extends NodeJS.RequireResolve {} + interface NodeModule extends NodeJS.Module {} + + var process: NodeJS.Process; + var console: Console; + + var __filename: string; + var __dirname: string; + + var require: NodeRequire; + var module: NodeModule; + + // Same as module.exports + var exports: any; + + /** + * Only available if `--expose-gc` is passed to the process. + */ + var gc: undefined | (() => void); + + // #region borrowed + // from https://github.com/microsoft/TypeScript/blob/38da7c600c83e7b31193a62495239a0fe478cb67/lib/lib.webworker.d.ts#L633 until moved to separate lib + /** A controller object that allows you to abort one or more DOM requests as and when desired. */ + interface AbortController { + /** + * Returns the AbortSignal object associated with this object. + */ + + readonly signal: AbortSignal; + /** + * Invoking this method will set this object's AbortSignal's aborted flag and signal to any observers that the associated activity is to be aborted. + */ + abort(reason?: any): void; + } + + /** A signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. */ + interface AbortSignal extends EventTarget { + /** + * Returns true if this AbortSignal's AbortController has signaled to abort, and false otherwise. + */ + readonly aborted: boolean; + readonly reason: any; + onabort: null | ((this: AbortSignal, event: Event) => any); + throwIfAborted(): void; + } + + var AbortController: typeof globalThis extends { onmessage: any; AbortController: infer T } ? T + : { + prototype: AbortController; + new(): AbortController; + }; + + var AbortSignal: typeof globalThis extends { onmessage: any; AbortSignal: infer T } ? T + : { + prototype: AbortSignal; + new(): AbortSignal; + abort(reason?: any): AbortSignal; + timeout(milliseconds: number): AbortSignal; + }; + // #endregion borrowed + + // #region Disposable + interface SymbolConstructor { + /** + * A method that is used to release resources held by an object. Called by the semantics of the `using` statement. + */ + readonly dispose: unique symbol; + + /** + * A method that is used to asynchronously release resources held by an object. Called by the semantics of the `await using` statement. + */ + readonly asyncDispose: unique symbol; + } + + interface Disposable { + [Symbol.dispose](): void; + } + + interface AsyncDisposable { + [Symbol.asyncDispose](): PromiseLike; + } + // #endregion Disposable + + // #region ArrayLike.at() + interface RelativeIndexable { + /** + * Takes an integer value and returns the item at that index, + * allowing for positive and negative integers. + * Negative integers count back from the last item in the array. + */ + at(index: number): T | undefined; + } + interface String extends RelativeIndexable {} + interface Array extends RelativeIndexable {} + interface ReadonlyArray extends RelativeIndexable {} + interface Int8Array extends RelativeIndexable {} + interface Uint8Array extends RelativeIndexable {} + interface Uint8ClampedArray extends RelativeIndexable {} + interface Int16Array extends RelativeIndexable {} + interface Uint16Array extends RelativeIndexable {} + interface Int32Array extends RelativeIndexable {} + interface Uint32Array extends RelativeIndexable {} + interface Float32Array extends RelativeIndexable {} + interface Float64Array extends RelativeIndexable {} + interface BigInt64Array extends RelativeIndexable {} + interface BigUint64Array extends RelativeIndexable {} + // #endregion ArrayLike.at() end + + /** + * @since v17.0.0 + * + * Creates a deep clone of an object. + */ + function structuredClone( + value: T, + transfer?: { transfer: ReadonlyArray }, + ): T; + + /*----------------------------------------------* + * * + * GLOBAL INTERFACES * + * * + *-----------------------------------------------*/ + namespace NodeJS { + interface CallSite { + /** + * Value of "this" + */ + getThis(): unknown; + + /** + * Type of "this" as a string. + * This is the name of the function stored in the constructor field of + * "this", if available. Otherwise the object's [[Class]] internal + * property. + */ + getTypeName(): string | null; + + /** + * Current function + */ + getFunction(): Function | undefined; + + /** + * Name of the current function, typically its name property. + * If a name property is not available an attempt will be made to try + * to infer a name from the function's context. + */ + getFunctionName(): string | null; + + /** + * Name of the property [of "this" or one of its prototypes] that holds + * the current function + */ + getMethodName(): string | null; + + /** + * Name of the script [if this function was defined in a script] + */ + getFileName(): string | undefined; + + /** + * Current line number [if this function was defined in a script] + */ + getLineNumber(): number | null; + + /** + * Current column number [if this function was defined in a script] + */ + getColumnNumber(): number | null; + + /** + * A call site object representing the location where eval was called + * [if this function was created using a call to eval] + */ + getEvalOrigin(): string | undefined; + + /** + * Is this a toplevel invocation, that is, is "this" the global object? + */ + isToplevel(): boolean; + + /** + * Does this call take place in code defined by a call to eval? + */ + isEval(): boolean; + + /** + * Is this call in native V8 code? + */ + isNative(): boolean; + + /** + * Is this a constructor call? + */ + isConstructor(): boolean; + + /** + * is this an async call (i.e. await, Promise.all(), or Promise.any())? + */ + isAsync(): boolean; + + /** + * is this an async call to Promise.all()? + */ + isPromiseAll(): boolean; + + /** + * returns the index of the promise element that was followed in + * Promise.all() or Promise.any() for async stack traces, or null + * if the CallSite is not an async + */ + getPromiseIndex(): number | null; + + getScriptNameOrSourceURL(): string; + getScriptHash(): string; + + getEnclosingColumnNumber(): number; + getEnclosingLineNumber(): number; + getPosition(): number; + + toString(): string; + } + + interface ErrnoException extends Error { + errno?: number | undefined; + code?: string | undefined; + path?: string | undefined; + syscall?: string | undefined; + } + + interface ReadableStream extends EventEmitter { + readable: boolean; + read(size?: number): string | Buffer; + setEncoding(encoding: BufferEncoding): this; + pause(): this; + resume(): this; + isPaused(): boolean; + pipe(destination: T, options?: { end?: boolean | undefined }): T; + unpipe(destination?: WritableStream): this; + unshift(chunk: string | Uint8Array, encoding?: BufferEncoding): void; + wrap(oldStream: ReadableStream): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + + interface WritableStream extends EventEmitter { + writable: boolean; + write(buffer: Uint8Array | string, cb?: (err?: Error | null) => void): boolean; + write(str: string, encoding?: BufferEncoding, cb?: (err?: Error | null) => void): boolean; + end(cb?: () => void): this; + end(data: string | Uint8Array, cb?: () => void): this; + end(str: string, encoding?: BufferEncoding, cb?: () => void): this; + } + + interface ReadWriteStream extends ReadableStream, WritableStream {} + + interface RefCounted { + ref(): this; + unref(): this; + } + + type TypedArray = + | Uint8Array + | Uint8ClampedArray + | Uint16Array + | Uint32Array + | Int8Array + | Int16Array + | Int32Array + | BigUint64Array + | BigInt64Array + | Float32Array + | Float64Array; + type ArrayBufferView = TypedArray | DataView; + + interface Require { + (id: string): any; + resolve: RequireResolve; + cache: Dict; + /** + * @deprecated + */ + extensions: RequireExtensions; + main: Module | undefined; + } + + interface RequireResolve { + (id: string, options?: { paths?: string[] | undefined }): string; + paths(request: string): string[] | null; + } + + interface RequireExtensions extends Dict<(m: Module, filename: string) => any> { + ".js": (m: Module, filename: string) => any; + ".json": (m: Module, filename: string) => any; + ".node": (m: Module, filename: string) => any; + } + interface Module { + /** + * `true` if the module is running during the Node.js preload + */ + isPreloading: boolean; + exports: any; + require: Require; + id: string; + filename: string; + loaded: boolean; + /** @deprecated since v14.6.0 Please use `require.main` and `module.children` instead. */ + parent: Module | null | undefined; + children: Module[]; + /** + * @since v11.14.0 + * + * The directory name of the module. This is usually the same as the path.dirname() of the module.id. + */ + path: string; + paths: string[]; + } + + interface Dict { + [key: string]: T | undefined; + } + + interface ReadOnlyDict { + readonly [key: string]: T | undefined; + } + } + + interface RequestInit extends _RequestInit {} + + function fetch( + input: string | URL | globalThis.Request, + init?: RequestInit, + ): Promise; + + interface Request extends _Request {} + var Request: typeof globalThis extends { + onmessage: any; + Request: infer T; + } ? T + : typeof import("undici-types").Request; + + interface ResponseInit extends _ResponseInit {} + + interface Response extends _Response {} + var Response: typeof globalThis extends { + onmessage: any; + Response: infer T; + } ? T + : typeof import("undici-types").Response; + + interface FormData extends _FormData {} + var FormData: typeof globalThis extends { + onmessage: any; + FormData: infer T; + } ? T + : typeof import("undici-types").FormData; + + interface Headers extends _Headers {} + var Headers: typeof globalThis extends { + onmessage: any; + Headers: infer T; + } ? T + : typeof import("undici-types").Headers; + + interface File extends _File {} + var File: typeof globalThis extends { + onmessage: any; + File: infer T; + } ? T + : typeof import("node:buffer").File; +} diff --git a/dist/node_modules/@types/node/globals.global.d.ts b/dist/node_modules/@types/node/globals.global.d.ts new file mode 100644 index 0000000..ef1198c --- /dev/null +++ b/dist/node_modules/@types/node/globals.global.d.ts @@ -0,0 +1 @@ +declare var global: typeof globalThis; diff --git a/dist/node_modules/@types/node/http.d.ts b/dist/node_modules/@types/node/http.d.ts new file mode 100644 index 0000000..98479fc --- /dev/null +++ b/dist/node_modules/@types/node/http.d.ts @@ -0,0 +1,1889 @@ +/** + * To use the HTTP server and client one must `require('node:http')`. + * + * The HTTP interfaces in Node.js are designed to support many features + * of the protocol which have been traditionally difficult to use. + * In particular, large, possibly chunk-encoded, messages. The interface is + * careful to never buffer entire requests or responses, so the + * user is able to stream data. + * + * HTTP message headers are represented by an object like this: + * + * ```json + * { "content-length": "123", + * "content-type": "text/plain", + * "connection": "keep-alive", + * "host": "example.com", + * "accept": "*" } + * ``` + * + * Keys are lowercased. Values are not modified. + * + * In order to support the full spectrum of possible HTTP applications, the Node.js + * HTTP API is very low-level. It deals with stream handling and message + * parsing only. It parses a message into headers and body but it does not + * parse the actual headers or the body. + * + * See `message.headers` for details on how duplicate headers are handled. + * + * The raw headers as they were received are retained in the `rawHeaders`property, which is an array of `[key, value, key2, value2, ...]`. For + * example, the previous message header object might have a `rawHeaders`list like the following: + * + * ```js + * [ 'ConTent-Length', '123456', + * 'content-LENGTH', '123', + * 'content-type', 'text/plain', + * 'CONNECTION', 'keep-alive', + * 'Host', 'example.com', + * 'accepT', '*' ] + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/http.js) + */ +declare module "http" { + import * as stream from "node:stream"; + import { URL } from "node:url"; + import { LookupOptions } from "node:dns"; + import { EventEmitter } from "node:events"; + import { LookupFunction, Server as NetServer, Socket, TcpSocketConnectOpts } from "node:net"; + // incoming headers will never contain number + interface IncomingHttpHeaders extends NodeJS.Dict { + accept?: string | undefined; + "accept-language"?: string | undefined; + "accept-patch"?: string | undefined; + "accept-ranges"?: string | undefined; + "access-control-allow-credentials"?: string | undefined; + "access-control-allow-headers"?: string | undefined; + "access-control-allow-methods"?: string | undefined; + "access-control-allow-origin"?: string | undefined; + "access-control-expose-headers"?: string | undefined; + "access-control-max-age"?: string | undefined; + "access-control-request-headers"?: string | undefined; + "access-control-request-method"?: string | undefined; + age?: string | undefined; + allow?: string | undefined; + "alt-svc"?: string | undefined; + authorization?: string | undefined; + "cache-control"?: string | undefined; + connection?: string | undefined; + "content-disposition"?: string | undefined; + "content-encoding"?: string | undefined; + "content-language"?: string | undefined; + "content-length"?: string | undefined; + "content-location"?: string | undefined; + "content-range"?: string | undefined; + "content-type"?: string | undefined; + cookie?: string | undefined; + date?: string | undefined; + etag?: string | undefined; + expect?: string | undefined; + expires?: string | undefined; + forwarded?: string | undefined; + from?: string | undefined; + host?: string | undefined; + "if-match"?: string | undefined; + "if-modified-since"?: string | undefined; + "if-none-match"?: string | undefined; + "if-unmodified-since"?: string | undefined; + "last-modified"?: string | undefined; + location?: string | undefined; + origin?: string | undefined; + pragma?: string | undefined; + "proxy-authenticate"?: string | undefined; + "proxy-authorization"?: string | undefined; + "public-key-pins"?: string | undefined; + range?: string | undefined; + referer?: string | undefined; + "retry-after"?: string | undefined; + "sec-websocket-accept"?: string | undefined; + "sec-websocket-extensions"?: string | undefined; + "sec-websocket-key"?: string | undefined; + "sec-websocket-protocol"?: string | undefined; + "sec-websocket-version"?: string | undefined; + "set-cookie"?: string[] | undefined; + "strict-transport-security"?: string | undefined; + tk?: string | undefined; + trailer?: string | undefined; + "transfer-encoding"?: string | undefined; + upgrade?: string | undefined; + "user-agent"?: string | undefined; + vary?: string | undefined; + via?: string | undefined; + warning?: string | undefined; + "www-authenticate"?: string | undefined; + } + // outgoing headers allows numbers (as they are converted internally to strings) + type OutgoingHttpHeader = number | string | string[]; + interface OutgoingHttpHeaders extends NodeJS.Dict { + accept?: string | string[] | undefined; + "accept-charset"?: string | string[] | undefined; + "accept-encoding"?: string | string[] | undefined; + "accept-language"?: string | string[] | undefined; + "accept-ranges"?: string | undefined; + "access-control-allow-credentials"?: string | undefined; + "access-control-allow-headers"?: string | undefined; + "access-control-allow-methods"?: string | undefined; + "access-control-allow-origin"?: string | undefined; + "access-control-expose-headers"?: string | undefined; + "access-control-max-age"?: string | undefined; + "access-control-request-headers"?: string | undefined; + "access-control-request-method"?: string | undefined; + age?: string | undefined; + allow?: string | undefined; + authorization?: string | undefined; + "cache-control"?: string | undefined; + "cdn-cache-control"?: string | undefined; + connection?: string | string[] | undefined; + "content-disposition"?: string | undefined; + "content-encoding"?: string | undefined; + "content-language"?: string | undefined; + "content-length"?: string | number | undefined; + "content-location"?: string | undefined; + "content-range"?: string | undefined; + "content-security-policy"?: string | undefined; + "content-security-policy-report-only"?: string | undefined; + cookie?: string | string[] | undefined; + dav?: string | string[] | undefined; + dnt?: string | undefined; + date?: string | undefined; + etag?: string | undefined; + expect?: string | undefined; + expires?: string | undefined; + forwarded?: string | undefined; + from?: string | undefined; + host?: string | undefined; + "if-match"?: string | undefined; + "if-modified-since"?: string | undefined; + "if-none-match"?: string | undefined; + "if-range"?: string | undefined; + "if-unmodified-since"?: string | undefined; + "last-modified"?: string | undefined; + link?: string | string[] | undefined; + location?: string | undefined; + "max-forwards"?: string | undefined; + origin?: string | undefined; + prgama?: string | string[] | undefined; + "proxy-authenticate"?: string | string[] | undefined; + "proxy-authorization"?: string | undefined; + "public-key-pins"?: string | undefined; + "public-key-pins-report-only"?: string | undefined; + range?: string | undefined; + referer?: string | undefined; + "referrer-policy"?: string | undefined; + refresh?: string | undefined; + "retry-after"?: string | undefined; + "sec-websocket-accept"?: string | undefined; + "sec-websocket-extensions"?: string | string[] | undefined; + "sec-websocket-key"?: string | undefined; + "sec-websocket-protocol"?: string | string[] | undefined; + "sec-websocket-version"?: string | undefined; + server?: string | undefined; + "set-cookie"?: string | string[] | undefined; + "strict-transport-security"?: string | undefined; + te?: string | undefined; + trailer?: string | undefined; + "transfer-encoding"?: string | undefined; + "user-agent"?: string | undefined; + upgrade?: string | undefined; + "upgrade-insecure-requests"?: string | undefined; + vary?: string | undefined; + via?: string | string[] | undefined; + warning?: string | undefined; + "www-authenticate"?: string | string[] | undefined; + "x-content-type-options"?: string | undefined; + "x-dns-prefetch-control"?: string | undefined; + "x-frame-options"?: string | undefined; + "x-xss-protection"?: string | undefined; + } + interface ClientRequestArgs { + _defaultAgent?: Agent | undefined; + agent?: Agent | boolean | undefined; + auth?: string | null | undefined; + createConnection?: + | (( + options: ClientRequestArgs, + oncreate: (err: Error | null, socket: stream.Duplex) => void, + ) => stream.Duplex | null | undefined) + | undefined; + defaultPort?: number | string | undefined; + family?: number | undefined; + headers?: OutgoingHttpHeaders | undefined; + hints?: LookupOptions["hints"]; + host?: string | null | undefined; + hostname?: string | null | undefined; + insecureHTTPParser?: boolean | undefined; + localAddress?: string | undefined; + localPort?: number | undefined; + lookup?: LookupFunction | undefined; + /** + * @default 16384 + */ + maxHeaderSize?: number | undefined; + method?: string | undefined; + path?: string | null | undefined; + port?: number | string | null | undefined; + protocol?: string | null | undefined; + setHost?: boolean | undefined; + signal?: AbortSignal | undefined; + socketPath?: string | undefined; + timeout?: number | undefined; + uniqueHeaders?: Array | undefined; + joinDuplicateHeaders?: boolean; + } + interface ServerOptions< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > { + /** + * Specifies the `IncomingMessage` class to be used. Useful for extending the original `IncomingMessage`. + */ + IncomingMessage?: Request | undefined; + /** + * Specifies the `ServerResponse` class to be used. Useful for extending the original `ServerResponse`. + */ + ServerResponse?: Response | undefined; + /** + * Sets the timeout value in milliseconds for receiving the entire request from the client. + * @see Server.requestTimeout for more information. + * @default 300000 + * @since v18.0.0 + */ + requestTimeout?: number | undefined; + /** + * It joins the field line values of multiple headers in a request with `, ` instead of discarding the duplicates. + * @default false + * @since v18.14.0 + */ + joinDuplicateHeaders?: boolean; + /** + * The number of milliseconds of inactivity a server needs to wait for additional incoming data, + * after it has finished writing the last response, before a socket will be destroyed. + * @see Server.keepAliveTimeout for more information. + * @default 5000 + * @since v18.0.0 + */ + keepAliveTimeout?: number | undefined; + /** + * Sets the interval value in milliseconds to check for request and headers timeout in incomplete requests. + * @default 30000 + */ + connectionsCheckingInterval?: number | undefined; + /** + * Optionally overrides all `socket`s' `readableHighWaterMark` and `writableHighWaterMark`. + * This affects `highWaterMark` property of both `IncomingMessage` and `ServerResponse`. + * Default: @see stream.getDefaultHighWaterMark(). + * @since v20.1.0 + */ + highWaterMark?: number | undefined; + /** + * Use an insecure HTTP parser that accepts invalid HTTP headers when `true`. + * Using the insecure parser should be avoided. + * See --insecure-http-parser for more information. + * @default false + */ + insecureHTTPParser?: boolean | undefined; + /** + * Optionally overrides the value of + * `--max-http-header-size` for requests received by this server, i.e. + * the maximum length of request headers in bytes. + * @default 16384 + * @since v13.3.0 + */ + maxHeaderSize?: number | undefined; + /** + * If set to `true`, it disables the use of Nagle's algorithm immediately after a new incoming connection is received. + * @default true + * @since v16.5.0 + */ + noDelay?: boolean | undefined; + /** + * If set to `true`, it enables keep-alive functionality on the socket immediately after a new incoming connection is received, + * similarly on what is done in `socket.setKeepAlive([enable][, initialDelay])`. + * @default false + * @since v16.5.0 + */ + keepAlive?: boolean | undefined; + /** + * If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket. + * @default 0 + * @since v16.5.0 + */ + keepAliveInitialDelay?: number | undefined; + /** + * A list of response headers that should be sent only once. + * If the header's value is an array, the items will be joined using `; `. + */ + uniqueHeaders?: Array | undefined; + } + type RequestListener< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > = (req: InstanceType, res: InstanceType & { req: InstanceType }) => void; + /** + * @since v0.1.17 + */ + class Server< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > extends NetServer { + constructor(requestListener?: RequestListener); + constructor(options: ServerOptions, requestListener?: RequestListener); + /** + * Sets the timeout value for sockets, and emits a `'timeout'` event on + * the Server object, passing the socket as an argument, if a timeout + * occurs. + * + * If there is a `'timeout'` event listener on the Server object, then it + * will be called with the timed-out socket as an argument. + * + * By default, the Server does not timeout sockets. However, if a callback + * is assigned to the Server's `'timeout'` event, timeouts must be handled + * explicitly. + * @since v0.9.12 + * @param [msecs=0 (no timeout)] + */ + setTimeout(msecs?: number, callback?: () => void): this; + setTimeout(callback: () => void): this; + /** + * Limits maximum incoming headers count. If set to 0, no limit will be applied. + * @since v0.7.0 + */ + maxHeadersCount: number | null; + /** + * The maximum number of requests socket can handle + * before closing keep alive connection. + * + * A value of `0` will disable the limit. + * + * When the limit is reached it will set the `Connection` header value to `close`, + * but will not actually close the connection, subsequent requests sent + * after the limit is reached will get `503 Service Unavailable` as a response. + * @since v16.10.0 + */ + maxRequestsPerSocket: number | null; + /** + * The number of milliseconds of inactivity before a socket is presumed + * to have timed out. + * + * A value of `0` will disable the timeout behavior on incoming connections. + * + * The socket timeout logic is set up on connection, so changing this + * value only affects new connections to the server, not any existing connections. + * @since v0.9.12 + */ + timeout: number; + /** + * Limit the amount of time the parser will wait to receive the complete HTTP + * headers. + * + * If the timeout expires, the server responds with status 408 without + * forwarding the request to the request listener and then closes the connection. + * + * It must be set to a non-zero value (e.g. 120 seconds) to protect against + * potential Denial-of-Service attacks in case the server is deployed without a + * reverse proxy in front. + * @since v11.3.0, v10.14.0 + */ + headersTimeout: number; + /** + * The number of milliseconds of inactivity a server needs to wait for additional + * incoming data, after it has finished writing the last response, before a socket + * will be destroyed. If the server receives new data before the keep-alive + * timeout has fired, it will reset the regular inactivity timeout, i.e.,`server.timeout`. + * + * A value of `0` will disable the keep-alive timeout behavior on incoming + * connections. + * A value of `0` makes the http server behave similarly to Node.js versions prior + * to 8.0.0, which did not have a keep-alive timeout. + * + * The socket timeout logic is set up on connection, so changing this value only + * affects new connections to the server, not any existing connections. + * @since v8.0.0 + */ + keepAliveTimeout: number; + /** + * Sets the timeout value in milliseconds for receiving the entire request from + * the client. + * + * If the timeout expires, the server responds with status 408 without + * forwarding the request to the request listener and then closes the connection. + * + * It must be set to a non-zero value (e.g. 120 seconds) to protect against + * potential Denial-of-Service attacks in case the server is deployed without a + * reverse proxy in front. + * @since v14.11.0 + */ + requestTimeout: number; + /** + * Closes all connections connected to this server. + * @since v18.2.0 + */ + closeAllConnections(): void; + /** + * Closes all connections connected to this server which are not sending a request + * or waiting for a response. + * @since v18.2.0 + */ + closeIdleConnections(): void; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connection", listener: (socket: Socket) => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "checkContinue", listener: RequestListener): this; + addListener(event: "checkExpectation", listener: RequestListener): this; + addListener(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + addListener( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + addListener(event: "dropRequest", listener: (req: InstanceType, socket: stream.Duplex) => void): this; + addListener(event: "request", listener: RequestListener): this; + addListener( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + emit(event: string, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "connection", socket: Socket): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit( + event: "checkContinue", + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit( + event: "checkExpectation", + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: "clientError", err: Error, socket: stream.Duplex): boolean; + emit(event: "connect", req: InstanceType, socket: stream.Duplex, head: Buffer): boolean; + emit(event: "dropRequest", req: InstanceType, socket: stream.Duplex): boolean; + emit( + event: "request", + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: "upgrade", req: InstanceType, socket: stream.Duplex, head: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connection", listener: (socket: Socket) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "checkContinue", listener: RequestListener): this; + on(event: "checkExpectation", listener: RequestListener): this; + on(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + on(event: "connect", listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void): this; + on(event: "dropRequest", listener: (req: InstanceType, socket: stream.Duplex) => void): this; + on(event: "request", listener: RequestListener): this; + on(event: "upgrade", listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connection", listener: (socket: Socket) => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "checkContinue", listener: RequestListener): this; + once(event: "checkExpectation", listener: RequestListener): this; + once(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + once( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + once(event: "dropRequest", listener: (req: InstanceType, socket: stream.Duplex) => void): this; + once(event: "request", listener: RequestListener): this; + once( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connection", listener: (socket: Socket) => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "checkContinue", listener: RequestListener): this; + prependListener(event: "checkExpectation", listener: RequestListener): this; + prependListener(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + prependListener( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependListener( + event: "dropRequest", + listener: (req: InstanceType, socket: stream.Duplex) => void, + ): this; + prependListener(event: "request", listener: RequestListener): this; + prependListener( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connection", listener: (socket: Socket) => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "checkContinue", listener: RequestListener): this; + prependOnceListener(event: "checkExpectation", listener: RequestListener): this; + prependOnceListener(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + prependOnceListener( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependOnceListener( + event: "dropRequest", + listener: (req: InstanceType, socket: stream.Duplex) => void, + ): this; + prependOnceListener(event: "request", listener: RequestListener): this; + prependOnceListener( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + } + /** + * This class serves as the parent class of {@link ClientRequest} and {@link ServerResponse}. It is an abstract outgoing message from + * the perspective of the participants of an HTTP transaction. + * @since v0.1.17 + */ + class OutgoingMessage extends stream.Writable { + readonly req: Request; + chunkedEncoding: boolean; + shouldKeepAlive: boolean; + useChunkedEncodingByDefault: boolean; + sendDate: boolean; + /** + * @deprecated Use `writableEnded` instead. + */ + finished: boolean; + /** + * Read-only. `true` if the headers were sent, otherwise `false`. + * @since v0.9.3 + */ + readonly headersSent: boolean; + /** + * Alias of `outgoingMessage.socket`. + * @since v0.3.0 + * @deprecated Since v15.12.0,v14.17.1 - Use `socket` instead. + */ + readonly connection: Socket | null; + /** + * Reference to the underlying socket. Usually, users will not want to access + * this property. + * + * After calling `outgoingMessage.end()`, this property will be nulled. + * @since v0.3.0 + */ + readonly socket: Socket | null; + constructor(); + /** + * Once a socket is associated with the message and is connected,`socket.setTimeout()` will be called with `msecs` as the first parameter. + * @since v0.9.12 + * @param callback Optional function to be called when a timeout occurs. Same as binding to the `timeout` event. + */ + setTimeout(msecs: number, callback?: () => void): this; + /** + * Sets a single header value. If the header already exists in the to-be-sent + * headers, its value will be replaced. Use an array of strings to send multiple + * headers with the same name. + * @since v0.4.0 + * @param name Header name + * @param value Header value + */ + setHeader(name: string, value: number | string | readonly string[]): this; + /** + * Append a single header value for the header object. + * + * If the value is an array, this is equivalent of calling this method multiple + * times. + * + * If there were no previous value for the header, this is equivalent of calling `outgoingMessage.setHeader(name, value)`. + * + * Depending of the value of `options.uniqueHeaders` when the client request or the + * server were created, this will end up in the header being sent multiple times or + * a single time with values joined using `; `. + * @since v18.3.0, v16.17.0 + * @param name Header name + * @param value Header value + */ + appendHeader(name: string, value: string | readonly string[]): this; + /** + * Gets the value of the HTTP header with the given name. If that header is not + * set, the returned value will be `undefined`. + * @since v0.4.0 + * @param name Name of header + */ + getHeader(name: string): number | string | string[] | undefined; + /** + * Returns a shallow copy of the current outgoing headers. Since a shallow + * copy is used, array values may be mutated without additional calls to + * various header-related HTTP module methods. The keys of the returned + * object are the header names and the values are the respective header + * values. All header names are lowercase. + * + * The object returned by the `outgoingMessage.getHeaders()` method does + * not prototypically inherit from the JavaScript `Object`. This means that + * typical `Object` methods such as `obj.toString()`, `obj.hasOwnProperty()`, + * and others are not defined and will not work. + * + * ```js + * outgoingMessage.setHeader('Foo', 'bar'); + * outgoingMessage.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headers = outgoingMessage.getHeaders(); + * // headers === { foo: 'bar', 'set-cookie': ['foo=bar', 'bar=baz'] } + * ``` + * @since v7.7.0 + */ + getHeaders(): OutgoingHttpHeaders; + /** + * Returns an array containing the unique names of the current outgoing headers. + * All names are lowercase. + * @since v7.7.0 + */ + getHeaderNames(): string[]; + /** + * Returns `true` if the header identified by `name` is currently set in the + * outgoing headers. The header name is case-insensitive. + * + * ```js + * const hasContentType = outgoingMessage.hasHeader('content-type'); + * ``` + * @since v7.7.0 + */ + hasHeader(name: string): boolean; + /** + * Removes a header that is queued for implicit sending. + * + * ```js + * outgoingMessage.removeHeader('Content-Encoding'); + * ``` + * @since v0.4.0 + * @param name Header name + */ + removeHeader(name: string): void; + /** + * Adds HTTP trailers (headers but at the end of the message) to the message. + * + * Trailers will **only** be emitted if the message is chunked encoded. If not, + * the trailers will be silently discarded. + * + * HTTP requires the `Trailer` header to be sent to emit trailers, + * with a list of header field names in its value, e.g. + * + * ```js + * message.writeHead(200, { 'Content-Type': 'text/plain', + * 'Trailer': 'Content-MD5' }); + * message.write(fileData); + * message.addTrailers({ 'Content-MD5': '7895bf4b8828b55ceaf47747b4bca667' }); + * message.end(); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v0.3.0 + */ + addTrailers(headers: OutgoingHttpHeaders | ReadonlyArray<[string, string]>): void; + /** + * Flushes the message headers. + * + * For efficiency reason, Node.js normally buffers the message headers + * until `outgoingMessage.end()` is called or the first chunk of message data + * is written. It then tries to pack the headers and data into a single TCP + * packet. + * + * It is usually desired (it saves a TCP round-trip), but not when the first + * data is not sent until possibly much later. `outgoingMessage.flushHeaders()`bypasses the optimization and kickstarts the message. + * @since v1.6.0 + */ + flushHeaders(): void; + } + /** + * This object is created internally by an HTTP server, not by the user. It is + * passed as the second parameter to the `'request'` event. + * @since v0.1.17 + */ + class ServerResponse extends OutgoingMessage { + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status code that will be sent to the client when + * the headers get flushed. + * + * ```js + * response.statusCode = 404; + * ``` + * + * After response header was sent to the client, this property indicates the + * status code which was sent out. + * @since v0.4.0 + */ + statusCode: number; + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status message that will be sent to the client when + * the headers get flushed. If this is left as `undefined` then the standard + * message for the status code will be used. + * + * ```js + * response.statusMessage = 'Not found'; + * ``` + * + * After response header was sent to the client, this property indicates the + * status message which was sent out. + * @since v0.11.8 + */ + statusMessage: string; + /** + * If set to `true`, Node.js will check whether the `Content-Length`header value and the size of the body, in bytes, are equal. + * Mismatching the `Content-Length` header value will result + * in an `Error` being thrown, identified by `code:``'ERR_HTTP_CONTENT_LENGTH_MISMATCH'`. + * @since v18.10.0, v16.18.0 + */ + strictContentLength: boolean; + constructor(req: Request); + assignSocket(socket: Socket): void; + detachSocket(socket: Socket): void; + /** + * Sends an HTTP/1.1 100 Continue message to the client, indicating that + * the request body should be sent. See the `'checkContinue'` event on`Server`. + * @since v0.3.0 + */ + writeContinue(callback?: () => void): void; + /** + * Sends an HTTP/1.1 103 Early Hints message to the client with a Link header, + * indicating that the user agent can preload/preconnect the linked resources. + * The `hints` is an object containing the values of headers to be sent with + * early hints message. The optional `callback` argument will be called when + * the response message has been written. + * + * **Example** + * + * ```js + * const earlyHintsLink = '; rel=preload; as=style'; + * response.writeEarlyHints({ + * 'link': earlyHintsLink, + * }); + * + * const earlyHintsLinks = [ + * '; rel=preload; as=style', + * '; rel=preload; as=script', + * ]; + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * 'x-trace-id': 'id for diagnostics', + * }); + * + * const earlyHintsCallback = () => console.log('early hints message sent'); + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * }, earlyHintsCallback); + * ``` + * @since v18.11.0 + * @param hints An object containing the values of headers + * @param callback Will be called when the response message has been written + */ + writeEarlyHints(hints: Record, callback?: () => void): void; + /** + * Sends a response header to the request. The status code is a 3-digit HTTP + * status code, like `404`. The last argument, `headers`, are the response headers. + * Optionally one can give a human-readable `statusMessage` as the second + * argument. + * + * `headers` may be an `Array` where the keys and values are in the same list. + * It is _not_ a list of tuples. So, the even-numbered offsets are key values, + * and the odd-numbered offsets are the associated values. The array is in the same + * format as `request.rawHeaders`. + * + * Returns a reference to the `ServerResponse`, so that calls can be chained. + * + * ```js + * const body = 'hello world'; + * response + * .writeHead(200, { + * 'Content-Length': Buffer.byteLength(body), + * 'Content-Type': 'text/plain', + * }) + * .end(body); + * ``` + * + * This method must only be called once on a message and it must + * be called before `response.end()` is called. + * + * If `response.write()` or `response.end()` are called before calling + * this, the implicit/mutable headers will be calculated and call this function. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * If this method is called and `response.setHeader()` has not been called, + * it will directly write the supplied header values onto the network channel + * without caching internally, and the `response.getHeader()` on the header + * will not yield the expected result. If progressive population of headers is + * desired with potential future retrieval and modification, use `response.setHeader()` instead. + * + * ```js + * // Returns content-type = text/plain + * const server = http.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain' }); + * res.end('ok'); + * }); + * ``` + * + * `Content-Length` is read in bytes, not characters. Use `Buffer.byteLength()` to determine the length of the body in bytes. Node.js + * will check whether `Content-Length` and the length of the body which has + * been transmitted are equal or not. + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a \[`Error`\]\[\] being thrown. + * @since v0.1.30 + */ + writeHead( + statusCode: number, + statusMessage?: string, + headers?: OutgoingHttpHeaders | OutgoingHttpHeader[], + ): this; + writeHead(statusCode: number, headers?: OutgoingHttpHeaders | OutgoingHttpHeader[]): this; + /** + * Sends a HTTP/1.1 102 Processing message to the client, indicating that + * the request body should be sent. + * @since v10.0.0 + */ + writeProcessing(): void; + } + interface InformationEvent { + statusCode: number; + statusMessage: string; + httpVersion: string; + httpVersionMajor: number; + httpVersionMinor: number; + headers: IncomingHttpHeaders; + rawHeaders: string[]; + } + /** + * This object is created internally and returned from {@link request}. It + * represents an _in-progress_ request whose header has already been queued. The + * header is still mutable using the `setHeader(name, value)`,`getHeader(name)`, `removeHeader(name)` API. The actual header will + * be sent along with the first data chunk or when calling `request.end()`. + * + * To get the response, add a listener for `'response'` to the request object.`'response'` will be emitted from the request object when the response + * headers have been received. The `'response'` event is executed with one + * argument which is an instance of {@link IncomingMessage}. + * + * During the `'response'` event, one can add listeners to the + * response object; particularly to listen for the `'data'` event. + * + * If no `'response'` handler is added, then the response will be + * entirely discarded. However, if a `'response'` event handler is added, + * then the data from the response object **must** be consumed, either by + * calling `response.read()` whenever there is a `'readable'` event, or + * by adding a `'data'` handler, or by calling the `.resume()` method. + * Until the data is consumed, the `'end'` event will not fire. Also, until + * the data is read it will consume memory that can eventually lead to a + * 'process out of memory' error. + * + * For backward compatibility, `res` will only emit `'error'` if there is an`'error'` listener registered. + * + * Set `Content-Length` header to limit the response body size. + * If `response.strictContentLength` is set to `true`, mismatching the`Content-Length` header value will result in an `Error` being thrown, + * identified by `code:``'ERR_HTTP_CONTENT_LENGTH_MISMATCH'`. + * + * `Content-Length` value should be in bytes, not characters. Use `Buffer.byteLength()` to determine the length of the body in bytes. + * @since v0.1.17 + */ + class ClientRequest extends OutgoingMessage { + /** + * The `request.aborted` property will be `true` if the request has + * been aborted. + * @since v0.11.14 + * @deprecated Since v17.0.0,v16.12.0 - Check `destroyed` instead. + */ + aborted: boolean; + /** + * The request host. + * @since v14.5.0, v12.19.0 + */ + host: string; + /** + * The request protocol. + * @since v14.5.0, v12.19.0 + */ + protocol: string; + /** + * When sending request through a keep-alive enabled agent, the underlying socket + * might be reused. But if server closes connection at unfortunate time, client + * may run into a 'ECONNRESET' error. + * + * ```js + * import http from 'node:http'; + * + * // Server has a 5 seconds keep-alive timeout by default + * http + * .createServer((req, res) => { + * res.write('hello\n'); + * res.end(); + * }) + * .listen(3000); + * + * setInterval(() => { + * // Adapting a keep-alive agent + * http.get('http://localhost:3000', { agent }, (res) => { + * res.on('data', (data) => { + * // Do nothing + * }); + * }); + * }, 5000); // Sending request on 5s interval so it's easy to hit idle timeout + * ``` + * + * By marking a request whether it reused socket or not, we can do + * automatic error retry base on it. + * + * ```js + * import http from 'node:http'; + * const agent = new http.Agent({ keepAlive: true }); + * + * function retriableRequest() { + * const req = http + * .get('http://localhost:3000', { agent }, (res) => { + * // ... + * }) + * .on('error', (err) => { + * // Check if retry is needed + * if (req.reusedSocket && err.code === 'ECONNRESET') { + * retriableRequest(); + * } + * }); + * } + * + * retriableRequest(); + * ``` + * @since v13.0.0, v12.16.0 + */ + reusedSocket: boolean; + /** + * Limits maximum response headers count. If set to 0, no limit will be applied. + */ + maxHeadersCount: number; + constructor(url: string | URL | ClientRequestArgs, cb?: (res: IncomingMessage) => void); + /** + * The request method. + * @since v0.1.97 + */ + method: string; + /** + * The request path. + * @since v0.4.0 + */ + path: string; + /** + * Marks the request as aborting. Calling this will cause remaining data + * in the response to be dropped and the socket to be destroyed. + * @since v0.3.8 + * @deprecated Since v14.1.0,v13.14.0 - Use `destroy` instead. + */ + abort(): void; + onSocket(socket: Socket): void; + /** + * Once a socket is assigned to this request and is connected `socket.setTimeout()` will be called. + * @since v0.5.9 + * @param timeout Milliseconds before a request times out. + * @param callback Optional function to be called when a timeout occurs. Same as binding to the `'timeout'` event. + */ + setTimeout(timeout: number, callback?: () => void): this; + /** + * Once a socket is assigned to this request and is connected `socket.setNoDelay()` will be called. + * @since v0.5.9 + */ + setNoDelay(noDelay?: boolean): void; + /** + * Once a socket is assigned to this request and is connected `socket.setKeepAlive()` will be called. + * @since v0.5.9 + */ + setSocketKeepAlive(enable?: boolean, initialDelay?: number): void; + /** + * Returns an array containing the unique names of the current outgoing raw + * headers. Header names are returned with their exact casing being set. + * + * ```js + * request.setHeader('Foo', 'bar'); + * request.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headerNames = request.getRawHeaderNames(); + * // headerNames === ['Foo', 'Set-Cookie'] + * ``` + * @since v15.13.0, v14.17.0 + */ + getRawHeaderNames(): string[]; + /** + * @deprecated + */ + addListener(event: "abort", listener: () => void): this; + addListener( + event: "connect", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + addListener(event: "continue", listener: () => void): this; + addListener(event: "information", listener: (info: InformationEvent) => void): this; + addListener(event: "response", listener: (response: IncomingMessage) => void): this; + addListener(event: "socket", listener: (socket: Socket) => void): this; + addListener(event: "timeout", listener: () => void): this; + addListener( + event: "upgrade", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + on(event: "abort", listener: () => void): this; + on(event: "connect", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + on(event: "continue", listener: () => void): this; + on(event: "information", listener: (info: InformationEvent) => void): this; + on(event: "response", listener: (response: IncomingMessage) => void): this; + on(event: "socket", listener: (socket: Socket) => void): this; + on(event: "timeout", listener: () => void): this; + on(event: "upgrade", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + once(event: "abort", listener: () => void): this; + once(event: "connect", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + once(event: "continue", listener: () => void): this; + once(event: "information", listener: (info: InformationEvent) => void): this; + once(event: "response", listener: (response: IncomingMessage) => void): this; + once(event: "socket", listener: (socket: Socket) => void): this; + once(event: "timeout", listener: () => void): this; + once(event: "upgrade", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + prependListener(event: "abort", listener: () => void): this; + prependListener( + event: "connect", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependListener(event: "continue", listener: () => void): this; + prependListener(event: "information", listener: (info: InformationEvent) => void): this; + prependListener(event: "response", listener: (response: IncomingMessage) => void): this; + prependListener(event: "socket", listener: (socket: Socket) => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener( + event: "upgrade", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + prependOnceListener(event: "abort", listener: () => void): this; + prependOnceListener( + event: "connect", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependOnceListener(event: "continue", listener: () => void): this; + prependOnceListener(event: "information", listener: (info: InformationEvent) => void): this; + prependOnceListener(event: "response", listener: (response: IncomingMessage) => void): this; + prependOnceListener(event: "socket", listener: (socket: Socket) => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener( + event: "upgrade", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * An `IncomingMessage` object is created by {@link Server} or {@link ClientRequest} and passed as the first argument to the `'request'` and `'response'` event respectively. It may be used to + * access response + * status, headers, and data. + * + * Different from its `socket` value which is a subclass of `stream.Duplex`, the`IncomingMessage` itself extends `stream.Readable` and is created separately to + * parse and emit the incoming HTTP headers and payload, as the underlying socket + * may be reused multiple times in case of keep-alive. + * @since v0.1.17 + */ + class IncomingMessage extends stream.Readable { + constructor(socket: Socket); + /** + * The `message.aborted` property will be `true` if the request has + * been aborted. + * @since v10.1.0 + * @deprecated Since v17.0.0,v16.12.0 - Check `message.destroyed` from stream.Readable. + */ + aborted: boolean; + /** + * In case of server request, the HTTP version sent by the client. In the case of + * client response, the HTTP version of the connected-to server. + * Probably either `'1.1'` or `'1.0'`. + * + * Also `message.httpVersionMajor` is the first integer and`message.httpVersionMinor` is the second. + * @since v0.1.1 + */ + httpVersion: string; + httpVersionMajor: number; + httpVersionMinor: number; + /** + * The `message.complete` property will be `true` if a complete HTTP message has + * been received and successfully parsed. + * + * This property is particularly useful as a means of determining if a client or + * server fully transmitted a message before a connection was terminated: + * + * ```js + * const req = http.request({ + * host: '127.0.0.1', + * port: 8080, + * method: 'POST', + * }, (res) => { + * res.resume(); + * res.on('end', () => { + * if (!res.complete) + * console.error( + * 'The connection was terminated while the message was still being sent'); + * }); + * }); + * ``` + * @since v0.3.0 + */ + complete: boolean; + /** + * Alias for `message.socket`. + * @since v0.1.90 + * @deprecated Since v16.0.0 - Use `socket`. + */ + connection: Socket; + /** + * The `net.Socket` object associated with the connection. + * + * With HTTPS support, use `request.socket.getPeerCertificate()` to obtain the + * client's authentication details. + * + * This property is guaranteed to be an instance of the `net.Socket` class, + * a subclass of `stream.Duplex`, unless the user specified a socket + * type other than `net.Socket` or internally nulled. + * @since v0.3.0 + */ + socket: Socket; + /** + * The request/response headers object. + * + * Key-value pairs of header names and values. Header names are lower-cased. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': 'curl/7.22.0', + * // host: '127.0.0.1:8000', + * // accept: '*' } + * console.log(request.headers); + * ``` + * + * Duplicates in raw headers are handled in the following ways, depending on the + * header name: + * + * * Duplicates of `age`, `authorization`, `content-length`, `content-type`,`etag`, `expires`, `from`, `host`, `if-modified-since`, `if-unmodified-since`,`last-modified`, `location`, + * `max-forwards`, `proxy-authorization`, `referer`,`retry-after`, `server`, or `user-agent` are discarded. + * To allow duplicate values of the headers listed above to be joined, + * use the option `joinDuplicateHeaders` in {@link request} and {@link createServer}. See RFC 9110 Section 5.3 for more + * information. + * * `set-cookie` is always an array. Duplicates are added to the array. + * * For duplicate `cookie` headers, the values are joined together with `; `. + * * For all other headers, the values are joined together with `, `. + * @since v0.1.5 + */ + headers: IncomingHttpHeaders; + /** + * Similar to `message.headers`, but there is no join logic and the values are + * always arrays of strings, even for headers received just once. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': ['curl/7.22.0'], + * // host: ['127.0.0.1:8000'], + * // accept: ['*'] } + * console.log(request.headersDistinct); + * ``` + * @since v18.3.0, v16.17.0 + */ + headersDistinct: NodeJS.Dict; + /** + * The raw request/response headers list exactly as they were received. + * + * The keys and values are in the same list. It is _not_ a + * list of tuples. So, the even-numbered offsets are key values, and the + * odd-numbered offsets are the associated values. + * + * Header names are not lowercased, and duplicates are not merged. + * + * ```js + * // Prints something like: + * // + * // [ 'user-agent', + * // 'this is invalid because there can be only one', + * // 'User-Agent', + * // 'curl/7.22.0', + * // 'Host', + * // '127.0.0.1:8000', + * // 'ACCEPT', + * // '*' ] + * console.log(request.rawHeaders); + * ``` + * @since v0.11.6 + */ + rawHeaders: string[]; + /** + * The request/response trailers object. Only populated at the `'end'` event. + * @since v0.3.0 + */ + trailers: NodeJS.Dict; + /** + * Similar to `message.trailers`, but there is no join logic and the values are + * always arrays of strings, even for headers received just once. + * Only populated at the `'end'` event. + * @since v18.3.0, v16.17.0 + */ + trailersDistinct: NodeJS.Dict; + /** + * The raw request/response trailer keys and values exactly as they were + * received. Only populated at the `'end'` event. + * @since v0.11.6 + */ + rawTrailers: string[]; + /** + * Calls `message.socket.setTimeout(msecs, callback)`. + * @since v0.5.9 + */ + setTimeout(msecs: number, callback?: () => void): this; + /** + * **Only valid for request obtained from {@link Server}.** + * + * The request method as a string. Read only. Examples: `'GET'`, `'DELETE'`. + * @since v0.1.1 + */ + method?: string | undefined; + /** + * **Only valid for request obtained from {@link Server}.** + * + * Request URL string. This contains only the URL that is present in the actual + * HTTP request. Take the following request: + * + * ```http + * GET /status?name=ryan HTTP/1.1 + * Accept: text/plain + * ``` + * + * To parse the URL into its parts: + * + * ```js + * new URL(request.url, `http://${request.headers.host}`); + * ``` + * + * When `request.url` is `'/status?name=ryan'` and `request.headers.host` is`'localhost:3000'`: + * + * ```console + * $ node + * > new URL(request.url, `http://${request.headers.host}`) + * URL { + * href: 'http://localhost:3000/status?name=ryan', + * origin: 'http://localhost:3000', + * protocol: 'http:', + * username: '', + * password: '', + * host: 'localhost:3000', + * hostname: 'localhost', + * port: '3000', + * pathname: '/status', + * search: '?name=ryan', + * searchParams: URLSearchParams { 'name' => 'ryan' }, + * hash: '' + * } + * ``` + * @since v0.1.90 + */ + url?: string | undefined; + /** + * **Only valid for response obtained from {@link ClientRequest}.** + * + * The 3-digit HTTP response status code. E.G. `404`. + * @since v0.1.1 + */ + statusCode?: number | undefined; + /** + * **Only valid for response obtained from {@link ClientRequest}.** + * + * The HTTP response status message (reason phrase). E.G. `OK` or `Internal Server Error`. + * @since v0.11.10 + */ + statusMessage?: string | undefined; + /** + * Calls `destroy()` on the socket that received the `IncomingMessage`. If `error`is provided, an `'error'` event is emitted on the socket and `error` is passed + * as an argument to any listeners on the event. + * @since v0.3.0 + */ + destroy(error?: Error): this; + } + interface AgentOptions extends Partial { + /** + * Keep sockets around in a pool to be used by other requests in the future. Default = false + */ + keepAlive?: boolean | undefined; + /** + * When using HTTP KeepAlive, how often to send TCP KeepAlive packets over sockets being kept alive. Default = 1000. + * Only relevant if keepAlive is set to true. + */ + keepAliveMsecs?: number | undefined; + /** + * Maximum number of sockets to allow per host. Default for Node 0.10 is 5, default for Node 0.12 is Infinity + */ + maxSockets?: number | undefined; + /** + * Maximum number of sockets allowed for all hosts in total. Each request will use a new socket until the maximum is reached. Default: Infinity. + */ + maxTotalSockets?: number | undefined; + /** + * Maximum number of sockets to leave open in a free state. Only relevant if keepAlive is set to true. Default = 256. + */ + maxFreeSockets?: number | undefined; + /** + * Socket timeout in milliseconds. This will set the timeout after the socket is connected. + */ + timeout?: number | undefined; + /** + * Scheduling strategy to apply when picking the next free socket to use. + * @default `lifo` + */ + scheduling?: "fifo" | "lifo" | undefined; + } + /** + * An `Agent` is responsible for managing connection persistence + * and reuse for HTTP clients. It maintains a queue of pending requests + * for a given host and port, reusing a single socket connection for each + * until the queue is empty, at which time the socket is either destroyed + * or put into a pool where it is kept to be used again for requests to the + * same host and port. Whether it is destroyed or pooled depends on the`keepAlive` `option`. + * + * Pooled connections have TCP Keep-Alive enabled for them, but servers may + * still close idle connections, in which case they will be removed from the + * pool and a new connection will be made when a new HTTP request is made for + * that host and port. Servers may also refuse to allow multiple requests + * over the same connection, in which case the connection will have to be + * remade for every request and cannot be pooled. The `Agent` will still make + * the requests to that server, but each one will occur over a new connection. + * + * When a connection is closed by the client or the server, it is removed + * from the pool. Any unused sockets in the pool will be unrefed so as not + * to keep the Node.js process running when there are no outstanding requests. + * (see `socket.unref()`). + * + * It is good practice, to `destroy()` an `Agent` instance when it is no + * longer in use, because unused sockets consume OS resources. + * + * Sockets are removed from an agent when the socket emits either + * a `'close'` event or an `'agentRemove'` event. When intending to keep one + * HTTP request open for a long time without keeping it in the agent, something + * like the following may be done: + * + * ```js + * http.get(options, (res) => { + * // Do stuff + * }).on('socket', (socket) => { + * socket.emit('agentRemove'); + * }); + * ``` + * + * An agent may also be used for an individual request. By providing`{agent: false}` as an option to the `http.get()` or `http.request()`functions, a one-time use `Agent` with default options + * will be used + * for the client connection. + * + * `agent:false`: + * + * ```js + * http.get({ + * hostname: 'localhost', + * port: 80, + * path: '/', + * agent: false, // Create a new agent just for this one request + * }, (res) => { + * // Do stuff with response + * }); + * ``` + * @since v0.3.4 + */ + class Agent extends EventEmitter { + /** + * By default set to 256. For agents with `keepAlive` enabled, this + * sets the maximum number of sockets that will be left open in the free + * state. + * @since v0.11.7 + */ + maxFreeSockets: number; + /** + * By default set to `Infinity`. Determines how many concurrent sockets the agent + * can have open per origin. Origin is the returned value of `agent.getName()`. + * @since v0.3.6 + */ + maxSockets: number; + /** + * By default set to `Infinity`. Determines how many concurrent sockets the agent + * can have open. Unlike `maxSockets`, this parameter applies across all origins. + * @since v14.5.0, v12.19.0 + */ + maxTotalSockets: number; + /** + * An object which contains arrays of sockets currently awaiting use by + * the agent when `keepAlive` is enabled. Do not modify. + * + * Sockets in the `freeSockets` list will be automatically destroyed and + * removed from the array on `'timeout'`. + * @since v0.11.4 + */ + readonly freeSockets: NodeJS.ReadOnlyDict; + /** + * An object which contains arrays of sockets currently in use by the + * agent. Do not modify. + * @since v0.3.6 + */ + readonly sockets: NodeJS.ReadOnlyDict; + /** + * An object which contains queues of requests that have not yet been assigned to + * sockets. Do not modify. + * @since v0.5.9 + */ + readonly requests: NodeJS.ReadOnlyDict; + constructor(opts?: AgentOptions); + /** + * Destroy any sockets that are currently in use by the agent. + * + * It is usually not necessary to do this. However, if using an + * agent with `keepAlive` enabled, then it is best to explicitly shut down + * the agent when it is no longer needed. Otherwise, + * sockets might stay open for quite a long time before the server + * terminates them. + * @since v0.11.4 + */ + destroy(): void; + } + const METHODS: string[]; + const STATUS_CODES: { + [errorCode: number]: string | undefined; + [errorCode: string]: string | undefined; + }; + /** + * Returns a new instance of {@link Server}. + * + * The `requestListener` is a function which is automatically + * added to the `'request'` event. + * + * ```js + * import http from 'node:http'; + * + * // Create a local server to receive data from + * const server = http.createServer((req, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!', + * })); + * }); + * + * server.listen(8000); + * ``` + * + * ```js + * import http from 'node:http'; + * + * // Create a local server to receive data from + * const server = http.createServer(); + * + * // Listen to the request event + * server.on('request', (request, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!', + * })); + * }); + * + * server.listen(8000); + * ``` + * @since v0.1.13 + */ + function createServer< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + >(requestListener?: RequestListener): Server; + function createServer< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + >( + options: ServerOptions, + requestListener?: RequestListener, + ): Server; + // although RequestOptions are passed as ClientRequestArgs to ClientRequest directly, + // create interface RequestOptions would make the naming more clear to developers + interface RequestOptions extends ClientRequestArgs {} + /** + * `options` in `socket.connect()` are also supported. + * + * Node.js maintains several connections per server to make HTTP requests. + * This function allows one to transparently issue requests. + * + * `url` can be a string or a `URL` object. If `url` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * If both `url` and `options` are specified, the objects are merged, with the`options` properties taking precedence. + * + * The optional `callback` parameter will be added as a one-time listener for + * the `'response'` event. + * + * `http.request()` returns an instance of the {@link ClientRequest} class. The `ClientRequest` instance is a writable stream. If one needs to + * upload a file with a POST request, then write to the `ClientRequest` object. + * + * ```js + * import http from 'node:http'; + * import { Buffer } from 'node:buffer'; + * + * const postData = JSON.stringify({ + * 'msg': 'Hello World!', + * }); + * + * const options = { + * hostname: 'www.google.com', + * port: 80, + * path: '/upload', + * method: 'POST', + * headers: { + * 'Content-Type': 'application/json', + * 'Content-Length': Buffer.byteLength(postData), + * }, + * }; + * + * const req = http.request(options, (res) => { + * console.log(`STATUS: ${res.statusCode}`); + * console.log(`HEADERS: ${JSON.stringify(res.headers)}`); + * res.setEncoding('utf8'); + * res.on('data', (chunk) => { + * console.log(`BODY: ${chunk}`); + * }); + * res.on('end', () => { + * console.log('No more data in response.'); + * }); + * }); + * + * req.on('error', (e) => { + * console.error(`problem with request: ${e.message}`); + * }); + * + * // Write data to request body + * req.write(postData); + * req.end(); + * ``` + * + * In the example `req.end()` was called. With `http.request()` one + * must always call `req.end()` to signify the end of the request - + * even if there is no data being written to the request body. + * + * If any error is encountered during the request (be that with DNS resolution, + * TCP level errors, or actual HTTP parse errors) an `'error'` event is emitted + * on the returned request object. As with all `'error'` events, if no listeners + * are registered the error will be thrown. + * + * There are a few special headers that should be noted. + * + * * Sending a 'Connection: keep-alive' will notify Node.js that the connection to + * the server should be persisted until the next request. + * * Sending a 'Content-Length' header will disable the default chunked encoding. + * * Sending an 'Expect' header will immediately send the request headers. + * Usually, when sending 'Expect: 100-continue', both a timeout and a listener + * for the `'continue'` event should be set. See RFC 2616 Section 8.2.3 for more + * information. + * * Sending an Authorization header will override using the `auth` option + * to compute basic authentication. + * + * Example using a `URL` as `options`: + * + * ```js + * const options = new URL('http://abc:xyz@example.com'); + * + * const req = http.request(options, (res) => { + * // ... + * }); + * ``` + * + * In a successful request, the following events will be emitted in the following + * order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * (`'data'` will not be emitted at all if the response body is empty, for + * instance, in most redirects) + * * `'end'` on the `res` object + * * `'close'` + * + * In the case of a connection error, the following events will be emitted: + * + * * `'socket'` + * * `'error'` + * * `'close'` + * + * In the case of a premature connection close before the response is received, + * the following events will be emitted in the following order: + * + * * `'socket'` + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * In the case of a premature connection close after the response is received, + * the following events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (connection closed here) + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message`'Error: aborted'` and code `'ECONNRESET'` + * * `'close'` + * * `'close'` on the `res` object + * + * If `req.destroy()` is called before a socket is assigned, the following + * events will be emitted in the following order: + * + * * (`req.destroy()` called here) + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'`, or the error with which `req.destroy()` was called + * * `'close'` + * + * If `req.destroy()` is called before the connection succeeds, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * (`req.destroy()` called here) + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'`, or the error with which `req.destroy()` was called + * * `'close'` + * + * If `req.destroy()` is called after the response is received, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (`req.destroy()` called here) + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message `'Error: aborted'`and code `'ECONNRESET'`, or the error with which `req.destroy()` was called + * * `'close'` + * * `'close'` on the `res` object + * + * If `req.abort()` is called before a socket is assigned, the following + * events will be emitted in the following order: + * + * * (`req.abort()` called here) + * * `'abort'` + * * `'close'` + * + * If `req.abort()` is called before the connection succeeds, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * (`req.abort()` called here) + * * `'abort'` + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * If `req.abort()` is called after the response is received, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (`req.abort()` called here) + * * `'abort'` + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message`'Error: aborted'` and code `'ECONNRESET'`. + * * `'close'` + * * `'close'` on the `res` object + * + * Setting the `timeout` option or using the `setTimeout()` function will + * not abort the request or do anything besides add a `'timeout'` event. + * + * Passing an `AbortSignal` and then calling `abort()` on the corresponding`AbortController` will behave the same way as calling `.destroy()` on the + * request. Specifically, the `'error'` event will be emitted with an error with + * the message `'AbortError: The operation was aborted'`, the code `'ABORT_ERR'`and the `cause`, if one was provided. + * @since v0.3.6 + */ + function request(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; + function request( + url: string | URL, + options: RequestOptions, + callback?: (res: IncomingMessage) => void, + ): ClientRequest; + /** + * Since most requests are GET requests without bodies, Node.js provides this + * convenience method. The only difference between this method and {@link request} is that it sets the method to GET by default and calls `req.end()`automatically. The callback must take care to + * consume the response + * data for reasons stated in {@link ClientRequest} section. + * + * The `callback` is invoked with a single argument that is an instance of {@link IncomingMessage}. + * + * JSON fetching example: + * + * ```js + * http.get('http://localhost:8000/', (res) => { + * const { statusCode } = res; + * const contentType = res.headers['content-type']; + * + * let error; + * // Any 2xx status code signals a successful response but + * // here we're only checking for 200. + * if (statusCode !== 200) { + * error = new Error('Request Failed.\n' + + * `Status Code: ${statusCode}`); + * } else if (!/^application\/json/.test(contentType)) { + * error = new Error('Invalid content-type.\n' + + * `Expected application/json but received ${contentType}`); + * } + * if (error) { + * console.error(error.message); + * // Consume response data to free up memory + * res.resume(); + * return; + * } + * + * res.setEncoding('utf8'); + * let rawData = ''; + * res.on('data', (chunk) => { rawData += chunk; }); + * res.on('end', () => { + * try { + * const parsedData = JSON.parse(rawData); + * console.log(parsedData); + * } catch (e) { + * console.error(e.message); + * } + * }); + * }).on('error', (e) => { + * console.error(`Got error: ${e.message}`); + * }); + * + * // Create a local server to receive data from + * const server = http.createServer((req, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!', + * })); + * }); + * + * server.listen(8000); + * ``` + * @since v0.3.6 + * @param options Accepts the same `options` as {@link request}, with the method set to GET by default. + */ + function get(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; + function get(url: string | URL, options: RequestOptions, callback?: (res: IncomingMessage) => void): ClientRequest; + /** + * Performs the low-level validations on the provided `name` that are done when`res.setHeader(name, value)` is called. + * + * Passing illegal value as `name` will result in a `TypeError` being thrown, + * identified by `code: 'ERR_INVALID_HTTP_TOKEN'`. + * + * It is not necessary to use this method before passing headers to an HTTP request + * or response. The HTTP module will automatically validate such headers. + * + * Example: + * + * ```js + * import { validateHeaderName } from 'node:http'; + * + * try { + * validateHeaderName(''); + * } catch (err) { + * console.error(err instanceof TypeError); // --> true + * console.error(err.code); // --> 'ERR_INVALID_HTTP_TOKEN' + * console.error(err.message); // --> 'Header name must be a valid HTTP token [""]' + * } + * ``` + * @since v14.3.0 + * @param [label='Header name'] Label for error message. + */ + function validateHeaderName(name: string): void; + /** + * Performs the low-level validations on the provided `value` that are done when`res.setHeader(name, value)` is called. + * + * Passing illegal value as `value` will result in a `TypeError` being thrown. + * + * * Undefined value error is identified by `code: 'ERR_HTTP_INVALID_HEADER_VALUE'`. + * * Invalid value character error is identified by `code: 'ERR_INVALID_CHAR'`. + * + * It is not necessary to use this method before passing headers to an HTTP request + * or response. The HTTP module will automatically validate such headers. + * + * Examples: + * + * ```js + * import { validateHeaderValue } from 'node:http'; + * + * try { + * validateHeaderValue('x-my-header', undefined); + * } catch (err) { + * console.error(err instanceof TypeError); // --> true + * console.error(err.code === 'ERR_HTTP_INVALID_HEADER_VALUE'); // --> true + * console.error(err.message); // --> 'Invalid value "undefined" for header "x-my-header"' + * } + * + * try { + * validateHeaderValue('x-my-header', 'oʊmɪɡə'); + * } catch (err) { + * console.error(err instanceof TypeError); // --> true + * console.error(err.code === 'ERR_INVALID_CHAR'); // --> true + * console.error(err.message); // --> 'Invalid character in header content ["x-my-header"]' + * } + * ``` + * @since v14.3.0 + * @param name Header name + * @param value Header value + */ + function validateHeaderValue(name: string, value: string): void; + /** + * Set the maximum number of idle HTTP parsers. + * @since v18.8.0, v16.18.0 + * @param [max=1000] + */ + function setMaxIdleHTTPParsers(max: number): void; + let globalAgent: Agent; + /** + * Read-only property specifying the maximum allowed size of HTTP headers in bytes. + * Defaults to 16KB. Configurable using the `--max-http-header-size` CLI option. + */ + const maxHeaderSize: number; +} +declare module "node:http" { + export * from "http"; +} diff --git a/dist/node_modules/@types/node/http2.d.ts b/dist/node_modules/@types/node/http2.d.ts new file mode 100644 index 0000000..c3b3e8e --- /dev/null +++ b/dist/node_modules/@types/node/http2.d.ts @@ -0,0 +1,2382 @@ +/** + * The `node:http2` module provides an implementation of the [HTTP/2](https://tools.ietf.org/html/rfc7540) protocol. + * It can be accessed using: + * + * ```js + * const http2 = require('node:http2'); + * ``` + * @since v8.4.0 + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/http2.js) + */ +declare module "http2" { + import EventEmitter = require("node:events"); + import * as fs from "node:fs"; + import * as net from "node:net"; + import * as stream from "node:stream"; + import * as tls from "node:tls"; + import * as url from "node:url"; + import { + IncomingHttpHeaders as Http1IncomingHttpHeaders, + IncomingMessage, + OutgoingHttpHeaders, + ServerResponse, + } from "node:http"; + export { OutgoingHttpHeaders } from "node:http"; + export interface IncomingHttpStatusHeader { + ":status"?: number | undefined; + } + export interface IncomingHttpHeaders extends Http1IncomingHttpHeaders { + ":path"?: string | undefined; + ":method"?: string | undefined; + ":authority"?: string | undefined; + ":scheme"?: string | undefined; + } + // Http2Stream + export interface StreamPriorityOptions { + exclusive?: boolean | undefined; + parent?: number | undefined; + weight?: number | undefined; + silent?: boolean | undefined; + } + export interface StreamState { + localWindowSize?: number | undefined; + state?: number | undefined; + localClose?: number | undefined; + remoteClose?: number | undefined; + sumDependencyWeight?: number | undefined; + weight?: number | undefined; + } + export interface ServerStreamResponseOptions { + endStream?: boolean | undefined; + waitForTrailers?: boolean | undefined; + } + export interface StatOptions { + offset: number; + length: number; + } + export interface ServerStreamFileResponseOptions { + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + statCheck?(stats: fs.Stats, headers: OutgoingHttpHeaders, statOptions: StatOptions): void | boolean; + waitForTrailers?: boolean | undefined; + offset?: number | undefined; + length?: number | undefined; + } + export interface ServerStreamFileResponseOptionsWithError extends ServerStreamFileResponseOptions { + onError?(err: NodeJS.ErrnoException): void; + } + export interface Http2Stream extends stream.Duplex { + /** + * Set to `true` if the `Http2Stream` instance was aborted abnormally. When set, + * the `'aborted'` event will have been emitted. + * @since v8.4.0 + */ + readonly aborted: boolean; + /** + * This property shows the number of characters currently buffered to be written. + * See `net.Socket.bufferSize` for details. + * @since v11.2.0, v10.16.0 + */ + readonly bufferSize: number; + /** + * Set to `true` if the `Http2Stream` instance has been closed. + * @since v9.4.0 + */ + readonly closed: boolean; + /** + * Set to `true` if the `Http2Stream` instance has been destroyed and is no longer + * usable. + * @since v8.4.0 + */ + readonly destroyed: boolean; + /** + * Set to `true` if the `END_STREAM` flag was set in the request or response + * HEADERS frame received, indicating that no additional data should be received + * and the readable side of the `Http2Stream` will be closed. + * @since v10.11.0 + */ + readonly endAfterHeaders: boolean; + /** + * The numeric stream identifier of this `Http2Stream` instance. Set to `undefined`if the stream identifier has not yet been assigned. + * @since v8.4.0 + */ + readonly id?: number | undefined; + /** + * Set to `true` if the `Http2Stream` instance has not yet been assigned a + * numeric stream identifier. + * @since v9.4.0 + */ + readonly pending: boolean; + /** + * Set to the `RST_STREAM` `error code` reported when the `Http2Stream` is + * destroyed after either receiving an `RST_STREAM` frame from the connected peer, + * calling `http2stream.close()`, or `http2stream.destroy()`. Will be`undefined` if the `Http2Stream` has not been closed. + * @since v8.4.0 + */ + readonly rstCode: number; + /** + * An object containing the outbound headers sent for this `Http2Stream`. + * @since v9.5.0 + */ + readonly sentHeaders: OutgoingHttpHeaders; + /** + * An array of objects containing the outbound informational (additional) headers + * sent for this `Http2Stream`. + * @since v9.5.0 + */ + readonly sentInfoHeaders?: OutgoingHttpHeaders[] | undefined; + /** + * An object containing the outbound trailers sent for this `HttpStream`. + * @since v9.5.0 + */ + readonly sentTrailers?: OutgoingHttpHeaders | undefined; + /** + * A reference to the `Http2Session` instance that owns this `Http2Stream`. The + * value will be `undefined` after the `Http2Stream` instance is destroyed. + * @since v8.4.0 + */ + readonly session: Http2Session | undefined; + /** + * Provides miscellaneous information about the current state of the`Http2Stream`. + * + * A current state of this `Http2Stream`. + * @since v8.4.0 + */ + readonly state: StreamState; + /** + * Closes the `Http2Stream` instance by sending an `RST_STREAM` frame to the + * connected HTTP/2 peer. + * @since v8.4.0 + * @param [code=http2.constants.NGHTTP2_NO_ERROR] Unsigned 32-bit integer identifying the error code. + * @param callback An optional function registered to listen for the `'close'` event. + */ + close(code?: number, callback?: () => void): void; + /** + * Updates the priority for this `Http2Stream` instance. + * @since v8.4.0 + */ + priority(options: StreamPriorityOptions): void; + /** + * ```js + * const http2 = require('node:http2'); + * const client = http2.connect('http://example.org:8000'); + * const { NGHTTP2_CANCEL } = http2.constants; + * const req = client.request({ ':path': '/' }); + * + * // Cancel the stream if there's no activity after 5 seconds + * req.setTimeout(5000, () => req.close(NGHTTP2_CANCEL)); + * ``` + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * Sends a trailing `HEADERS` frame to the connected HTTP/2 peer. This method + * will cause the `Http2Stream` to be immediately closed and must only be + * called after the `'wantTrailers'` event has been emitted. When sending a + * request or sending a response, the `options.waitForTrailers` option must be set + * in order to keep the `Http2Stream` open after the final `DATA` frame so that + * trailers can be sent. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond(undefined, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ xyz: 'abc' }); + * }); + * stream.end('Hello World'); + * }); + * ``` + * + * The HTTP/1 specification forbids trailers from containing HTTP/2 pseudo-header + * fields (e.g. `':method'`, `':path'`, etc). + * @since v10.0.0 + */ + sendTrailers(headers: OutgoingHttpHeaders): void; + addListener(event: "aborted", listener: () => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: "streamClosed", listener: (code: number) => void): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: "wantTrailers", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "aborted"): boolean; + emit(event: "close"): boolean; + emit(event: "data", chunk: Buffer | string): boolean; + emit(event: "drain"): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "frameError", frameType: number, errorCode: number): boolean; + emit(event: "pipe", src: stream.Readable): boolean; + emit(event: "unpipe", src: stream.Readable): boolean; + emit(event: "streamClosed", code: number): boolean; + emit(event: "timeout"): boolean; + emit(event: "trailers", trailers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "wantTrailers"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "aborted", listener: () => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "drain", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: "streamClosed", listener: (code: number) => void): this; + on(event: "timeout", listener: () => void): this; + on(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + on(event: "wantTrailers", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "aborted", listener: () => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "drain", listener: () => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: "streamClosed", listener: (code: number) => void): this; + once(event: "timeout", listener: () => void): this; + once(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + once(event: "wantTrailers", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "aborted", listener: () => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "streamClosed", listener: (code: number) => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: "wantTrailers", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "aborted", listener: () => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "streamClosed", listener: (code: number) => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: "wantTrailers", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ClientHttp2Stream extends Http2Stream { + addListener(event: "continue", listener: () => {}): this; + addListener( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + addListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + addListener( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "continue"): boolean; + emit(event: "headers", headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: "push", headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "response", headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "continue", listener: () => {}): this; + on( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + on(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + on( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "continue", listener: () => {}): this; + once( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + once(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + once( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "continue", listener: () => {}): this; + prependListener( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + prependListener( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "continue", listener: () => {}): this; + prependOnceListener( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependOnceListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ServerHttp2Stream extends Http2Stream { + /** + * True if headers were sent, false otherwise (read-only). + * @since v8.4.0 + */ + readonly headersSent: boolean; + /** + * Read-only property mapped to the `SETTINGS_ENABLE_PUSH` flag of the remote + * client's most recent `SETTINGS` frame. Will be `true` if the remote peer + * accepts push streams, `false` otherwise. Settings are the same for every`Http2Stream` in the same `Http2Session`. + * @since v8.4.0 + */ + readonly pushAllowed: boolean; + /** + * Sends an additional informational `HEADERS` frame to the connected HTTP/2 peer. + * @since v8.4.0 + */ + additionalHeaders(headers: OutgoingHttpHeaders): void; + /** + * Initiates a push stream. The callback is invoked with the new `Http2Stream`instance created for the push stream passed as the second argument, or an`Error` passed as the first argument. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }); + * stream.pushStream({ ':path': '/' }, (err, pushStream, headers) => { + * if (err) throw err; + * pushStream.respond({ ':status': 200 }); + * pushStream.end('some pushed data'); + * }); + * stream.end('some data'); + * }); + * ``` + * + * Setting the weight of a push stream is not allowed in the `HEADERS` frame. Pass + * a `weight` value to `http2stream.priority` with the `silent` option set to`true` to enable server-side bandwidth balancing between concurrent streams. + * + * Calling `http2stream.pushStream()` from within a pushed stream is not permitted + * and will throw an error. + * @since v8.4.0 + * @param callback Callback that is called once the push stream has been initiated. + */ + pushStream( + headers: OutgoingHttpHeaders, + callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void, + ): void; + pushStream( + headers: OutgoingHttpHeaders, + options?: StreamPriorityOptions, + callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void, + ): void; + /** + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }); + * stream.end('some data'); + * }); + * ``` + * + * Initiates a response. When the `options.waitForTrailers` option is set, the`'wantTrailers'` event will be emitted immediately after queuing the last chunk + * of payload data to be sent. The `http2stream.sendTrailers()` method can then be + * used to sent trailing header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * stream.end('some data'); + * }); + * ``` + * @since v8.4.0 + */ + respond(headers?: OutgoingHttpHeaders, options?: ServerStreamResponseOptions): void; + /** + * Initiates a response whose data is read from the given file descriptor. No + * validation is performed on the given file descriptor. If an error occurs while + * attempting to read data using the file descriptor, the `Http2Stream` will be + * closed using an `RST_STREAM` frame using the standard `INTERNAL_ERROR` code. + * + * When used, the `Http2Stream` object's `Duplex` interface will be closed + * automatically. + * + * ```js + * const http2 = require('node:http2'); + * const fs = require('node:fs'); + * + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * const fd = fs.openSync('/some/file', 'r'); + * + * const stat = fs.fstatSync(fd); + * const headers = { + * 'content-length': stat.size, + * 'last-modified': stat.mtime.toUTCString(), + * 'content-type': 'text/plain; charset=utf-8', + * }; + * stream.respondWithFD(fd, headers); + * stream.on('close', () => fs.closeSync(fd)); + * }); + * ``` + * + * The optional `options.statCheck` function may be specified to give user code + * an opportunity to set additional content headers based on the `fs.Stat` details + * of the given fd. If the `statCheck` function is provided, the`http2stream.respondWithFD()` method will perform an `fs.fstat()` call to + * collect details on the provided file descriptor. + * + * The `offset` and `length` options may be used to limit the response to a + * specific range subset. This can be used, for instance, to support HTTP Range + * requests. + * + * The file descriptor or `FileHandle` is not closed when the stream is closed, + * so it will need to be closed manually once it is no longer needed. + * Using the same file descriptor concurrently for multiple streams + * is not supported and may result in data loss. Re-using a file descriptor + * after a stream has finished is supported. + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code _must_ call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('node:http2'); + * const fs = require('node:fs'); + * + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * const fd = fs.openSync('/some/file', 'r'); + * + * const stat = fs.fstatSync(fd); + * const headers = { + * 'content-length': stat.size, + * 'last-modified': stat.mtime.toUTCString(), + * 'content-type': 'text/plain; charset=utf-8', + * }; + * stream.respondWithFD(fd, headers, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * + * stream.on('close', () => fs.closeSync(fd)); + * }); + * ``` + * @since v8.4.0 + * @param fd A readable file descriptor. + */ + respondWithFD( + fd: number | fs.promises.FileHandle, + headers?: OutgoingHttpHeaders, + options?: ServerStreamFileResponseOptions, + ): void; + /** + * Sends a regular file as the response. The `path` must specify a regular file + * or an `'error'` event will be emitted on the `Http2Stream` object. + * + * When used, the `Http2Stream` object's `Duplex` interface will be closed + * automatically. + * + * The optional `options.statCheck` function may be specified to give user code + * an opportunity to set additional content headers based on the `fs.Stat` details + * of the given file: + * + * If an error occurs while attempting to read the file data, the `Http2Stream`will be closed using an `RST_STREAM` frame using the standard `INTERNAL_ERROR`code. If the `onError` callback is + * defined, then it will be called. Otherwise + * the stream will be destroyed. + * + * Example using a file path: + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * function statCheck(stat, headers) { + * headers['last-modified'] = stat.mtime.toUTCString(); + * } + * + * function onError(err) { + * // stream.respond() can throw if the stream has been destroyed by + * // the other side. + * try { + * if (err.code === 'ENOENT') { + * stream.respond({ ':status': 404 }); + * } else { + * stream.respond({ ':status': 500 }); + * } + * } catch (err) { + * // Perform actual error handling. + * console.error(err); + * } + * stream.end(); + * } + * + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { statCheck, onError }); + * }); + * ``` + * + * The `options.statCheck` function may also be used to cancel the send operation + * by returning `false`. For instance, a conditional request may check the stat + * results to determine if the file has been modified to return an appropriate`304` response: + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * function statCheck(stat, headers) { + * // Check the stat here... + * stream.respond({ ':status': 304 }); + * return false; // Cancel the send operation + * } + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { statCheck }); + * }); + * ``` + * + * The `content-length` header field will be automatically set. + * + * The `offset` and `length` options may be used to limit the response to a + * specific range subset. This can be used, for instance, to support HTTP Range + * requests. + * + * The `options.onError` function may also be used to handle all the errors + * that could happen before the delivery of the file is initiated. The + * default behavior is to destroy the stream. + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * }); + * ``` + * @since v8.4.0 + */ + respondWithFile( + path: string, + headers?: OutgoingHttpHeaders, + options?: ServerStreamFileResponseOptionsWithError, + ): void; + } + // Http2Session + export interface Settings { + headerTableSize?: number | undefined; + enablePush?: boolean | undefined; + initialWindowSize?: number | undefined; + maxFrameSize?: number | undefined; + maxConcurrentStreams?: number | undefined; + maxHeaderListSize?: number | undefined; + enableConnectProtocol?: boolean | undefined; + } + export interface ClientSessionRequestOptions { + endStream?: boolean | undefined; + exclusive?: boolean | undefined; + parent?: number | undefined; + weight?: number | undefined; + waitForTrailers?: boolean | undefined; + signal?: AbortSignal | undefined; + } + export interface SessionState { + effectiveLocalWindowSize?: number | undefined; + effectiveRecvDataLength?: number | undefined; + nextStreamID?: number | undefined; + localWindowSize?: number | undefined; + lastProcStreamID?: number | undefined; + remoteWindowSize?: number | undefined; + outboundQueueSize?: number | undefined; + deflateDynamicTableSize?: number | undefined; + inflateDynamicTableSize?: number | undefined; + } + export interface Http2Session extends EventEmitter { + /** + * Value will be `undefined` if the `Http2Session` is not yet connected to a + * socket, `h2c` if the `Http2Session` is not connected to a `TLSSocket`, or + * will return the value of the connected `TLSSocket`'s own `alpnProtocol`property. + * @since v9.4.0 + */ + readonly alpnProtocol?: string | undefined; + /** + * Will be `true` if this `Http2Session` instance has been closed, otherwise`false`. + * @since v9.4.0 + */ + readonly closed: boolean; + /** + * Will be `true` if this `Http2Session` instance is still connecting, will be set + * to `false` before emitting `connect` event and/or calling the `http2.connect`callback. + * @since v10.0.0 + */ + readonly connecting: boolean; + /** + * Will be `true` if this `Http2Session` instance has been destroyed and must no + * longer be used, otherwise `false`. + * @since v8.4.0 + */ + readonly destroyed: boolean; + /** + * Value is `undefined` if the `Http2Session` session socket has not yet been + * connected, `true` if the `Http2Session` is connected with a `TLSSocket`, + * and `false` if the `Http2Session` is connected to any other kind of socket + * or stream. + * @since v9.4.0 + */ + readonly encrypted?: boolean | undefined; + /** + * A prototype-less object describing the current local settings of this`Http2Session`. The local settings are local to _this_`Http2Session` instance. + * @since v8.4.0 + */ + readonly localSettings: Settings; + /** + * If the `Http2Session` is connected to a `TLSSocket`, the `originSet` property + * will return an `Array` of origins for which the `Http2Session` may be + * considered authoritative. + * + * The `originSet` property is only available when using a secure TLS connection. + * @since v9.4.0 + */ + readonly originSet?: string[] | undefined; + /** + * Indicates whether the `Http2Session` is currently waiting for acknowledgment of + * a sent `SETTINGS` frame. Will be `true` after calling the`http2session.settings()` method. Will be `false` once all sent `SETTINGS`frames have been acknowledged. + * @since v8.4.0 + */ + readonly pendingSettingsAck: boolean; + /** + * A prototype-less object describing the current remote settings of this`Http2Session`. The remote settings are set by the _connected_ HTTP/2 peer. + * @since v8.4.0 + */ + readonly remoteSettings: Settings; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * limits available methods to ones safe to use with HTTP/2. + * + * `destroy`, `emit`, `end`, `pause`, `read`, `resume`, and `write` will throw + * an error with code `ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for more information. + * + * `setTimeout` method will be called on this `Http2Session`. + * + * All other interactions will be routed directly to the socket. + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * Provides miscellaneous information about the current state of the`Http2Session`. + * + * An object describing the current status of this `Http2Session`. + * @since v8.4.0 + */ + readonly state: SessionState; + /** + * The `http2session.type` will be equal to`http2.constants.NGHTTP2_SESSION_SERVER` if this `Http2Session` instance is a + * server, and `http2.constants.NGHTTP2_SESSION_CLIENT` if the instance is a + * client. + * @since v8.4.0 + */ + readonly type: number; + /** + * Gracefully closes the `Http2Session`, allowing any existing streams to + * complete on their own and preventing new `Http2Stream` instances from being + * created. Once closed, `http2session.destroy()`_might_ be called if there + * are no open `Http2Stream` instances. + * + * If specified, the `callback` function is registered as a handler for the`'close'` event. + * @since v9.4.0 + */ + close(callback?: () => void): void; + /** + * Immediately terminates the `Http2Session` and the associated `net.Socket` or`tls.TLSSocket`. + * + * Once destroyed, the `Http2Session` will emit the `'close'` event. If `error`is not undefined, an `'error'` event will be emitted immediately before the`'close'` event. + * + * If there are any remaining open `Http2Streams` associated with the`Http2Session`, those will also be destroyed. + * @since v8.4.0 + * @param error An `Error` object if the `Http2Session` is being destroyed due to an error. + * @param code The HTTP/2 error code to send in the final `GOAWAY` frame. If unspecified, and `error` is not undefined, the default is `INTERNAL_ERROR`, otherwise defaults to `NO_ERROR`. + */ + destroy(error?: Error, code?: number): void; + /** + * Transmits a `GOAWAY` frame to the connected peer _without_ shutting down the`Http2Session`. + * @since v9.4.0 + * @param code An HTTP/2 error code + * @param lastStreamID The numeric ID of the last processed `Http2Stream` + * @param opaqueData A `TypedArray` or `DataView` instance containing additional data to be carried within the `GOAWAY` frame. + */ + goaway(code?: number, lastStreamID?: number, opaqueData?: NodeJS.ArrayBufferView): void; + /** + * Sends a `PING` frame to the connected HTTP/2 peer. A `callback` function must + * be provided. The method will return `true` if the `PING` was sent, `false`otherwise. + * + * The maximum number of outstanding (unacknowledged) pings is determined by the`maxOutstandingPings` configuration option. The default maximum is 10. + * + * If provided, the `payload` must be a `Buffer`, `TypedArray`, or `DataView`containing 8 bytes of data that will be transmitted with the `PING` and + * returned with the ping acknowledgment. + * + * The callback will be invoked with three arguments: an error argument that will + * be `null` if the `PING` was successfully acknowledged, a `duration` argument + * that reports the number of milliseconds elapsed since the ping was sent and the + * acknowledgment was received, and a `Buffer` containing the 8-byte `PING`payload. + * + * ```js + * session.ping(Buffer.from('abcdefgh'), (err, duration, payload) => { + * if (!err) { + * console.log(`Ping acknowledged in ${duration} milliseconds`); + * console.log(`With payload '${payload.toString()}'`); + * } + * }); + * ``` + * + * If the `payload` argument is not specified, the default payload will be the + * 64-bit timestamp (little endian) marking the start of the `PING` duration. + * @since v8.9.3 + * @param payload Optional ping payload. + */ + ping(callback: (err: Error | null, duration: number, payload: Buffer) => void): boolean; + ping( + payload: NodeJS.ArrayBufferView, + callback: (err: Error | null, duration: number, payload: Buffer) => void, + ): boolean; + /** + * Calls `ref()` on this `Http2Session`instance's underlying `net.Socket`. + * @since v9.4.0 + */ + ref(): void; + /** + * Sets the local endpoint's window size. + * The `windowSize` is the total window size to set, not + * the delta. + * + * ```js + * const http2 = require('node:http2'); + * + * const server = http2.createServer(); + * const expectedWindowSize = 2 ** 20; + * server.on('connect', (session) => { + * + * // Set local window size to be 2 ** 20 + * session.setLocalWindowSize(expectedWindowSize); + * }); + * ``` + * @since v15.3.0, v14.18.0 + */ + setLocalWindowSize(windowSize: number): void; + /** + * Used to set a callback function that is called when there is no activity on + * the `Http2Session` after `msecs` milliseconds. The given `callback` is + * registered as a listener on the `'timeout'` event. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * Updates the current local settings for this `Http2Session` and sends a new`SETTINGS` frame to the connected HTTP/2 peer. + * + * Once called, the `http2session.pendingSettingsAck` property will be `true`while the session is waiting for the remote peer to acknowledge the new + * settings. + * + * The new settings will not become effective until the `SETTINGS` acknowledgment + * is received and the `'localSettings'` event is emitted. It is possible to send + * multiple `SETTINGS` frames while acknowledgment is still pending. + * @since v8.4.0 + * @param callback Callback that is called once the session is connected or right away if the session is already connected. + */ + settings( + settings: Settings, + callback?: (err: Error | null, settings: Settings, duration: number) => void, + ): void; + /** + * Calls `unref()` on this `Http2Session`instance's underlying `net.Socket`. + * @since v9.4.0 + */ + unref(): void; + addListener(event: "close", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener( + event: "frameError", + listener: (frameType: number, errorCode: number, streamID: number) => void, + ): this; + addListener( + event: "goaway", + listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void, + ): this; + addListener(event: "localSettings", listener: (settings: Settings) => void): this; + addListener(event: "ping", listener: () => void): this; + addListener(event: "remoteSettings", listener: (settings: Settings) => void): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "frameError", frameType: number, errorCode: number, streamID: number): boolean; + emit(event: "goaway", errorCode: number, lastStreamID: number, opaqueData?: Buffer): boolean; + emit(event: "localSettings", settings: Settings): boolean; + emit(event: "ping"): boolean; + emit(event: "remoteSettings", settings: Settings): boolean; + emit(event: "timeout"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; + on(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void): this; + on(event: "localSettings", listener: (settings: Settings) => void): this; + on(event: "ping", listener: () => void): this; + on(event: "remoteSettings", listener: (settings: Settings) => void): this; + on(event: "timeout", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; + once(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void): this; + once(event: "localSettings", listener: (settings: Settings) => void): this; + once(event: "ping", listener: () => void): this; + once(event: "remoteSettings", listener: (settings: Settings) => void): this; + once(event: "timeout", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener( + event: "frameError", + listener: (frameType: number, errorCode: number, streamID: number) => void, + ): this; + prependListener( + event: "goaway", + listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void, + ): this; + prependListener(event: "localSettings", listener: (settings: Settings) => void): this; + prependListener(event: "ping", listener: () => void): this; + prependListener(event: "remoteSettings", listener: (settings: Settings) => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener( + event: "frameError", + listener: (frameType: number, errorCode: number, streamID: number) => void, + ): this; + prependOnceListener( + event: "goaway", + listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void, + ): this; + prependOnceListener(event: "localSettings", listener: (settings: Settings) => void): this; + prependOnceListener(event: "ping", listener: () => void): this; + prependOnceListener(event: "remoteSettings", listener: (settings: Settings) => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ClientHttp2Session extends Http2Session { + /** + * For HTTP/2 Client `Http2Session` instances only, the `http2session.request()`creates and returns an `Http2Stream` instance that can be used to send an + * HTTP/2 request to the connected server. + * + * When a `ClientHttp2Session` is first created, the socket may not yet be + * connected. if `clienthttp2session.request()` is called during this time, the + * actual request will be deferred until the socket is ready to go. + * If the `session` is closed before the actual request be executed, an`ERR_HTTP2_GOAWAY_SESSION` is thrown. + * + * This method is only available if `http2session.type` is equal to`http2.constants.NGHTTP2_SESSION_CLIENT`. + * + * ```js + * const http2 = require('node:http2'); + * const clientSession = http2.connect('https://localhost:1234'); + * const { + * HTTP2_HEADER_PATH, + * HTTP2_HEADER_STATUS, + * } = http2.constants; + * + * const req = clientSession.request({ [HTTP2_HEADER_PATH]: '/' }); + * req.on('response', (headers) => { + * console.log(headers[HTTP2_HEADER_STATUS]); + * req.on('data', (chunk) => { // .. }); + * req.on('end', () => { // .. }); + * }); + * ``` + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * is emitted immediately after queuing the last chunk of payload data to be sent. + * The `http2stream.sendTrailers()` method can then be called to send trailing + * headers to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * When `options.signal` is set with an `AbortSignal` and then `abort` on the + * corresponding `AbortController` is called, the request will emit an `'error'`event with an `AbortError` error. + * + * The `:method` and `:path` pseudo-headers are not specified within `headers`, + * they respectively default to: + * + * * `:method` \= `'GET'` + * * `:path` \= `/` + * @since v8.4.0 + */ + request(headers?: OutgoingHttpHeaders, options?: ClientSessionRequestOptions): ClientHttp2Stream; + addListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + addListener(event: "origin", listener: (origins: string[]) => void): this; + addListener( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + addListener( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "altsvc", alt: string, origin: string, stream: number): boolean; + emit(event: "origin", origins: readonly string[]): boolean; + emit(event: "connect", session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; + emit( + event: "stream", + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + on(event: "origin", listener: (origins: string[]) => void): this; + on(event: "connect", listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + on( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + once(event: "origin", listener: (origins: string[]) => void): this; + once( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + once( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + prependListener(event: "origin", listener: (origins: string[]) => void): this; + prependListener( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependListener( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + prependOnceListener(event: "origin", listener: (origins: string[]) => void): this; + prependOnceListener( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependOnceListener( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface AlternativeServiceOptions { + origin: number | string | url.URL; + } + export interface ServerHttp2Session extends Http2Session { + readonly server: Http2Server | Http2SecureServer; + /** + * Submits an `ALTSVC` frame (as defined by [RFC 7838](https://tools.ietf.org/html/rfc7838)) to the connected client. + * + * ```js + * const http2 = require('node:http2'); + * + * const server = http2.createServer(); + * server.on('session', (session) => { + * // Set altsvc for origin https://example.org:80 + * session.altsvc('h2=":8000"', 'https://example.org:80'); + * }); + * + * server.on('stream', (stream) => { + * // Set altsvc for a specific stream + * stream.session.altsvc('h2=":8000"', stream.id); + * }); + * ``` + * + * Sending an `ALTSVC` frame with a specific stream ID indicates that the alternate + * service is associated with the origin of the given `Http2Stream`. + * + * The `alt` and origin string _must_ contain only ASCII bytes and are + * strictly interpreted as a sequence of ASCII bytes. The special value `'clear'`may be passed to clear any previously set alternative service for a given + * domain. + * + * When a string is passed for the `originOrStream` argument, it will be parsed as + * a URL and the origin will be derived. For instance, the origin for the + * HTTP URL `'https://example.org/foo/bar'` is the ASCII string`'https://example.org'`. An error will be thrown if either the given string + * cannot be parsed as a URL or if a valid origin cannot be derived. + * + * A `URL` object, or any object with an `origin` property, may be passed as`originOrStream`, in which case the value of the `origin` property will be + * used. The value of the `origin` property _must_ be a properly serialized + * ASCII origin. + * @since v9.4.0 + * @param alt A description of the alternative service configuration as defined by `RFC 7838`. + * @param originOrStream Either a URL string specifying the origin (or an `Object` with an `origin` property) or the numeric identifier of an active `Http2Stream` as given by the + * `http2stream.id` property. + */ + altsvc(alt: string, originOrStream: number | string | url.URL | AlternativeServiceOptions): void; + /** + * Submits an `ORIGIN` frame (as defined by [RFC 8336](https://tools.ietf.org/html/rfc8336)) to the connected client + * to advertise the set of origins for which the server is capable of providing + * authoritative responses. + * + * ```js + * const http2 = require('node:http2'); + * const options = getSecureOptionsSomehow(); + * const server = http2.createSecureServer(options); + * server.on('stream', (stream) => { + * stream.respond(); + * stream.end('ok'); + * }); + * server.on('session', (session) => { + * session.origin('https://example.com', 'https://example.org'); + * }); + * ``` + * + * When a string is passed as an `origin`, it will be parsed as a URL and the + * origin will be derived. For instance, the origin for the HTTP URL`'https://example.org/foo/bar'` is the ASCII string`'https://example.org'`. An error will be thrown if either the given + * string + * cannot be parsed as a URL or if a valid origin cannot be derived. + * + * A `URL` object, or any object with an `origin` property, may be passed as + * an `origin`, in which case the value of the `origin` property will be + * used. The value of the `origin` property _must_ be a properly serialized + * ASCII origin. + * + * Alternatively, the `origins` option may be used when creating a new HTTP/2 + * server using the `http2.createSecureServer()` method: + * + * ```js + * const http2 = require('node:http2'); + * const options = getSecureOptionsSomehow(); + * options.origins = ['https://example.com', 'https://example.org']; + * const server = http2.createSecureServer(options); + * server.on('stream', (stream) => { + * stream.respond(); + * stream.end('ok'); + * }); + * ``` + * @since v10.12.0 + * @param origins One or more URL Strings passed as separate arguments. + */ + origin( + ...origins: Array< + | string + | url.URL + | { + origin: string; + } + > + ): void; + addListener( + event: "connect", + listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + addListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "connect", session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; + emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "connect", listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + on( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once( + event: "connect", + listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + once( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener( + event: "connect", + listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "connect", + listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependOnceListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + // Http2Server + export interface SessionOptions { + maxDeflateDynamicTableSize?: number | undefined; + maxSessionMemory?: number | undefined; + maxHeaderListPairs?: number | undefined; + maxOutstandingPings?: number | undefined; + maxSendHeaderBlockLength?: number | undefined; + paddingStrategy?: number | undefined; + peerMaxConcurrentStreams?: number | undefined; + settings?: Settings | undefined; + /** + * Specifies a timeout in milliseconds that + * a server should wait when an [`'unknownProtocol'`][] is emitted. If the + * socket has not been destroyed by that time the server will destroy it. + * @default 100000 + */ + unknownProtocolTimeout?: number | undefined; + selectPadding?(frameLen: number, maxFrameLen: number): number; + } + export interface ClientSessionOptions extends SessionOptions { + maxReservedRemoteStreams?: number | undefined; + createConnection?: ((authority: url.URL, option: SessionOptions) => stream.Duplex) | undefined; + protocol?: "http:" | "https:" | undefined; + } + export interface ServerSessionOptions extends SessionOptions { + Http1IncomingMessage?: typeof IncomingMessage | undefined; + Http1ServerResponse?: typeof ServerResponse | undefined; + Http2ServerRequest?: typeof Http2ServerRequest | undefined; + Http2ServerResponse?: typeof Http2ServerResponse | undefined; + } + export interface SecureClientSessionOptions extends ClientSessionOptions, tls.ConnectionOptions {} + export interface SecureServerSessionOptions extends ServerSessionOptions, tls.TlsOptions {} + export interface ServerOptions extends ServerSessionOptions {} + export interface SecureServerOptions extends SecureServerSessionOptions { + allowHTTP1?: boolean | undefined; + origins?: string[] | undefined; + } + interface HTTP2ServerCommon { + setTimeout(msec?: number, callback?: () => void): this; + /** + * Throws ERR_HTTP2_INVALID_SETTING_VALUE for invalid settings values. + * Throws ERR_INVALID_ARG_TYPE for invalid settings argument. + */ + updateSettings(settings: Settings): void; + } + export interface Http2Server extends net.Server, HTTP2ServerCommon { + addListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + addListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + addListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + addListener(event: "sessionError", listener: (err: Error) => void): this; + addListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "checkContinue", request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: "request", request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: "session", session: ServerHttp2Session): boolean; + emit(event: "sessionError", err: Error): boolean; + emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "timeout"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + on(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: "session", listener: (session: ServerHttp2Session) => void): this; + on(event: "sessionError", listener: (err: Error) => void): this; + on( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + on(event: "timeout", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + once(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: "session", listener: (session: ServerHttp2Session) => void): this; + once(event: "sessionError", listener: (err: Error) => void): this; + once( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + once(event: "timeout", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + prependListener(event: "sessionError", listener: (err: Error) => void): this; + prependListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependOnceListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependOnceListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + prependOnceListener(event: "sessionError", listener: (err: Error) => void): this; + prependOnceListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface Http2SecureServer extends tls.Server, HTTP2ServerCommon { + addListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + addListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + addListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + addListener(event: "sessionError", listener: (err: Error) => void): this; + addListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "checkContinue", request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: "request", request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: "session", session: ServerHttp2Session): boolean; + emit(event: "sessionError", err: Error): boolean; + emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "timeout"): boolean; + emit(event: "unknownProtocol", socket: tls.TLSSocket): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + on(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: "session", listener: (session: ServerHttp2Session) => void): this; + on(event: "sessionError", listener: (err: Error) => void): this; + on( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + on(event: "timeout", listener: () => void): this; + on(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + once(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: "session", listener: (session: ServerHttp2Session) => void): this; + once(event: "sessionError", listener: (err: Error) => void): this; + once( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + once(event: "timeout", listener: () => void): this; + once(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + prependListener(event: "sessionError", listener: (err: Error) => void): this; + prependListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependOnceListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependOnceListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + prependOnceListener(event: "sessionError", listener: (err: Error) => void): this; + prependOnceListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * A `Http2ServerRequest` object is created by {@link Server} or {@link SecureServer} and passed as the first argument to the `'request'` event. It may be used to access a request status, + * headers, and + * data. + * @since v8.4.0 + */ + export class Http2ServerRequest extends stream.Readable { + constructor( + stream: ServerHttp2Stream, + headers: IncomingHttpHeaders, + options: stream.ReadableOptions, + rawHeaders: readonly string[], + ); + /** + * The `request.aborted` property will be `true` if the request has + * been aborted. + * @since v10.1.0 + */ + readonly aborted: boolean; + /** + * The request authority pseudo header field. Because HTTP/2 allows requests + * to set either `:authority` or `host`, this value is derived from`req.headers[':authority']` if present. Otherwise, it is derived from`req.headers['host']`. + * @since v8.4.0 + */ + readonly authority: string; + /** + * See `request.socket`. + * @since v8.4.0 + * @deprecated Since v13.0.0 - Use `socket`. + */ + readonly connection: net.Socket | tls.TLSSocket; + /** + * The `request.complete` property will be `true` if the request has + * been completed, aborted, or destroyed. + * @since v12.10.0 + */ + readonly complete: boolean; + /** + * The request/response headers object. + * + * Key-value pairs of header names and values. Header names are lower-cased. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': 'curl/7.22.0', + * // host: '127.0.0.1:8000', + * // accept: '*' } + * console.log(request.headers); + * ``` + * + * See `HTTP/2 Headers Object`. + * + * In HTTP/2, the request path, host name, protocol, and method are represented as + * special headers prefixed with the `:` character (e.g. `':path'`). These special + * headers will be included in the `request.headers` object. Care must be taken not + * to inadvertently modify these special headers or errors may occur. For instance, + * removing all headers from the request will cause errors to occur: + * + * ```js + * removeAllHeaders(request.headers); + * assert(request.url); // Fails because the :path header has been removed + * ``` + * @since v8.4.0 + */ + readonly headers: IncomingHttpHeaders; + /** + * In case of server request, the HTTP version sent by the client. In the case of + * client response, the HTTP version of the connected-to server. Returns`'2.0'`. + * + * Also `message.httpVersionMajor` is the first integer and`message.httpVersionMinor` is the second. + * @since v8.4.0 + */ + readonly httpVersion: string; + readonly httpVersionMinor: number; + readonly httpVersionMajor: number; + /** + * The request method as a string. Read-only. Examples: `'GET'`, `'DELETE'`. + * @since v8.4.0 + */ + readonly method: string; + /** + * The raw request/response headers list exactly as they were received. + * + * The keys and values are in the same list. It is _not_ a + * list of tuples. So, the even-numbered offsets are key values, and the + * odd-numbered offsets are the associated values. + * + * Header names are not lowercased, and duplicates are not merged. + * + * ```js + * // Prints something like: + * // + * // [ 'user-agent', + * // 'this is invalid because there can be only one', + * // 'User-Agent', + * // 'curl/7.22.0', + * // 'Host', + * // '127.0.0.1:8000', + * // 'ACCEPT', + * // '*' ] + * console.log(request.rawHeaders); + * ``` + * @since v8.4.0 + */ + readonly rawHeaders: string[]; + /** + * The raw request/response trailer keys and values exactly as they were + * received. Only populated at the `'end'` event. + * @since v8.4.0 + */ + readonly rawTrailers: string[]; + /** + * The request scheme pseudo header field indicating the scheme + * portion of the target URL. + * @since v8.4.0 + */ + readonly scheme: string; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * applies getters, setters, and methods based on HTTP/2 logic. + * + * `destroyed`, `readable`, and `writable` properties will be retrieved from and + * set on `request.stream`. + * + * `destroy`, `emit`, `end`, `on` and `once` methods will be called on`request.stream`. + * + * `setTimeout` method will be called on `request.stream.session`. + * + * `pause`, `read`, `resume`, and `write` will throw an error with code`ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for + * more information. + * + * All other interactions will be routed directly to the socket. With TLS support, + * use `request.socket.getPeerCertificate()` to obtain the client's + * authentication details. + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * The `Http2Stream` object backing the request. + * @since v8.4.0 + */ + readonly stream: ServerHttp2Stream; + /** + * The request/response trailers object. Only populated at the `'end'` event. + * @since v8.4.0 + */ + readonly trailers: IncomingHttpHeaders; + /** + * Request URL string. This contains only the URL that is present in the actual + * HTTP request. If the request is: + * + * ```http + * GET /status?name=ryan HTTP/1.1 + * Accept: text/plain + * ``` + * + * Then `request.url` will be: + * + * ```js + * '/status?name=ryan' + * ``` + * + * To parse the url into its parts, `new URL()` can be used: + * + * ```console + * $ node + * > new URL('/status?name=ryan', 'http://example.com') + * URL { + * href: 'http://example.com/status?name=ryan', + * origin: 'http://example.com', + * protocol: 'http:', + * username: '', + * password: '', + * host: 'example.com', + * hostname: 'example.com', + * port: '', + * pathname: '/status', + * search: '?name=ryan', + * searchParams: URLSearchParams { 'name' => 'ryan' }, + * hash: '' + * } + * ``` + * @since v8.4.0 + */ + url: string; + /** + * Sets the `Http2Stream`'s timeout value to `msecs`. If a callback is + * provided, then it is added as a listener on the `'timeout'` event on + * the response object. + * + * If no `'timeout'` listener is added to the request, the response, or + * the server, then `Http2Stream` s are destroyed when they time out. If a + * handler is assigned to the request, the response, or the server's `'timeout'`events, timed out sockets must be handled explicitly. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + read(size?: number): Buffer | string | null; + addListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "aborted", hadError: boolean, code: number): boolean; + emit(event: "close"): boolean; + emit(event: "data", chunk: Buffer | string): boolean; + emit(event: "end"): boolean; + emit(event: "readable"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "end", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "end", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * This object is created internally by an HTTP server, not by the user. It is + * passed as the second parameter to the `'request'` event. + * @since v8.4.0 + */ + export class Http2ServerResponse extends stream.Writable { + constructor(stream: ServerHttp2Stream); + /** + * See `response.socket`. + * @since v8.4.0 + * @deprecated Since v13.0.0 - Use `socket`. + */ + readonly connection: net.Socket | tls.TLSSocket; + /** + * Boolean value that indicates whether the response has completed. Starts + * as `false`. After `response.end()` executes, the value will be `true`. + * @since v8.4.0 + * @deprecated Since v13.4.0,v12.16.0 - Use `writableEnded`. + */ + readonly finished: boolean; + /** + * True if headers were sent, false otherwise (read-only). + * @since v8.4.0 + */ + readonly headersSent: boolean; + /** + * A reference to the original HTTP2 `request` object. + * @since v15.7.0 + */ + readonly req: Http2ServerRequest; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * applies getters, setters, and methods based on HTTP/2 logic. + * + * `destroyed`, `readable`, and `writable` properties will be retrieved from and + * set on `response.stream`. + * + * `destroy`, `emit`, `end`, `on` and `once` methods will be called on`response.stream`. + * + * `setTimeout` method will be called on `response.stream.session`. + * + * `pause`, `read`, `resume`, and `write` will throw an error with code`ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for + * more information. + * + * All other interactions will be routed directly to the socket. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer((req, res) => { + * const ip = req.socket.remoteAddress; + * const port = req.socket.remotePort; + * res.end(`Your IP address is ${ip} and your source port is ${port}.`); + * }).listen(3000); + * ``` + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * The `Http2Stream` object backing the response. + * @since v8.4.0 + */ + readonly stream: ServerHttp2Stream; + /** + * When true, the Date header will be automatically generated and sent in + * the response if it is not already present in the headers. Defaults to true. + * + * This should only be disabled for testing; HTTP requires the Date header + * in responses. + * @since v8.4.0 + */ + sendDate: boolean; + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status code that will be sent to the client when + * the headers get flushed. + * + * ```js + * response.statusCode = 404; + * ``` + * + * After response header was sent to the client, this property indicates the + * status code which was sent out. + * @since v8.4.0 + */ + statusCode: number; + /** + * Status message is not supported by HTTP/2 (RFC 7540 8.1.2.4). It returns + * an empty string. + * @since v8.4.0 + */ + statusMessage: ""; + /** + * This method adds HTTP trailing headers (a header but at the end of the + * message) to the response. + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v8.4.0 + */ + addTrailers(trailers: OutgoingHttpHeaders): void; + /** + * This method signals to the server that all of the response headers and body + * have been sent; that server should consider this message complete. + * The method, `response.end()`, MUST be called on each response. + * + * If `data` is specified, it is equivalent to calling `response.write(data, encoding)` followed by `response.end(callback)`. + * + * If `callback` is specified, it will be called when the response stream + * is finished. + * @since v8.4.0 + */ + end(callback?: () => void): this; + end(data: string | Uint8Array, callback?: () => void): this; + end(data: string | Uint8Array, encoding: BufferEncoding, callback?: () => void): this; + /** + * Reads out a header that has already been queued but not sent to the client. + * The name is case-insensitive. + * + * ```js + * const contentType = response.getHeader('content-type'); + * ``` + * @since v8.4.0 + */ + getHeader(name: string): string; + /** + * Returns an array containing the unique names of the current outgoing headers. + * All header names are lowercase. + * + * ```js + * response.setHeader('Foo', 'bar'); + * response.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headerNames = response.getHeaderNames(); + * // headerNames === ['foo', 'set-cookie'] + * ``` + * @since v8.4.0 + */ + getHeaderNames(): string[]; + /** + * Returns a shallow copy of the current outgoing headers. Since a shallow copy + * is used, array values may be mutated without additional calls to various + * header-related http module methods. The keys of the returned object are the + * header names and the values are the respective header values. All header names + * are lowercase. + * + * The object returned by the `response.getHeaders()` method _does not_prototypically inherit from the JavaScript `Object`. This means that typical`Object` methods such as `obj.toString()`, + * `obj.hasOwnProperty()`, and others + * are not defined and _will not work_. + * + * ```js + * response.setHeader('Foo', 'bar'); + * response.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headers = response.getHeaders(); + * // headers === { foo: 'bar', 'set-cookie': ['foo=bar', 'bar=baz'] } + * ``` + * @since v8.4.0 + */ + getHeaders(): OutgoingHttpHeaders; + /** + * Returns `true` if the header identified by `name` is currently set in the + * outgoing headers. The header name matching is case-insensitive. + * + * ```js + * const hasContentType = response.hasHeader('content-type'); + * ``` + * @since v8.4.0 + */ + hasHeader(name: string): boolean; + /** + * Removes a header that has been queued for implicit sending. + * + * ```js + * response.removeHeader('Content-Encoding'); + * ``` + * @since v8.4.0 + */ + removeHeader(name: string): void; + /** + * Sets a single header value for implicit headers. If this header already exists + * in the to-be-sent headers, its value will be replaced. Use an array of strings + * here to send multiple headers with the same name. + * + * ```js + * response.setHeader('Content-Type', 'text/html; charset=utf-8'); + * ``` + * + * or + * + * ```js + * response.setHeader('Set-Cookie', ['type=ninja', 'language=javascript']); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http2.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html; charset=utf-8'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain; charset=utf-8' }); + * res.end('ok'); + * }); + * ``` + * @since v8.4.0 + */ + setHeader(name: string, value: number | string | readonly string[]): void; + /** + * Sets the `Http2Stream`'s timeout value to `msecs`. If a callback is + * provided, then it is added as a listener on the `'timeout'` event on + * the response object. + * + * If no `'timeout'` listener is added to the request, the response, or + * the server, then `Http2Stream` s are destroyed when they time out. If a + * handler is assigned to the request, the response, or the server's `'timeout'`events, timed out sockets must be handled explicitly. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * If this method is called and `response.writeHead()` has not been called, + * it will switch to implicit header mode and flush the implicit headers. + * + * This sends a chunk of the response body. This method may + * be called multiple times to provide successive parts of the body. + * + * In the `node:http` module, the response body is omitted when the + * request is a HEAD request. Similarly, the `204` and `304` responses _must not_ include a message body. + * + * `chunk` can be a string or a buffer. If `chunk` is a string, + * the second parameter specifies how to encode it into a byte stream. + * By default the `encoding` is `'utf8'`. `callback` will be called when this chunk + * of data is flushed. + * + * This is the raw HTTP body and has nothing to do with higher-level multi-part + * body encodings that may be used. + * + * The first time `response.write()` is called, it will send the buffered + * header information and the first chunk of the body to the client. The second + * time `response.write()` is called, Node.js assumes data will be streamed, + * and sends the new data separately. That is, the response is buffered up to the + * first chunk of the body. + * + * Returns `true` if the entire data was flushed successfully to the kernel + * buffer. Returns `false` if all or part of the data was queued in user memory.`'drain'` will be emitted when the buffer is free again. + * @since v8.4.0 + */ + write(chunk: string | Uint8Array, callback?: (err: Error) => void): boolean; + write(chunk: string | Uint8Array, encoding: BufferEncoding, callback?: (err: Error) => void): boolean; + /** + * Sends a status `100 Continue` to the client, indicating that the request body + * should be sent. See the `'checkContinue'` event on `Http2Server` and`Http2SecureServer`. + * @since v8.4.0 + */ + writeContinue(): void; + /** + * Sends a status `103 Early Hints` to the client with a Link header, + * indicating that the user agent can preload/preconnect the linked resources. + * The `hints` is an object containing the values of headers to be sent with + * early hints message. + * + * **Example** + * + * ```js + * const earlyHintsLink = '; rel=preload; as=style'; + * response.writeEarlyHints({ + * 'link': earlyHintsLink, + * }); + * + * const earlyHintsLinks = [ + * '; rel=preload; as=style', + * '; rel=preload; as=script', + * ]; + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * }); + * ``` + * @since v18.11.0 + */ + writeEarlyHints(hints: Record): void; + /** + * Sends a response header to the request. The status code is a 3-digit HTTP + * status code, like `404`. The last argument, `headers`, are the response headers. + * + * Returns a reference to the `Http2ServerResponse`, so that calls can be chained. + * + * For compatibility with `HTTP/1`, a human-readable `statusMessage` may be + * passed as the second argument. However, because the `statusMessage` has no + * meaning within HTTP/2, the argument will have no effect and a process warning + * will be emitted. + * + * ```js + * const body = 'hello world'; + * response.writeHead(200, { + * 'Content-Length': Buffer.byteLength(body), + * 'Content-Type': 'text/plain; charset=utf-8', + * }); + * ``` + * + * `Content-Length` is given in bytes not characters. The`Buffer.byteLength()` API may be used to determine the number of bytes in a + * given encoding. On outbound messages, Node.js does not check if Content-Length + * and the length of the body being transmitted are equal or not. However, when + * receiving messages, Node.js will automatically reject messages when the`Content-Length` does not match the actual payload size. + * + * This method may be called at most one time on a message before `response.end()` is called. + * + * If `response.write()` or `response.end()` are called before calling + * this, the implicit/mutable headers will be calculated and call this function. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http2.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html; charset=utf-8'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain; charset=utf-8' }); + * res.end('ok'); + * }); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v8.4.0 + */ + writeHead(statusCode: number, headers?: OutgoingHttpHeaders): this; + writeHead(statusCode: number, statusMessage: string, headers?: OutgoingHttpHeaders): this; + /** + * Call `http2stream.pushStream()` with the given headers, and wrap the + * given `Http2Stream` on a newly created `Http2ServerResponse` as the callback + * parameter if successful. When `Http2ServerRequest` is closed, the callback is + * called with an error `ERR_HTTP2_INVALID_STREAM`. + * @since v8.4.0 + * @param headers An object describing the headers + * @param callback Called once `http2stream.pushStream()` is finished, or either when the attempt to create the pushed `Http2Stream` has failed or has been rejected, or the state of + * `Http2ServerRequest` is closed prior to calling the `http2stream.pushStream()` method + */ + createPushResponse( + headers: OutgoingHttpHeaders, + callback: (err: Error | null, res: Http2ServerResponse) => void, + ): void; + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (error: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "drain"): boolean; + emit(event: "error", error: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "pipe", src: stream.Readable): boolean; + emit(event: "unpipe", src: stream.Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (error: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (error: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (error: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (error: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export namespace constants { + const NGHTTP2_SESSION_SERVER: number; + const NGHTTP2_SESSION_CLIENT: number; + const NGHTTP2_STREAM_STATE_IDLE: number; + const NGHTTP2_STREAM_STATE_OPEN: number; + const NGHTTP2_STREAM_STATE_RESERVED_LOCAL: number; + const NGHTTP2_STREAM_STATE_RESERVED_REMOTE: number; + const NGHTTP2_STREAM_STATE_HALF_CLOSED_LOCAL: number; + const NGHTTP2_STREAM_STATE_HALF_CLOSED_REMOTE: number; + const NGHTTP2_STREAM_STATE_CLOSED: number; + const NGHTTP2_NO_ERROR: number; + const NGHTTP2_PROTOCOL_ERROR: number; + const NGHTTP2_INTERNAL_ERROR: number; + const NGHTTP2_FLOW_CONTROL_ERROR: number; + const NGHTTP2_SETTINGS_TIMEOUT: number; + const NGHTTP2_STREAM_CLOSED: number; + const NGHTTP2_FRAME_SIZE_ERROR: number; + const NGHTTP2_REFUSED_STREAM: number; + const NGHTTP2_CANCEL: number; + const NGHTTP2_COMPRESSION_ERROR: number; + const NGHTTP2_CONNECT_ERROR: number; + const NGHTTP2_ENHANCE_YOUR_CALM: number; + const NGHTTP2_INADEQUATE_SECURITY: number; + const NGHTTP2_HTTP_1_1_REQUIRED: number; + const NGHTTP2_ERR_FRAME_SIZE_ERROR: number; + const NGHTTP2_FLAG_NONE: number; + const NGHTTP2_FLAG_END_STREAM: number; + const NGHTTP2_FLAG_END_HEADERS: number; + const NGHTTP2_FLAG_ACK: number; + const NGHTTP2_FLAG_PADDED: number; + const NGHTTP2_FLAG_PRIORITY: number; + const DEFAULT_SETTINGS_HEADER_TABLE_SIZE: number; + const DEFAULT_SETTINGS_ENABLE_PUSH: number; + const DEFAULT_SETTINGS_INITIAL_WINDOW_SIZE: number; + const DEFAULT_SETTINGS_MAX_FRAME_SIZE: number; + const MAX_MAX_FRAME_SIZE: number; + const MIN_MAX_FRAME_SIZE: number; + const MAX_INITIAL_WINDOW_SIZE: number; + const NGHTTP2_DEFAULT_WEIGHT: number; + const NGHTTP2_SETTINGS_HEADER_TABLE_SIZE: number; + const NGHTTP2_SETTINGS_ENABLE_PUSH: number; + const NGHTTP2_SETTINGS_MAX_CONCURRENT_STREAMS: number; + const NGHTTP2_SETTINGS_INITIAL_WINDOW_SIZE: number; + const NGHTTP2_SETTINGS_MAX_FRAME_SIZE: number; + const NGHTTP2_SETTINGS_MAX_HEADER_LIST_SIZE: number; + const PADDING_STRATEGY_NONE: number; + const PADDING_STRATEGY_MAX: number; + const PADDING_STRATEGY_CALLBACK: number; + const HTTP2_HEADER_STATUS: string; + const HTTP2_HEADER_METHOD: string; + const HTTP2_HEADER_AUTHORITY: string; + const HTTP2_HEADER_SCHEME: string; + const HTTP2_HEADER_PATH: string; + const HTTP2_HEADER_ACCEPT_CHARSET: string; + const HTTP2_HEADER_ACCEPT_ENCODING: string; + const HTTP2_HEADER_ACCEPT_LANGUAGE: string; + const HTTP2_HEADER_ACCEPT_RANGES: string; + const HTTP2_HEADER_ACCEPT: string; + const HTTP2_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN: string; + const HTTP2_HEADER_AGE: string; + const HTTP2_HEADER_ALLOW: string; + const HTTP2_HEADER_AUTHORIZATION: string; + const HTTP2_HEADER_CACHE_CONTROL: string; + const HTTP2_HEADER_CONNECTION: string; + const HTTP2_HEADER_CONTENT_DISPOSITION: string; + const HTTP2_HEADER_CONTENT_ENCODING: string; + const HTTP2_HEADER_CONTENT_LANGUAGE: string; + const HTTP2_HEADER_CONTENT_LENGTH: string; + const HTTP2_HEADER_CONTENT_LOCATION: string; + const HTTP2_HEADER_CONTENT_MD5: string; + const HTTP2_HEADER_CONTENT_RANGE: string; + const HTTP2_HEADER_CONTENT_TYPE: string; + const HTTP2_HEADER_COOKIE: string; + const HTTP2_HEADER_DATE: string; + const HTTP2_HEADER_ETAG: string; + const HTTP2_HEADER_EXPECT: string; + const HTTP2_HEADER_EXPIRES: string; + const HTTP2_HEADER_FROM: string; + const HTTP2_HEADER_HOST: string; + const HTTP2_HEADER_IF_MATCH: string; + const HTTP2_HEADER_IF_MODIFIED_SINCE: string; + const HTTP2_HEADER_IF_NONE_MATCH: string; + const HTTP2_HEADER_IF_RANGE: string; + const HTTP2_HEADER_IF_UNMODIFIED_SINCE: string; + const HTTP2_HEADER_LAST_MODIFIED: string; + const HTTP2_HEADER_LINK: string; + const HTTP2_HEADER_LOCATION: string; + const HTTP2_HEADER_MAX_FORWARDS: string; + const HTTP2_HEADER_PREFER: string; + const HTTP2_HEADER_PROXY_AUTHENTICATE: string; + const HTTP2_HEADER_PROXY_AUTHORIZATION: string; + const HTTP2_HEADER_RANGE: string; + const HTTP2_HEADER_REFERER: string; + const HTTP2_HEADER_REFRESH: string; + const HTTP2_HEADER_RETRY_AFTER: string; + const HTTP2_HEADER_SERVER: string; + const HTTP2_HEADER_SET_COOKIE: string; + const HTTP2_HEADER_STRICT_TRANSPORT_SECURITY: string; + const HTTP2_HEADER_TRANSFER_ENCODING: string; + const HTTP2_HEADER_TE: string; + const HTTP2_HEADER_UPGRADE: string; + const HTTP2_HEADER_USER_AGENT: string; + const HTTP2_HEADER_VARY: string; + const HTTP2_HEADER_VIA: string; + const HTTP2_HEADER_WWW_AUTHENTICATE: string; + const HTTP2_HEADER_HTTP2_SETTINGS: string; + const HTTP2_HEADER_KEEP_ALIVE: string; + const HTTP2_HEADER_PROXY_CONNECTION: string; + const HTTP2_METHOD_ACL: string; + const HTTP2_METHOD_BASELINE_CONTROL: string; + const HTTP2_METHOD_BIND: string; + const HTTP2_METHOD_CHECKIN: string; + const HTTP2_METHOD_CHECKOUT: string; + const HTTP2_METHOD_CONNECT: string; + const HTTP2_METHOD_COPY: string; + const HTTP2_METHOD_DELETE: string; + const HTTP2_METHOD_GET: string; + const HTTP2_METHOD_HEAD: string; + const HTTP2_METHOD_LABEL: string; + const HTTP2_METHOD_LINK: string; + const HTTP2_METHOD_LOCK: string; + const HTTP2_METHOD_MERGE: string; + const HTTP2_METHOD_MKACTIVITY: string; + const HTTP2_METHOD_MKCALENDAR: string; + const HTTP2_METHOD_MKCOL: string; + const HTTP2_METHOD_MKREDIRECTREF: string; + const HTTP2_METHOD_MKWORKSPACE: string; + const HTTP2_METHOD_MOVE: string; + const HTTP2_METHOD_OPTIONS: string; + const HTTP2_METHOD_ORDERPATCH: string; + const HTTP2_METHOD_PATCH: string; + const HTTP2_METHOD_POST: string; + const HTTP2_METHOD_PRI: string; + const HTTP2_METHOD_PROPFIND: string; + const HTTP2_METHOD_PROPPATCH: string; + const HTTP2_METHOD_PUT: string; + const HTTP2_METHOD_REBIND: string; + const HTTP2_METHOD_REPORT: string; + const HTTP2_METHOD_SEARCH: string; + const HTTP2_METHOD_TRACE: string; + const HTTP2_METHOD_UNBIND: string; + const HTTP2_METHOD_UNCHECKOUT: string; + const HTTP2_METHOD_UNLINK: string; + const HTTP2_METHOD_UNLOCK: string; + const HTTP2_METHOD_UPDATE: string; + const HTTP2_METHOD_UPDATEREDIRECTREF: string; + const HTTP2_METHOD_VERSION_CONTROL: string; + const HTTP_STATUS_CONTINUE: number; + const HTTP_STATUS_SWITCHING_PROTOCOLS: number; + const HTTP_STATUS_PROCESSING: number; + const HTTP_STATUS_OK: number; + const HTTP_STATUS_CREATED: number; + const HTTP_STATUS_ACCEPTED: number; + const HTTP_STATUS_NON_AUTHORITATIVE_INFORMATION: number; + const HTTP_STATUS_NO_CONTENT: number; + const HTTP_STATUS_RESET_CONTENT: number; + const HTTP_STATUS_PARTIAL_CONTENT: number; + const HTTP_STATUS_MULTI_STATUS: number; + const HTTP_STATUS_ALREADY_REPORTED: number; + const HTTP_STATUS_IM_USED: number; + const HTTP_STATUS_MULTIPLE_CHOICES: number; + const HTTP_STATUS_MOVED_PERMANENTLY: number; + const HTTP_STATUS_FOUND: number; + const HTTP_STATUS_SEE_OTHER: number; + const HTTP_STATUS_NOT_MODIFIED: number; + const HTTP_STATUS_USE_PROXY: number; + const HTTP_STATUS_TEMPORARY_REDIRECT: number; + const HTTP_STATUS_PERMANENT_REDIRECT: number; + const HTTP_STATUS_BAD_REQUEST: number; + const HTTP_STATUS_UNAUTHORIZED: number; + const HTTP_STATUS_PAYMENT_REQUIRED: number; + const HTTP_STATUS_FORBIDDEN: number; + const HTTP_STATUS_NOT_FOUND: number; + const HTTP_STATUS_METHOD_NOT_ALLOWED: number; + const HTTP_STATUS_NOT_ACCEPTABLE: number; + const HTTP_STATUS_PROXY_AUTHENTICATION_REQUIRED: number; + const HTTP_STATUS_REQUEST_TIMEOUT: number; + const HTTP_STATUS_CONFLICT: number; + const HTTP_STATUS_GONE: number; + const HTTP_STATUS_LENGTH_REQUIRED: number; + const HTTP_STATUS_PRECONDITION_FAILED: number; + const HTTP_STATUS_PAYLOAD_TOO_LARGE: number; + const HTTP_STATUS_URI_TOO_LONG: number; + const HTTP_STATUS_UNSUPPORTED_MEDIA_TYPE: number; + const HTTP_STATUS_RANGE_NOT_SATISFIABLE: number; + const HTTP_STATUS_EXPECTATION_FAILED: number; + const HTTP_STATUS_TEAPOT: number; + const HTTP_STATUS_MISDIRECTED_REQUEST: number; + const HTTP_STATUS_UNPROCESSABLE_ENTITY: number; + const HTTP_STATUS_LOCKED: number; + const HTTP_STATUS_FAILED_DEPENDENCY: number; + const HTTP_STATUS_UNORDERED_COLLECTION: number; + const HTTP_STATUS_UPGRADE_REQUIRED: number; + const HTTP_STATUS_PRECONDITION_REQUIRED: number; + const HTTP_STATUS_TOO_MANY_REQUESTS: number; + const HTTP_STATUS_REQUEST_HEADER_FIELDS_TOO_LARGE: number; + const HTTP_STATUS_UNAVAILABLE_FOR_LEGAL_REASONS: number; + const HTTP_STATUS_INTERNAL_SERVER_ERROR: number; + const HTTP_STATUS_NOT_IMPLEMENTED: number; + const HTTP_STATUS_BAD_GATEWAY: number; + const HTTP_STATUS_SERVICE_UNAVAILABLE: number; + const HTTP_STATUS_GATEWAY_TIMEOUT: number; + const HTTP_STATUS_HTTP_VERSION_NOT_SUPPORTED: number; + const HTTP_STATUS_VARIANT_ALSO_NEGOTIATES: number; + const HTTP_STATUS_INSUFFICIENT_STORAGE: number; + const HTTP_STATUS_LOOP_DETECTED: number; + const HTTP_STATUS_BANDWIDTH_LIMIT_EXCEEDED: number; + const HTTP_STATUS_NOT_EXTENDED: number; + const HTTP_STATUS_NETWORK_AUTHENTICATION_REQUIRED: number; + } + /** + * This symbol can be set as a property on the HTTP/2 headers object with + * an array value in order to provide a list of headers considered sensitive. + */ + export const sensitiveHeaders: symbol; + /** + * Returns an object containing the default settings for an `Http2Session`instance. This method returns a new object instance every time it is called + * so instances returned may be safely modified for use. + * @since v8.4.0 + */ + export function getDefaultSettings(): Settings; + /** + * Returns a `Buffer` instance containing serialized representation of the given + * HTTP/2 settings as specified in the [HTTP/2](https://tools.ietf.org/html/rfc7540) specification. This is intended + * for use with the `HTTP2-Settings` header field. + * + * ```js + * const http2 = require('node:http2'); + * + * const packed = http2.getPackedSettings({ enablePush: false }); + * + * console.log(packed.toString('base64')); + * // Prints: AAIAAAAA + * ``` + * @since v8.4.0 + */ + export function getPackedSettings(settings: Settings): Buffer; + /** + * Returns a `HTTP/2 Settings Object` containing the deserialized settings from + * the given `Buffer` as generated by `http2.getPackedSettings()`. + * @since v8.4.0 + * @param buf The packed settings. + */ + export function getUnpackedSettings(buf: Uint8Array): Settings; + /** + * Returns a `net.Server` instance that creates and manages `Http2Session`instances. + * + * Since there are no browsers known that support [unencrypted HTTP/2](https://http2.github.io/faq/#does-http2-require-encryption), the use of {@link createSecureServer} is necessary when + * communicating + * with browser clients. + * + * ```js + * const http2 = require('node:http2'); + * + * // Create an unencrypted HTTP/2 server. + * // Since there are no browsers known that support + * // unencrypted HTTP/2, the use of `http2.createSecureServer()` + * // is necessary when communicating with browser clients. + * const server = http2.createServer(); + * + * server.on('stream', (stream, headers) => { + * stream.respond({ + * 'content-type': 'text/html; charset=utf-8', + * ':status': 200, + * }); + * stream.end('

Hello World

'); + * }); + * + * server.listen(8000); + * ``` + * @since v8.4.0 + * @param onRequestHandler See `Compatibility API` + */ + export function createServer( + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2Server; + export function createServer( + options: ServerOptions, + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2Server; + /** + * Returns a `tls.Server` instance that creates and manages `Http2Session`instances. + * + * ```js + * const http2 = require('node:http2'); + * const fs = require('node:fs'); + * + * const options = { + * key: fs.readFileSync('server-key.pem'), + * cert: fs.readFileSync('server-cert.pem'), + * }; + * + * // Create a secure HTTP/2 server + * const server = http2.createSecureServer(options); + * + * server.on('stream', (stream, headers) => { + * stream.respond({ + * 'content-type': 'text/html; charset=utf-8', + * ':status': 200, + * }); + * stream.end('

Hello World

'); + * }); + * + * server.listen(8443); + * ``` + * @since v8.4.0 + * @param onRequestHandler See `Compatibility API` + */ + export function createSecureServer( + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2SecureServer; + export function createSecureServer( + options: SecureServerOptions, + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2SecureServer; + /** + * Returns a `ClientHttp2Session` instance. + * + * ```js + * const http2 = require('node:http2'); + * const client = http2.connect('https://localhost:1234'); + * + * // Use the client + * + * client.close(); + * ``` + * @since v8.4.0 + * @param authority The remote HTTP/2 server to connect to. This must be in the form of a minimal, valid URL with the `http://` or `https://` prefix, host name, and IP port (if a non-default port + * is used). Userinfo (user ID and password), path, querystring, and fragment details in the URL will be ignored. + * @param listener Will be registered as a one-time listener of the {@link 'connect'} event. + */ + export function connect( + authority: string | url.URL, + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): ClientHttp2Session; + export function connect( + authority: string | url.URL, + options?: ClientSessionOptions | SecureClientSessionOptions, + listener?: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): ClientHttp2Session; +} +declare module "node:http2" { + export * from "http2"; +} diff --git a/dist/node_modules/@types/node/https.d.ts b/dist/node_modules/@types/node/https.d.ts new file mode 100644 index 0000000..36ae5b2 --- /dev/null +++ b/dist/node_modules/@types/node/https.d.ts @@ -0,0 +1,550 @@ +/** + * HTTPS is the HTTP protocol over TLS/SSL. In Node.js this is implemented as a + * separate module. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/https.js) + */ +declare module "https" { + import { Duplex } from "node:stream"; + import * as tls from "node:tls"; + import * as http from "node:http"; + import { URL } from "node:url"; + type ServerOptions< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > = tls.SecureContextOptions & tls.TlsOptions & http.ServerOptions; + type RequestOptions = + & http.RequestOptions + & tls.SecureContextOptions + & { + checkServerIdentity?: typeof tls.checkServerIdentity | undefined; + rejectUnauthorized?: boolean | undefined; // Defaults to true + servername?: string | undefined; // SNI TLS Extension + }; + interface AgentOptions extends http.AgentOptions, tls.ConnectionOptions { + rejectUnauthorized?: boolean | undefined; + maxCachedSessions?: number | undefined; + } + /** + * An `Agent` object for HTTPS similar to `http.Agent`. See {@link request} for more information. + * @since v0.4.5 + */ + class Agent extends http.Agent { + constructor(options?: AgentOptions); + options: AgentOptions; + } + interface Server< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > extends http.Server {} + /** + * See `http.Server` for more information. + * @since v0.3.4 + */ + class Server< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > extends tls.Server { + constructor(requestListener?: http.RequestListener); + constructor( + options: ServerOptions, + requestListener?: http.RequestListener, + ); + /** + * Closes all connections connected to this server. + * @since v18.2.0 + */ + closeAllConnections(): void; + /** + * Closes all connections connected to this server which are not sending a request or waiting for a response. + * @since v18.2.0 + */ + closeIdleConnections(): void; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + addListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + addListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + addListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + addListener(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + addListener(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connection", listener: (socket: Duplex) => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "checkContinue", listener: http.RequestListener): this; + addListener(event: "checkExpectation", listener: http.RequestListener): this; + addListener(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + addListener( + event: "connect", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + addListener(event: "request", listener: http.RequestListener): this; + addListener( + event: "upgrade", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + emit(event: string, ...args: any[]): boolean; + emit(event: "keylog", line: Buffer, tlsSocket: tls.TLSSocket): boolean; + emit( + event: "newSession", + sessionId: Buffer, + sessionData: Buffer, + callback: (err: Error, resp: Buffer) => void, + ): boolean; + emit( + event: "OCSPRequest", + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ): boolean; + emit(event: "resumeSession", sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void): boolean; + emit(event: "secureConnection", tlsSocket: tls.TLSSocket): boolean; + emit(event: "tlsClientError", err: Error, tlsSocket: tls.TLSSocket): boolean; + emit(event: "close"): boolean; + emit(event: "connection", socket: Duplex): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit( + event: "checkContinue", + req: InstanceType, + res: InstanceType & { + req: InstanceType; + }, + ): boolean; + emit( + event: "checkExpectation", + req: InstanceType, + res: InstanceType & { + req: InstanceType; + }, + ): boolean; + emit(event: "clientError", err: Error, socket: Duplex): boolean; + emit(event: "connect", req: InstanceType, socket: Duplex, head: Buffer): boolean; + emit( + event: "request", + req: InstanceType, + res: InstanceType & { + req: InstanceType; + }, + ): boolean; + emit(event: "upgrade", req: InstanceType, socket: Duplex, head: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + on( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + on( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + on( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + on(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + on(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connection", listener: (socket: Duplex) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "checkContinue", listener: http.RequestListener): this; + on(event: "checkExpectation", listener: http.RequestListener): this; + on(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + on(event: "connect", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + on(event: "request", listener: http.RequestListener): this; + on(event: "upgrade", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + once( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + once( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + once( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + once(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + once(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connection", listener: (socket: Duplex) => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "checkContinue", listener: http.RequestListener): this; + once(event: "checkExpectation", listener: http.RequestListener): this; + once(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + once(event: "connect", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + once(event: "request", listener: http.RequestListener): this; + once(event: "upgrade", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + prependListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + prependListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + prependListener(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + prependListener(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connection", listener: (socket: Duplex) => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "checkContinue", listener: http.RequestListener): this; + prependListener(event: "checkExpectation", listener: http.RequestListener): this; + prependListener(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + prependListener( + event: "connect", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependListener(event: "request", listener: http.RequestListener): this; + prependListener( + event: "upgrade", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + prependOnceListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependOnceListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + prependOnceListener(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connection", listener: (socket: Duplex) => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "checkContinue", listener: http.RequestListener): this; + prependOnceListener(event: "checkExpectation", listener: http.RequestListener): this; + prependOnceListener(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + prependOnceListener( + event: "connect", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: "request", listener: http.RequestListener): this; + prependOnceListener( + event: "upgrade", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + } + /** + * ```js + * // curl -k https://localhost:8000/ + * const https = require('node:https'); + * const fs = require('node:fs'); + * + * const options = { + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * }; + * + * https.createServer(options, (req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * ``` + * + * Or + * + * ```js + * const https = require('node:https'); + * const fs = require('node:fs'); + * + * const options = { + * pfx: fs.readFileSync('test/fixtures/test_cert.pfx'), + * passphrase: 'sample', + * }; + * + * https.createServer(options, (req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * ``` + * @since v0.3.4 + * @param options Accepts `options` from `createServer`, `createSecureContext` and `createServer`. + * @param requestListener A listener to be added to the `'request'` event. + */ + function createServer< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + >(requestListener?: http.RequestListener): Server; + function createServer< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + >( + options: ServerOptions, + requestListener?: http.RequestListener, + ): Server; + /** + * Makes a request to a secure web server. + * + * The following additional `options` from `tls.connect()` are also accepted:`ca`, `cert`, `ciphers`, `clientCertEngine`, `crl`, `dhparam`, `ecdhCurve`,`honorCipherOrder`, `key`, `passphrase`, + * `pfx`, `rejectUnauthorized`,`secureOptions`, `secureProtocol`, `servername`, `sessionIdContext`,`highWaterMark`. + * + * `options` can be an object, a string, or a `URL` object. If `options` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * `https.request()` returns an instance of the `http.ClientRequest` class. The `ClientRequest` instance is a writable stream. If one needs to + * upload a file with a POST request, then write to the `ClientRequest` object. + * + * ```js + * const https = require('node:https'); + * + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * }; + * + * const req = https.request(options, (res) => { + * console.log('statusCode:', res.statusCode); + * console.log('headers:', res.headers); + * + * res.on('data', (d) => { + * process.stdout.write(d); + * }); + * }); + * + * req.on('error', (e) => { + * console.error(e); + * }); + * req.end(); + * ``` + * + * Example using options from `tls.connect()`: + * + * ```js + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * }; + * options.agent = new https.Agent(options); + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Alternatively, opt out of connection pooling by not using an `Agent`. + * + * ```js + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * agent: false, + * }; + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Example using a `URL` as `options`: + * + * ```js + * const options = new URL('https://abc:xyz@example.com'); + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Example pinning on certificate fingerprint, or the public key (similar to`pin-sha256`): + * + * ```js + * const tls = require('node:tls'); + * const https = require('node:https'); + * const crypto = require('node:crypto'); + * + * function sha256(s) { + * return crypto.createHash('sha256').update(s).digest('base64'); + * } + * const options = { + * hostname: 'github.com', + * port: 443, + * path: '/', + * method: 'GET', + * checkServerIdentity: function(host, cert) { + * // Make sure the certificate is issued to the host we are connected to + * const err = tls.checkServerIdentity(host, cert); + * if (err) { + * return err; + * } + * + * // Pin the public key, similar to HPKP pin-sha256 pinning + * const pubkey256 = 'pL1+qb9HTMRZJmuC/bB/ZI9d302BYrrqiVuRyW+DGrU='; + * if (sha256(cert.pubkey) !== pubkey256) { + * const msg = 'Certificate verification error: ' + + * `The public key of '${cert.subject.CN}' ` + + * 'does not match our pinned fingerprint'; + * return new Error(msg); + * } + * + * // Pin the exact certificate, rather than the pub key + * const cert256 = '25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:' + + * 'D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16'; + * if (cert.fingerprint256 !== cert256) { + * const msg = 'Certificate verification error: ' + + * `The certificate of '${cert.subject.CN}' ` + + * 'does not match our pinned fingerprint'; + * return new Error(msg); + * } + * + * // This loop is informational only. + * // Print the certificate and public key fingerprints of all certs in the + * // chain. Its common to pin the public key of the issuer on the public + * // internet, while pinning the public key of the service in sensitive + * // environments. + * do { + * console.log('Subject Common Name:', cert.subject.CN); + * console.log(' Certificate SHA256 fingerprint:', cert.fingerprint256); + * + * hash = crypto.createHash('sha256'); + * console.log(' Public key ping-sha256:', sha256(cert.pubkey)); + * + * lastprint256 = cert.fingerprint256; + * cert = cert.issuerCertificate; + * } while (cert.fingerprint256 !== lastprint256); + * + * }, + * }; + * + * options.agent = new https.Agent(options); + * const req = https.request(options, (res) => { + * console.log('All OK. Server matched our pinned cert or public key'); + * console.log('statusCode:', res.statusCode); + * // Print the HPKP values + * console.log('headers:', res.headers['public-key-pins']); + * + * res.on('data', (d) => {}); + * }); + * + * req.on('error', (e) => { + * console.error(e.message); + * }); + * req.end(); + * ``` + * + * Outputs for example: + * + * ```text + * Subject Common Name: github.com + * Certificate SHA256 fingerprint: 25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16 + * Public key ping-sha256: pL1+qb9HTMRZJmuC/bB/ZI9d302BYrrqiVuRyW+DGrU= + * Subject Common Name: DigiCert SHA2 Extended Validation Server CA + * Certificate SHA256 fingerprint: 40:3E:06:2A:26:53:05:91:13:28:5B:AF:80:A0:D4:AE:42:2C:84:8C:9F:78:FA:D0:1F:C9:4B:C5:B8:7F:EF:1A + * Public key ping-sha256: RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho= + * Subject Common Name: DigiCert High Assurance EV Root CA + * Certificate SHA256 fingerprint: 74:31:E5:F4:C3:C1:CE:46:90:77:4F:0B:61:E0:54:40:88:3B:A9:A0:1E:D0:0B:A6:AB:D7:80:6E:D3:B1:18:CF + * Public key ping-sha256: WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18= + * All OK. Server matched our pinned cert or public key + * statusCode: 200 + * headers: max-age=0; pin-sha256="WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18="; pin-sha256="RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho="; + * pin-sha256="k2v657xBsOVe1PQRwOsHsw3bsGT2VzIqz5K+59sNQws="; pin-sha256="K87oWBWM9UZfyddvDfoxL+8lpNyoUB2ptGtn0fv6G2Q="; pin-sha256="IQBnNBEiFuhj+8x6X8XLgh01V9Ic5/V3IRQLNFFc7v4="; + * pin-sha256="iie1VXtL7HzAMF+/PVPR9xzT80kQxdZeJ+zduCB3uj0="; pin-sha256="LvRiGEjRqfzurezaWuj8Wie2gyHMrW5Q06LspMnox7A="; includeSubDomains + * ``` + * @since v0.3.6 + * @param options Accepts all `options` from `request`, with some differences in default values: + */ + function request( + options: RequestOptions | string | URL, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + function request( + url: string | URL, + options: RequestOptions, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + /** + * Like `http.get()` but for HTTPS. + * + * `options` can be an object, a string, or a `URL` object. If `options` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * ```js + * const https = require('node:https'); + * + * https.get('https://encrypted.google.com/', (res) => { + * console.log('statusCode:', res.statusCode); + * console.log('headers:', res.headers); + * + * res.on('data', (d) => { + * process.stdout.write(d); + * }); + * + * }).on('error', (e) => { + * console.error(e); + * }); + * ``` + * @since v0.3.6 + * @param options Accepts the same `options` as {@link request}, with the `method` always set to `GET`. + */ + function get( + options: RequestOptions | string | URL, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + function get( + url: string | URL, + options: RequestOptions, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + let globalAgent: Agent; +} +declare module "node:https" { + export * from "https"; +} diff --git a/dist/node_modules/@types/node/index.d.ts b/dist/node_modules/@types/node/index.d.ts new file mode 100644 index 0000000..a596cad --- /dev/null +++ b/dist/node_modules/@types/node/index.d.ts @@ -0,0 +1,88 @@ +/** + * License for programmatically and manually incorporated + * documentation aka. `JSDoc` from https://github.com/nodejs/node/tree/master/doc + * + * Copyright Node.js contributors. All rights reserved. + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ + +// NOTE: These definitions support NodeJS and TypeScript 4.9+. + +// Reference required types from the default lib: +/// +/// +/// +/// + +// Base definitions for all NodeJS modules that are not specific to any version of TypeScript: +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// + +/// diff --git a/dist/node_modules/@types/node/inspector.d.ts b/dist/node_modules/@types/node/inspector.d.ts new file mode 100644 index 0000000..3927b81 --- /dev/null +++ b/dist/node_modules/@types/node/inspector.d.ts @@ -0,0 +1,2747 @@ +// Type definitions for inspector + +// These definitions are auto-generated. +// Please see https://github.com/DefinitelyTyped/DefinitelyTyped/pull/19330 +// for more information. + + +/** + * The `node:inspector` module provides an API for interacting with the V8 + * inspector. + * + * It can be accessed using: + * + * ```js + * import * as inspector from 'node:inspector/promises'; + * ``` + * + * or + * + * ```js + * import * as inspector from 'node:inspector'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/inspector.js) + */ +declare module 'inspector' { + import EventEmitter = require('node:events'); + interface InspectorNotification { + method: string; + params: T; + } + namespace Schema { + /** + * Description of the protocol domain. + */ + interface Domain { + /** + * Domain name. + */ + name: string; + /** + * Domain version. + */ + version: string; + } + interface GetDomainsReturnType { + /** + * List of supported domains. + */ + domains: Domain[]; + } + } + namespace Runtime { + /** + * Unique script identifier. + */ + type ScriptId = string; + /** + * Unique object identifier. + */ + type RemoteObjectId = string; + /** + * Primitive value which cannot be JSON-stringified. + */ + type UnserializableValue = string; + /** + * Mirror object referencing original JavaScript object. + */ + interface RemoteObject { + /** + * Object type. + */ + type: string; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + /** + * Object class (constructor) name. Specified for object type values only. + */ + className?: string | undefined; + /** + * Remote object value in case of primitive values or JSON values (if it was requested). + */ + value?: any; + /** + * Primitive value which can not be JSON-stringified does not have value, but gets this property. + */ + unserializableValue?: UnserializableValue | undefined; + /** + * String representation of the object. + */ + description?: string | undefined; + /** + * Unique object identifier (for non-primitive values). + */ + objectId?: RemoteObjectId | undefined; + /** + * Preview containing abbreviated property values. Specified for object type values only. + * @experimental + */ + preview?: ObjectPreview | undefined; + /** + * @experimental + */ + customPreview?: CustomPreview | undefined; + } + /** + * @experimental + */ + interface CustomPreview { + header: string; + hasBody: boolean; + formatterObjectId: RemoteObjectId; + bindRemoteObjectFunctionId: RemoteObjectId; + configObjectId?: RemoteObjectId | undefined; + } + /** + * Object containing abbreviated remote object value. + * @experimental + */ + interface ObjectPreview { + /** + * Object type. + */ + type: string; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + /** + * String representation of the object. + */ + description?: string | undefined; + /** + * True iff some of the properties or entries of the original object did not fit. + */ + overflow: boolean; + /** + * List of the properties. + */ + properties: PropertyPreview[]; + /** + * List of the entries. Specified for map and set subtype values only. + */ + entries?: EntryPreview[] | undefined; + } + /** + * @experimental + */ + interface PropertyPreview { + /** + * Property name. + */ + name: string; + /** + * Object type. Accessor means that the property itself is an accessor property. + */ + type: string; + /** + * User-friendly property value string. + */ + value?: string | undefined; + /** + * Nested value preview. + */ + valuePreview?: ObjectPreview | undefined; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + } + /** + * @experimental + */ + interface EntryPreview { + /** + * Preview of the key. Specified for map-like collection entries. + */ + key?: ObjectPreview | undefined; + /** + * Preview of the value. + */ + value: ObjectPreview; + } + /** + * Object property descriptor. + */ + interface PropertyDescriptor { + /** + * Property name or symbol description. + */ + name: string; + /** + * The value associated with the property. + */ + value?: RemoteObject | undefined; + /** + * True if the value associated with the property may be changed (data descriptors only). + */ + writable?: boolean | undefined; + /** + * A function which serves as a getter for the property, or undefined if there is no getter (accessor descriptors only). + */ + get?: RemoteObject | undefined; + /** + * A function which serves as a setter for the property, or undefined if there is no setter (accessor descriptors only). + */ + set?: RemoteObject | undefined; + /** + * True if the type of this property descriptor may be changed and if the property may be deleted from the corresponding object. + */ + configurable: boolean; + /** + * True if this property shows up during enumeration of the properties on the corresponding object. + */ + enumerable: boolean; + /** + * True if the result was thrown during the evaluation. + */ + wasThrown?: boolean | undefined; + /** + * True if the property is owned for the object. + */ + isOwn?: boolean | undefined; + /** + * Property symbol object, if the property is of the symbol type. + */ + symbol?: RemoteObject | undefined; + } + /** + * Object internal property descriptor. This property isn't normally visible in JavaScript code. + */ + interface InternalPropertyDescriptor { + /** + * Conventional property name. + */ + name: string; + /** + * The value associated with the property. + */ + value?: RemoteObject | undefined; + } + /** + * Represents function call argument. Either remote object id objectId, primitive value, unserializable primitive value or neither of (for undefined) them should be specified. + */ + interface CallArgument { + /** + * Primitive value or serializable javascript object. + */ + value?: any; + /** + * Primitive value which can not be JSON-stringified. + */ + unserializableValue?: UnserializableValue | undefined; + /** + * Remote object handle. + */ + objectId?: RemoteObjectId | undefined; + } + /** + * Id of an execution context. + */ + type ExecutionContextId = number; + /** + * Description of an isolated world. + */ + interface ExecutionContextDescription { + /** + * Unique id of the execution context. It can be used to specify in which execution context script evaluation should be performed. + */ + id: ExecutionContextId; + /** + * Execution context origin. + */ + origin: string; + /** + * Human readable name describing given context. + */ + name: string; + /** + * Embedder-specific auxiliary data. + */ + auxData?: {} | undefined; + } + /** + * Detailed information about exception (or error) that was thrown during script compilation or execution. + */ + interface ExceptionDetails { + /** + * Exception id. + */ + exceptionId: number; + /** + * Exception text, which should be used together with exception object when available. + */ + text: string; + /** + * Line number of the exception location (0-based). + */ + lineNumber: number; + /** + * Column number of the exception location (0-based). + */ + columnNumber: number; + /** + * Script ID of the exception location. + */ + scriptId?: ScriptId | undefined; + /** + * URL of the exception location, to be used when the script was not reported. + */ + url?: string | undefined; + /** + * JavaScript stack trace if available. + */ + stackTrace?: StackTrace | undefined; + /** + * Exception object if available. + */ + exception?: RemoteObject | undefined; + /** + * Identifier of the context where exception happened. + */ + executionContextId?: ExecutionContextId | undefined; + } + /** + * Number of milliseconds since epoch. + */ + type Timestamp = number; + /** + * Stack entry for runtime errors and assertions. + */ + interface CallFrame { + /** + * JavaScript function name. + */ + functionName: string; + /** + * JavaScript script id. + */ + scriptId: ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * JavaScript script line number (0-based). + */ + lineNumber: number; + /** + * JavaScript script column number (0-based). + */ + columnNumber: number; + } + /** + * Call frames for assertions or error messages. + */ + interface StackTrace { + /** + * String label of this stack trace. For async traces this may be a name of the function that initiated the async call. + */ + description?: string | undefined; + /** + * JavaScript function name. + */ + callFrames: CallFrame[]; + /** + * Asynchronous JavaScript stack trace that preceded this stack, if available. + */ + parent?: StackTrace | undefined; + /** + * Asynchronous JavaScript stack trace that preceded this stack, if available. + * @experimental + */ + parentId?: StackTraceId | undefined; + } + /** + * Unique identifier of current debugger. + * @experimental + */ + type UniqueDebuggerId = string; + /** + * If debuggerId is set stack trace comes from another debugger and can be resolved there. This allows to track cross-debugger calls. See Runtime.StackTrace and Debugger.paused for usages. + * @experimental + */ + interface StackTraceId { + id: string; + debuggerId?: UniqueDebuggerId | undefined; + } + interface EvaluateParameterType { + /** + * Expression to evaluate. + */ + expression: string; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + /** + * Determines whether Command Line API should be available during the evaluation. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Specifies in which execution context to perform evaluation. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + contextId?: ExecutionContextId | undefined; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should be treated as initiated by user in the UI. + */ + userGesture?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + } + interface AwaitPromiseParameterType { + /** + * Identifier of the promise. + */ + promiseObjectId: RemoteObjectId; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + */ + generatePreview?: boolean | undefined; + } + interface CallFunctionOnParameterType { + /** + * Declaration of the function to call. + */ + functionDeclaration: string; + /** + * Identifier of the object to call function on. Either objectId or executionContextId should be specified. + */ + objectId?: RemoteObjectId | undefined; + /** + * Call arguments. All call arguments must belong to the same JavaScript world as the target object. + */ + arguments?: CallArgument[] | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object which should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should be treated as initiated by user in the UI. + */ + userGesture?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + /** + * Specifies execution context which global object will be used to call function on. Either executionContextId or objectId should be specified. + */ + executionContextId?: ExecutionContextId | undefined; + /** + * Symbolic group name that can be used to release multiple objects. If objectGroup is not specified and objectId is, objectGroup will be inherited from object. + */ + objectGroup?: string | undefined; + } + interface GetPropertiesParameterType { + /** + * Identifier of the object to return properties for. + */ + objectId: RemoteObjectId; + /** + * If true, returns properties belonging only to the element itself, not to its prototype chain. + */ + ownProperties?: boolean | undefined; + /** + * If true, returns accessor properties (with getter/setter) only; internal properties are not returned either. + * @experimental + */ + accessorPropertiesOnly?: boolean | undefined; + /** + * Whether preview should be generated for the results. + * @experimental + */ + generatePreview?: boolean | undefined; + } + interface ReleaseObjectParameterType { + /** + * Identifier of the object to release. + */ + objectId: RemoteObjectId; + } + interface ReleaseObjectGroupParameterType { + /** + * Symbolic object group name. + */ + objectGroup: string; + } + interface SetCustomObjectFormatterEnabledParameterType { + enabled: boolean; + } + interface CompileScriptParameterType { + /** + * Expression to compile. + */ + expression: string; + /** + * Source url to be set for the script. + */ + sourceURL: string; + /** + * Specifies whether the compiled script should be persisted. + */ + persistScript: boolean; + /** + * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + executionContextId?: ExecutionContextId | undefined; + } + interface RunScriptParameterType { + /** + * Id of the script to run. + */ + scriptId: ScriptId; + /** + * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + executionContextId?: ExecutionContextId | undefined; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Determines whether Command Line API should be available during the evaluation. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object which should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + } + interface QueryObjectsParameterType { + /** + * Identifier of the prototype to return objects for. + */ + prototypeObjectId: RemoteObjectId; + } + interface GlobalLexicalScopeNamesParameterType { + /** + * Specifies in which execution context to lookup global scope variables. + */ + executionContextId?: ExecutionContextId | undefined; + } + interface EvaluateReturnType { + /** + * Evaluation result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface AwaitPromiseReturnType { + /** + * Promise result. Will contain rejected value if promise was rejected. + */ + result: RemoteObject; + /** + * Exception details if stack strace is available. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface CallFunctionOnReturnType { + /** + * Call result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface GetPropertiesReturnType { + /** + * Object properties. + */ + result: PropertyDescriptor[]; + /** + * Internal object properties (only of the element itself). + */ + internalProperties?: InternalPropertyDescriptor[] | undefined; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface CompileScriptReturnType { + /** + * Id of the script. + */ + scriptId?: ScriptId | undefined; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface RunScriptReturnType { + /** + * Run result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface QueryObjectsReturnType { + /** + * Array with objects. + */ + objects: RemoteObject; + } + interface GlobalLexicalScopeNamesReturnType { + names: string[]; + } + interface ExecutionContextCreatedEventDataType { + /** + * A newly created execution context. + */ + context: ExecutionContextDescription; + } + interface ExecutionContextDestroyedEventDataType { + /** + * Id of the destroyed context + */ + executionContextId: ExecutionContextId; + } + interface ExceptionThrownEventDataType { + /** + * Timestamp of the exception. + */ + timestamp: Timestamp; + exceptionDetails: ExceptionDetails; + } + interface ExceptionRevokedEventDataType { + /** + * Reason describing why exception was revoked. + */ + reason: string; + /** + * The id of revoked exception, as reported in exceptionThrown. + */ + exceptionId: number; + } + interface ConsoleAPICalledEventDataType { + /** + * Type of the call. + */ + type: string; + /** + * Call arguments. + */ + args: RemoteObject[]; + /** + * Identifier of the context where the call was made. + */ + executionContextId: ExecutionContextId; + /** + * Call timestamp. + */ + timestamp: Timestamp; + /** + * Stack trace captured when the call was made. + */ + stackTrace?: StackTrace | undefined; + /** + * Console context descriptor for calls on non-default console context (not console.*): 'anonymous#unique-logger-id' for call on unnamed context, 'name#unique-logger-id' for call on named context. + * @experimental + */ + context?: string | undefined; + } + interface InspectRequestedEventDataType { + object: RemoteObject; + hints: {}; + } + } + namespace Debugger { + /** + * Breakpoint identifier. + */ + type BreakpointId = string; + /** + * Call frame identifier. + */ + type CallFrameId = string; + /** + * Location in the source code. + */ + interface Location { + /** + * Script identifier as reported in the Debugger.scriptParsed. + */ + scriptId: Runtime.ScriptId; + /** + * Line number in the script (0-based). + */ + lineNumber: number; + /** + * Column number in the script (0-based). + */ + columnNumber?: number | undefined; + } + /** + * Location in the source code. + * @experimental + */ + interface ScriptPosition { + lineNumber: number; + columnNumber: number; + } + /** + * JavaScript call frame. Array of call frames form the call stack. + */ + interface CallFrame { + /** + * Call frame identifier. This identifier is only valid while the virtual machine is paused. + */ + callFrameId: CallFrameId; + /** + * Name of the JavaScript function called on this call frame. + */ + functionName: string; + /** + * Location in the source code. + */ + functionLocation?: Location | undefined; + /** + * Location in the source code. + */ + location: Location; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Scope chain for this call frame. + */ + scopeChain: Scope[]; + /** + * this object for this call frame. + */ + this: Runtime.RemoteObject; + /** + * The value being returned, if the function is at return point. + */ + returnValue?: Runtime.RemoteObject | undefined; + } + /** + * Scope description. + */ + interface Scope { + /** + * Scope type. + */ + type: string; + /** + * Object representing the scope. For global and with scopes it represents the actual object; for the rest of the scopes, it is artificial transient object enumerating scope variables as its properties. + */ + object: Runtime.RemoteObject; + name?: string | undefined; + /** + * Location in the source code where scope starts + */ + startLocation?: Location | undefined; + /** + * Location in the source code where scope ends + */ + endLocation?: Location | undefined; + } + /** + * Search match for resource. + */ + interface SearchMatch { + /** + * Line number in resource content. + */ + lineNumber: number; + /** + * Line with match content. + */ + lineContent: string; + } + interface BreakLocation { + /** + * Script identifier as reported in the Debugger.scriptParsed. + */ + scriptId: Runtime.ScriptId; + /** + * Line number in the script (0-based). + */ + lineNumber: number; + /** + * Column number in the script (0-based). + */ + columnNumber?: number | undefined; + type?: string | undefined; + } + interface SetBreakpointsActiveParameterType { + /** + * New value for breakpoints active state. + */ + active: boolean; + } + interface SetSkipAllPausesParameterType { + /** + * New value for skip pauses state. + */ + skip: boolean; + } + interface SetBreakpointByUrlParameterType { + /** + * Line number to set breakpoint at. + */ + lineNumber: number; + /** + * URL of the resources to set breakpoint on. + */ + url?: string | undefined; + /** + * Regex pattern for the URLs of the resources to set breakpoints on. Either url or urlRegex must be specified. + */ + urlRegex?: string | undefined; + /** + * Script hash of the resources to set breakpoint on. + */ + scriptHash?: string | undefined; + /** + * Offset in the line to set breakpoint at. + */ + columnNumber?: number | undefined; + /** + * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. + */ + condition?: string | undefined; + } + interface SetBreakpointParameterType { + /** + * Location to set breakpoint in. + */ + location: Location; + /** + * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. + */ + condition?: string | undefined; + } + interface RemoveBreakpointParameterType { + breakpointId: BreakpointId; + } + interface GetPossibleBreakpointsParameterType { + /** + * Start of range to search possible breakpoint locations in. + */ + start: Location; + /** + * End of range to search possible breakpoint locations in (excluding). When not specified, end of scripts is used as end of range. + */ + end?: Location | undefined; + /** + * Only consider locations which are in the same (non-nested) function as start. + */ + restrictToFunction?: boolean | undefined; + } + interface ContinueToLocationParameterType { + /** + * Location to continue to. + */ + location: Location; + targetCallFrames?: string | undefined; + } + interface PauseOnAsyncCallParameterType { + /** + * Debugger will pause when async call with given stack trace is started. + */ + parentStackTraceId: Runtime.StackTraceId; + } + interface StepIntoParameterType { + /** + * Debugger will issue additional Debugger.paused notification if any async task is scheduled before next pause. + * @experimental + */ + breakOnAsyncCall?: boolean | undefined; + } + interface GetStackTraceParameterType { + stackTraceId: Runtime.StackTraceId; + } + interface SearchInContentParameterType { + /** + * Id of the script to search in. + */ + scriptId: Runtime.ScriptId; + /** + * String to search for. + */ + query: string; + /** + * If true, search is case sensitive. + */ + caseSensitive?: boolean | undefined; + /** + * If true, treats string parameter as regex. + */ + isRegex?: boolean | undefined; + } + interface SetScriptSourceParameterType { + /** + * Id of the script to edit. + */ + scriptId: Runtime.ScriptId; + /** + * New content of the script. + */ + scriptSource: string; + /** + * If true the change will not actually be applied. Dry run may be used to get result description without actually modifying the code. + */ + dryRun?: boolean | undefined; + } + interface RestartFrameParameterType { + /** + * Call frame identifier to evaluate on. + */ + callFrameId: CallFrameId; + } + interface GetScriptSourceParameterType { + /** + * Id of the script to get source for. + */ + scriptId: Runtime.ScriptId; + } + interface SetPauseOnExceptionsParameterType { + /** + * Pause on exceptions mode. + */ + state: string; + } + interface EvaluateOnCallFrameParameterType { + /** + * Call frame identifier to evaluate on. + */ + callFrameId: CallFrameId; + /** + * Expression to evaluate. + */ + expression: string; + /** + * String object group name to put result into (allows rapid releasing resulting object handles using releaseObjectGroup). + */ + objectGroup?: string | undefined; + /** + * Specifies whether command line API should be available to the evaluated expression, defaults to false. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether to throw an exception if side effect cannot be ruled out during evaluation. + */ + throwOnSideEffect?: boolean | undefined; + } + interface SetVariableValueParameterType { + /** + * 0-based number of scope as was listed in scope chain. Only 'local', 'closure' and 'catch' scope types are allowed. Other scopes could be manipulated manually. + */ + scopeNumber: number; + /** + * Variable name. + */ + variableName: string; + /** + * New variable value. + */ + newValue: Runtime.CallArgument; + /** + * Id of callframe that holds variable. + */ + callFrameId: CallFrameId; + } + interface SetReturnValueParameterType { + /** + * New return value. + */ + newValue: Runtime.CallArgument; + } + interface SetAsyncCallStackDepthParameterType { + /** + * Maximum depth of async call stacks. Setting to 0 will effectively disable collecting async call stacks (default). + */ + maxDepth: number; + } + interface SetBlackboxPatternsParameterType { + /** + * Array of regexps that will be used to check script url for blackbox state. + */ + patterns: string[]; + } + interface SetBlackboxedRangesParameterType { + /** + * Id of the script. + */ + scriptId: Runtime.ScriptId; + positions: ScriptPosition[]; + } + interface EnableReturnType { + /** + * Unique identifier of the debugger. + * @experimental + */ + debuggerId: Runtime.UniqueDebuggerId; + } + interface SetBreakpointByUrlReturnType { + /** + * Id of the created breakpoint for further reference. + */ + breakpointId: BreakpointId; + /** + * List of the locations this breakpoint resolved into upon addition. + */ + locations: Location[]; + } + interface SetBreakpointReturnType { + /** + * Id of the created breakpoint for further reference. + */ + breakpointId: BreakpointId; + /** + * Location this breakpoint resolved into. + */ + actualLocation: Location; + } + interface GetPossibleBreakpointsReturnType { + /** + * List of the possible breakpoint locations. + */ + locations: BreakLocation[]; + } + interface GetStackTraceReturnType { + stackTrace: Runtime.StackTrace; + } + interface SearchInContentReturnType { + /** + * List of search matches. + */ + result: SearchMatch[]; + } + interface SetScriptSourceReturnType { + /** + * New stack trace in case editing has happened while VM was stopped. + */ + callFrames?: CallFrame[] | undefined; + /** + * Whether current call stack was modified after applying the changes. + */ + stackChanged?: boolean | undefined; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + /** + * Exception details if any. + */ + exceptionDetails?: Runtime.ExceptionDetails | undefined; + } + interface RestartFrameReturnType { + /** + * New stack trace. + */ + callFrames: CallFrame[]; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + } + interface GetScriptSourceReturnType { + /** + * Script source. + */ + scriptSource: string; + } + interface EvaluateOnCallFrameReturnType { + /** + * Object wrapper for the evaluation result. + */ + result: Runtime.RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: Runtime.ExceptionDetails | undefined; + } + interface ScriptParsedEventDataType { + /** + * Identifier of the script parsed. + */ + scriptId: Runtime.ScriptId; + /** + * URL or name of the script parsed (if any). + */ + url: string; + /** + * Line offset of the script within the resource with given URL (for script tags). + */ + startLine: number; + /** + * Column offset of the script within the resource with given URL. + */ + startColumn: number; + /** + * Last line of the script. + */ + endLine: number; + /** + * Length of the last line of the script. + */ + endColumn: number; + /** + * Specifies script creation context. + */ + executionContextId: Runtime.ExecutionContextId; + /** + * Content hash of the script. + */ + hash: string; + /** + * Embedder-specific auxiliary data. + */ + executionContextAuxData?: {} | undefined; + /** + * True, if this script is generated as a result of the live edit operation. + * @experimental + */ + isLiveEdit?: boolean | undefined; + /** + * URL of source map associated with script (if any). + */ + sourceMapURL?: string | undefined; + /** + * True, if this script has sourceURL. + */ + hasSourceURL?: boolean | undefined; + /** + * True, if this script is ES6 module. + */ + isModule?: boolean | undefined; + /** + * This script length. + */ + length?: number | undefined; + /** + * JavaScript top stack frame of where the script parsed event was triggered if available. + * @experimental + */ + stackTrace?: Runtime.StackTrace | undefined; + } + interface ScriptFailedToParseEventDataType { + /** + * Identifier of the script parsed. + */ + scriptId: Runtime.ScriptId; + /** + * URL or name of the script parsed (if any). + */ + url: string; + /** + * Line offset of the script within the resource with given URL (for script tags). + */ + startLine: number; + /** + * Column offset of the script within the resource with given URL. + */ + startColumn: number; + /** + * Last line of the script. + */ + endLine: number; + /** + * Length of the last line of the script. + */ + endColumn: number; + /** + * Specifies script creation context. + */ + executionContextId: Runtime.ExecutionContextId; + /** + * Content hash of the script. + */ + hash: string; + /** + * Embedder-specific auxiliary data. + */ + executionContextAuxData?: {} | undefined; + /** + * URL of source map associated with script (if any). + */ + sourceMapURL?: string | undefined; + /** + * True, if this script has sourceURL. + */ + hasSourceURL?: boolean | undefined; + /** + * True, if this script is ES6 module. + */ + isModule?: boolean | undefined; + /** + * This script length. + */ + length?: number | undefined; + /** + * JavaScript top stack frame of where the script parsed event was triggered if available. + * @experimental + */ + stackTrace?: Runtime.StackTrace | undefined; + } + interface BreakpointResolvedEventDataType { + /** + * Breakpoint unique identifier. + */ + breakpointId: BreakpointId; + /** + * Actual breakpoint location. + */ + location: Location; + } + interface PausedEventDataType { + /** + * Call stack the virtual machine stopped on. + */ + callFrames: CallFrame[]; + /** + * Pause reason. + */ + reason: string; + /** + * Object containing break-specific auxiliary properties. + */ + data?: {} | undefined; + /** + * Hit breakpoints IDs + */ + hitBreakpoints?: string[] | undefined; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + /** + * Just scheduled async call will have this stack trace as parent stack during async execution. This field is available only after Debugger.stepInto call with breakOnAsynCall flag. + * @experimental + */ + asyncCallStackTraceId?: Runtime.StackTraceId | undefined; + } + } + namespace Console { + /** + * Console message. + */ + interface ConsoleMessage { + /** + * Message source. + */ + source: string; + /** + * Message severity. + */ + level: string; + /** + * Message text. + */ + text: string; + /** + * URL of the message origin. + */ + url?: string | undefined; + /** + * Line number in the resource that generated this message (1-based). + */ + line?: number | undefined; + /** + * Column number in the resource that generated this message (1-based). + */ + column?: number | undefined; + } + interface MessageAddedEventDataType { + /** + * Console message that has been added. + */ + message: ConsoleMessage; + } + } + namespace Profiler { + /** + * Profile node. Holds callsite information, execution statistics and child nodes. + */ + interface ProfileNode { + /** + * Unique id of the node. + */ + id: number; + /** + * Function location. + */ + callFrame: Runtime.CallFrame; + /** + * Number of samples where this node was on top of the call stack. + */ + hitCount?: number | undefined; + /** + * Child node ids. + */ + children?: number[] | undefined; + /** + * The reason of being not optimized. The function may be deoptimized or marked as don't optimize. + */ + deoptReason?: string | undefined; + /** + * An array of source position ticks. + */ + positionTicks?: PositionTickInfo[] | undefined; + } + /** + * Profile. + */ + interface Profile { + /** + * The list of profile nodes. First item is the root node. + */ + nodes: ProfileNode[]; + /** + * Profiling start timestamp in microseconds. + */ + startTime: number; + /** + * Profiling end timestamp in microseconds. + */ + endTime: number; + /** + * Ids of samples top nodes. + */ + samples?: number[] | undefined; + /** + * Time intervals between adjacent samples in microseconds. The first delta is relative to the profile startTime. + */ + timeDeltas?: number[] | undefined; + } + /** + * Specifies a number of samples attributed to a certain source position. + */ + interface PositionTickInfo { + /** + * Source line number (1-based). + */ + line: number; + /** + * Number of samples attributed to the source line. + */ + ticks: number; + } + /** + * Coverage data for a source range. + */ + interface CoverageRange { + /** + * JavaScript script source offset for the range start. + */ + startOffset: number; + /** + * JavaScript script source offset for the range end. + */ + endOffset: number; + /** + * Collected execution count of the source range. + */ + count: number; + } + /** + * Coverage data for a JavaScript function. + */ + interface FunctionCoverage { + /** + * JavaScript function name. + */ + functionName: string; + /** + * Source ranges inside the function with coverage data. + */ + ranges: CoverageRange[]; + /** + * Whether coverage data for this function has block granularity. + */ + isBlockCoverage: boolean; + } + /** + * Coverage data for a JavaScript script. + */ + interface ScriptCoverage { + /** + * JavaScript script id. + */ + scriptId: Runtime.ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Functions contained in the script that has coverage data. + */ + functions: FunctionCoverage[]; + } + /** + * Describes a type collected during runtime. + * @experimental + */ + interface TypeObject { + /** + * Name of a type collected with type profiling. + */ + name: string; + } + /** + * Source offset and types for a parameter or return value. + * @experimental + */ + interface TypeProfileEntry { + /** + * Source offset of the parameter or end of function for return values. + */ + offset: number; + /** + * The types for this parameter or return value. + */ + types: TypeObject[]; + } + /** + * Type profile data collected during runtime for a JavaScript script. + * @experimental + */ + interface ScriptTypeProfile { + /** + * JavaScript script id. + */ + scriptId: Runtime.ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Type profile entries for parameters and return values of the functions in the script. + */ + entries: TypeProfileEntry[]; + } + interface SetSamplingIntervalParameterType { + /** + * New sampling interval in microseconds. + */ + interval: number; + } + interface StartPreciseCoverageParameterType { + /** + * Collect accurate call counts beyond simple 'covered' or 'not covered'. + */ + callCount?: boolean | undefined; + /** + * Collect block-based coverage. + */ + detailed?: boolean | undefined; + } + interface StopReturnType { + /** + * Recorded profile. + */ + profile: Profile; + } + interface TakePreciseCoverageReturnType { + /** + * Coverage data for the current isolate. + */ + result: ScriptCoverage[]; + } + interface GetBestEffortCoverageReturnType { + /** + * Coverage data for the current isolate. + */ + result: ScriptCoverage[]; + } + interface TakeTypeProfileReturnType { + /** + * Type profile for all scripts since startTypeProfile() was turned on. + */ + result: ScriptTypeProfile[]; + } + interface ConsoleProfileStartedEventDataType { + id: string; + /** + * Location of console.profile(). + */ + location: Debugger.Location; + /** + * Profile title passed as an argument to console.profile(). + */ + title?: string | undefined; + } + interface ConsoleProfileFinishedEventDataType { + id: string; + /** + * Location of console.profileEnd(). + */ + location: Debugger.Location; + profile: Profile; + /** + * Profile title passed as an argument to console.profile(). + */ + title?: string | undefined; + } + } + namespace HeapProfiler { + /** + * Heap snapshot object id. + */ + type HeapSnapshotObjectId = string; + /** + * Sampling Heap Profile node. Holds callsite information, allocation statistics and child nodes. + */ + interface SamplingHeapProfileNode { + /** + * Function location. + */ + callFrame: Runtime.CallFrame; + /** + * Allocations size in bytes for the node excluding children. + */ + selfSize: number; + /** + * Child nodes. + */ + children: SamplingHeapProfileNode[]; + } + /** + * Profile. + */ + interface SamplingHeapProfile { + head: SamplingHeapProfileNode; + } + interface StartTrackingHeapObjectsParameterType { + trackAllocations?: boolean | undefined; + } + interface StopTrackingHeapObjectsParameterType { + /** + * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken when the tracking is stopped. + */ + reportProgress?: boolean | undefined; + } + interface TakeHeapSnapshotParameterType { + /** + * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken. + */ + reportProgress?: boolean | undefined; + } + interface GetObjectByHeapObjectIdParameterType { + objectId: HeapSnapshotObjectId; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + } + interface AddInspectedHeapObjectParameterType { + /** + * Heap snapshot object id to be accessible by means of $x command line API. + */ + heapObjectId: HeapSnapshotObjectId; + } + interface GetHeapObjectIdParameterType { + /** + * Identifier of the object to get heap object id for. + */ + objectId: Runtime.RemoteObjectId; + } + interface StartSamplingParameterType { + /** + * Average sample interval in bytes. Poisson distribution is used for the intervals. The default value is 32768 bytes. + */ + samplingInterval?: number | undefined; + } + interface GetObjectByHeapObjectIdReturnType { + /** + * Evaluation result. + */ + result: Runtime.RemoteObject; + } + interface GetHeapObjectIdReturnType { + /** + * Id of the heap snapshot object corresponding to the passed remote object id. + */ + heapSnapshotObjectId: HeapSnapshotObjectId; + } + interface StopSamplingReturnType { + /** + * Recorded sampling heap profile. + */ + profile: SamplingHeapProfile; + } + interface GetSamplingProfileReturnType { + /** + * Return the sampling profile being collected. + */ + profile: SamplingHeapProfile; + } + interface AddHeapSnapshotChunkEventDataType { + chunk: string; + } + interface ReportHeapSnapshotProgressEventDataType { + done: number; + total: number; + finished?: boolean | undefined; + } + interface LastSeenObjectIdEventDataType { + lastSeenObjectId: number; + timestamp: number; + } + interface HeapStatsUpdateEventDataType { + /** + * An array of triplets. Each triplet describes a fragment. The first integer is the fragment index, the second integer is a total count of objects for the fragment, the third integer is a total size of the objects for the fragment. + */ + statsUpdate: number[]; + } + } + namespace NodeTracing { + interface TraceConfig { + /** + * Controls how the trace buffer stores data. + */ + recordMode?: string | undefined; + /** + * Included category filters. + */ + includedCategories: string[]; + } + interface StartParameterType { + traceConfig: TraceConfig; + } + interface GetCategoriesReturnType { + /** + * A list of supported tracing categories. + */ + categories: string[]; + } + interface DataCollectedEventDataType { + value: Array<{}>; + } + } + namespace NodeWorker { + type WorkerID = string; + /** + * Unique identifier of attached debugging session. + */ + type SessionID = string; + interface WorkerInfo { + workerId: WorkerID; + type: string; + title: string; + url: string; + } + interface SendMessageToWorkerParameterType { + message: string; + /** + * Identifier of the session. + */ + sessionId: SessionID; + } + interface EnableParameterType { + /** + * Whether to new workers should be paused until the frontend sends `Runtime.runIfWaitingForDebugger` + * message to run them. + */ + waitForDebuggerOnStart: boolean; + } + interface DetachParameterType { + sessionId: SessionID; + } + interface AttachedToWorkerEventDataType { + /** + * Identifier assigned to the session used to send/receive messages. + */ + sessionId: SessionID; + workerInfo: WorkerInfo; + waitingForDebugger: boolean; + } + interface DetachedFromWorkerEventDataType { + /** + * Detached session identifier. + */ + sessionId: SessionID; + } + interface ReceivedMessageFromWorkerEventDataType { + /** + * Identifier of a session which sends a message. + */ + sessionId: SessionID; + message: string; + } + } + namespace NodeRuntime { + interface NotifyWhenWaitingForDisconnectParameterType { + enabled: boolean; + } + } + /** + * The `inspector.Session` is used for dispatching messages to the V8 inspector + * back-end and receiving message responses and notifications. + */ + class Session extends EventEmitter { + /** + * Create a new instance of the inspector.Session class. + * The inspector session needs to be connected through session.connect() before the messages can be dispatched to the inspector backend. + */ + constructor(); + /** + * Connects a session to the inspector back-end. + * @since v8.0.0 + */ + connect(): void; + /** + * Immediately close the session. All pending message callbacks will be called + * with an error. `session.connect()` will need to be called to be able to send + * messages again. Reconnected session will lose all inspector state, such as + * enabled agents or configured breakpoints. + * @since v8.0.0 + */ + disconnect(): void; + /** + * Posts a message to the inspector back-end. `callback` will be notified when + * a response is received. `callback` is a function that accepts two optional + * arguments: error and message-specific result. + * + * ```js + * session.post('Runtime.evaluate', { expression: '2 + 2' }, + * (error, { result }) => console.log(result)); + * // Output: { type: 'number', value: 4, description: '4' } + * ``` + * + * The latest version of the V8 inspector protocol is published on the [Chrome DevTools Protocol Viewer](https://chromedevtools.github.io/devtools-protocol/v8/). + * + * Node.js inspector supports all the Chrome DevTools Protocol domains declared + * by V8\. Chrome DevTools Protocol domain provides an interface for interacting + * with one of the runtime agents used to inspect the application state and listen + * to the run-time events. + * + * ## Example usage + * + * Apart from the debugger, various V8 Profilers are available through the DevTools + * protocol. + * @since v8.0.0 + */ + post(method: string, params?: {}, callback?: (err: Error | null, params?: {}) => void): void; + post(method: string, callback?: (err: Error | null, params?: {}) => void): void; + /** + * Returns supported domains. + */ + post(method: 'Schema.getDomains', callback?: (err: Error | null, params: Schema.GetDomainsReturnType) => void): void; + /** + * Evaluates expression on global object. + */ + post(method: 'Runtime.evaluate', params?: Runtime.EvaluateParameterType, callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; + post(method: 'Runtime.evaluate', callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; + /** + * Add handler to promise with given promise object id. + */ + post(method: 'Runtime.awaitPromise', params?: Runtime.AwaitPromiseParameterType, callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; + post(method: 'Runtime.awaitPromise', callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; + /** + * Calls function with given declaration on the given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.callFunctionOn', params?: Runtime.CallFunctionOnParameterType, callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; + post(method: 'Runtime.callFunctionOn', callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; + /** + * Returns properties of a given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.getProperties', params?: Runtime.GetPropertiesParameterType, callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; + post(method: 'Runtime.getProperties', callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; + /** + * Releases remote object with given id. + */ + post(method: 'Runtime.releaseObject', params?: Runtime.ReleaseObjectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.releaseObject', callback?: (err: Error | null) => void): void; + /** + * Releases all remote objects that belong to a given group. + */ + post(method: 'Runtime.releaseObjectGroup', params?: Runtime.ReleaseObjectGroupParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.releaseObjectGroup', callback?: (err: Error | null) => void): void; + /** + * Tells inspected instance to run if it was waiting for debugger to attach. + */ + post(method: 'Runtime.runIfWaitingForDebugger', callback?: (err: Error | null) => void): void; + /** + * Enables reporting of execution contexts creation by means of executionContextCreated event. When the reporting gets enabled the event will be sent immediately for each existing execution context. + */ + post(method: 'Runtime.enable', callback?: (err: Error | null) => void): void; + /** + * Disables reporting of execution contexts creation. + */ + post(method: 'Runtime.disable', callback?: (err: Error | null) => void): void; + /** + * Discards collected exceptions and console API calls. + */ + post(method: 'Runtime.discardConsoleEntries', callback?: (err: Error | null) => void): void; + /** + * @experimental + */ + post(method: 'Runtime.setCustomObjectFormatterEnabled', params?: Runtime.SetCustomObjectFormatterEnabledParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.setCustomObjectFormatterEnabled', callback?: (err: Error | null) => void): void; + /** + * Compiles expression. + */ + post(method: 'Runtime.compileScript', params?: Runtime.CompileScriptParameterType, callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; + post(method: 'Runtime.compileScript', callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; + /** + * Runs script with given id in a given context. + */ + post(method: 'Runtime.runScript', params?: Runtime.RunScriptParameterType, callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; + post(method: 'Runtime.runScript', callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; + post(method: 'Runtime.queryObjects', params?: Runtime.QueryObjectsParameterType, callback?: (err: Error | null, params: Runtime.QueryObjectsReturnType) => void): void; + post(method: 'Runtime.queryObjects', callback?: (err: Error | null, params: Runtime.QueryObjectsReturnType) => void): void; + /** + * Returns all let, const and class variables from global scope. + */ + post( + method: 'Runtime.globalLexicalScopeNames', + params?: Runtime.GlobalLexicalScopeNamesParameterType, + callback?: (err: Error | null, params: Runtime.GlobalLexicalScopeNamesReturnType) => void + ): void; + post(method: 'Runtime.globalLexicalScopeNames', callback?: (err: Error | null, params: Runtime.GlobalLexicalScopeNamesReturnType) => void): void; + /** + * Enables debugger for the given page. Clients should not assume that the debugging has been enabled until the result for this command is received. + */ + post(method: 'Debugger.enable', callback?: (err: Error | null, params: Debugger.EnableReturnType) => void): void; + /** + * Disables debugger for given page. + */ + post(method: 'Debugger.disable', callback?: (err: Error | null) => void): void; + /** + * Activates / deactivates all breakpoints on the page. + */ + post(method: 'Debugger.setBreakpointsActive', params?: Debugger.SetBreakpointsActiveParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBreakpointsActive', callback?: (err: Error | null) => void): void; + /** + * Makes page not interrupt on any pauses (breakpoint, exception, dom exception etc). + */ + post(method: 'Debugger.setSkipAllPauses', params?: Debugger.SetSkipAllPausesParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setSkipAllPauses', callback?: (err: Error | null) => void): void; + /** + * Sets JavaScript breakpoint at given location specified either by URL or URL regex. Once this command is issued, all existing parsed scripts will have breakpoints resolved and returned in locations property. Further matching script parsing will result in subsequent breakpointResolved events issued. This logical breakpoint will survive page reloads. + */ + post(method: 'Debugger.setBreakpointByUrl', params?: Debugger.SetBreakpointByUrlParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; + post(method: 'Debugger.setBreakpointByUrl', callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; + /** + * Sets JavaScript breakpoint at a given location. + */ + post(method: 'Debugger.setBreakpoint', params?: Debugger.SetBreakpointParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; + post(method: 'Debugger.setBreakpoint', callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; + /** + * Removes JavaScript breakpoint. + */ + post(method: 'Debugger.removeBreakpoint', params?: Debugger.RemoveBreakpointParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.removeBreakpoint', callback?: (err: Error | null) => void): void; + /** + * Returns possible locations for breakpoint. scriptId in start and end range locations should be the same. + */ + post( + method: 'Debugger.getPossibleBreakpoints', + params?: Debugger.GetPossibleBreakpointsParameterType, + callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void + ): void; + post(method: 'Debugger.getPossibleBreakpoints', callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void): void; + /** + * Continues execution until specific location is reached. + */ + post(method: 'Debugger.continueToLocation', params?: Debugger.ContinueToLocationParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.continueToLocation', callback?: (err: Error | null) => void): void; + /** + * @experimental + */ + post(method: 'Debugger.pauseOnAsyncCall', params?: Debugger.PauseOnAsyncCallParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.pauseOnAsyncCall', callback?: (err: Error | null) => void): void; + /** + * Steps over the statement. + */ + post(method: 'Debugger.stepOver', callback?: (err: Error | null) => void): void; + /** + * Steps into the function call. + */ + post(method: 'Debugger.stepInto', params?: Debugger.StepIntoParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.stepInto', callback?: (err: Error | null) => void): void; + /** + * Steps out of the function call. + */ + post(method: 'Debugger.stepOut', callback?: (err: Error | null) => void): void; + /** + * Stops on the next JavaScript statement. + */ + post(method: 'Debugger.pause', callback?: (err: Error | null) => void): void; + /** + * This method is deprecated - use Debugger.stepInto with breakOnAsyncCall and Debugger.pauseOnAsyncTask instead. Steps into next scheduled async task if any is scheduled before next pause. Returns success when async task is actually scheduled, returns error if no task were scheduled or another scheduleStepIntoAsync was called. + * @experimental + */ + post(method: 'Debugger.scheduleStepIntoAsync', callback?: (err: Error | null) => void): void; + /** + * Resumes JavaScript execution. + */ + post(method: 'Debugger.resume', callback?: (err: Error | null) => void): void; + /** + * Returns stack trace with given stackTraceId. + * @experimental + */ + post(method: 'Debugger.getStackTrace', params?: Debugger.GetStackTraceParameterType, callback?: (err: Error | null, params: Debugger.GetStackTraceReturnType) => void): void; + post(method: 'Debugger.getStackTrace', callback?: (err: Error | null, params: Debugger.GetStackTraceReturnType) => void): void; + /** + * Searches for given string in script content. + */ + post(method: 'Debugger.searchInContent', params?: Debugger.SearchInContentParameterType, callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; + post(method: 'Debugger.searchInContent', callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; + /** + * Edits JavaScript source live. + */ + post(method: 'Debugger.setScriptSource', params?: Debugger.SetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; + post(method: 'Debugger.setScriptSource', callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; + /** + * Restarts particular call frame from the beginning. + */ + post(method: 'Debugger.restartFrame', params?: Debugger.RestartFrameParameterType, callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; + post(method: 'Debugger.restartFrame', callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; + /** + * Returns source for the script with given id. + */ + post(method: 'Debugger.getScriptSource', params?: Debugger.GetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; + post(method: 'Debugger.getScriptSource', callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; + /** + * Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions or no exceptions. Initial pause on exceptions state is none. + */ + post(method: 'Debugger.setPauseOnExceptions', params?: Debugger.SetPauseOnExceptionsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setPauseOnExceptions', callback?: (err: Error | null) => void): void; + /** + * Evaluates expression on a given call frame. + */ + post(method: 'Debugger.evaluateOnCallFrame', params?: Debugger.EvaluateOnCallFrameParameterType, callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; + post(method: 'Debugger.evaluateOnCallFrame', callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; + /** + * Changes value of variable in a callframe. Object-based scopes are not supported and must be mutated manually. + */ + post(method: 'Debugger.setVariableValue', params?: Debugger.SetVariableValueParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setVariableValue', callback?: (err: Error | null) => void): void; + /** + * Changes return value in top frame. Available only at return break position. + * @experimental + */ + post(method: 'Debugger.setReturnValue', params?: Debugger.SetReturnValueParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setReturnValue', callback?: (err: Error | null) => void): void; + /** + * Enables or disables async call stacks tracking. + */ + post(method: 'Debugger.setAsyncCallStackDepth', params?: Debugger.SetAsyncCallStackDepthParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setAsyncCallStackDepth', callback?: (err: Error | null) => void): void; + /** + * Replace previous blackbox patterns with passed ones. Forces backend to skip stepping/pausing in scripts with url matching one of the patterns. VM will try to leave blackboxed script by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. + * @experimental + */ + post(method: 'Debugger.setBlackboxPatterns', params?: Debugger.SetBlackboxPatternsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBlackboxPatterns', callback?: (err: Error | null) => void): void; + /** + * Makes backend skip steps in the script in blackboxed ranges. VM will try leave blacklisted scripts by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. Positions array contains positions where blackbox state is changed. First interval isn't blackboxed. Array should be sorted. + * @experimental + */ + post(method: 'Debugger.setBlackboxedRanges', params?: Debugger.SetBlackboxedRangesParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBlackboxedRanges', callback?: (err: Error | null) => void): void; + /** + * Enables console domain, sends the messages collected so far to the client by means of the messageAdded notification. + */ + post(method: 'Console.enable', callback?: (err: Error | null) => void): void; + /** + * Disables console domain, prevents further console messages from being reported to the client. + */ + post(method: 'Console.disable', callback?: (err: Error | null) => void): void; + /** + * Does nothing. + */ + post(method: 'Console.clearMessages', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.enable', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.disable', callback?: (err: Error | null) => void): void; + /** + * Changes CPU profiler sampling interval. Must be called before CPU profiles recording started. + */ + post(method: 'Profiler.setSamplingInterval', params?: Profiler.SetSamplingIntervalParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Profiler.setSamplingInterval', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.start', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.stop', callback?: (err: Error | null, params: Profiler.StopReturnType) => void): void; + /** + * Enable precise code coverage. Coverage data for JavaScript executed before enabling precise code coverage may be incomplete. Enabling prevents running optimized code and resets execution counters. + */ + post(method: 'Profiler.startPreciseCoverage', params?: Profiler.StartPreciseCoverageParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Profiler.startPreciseCoverage', callback?: (err: Error | null) => void): void; + /** + * Disable precise code coverage. Disabling releases unnecessary execution count records and allows executing optimized code. + */ + post(method: 'Profiler.stopPreciseCoverage', callback?: (err: Error | null) => void): void; + /** + * Collect coverage data for the current isolate, and resets execution counters. Precise code coverage needs to have started. + */ + post(method: 'Profiler.takePreciseCoverage', callback?: (err: Error | null, params: Profiler.TakePreciseCoverageReturnType) => void): void; + /** + * Collect coverage data for the current isolate. The coverage data may be incomplete due to garbage collection. + */ + post(method: 'Profiler.getBestEffortCoverage', callback?: (err: Error | null, params: Profiler.GetBestEffortCoverageReturnType) => void): void; + /** + * Enable type profile. + * @experimental + */ + post(method: 'Profiler.startTypeProfile', callback?: (err: Error | null) => void): void; + /** + * Disable type profile. Disabling releases type profile data collected so far. + * @experimental + */ + post(method: 'Profiler.stopTypeProfile', callback?: (err: Error | null) => void): void; + /** + * Collect type profile. + * @experimental + */ + post(method: 'Profiler.takeTypeProfile', callback?: (err: Error | null, params: Profiler.TakeTypeProfileReturnType) => void): void; + post(method: 'HeapProfiler.enable', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.disable', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startTrackingHeapObjects', params?: HeapProfiler.StartTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startTrackingHeapObjects', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopTrackingHeapObjects', params?: HeapProfiler.StopTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopTrackingHeapObjects', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.takeHeapSnapshot', params?: HeapProfiler.TakeHeapSnapshotParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.takeHeapSnapshot', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.collectGarbage', callback?: (err: Error | null) => void): void; + post( + method: 'HeapProfiler.getObjectByHeapObjectId', + params?: HeapProfiler.GetObjectByHeapObjectIdParameterType, + callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void + ): void; + post(method: 'HeapProfiler.getObjectByHeapObjectId', callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void): void; + /** + * Enables console to refer to the node with given id via $x (see Command Line API for more details $x functions). + */ + post(method: 'HeapProfiler.addInspectedHeapObject', params?: HeapProfiler.AddInspectedHeapObjectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.addInspectedHeapObject', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.getHeapObjectId', params?: HeapProfiler.GetHeapObjectIdParameterType, callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; + post(method: 'HeapProfiler.getHeapObjectId', callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; + post(method: 'HeapProfiler.startSampling', params?: HeapProfiler.StartSamplingParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startSampling', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopSampling', callback?: (err: Error | null, params: HeapProfiler.StopSamplingReturnType) => void): void; + post(method: 'HeapProfiler.getSamplingProfile', callback?: (err: Error | null, params: HeapProfiler.GetSamplingProfileReturnType) => void): void; + /** + * Gets supported tracing categories. + */ + post(method: 'NodeTracing.getCategories', callback?: (err: Error | null, params: NodeTracing.GetCategoriesReturnType) => void): void; + /** + * Start trace events collection. + */ + post(method: 'NodeTracing.start', params?: NodeTracing.StartParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeTracing.start', callback?: (err: Error | null) => void): void; + /** + * Stop trace events collection. Remaining collected events will be sent as a sequence of + * dataCollected events followed by tracingComplete event. + */ + post(method: 'NodeTracing.stop', callback?: (err: Error | null) => void): void; + /** + * Sends protocol message over session with given id. + */ + post(method: 'NodeWorker.sendMessageToWorker', params?: NodeWorker.SendMessageToWorkerParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.sendMessageToWorker', callback?: (err: Error | null) => void): void; + /** + * Instructs the inspector to attach to running workers. Will also attach to new workers + * as they start + */ + post(method: 'NodeWorker.enable', params?: NodeWorker.EnableParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.enable', callback?: (err: Error | null) => void): void; + /** + * Detaches from all running workers and disables attaching to new workers as they are started. + */ + post(method: 'NodeWorker.disable', callback?: (err: Error | null) => void): void; + /** + * Detached from the worker with given sessionId. + */ + post(method: 'NodeWorker.detach', params?: NodeWorker.DetachParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.detach', callback?: (err: Error | null) => void): void; + /** + * Enable the `NodeRuntime.waitingForDisconnect`. + */ + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', params?: NodeRuntime.NotifyWhenWaitingForDisconnectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', callback?: (err: Error | null) => void): void; + // Events + addListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + addListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + addListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + addListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + addListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + addListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + addListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + addListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + addListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + addListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + addListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + addListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + addListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + addListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + addListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + addListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + addListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + addListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + addListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + addListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + addListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + addListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + addListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + addListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + addListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + addListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'inspectorNotification', message: InspectorNotification<{}>): boolean; + emit(event: 'Runtime.executionContextCreated', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextDestroyed', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextsCleared'): boolean; + emit(event: 'Runtime.exceptionThrown', message: InspectorNotification): boolean; + emit(event: 'Runtime.exceptionRevoked', message: InspectorNotification): boolean; + emit(event: 'Runtime.consoleAPICalled', message: InspectorNotification): boolean; + emit(event: 'Runtime.inspectRequested', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptParsed', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptFailedToParse', message: InspectorNotification): boolean; + emit(event: 'Debugger.breakpointResolved', message: InspectorNotification): boolean; + emit(event: 'Debugger.paused', message: InspectorNotification): boolean; + emit(event: 'Debugger.resumed'): boolean; + emit(event: 'Console.messageAdded', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileStarted', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileFinished', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.addHeapSnapshotChunk', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.resetProfiles'): boolean; + emit(event: 'HeapProfiler.reportHeapSnapshotProgress', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.lastSeenObjectId', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.heapStatsUpdate', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.dataCollected', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.tracingComplete'): boolean; + emit(event: 'NodeWorker.attachedToWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.detachedFromWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.receivedMessageFromWorker', message: InspectorNotification): boolean; + emit(event: 'NodeRuntime.waitingForDisconnect'): boolean; + on(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + on(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + on(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + on(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + on(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + on(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + on(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + on(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + on(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + on(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + on(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + on(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + on(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + on(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + on(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + on(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + on(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + on(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + on(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + on(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + on(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + on(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + on(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + on(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + on(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + on(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + once(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + once(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + once(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + once(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + once(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + once(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + once(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + once(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + once(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + once(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + once(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + once(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + once(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + once(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + once(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + once(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + once(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + once(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + once(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + once(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + once(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + once(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + once(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + once(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + once(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + prependListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependOnceListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + prependOnceListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependOnceListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependOnceListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependOnceListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependOnceListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependOnceListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependOnceListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependOnceListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependOnceListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependOnceListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependOnceListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependOnceListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependOnceListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependOnceListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependOnceListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependOnceListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependOnceListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependOnceListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependOnceListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependOnceListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependOnceListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependOnceListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependOnceListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + } + /** + * Activate inspector on host and port. Equivalent to`node --inspect=[[host:]port]`, but can be done programmatically after node has + * started. + * + * If wait is `true`, will block until a client has connected to the inspect port + * and flow control has been passed to the debugger client. + * + * See the `security warning` regarding the `host`parameter usage. + * @param [port='what was specified on the CLI'] Port to listen on for inspector connections. Optional. + * @param [host='what was specified on the CLI'] Host to listen on for inspector connections. Optional. + * @param [wait=false] Block until a client has connected. Optional. + * @returns Disposable that calls `inspector.close()`. + */ + function open(port?: number, host?: string, wait?: boolean): Disposable; + /** + * Deactivate the inspector. Blocks until there are no active connections. + */ + function close(): void; + /** + * Return the URL of the active inspector, or `undefined` if there is none. + * + * ```console + * $ node --inspect -p 'inspector.url()' + * Debugger listening on ws://127.0.0.1:9229/166e272e-7a30-4d09-97ce-f1c012b43c34 + * For help, see: https://nodejs.org/en/docs/inspector + * ws://127.0.0.1:9229/166e272e-7a30-4d09-97ce-f1c012b43c34 + * + * $ node --inspect=localhost:3000 -p 'inspector.url()' + * Debugger listening on ws://localhost:3000/51cf8d0e-3c36-4c59-8efd-54519839e56a + * For help, see: https://nodejs.org/en/docs/inspector + * ws://localhost:3000/51cf8d0e-3c36-4c59-8efd-54519839e56a + * + * $ node -p 'inspector.url()' + * undefined + * ``` + */ + function url(): string | undefined; + /** + * Blocks until a client (existing or connected later) has sent`Runtime.runIfWaitingForDebugger` command. + * + * An exception will be thrown if there is no active inspector. + * @since v12.7.0 + */ + function waitForDebugger(): void; +} +/** + * The inspector module provides an API for interacting with the V8 inspector. + */ +declare module 'node:inspector' { + import inspector = require('inspector'); + export = inspector; +} diff --git a/dist/node_modules/@types/node/module.d.ts b/dist/node_modules/@types/node/module.d.ts new file mode 100644 index 0000000..c38a9b8 --- /dev/null +++ b/dist/node_modules/@types/node/module.d.ts @@ -0,0 +1,315 @@ +/** + * @since v0.3.7 + * @experimental + */ +declare module "module" { + import { URL } from "node:url"; + import { MessagePort } from "node:worker_threads"; + namespace Module { + /** + * The `module.syncBuiltinESMExports()` method updates all the live bindings for + * builtin `ES Modules` to match the properties of the `CommonJS` exports. It + * does not add or remove exported names from the `ES Modules`. + * + * ```js + * const fs = require('node:fs'); + * const assert = require('node:assert'); + * const { syncBuiltinESMExports } = require('node:module'); + * + * fs.readFile = newAPI; + * + * delete fs.readFileSync; + * + * function newAPI() { + * // ... + * } + * + * fs.newAPI = newAPI; + * + * syncBuiltinESMExports(); + * + * import('node:fs').then((esmFS) => { + * // It syncs the existing readFile property with the new value + * assert.strictEqual(esmFS.readFile, newAPI); + * // readFileSync has been deleted from the required fs + * assert.strictEqual('readFileSync' in fs, false); + * // syncBuiltinESMExports() does not remove readFileSync from esmFS + * assert.strictEqual('readFileSync' in esmFS, true); + * // syncBuiltinESMExports() does not add names + * assert.strictEqual(esmFS.newAPI, undefined); + * }); + * ``` + * @since v12.12.0 + */ + function syncBuiltinESMExports(): void; + /** + * `path` is the resolved path for the file for which a corresponding source map + * should be fetched. + * @since v13.7.0, v12.17.0 + * @return Returns `module.SourceMap` if a source map is found, `undefined` otherwise. + */ + function findSourceMap(path: string, error?: Error): SourceMap; + interface SourceMapPayload { + file: string; + version: number; + sources: string[]; + sourcesContent: string[]; + names: string[]; + mappings: string; + sourceRoot: string; + } + interface SourceMapping { + generatedLine: number; + generatedColumn: number; + originalSource: string; + originalLine: number; + originalColumn: number; + } + interface SourceOrigin { + /** + * The name of the range in the source map, if one was provided + */ + name?: string; + /** + * The file name of the original source, as reported in the SourceMap + */ + fileName: string; + /** + * The 1-indexed lineNumber of the corresponding call site in the original source + */ + lineNumber: number; + /** + * The 1-indexed columnNumber of the corresponding call site in the original source + */ + columnNumber: number; + } + /** + * @since v13.7.0, v12.17.0 + */ + class SourceMap { + /** + * Getter for the payload used to construct the `SourceMap` instance. + */ + readonly payload: SourceMapPayload; + constructor(payload: SourceMapPayload); + /** + * Given a line offset and column offset in the generated source + * file, returns an object representing the SourceMap range in the + * original file if found, or an empty object if not. + * + * The object returned contains the following keys: + * + * The returned value represents the raw range as it appears in the + * SourceMap, based on zero-indexed offsets, _not_ 1-indexed line and + * column numbers as they appear in Error messages and CallSite + * objects. + * + * To get the corresponding 1-indexed line and column numbers from a + * lineNumber and columnNumber as they are reported by Error stacks + * and CallSite objects, use `sourceMap.findOrigin(lineNumber, columnNumber)` + * @param lineOffset The zero-indexed line number offset in the generated source + * @param columnOffset The zero-indexed column number offset in the generated source + */ + findEntry(lineOffset: number, columnOffset: number): SourceMapping; + /** + * Given a 1-indexed `lineNumber` and `columnNumber` from a call site in the generated source, + * find the corresponding call site location in the original source. + * + * If the `lineNumber` and `columnNumber` provided are not found in any source map, + * then an empty object is returned. + * @param lineNumber The 1-indexed line number of the call site in the generated source + * @param columnNumber The 1-indexed column number of the call site in the generated source + */ + findOrigin(lineNumber: number, columnNumber: number): SourceOrigin | {}; + } + /** @deprecated Use `ImportAttributes` instead */ + interface ImportAssertions extends ImportAttributes {} + interface ImportAttributes extends NodeJS.Dict { + type?: string | undefined; + } + type ModuleFormat = "builtin" | "commonjs" | "json" | "module" | "wasm"; + type ModuleSource = string | ArrayBuffer | NodeJS.TypedArray; + interface GlobalPreloadContext { + port: MessagePort; + } + /** + * @deprecated This hook will be removed in a future version. + * Use `initialize` instead. When a loader has an `initialize` export, `globalPreload` will be ignored. + * + * Sometimes it might be necessary to run some code inside of the same global scope that the application runs in. + * This hook allows the return of a string that is run as a sloppy-mode script on startup. + * + * @param context Information to assist the preload code + * @return Code to run before application startup + */ + type GlobalPreloadHook = (context: GlobalPreloadContext) => string; + /** + * The `initialize` hook provides a way to define a custom function that runs in the hooks thread + * when the hooks module is initialized. Initialization happens when the hooks module is registered via `register`. + * + * This hook can receive data from a `register` invocation, including ports and other transferrable objects. + * The return value of `initialize` can be a `Promise`, in which case it will be awaited before the main application thread execution resumes. + */ + type InitializeHook = (data: Data) => void | Promise; + interface ResolveHookContext { + /** + * Export conditions of the relevant `package.json` + */ + conditions: string[]; + /** + * @deprecated Use `importAttributes` instead + */ + importAssertions: ImportAttributes; + /** + * An object whose key-value pairs represent the assertions for the module to import + */ + importAttributes: ImportAttributes; + /** + * The module importing this one, or undefined if this is the Node.js entry point + */ + parentURL: string | undefined; + } + interface ResolveFnOutput { + /** + * A hint to the load hook (it might be ignored) + */ + format?: ModuleFormat | null | undefined; + /** + * @deprecated Use `importAttributes` instead + */ + importAssertions?: ImportAttributes | undefined; + /** + * The import attributes to use when caching the module (optional; if excluded the input will be used) + */ + importAttributes?: ImportAttributes | undefined; + /** + * A signal that this hook intends to terminate the chain of `resolve` hooks. + * @default false + */ + shortCircuit?: boolean | undefined; + /** + * The absolute URL to which this input resolves + */ + url: string; + } + /** + * The `resolve` hook chain is responsible for resolving file URL for a given module specifier and parent URL, and optionally its format (such as `'module'`) as a hint to the `load` hook. + * If a format is specified, the load hook is ultimately responsible for providing the final `format` value (and it is free to ignore the hint provided by `resolve`); + * if `resolve` provides a format, a custom `load` hook is required even if only to pass the value to the Node.js default `load` hook. + * + * @param specifier The specified URL path of the module to be resolved + * @param context + * @param nextResolve The subsequent `resolve` hook in the chain, or the Node.js default `resolve` hook after the last user-supplied resolve hook + */ + type ResolveHook = ( + specifier: string, + context: ResolveHookContext, + nextResolve: ( + specifier: string, + context?: ResolveHookContext, + ) => ResolveFnOutput | Promise, + ) => ResolveFnOutput | Promise; + interface LoadHookContext { + /** + * Export conditions of the relevant `package.json` + */ + conditions: string[]; + /** + * The format optionally supplied by the `resolve` hook chain + */ + format: ModuleFormat; + /** + * @deprecated Use `importAttributes` instead + */ + importAssertions: ImportAttributes; + /** + * An object whose key-value pairs represent the assertions for the module to import + */ + importAttributes: ImportAttributes; + } + interface LoadFnOutput { + format: ModuleFormat; + /** + * A signal that this hook intends to terminate the chain of `resolve` hooks. + * @default false + */ + shortCircuit?: boolean | undefined; + /** + * The source for Node.js to evaluate + */ + source?: ModuleSource; + } + /** + * The `load` hook provides a way to define a custom method of determining how a URL should be interpreted, retrieved, and parsed. + * It is also in charge of validating the import assertion. + * + * @param url The URL/path of the module to be loaded + * @param context Metadata about the module + * @param nextLoad The subsequent `load` hook in the chain, or the Node.js default `load` hook after the last user-supplied `load` hook + */ + type LoadHook = ( + url: string, + context: LoadHookContext, + nextLoad: (url: string, context?: LoadHookContext) => LoadFnOutput | Promise, + ) => LoadFnOutput | Promise; + } + interface RegisterOptions { + parentURL: string | URL; + data?: Data | undefined; + transferList?: any[] | undefined; + } + interface Module extends NodeModule {} + class Module { + static runMain(): void; + static wrap(code: string): string; + static createRequire(path: string | URL): NodeRequire; + static builtinModules: string[]; + static isBuiltin(moduleName: string): boolean; + static Module: typeof Module; + static register( + specifier: string | URL, + parentURL?: string | URL, + options?: RegisterOptions, + ): void; + static register(specifier: string | URL, options?: RegisterOptions): void; + constructor(id: string, parent?: Module); + } + global { + interface ImportMeta { + /** + * The directory name of the current module. This is the same as the `path.dirname()` of the `import.meta.filename`. + * **Caveat:** only present on `file:` modules. + */ + dirname: string; + /** + * The full absolute path and filename of the current module, with symlinks resolved. + * This is the same as the `url.fileURLToPath()` of the `import.meta.url`. + * **Caveat:** only local modules support this property. Modules not using the `file:` protocol will not provide it. + */ + filename: string; + /** + * The absolute `file:` URL of the module. + */ + url: string; + /** + * Provides a module-relative resolution function scoped to each module, returning + * the URL string. + * + * Second `parent` parameter is only used when the `--experimental-import-meta-resolve` + * command flag enabled. + * + * @since v20.6.0 + * + * @param specifier The module specifier to resolve relative to `parent`. + * @param parent The absolute parent module URL to resolve from. + * @returns The absolute (`file:`) URL string for the resolved module. + */ + resolve(specifier: string, parent?: string | URL | undefined): string; + } + } + export = Module; +} +declare module "node:module" { + import module = require("module"); + export = module; +} diff --git a/dist/node_modules/@types/node/net.d.ts b/dist/node_modules/@types/node/net.d.ts new file mode 100644 index 0000000..c09b95d --- /dev/null +++ b/dist/node_modules/@types/node/net.d.ts @@ -0,0 +1,954 @@ +/** + * > Stability: 2 - Stable + * + * The `node:net` module provides an asynchronous network API for creating stream-based + * TCP or `IPC` servers ({@link createServer}) and clients + * ({@link createConnection}). + * + * It can be accessed using: + * + * ```js + * const net = require('node:net'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/net.js) + */ +declare module "net" { + import * as stream from "node:stream"; + import { Abortable, EventEmitter } from "node:events"; + import * as dns from "node:dns"; + type LookupFunction = ( + hostname: string, + options: dns.LookupOptions, + callback: (err: NodeJS.ErrnoException | null, address: string | dns.LookupAddress[], family?: number) => void, + ) => void; + interface AddressInfo { + address: string; + family: string; + port: number; + } + interface SocketConstructorOpts { + fd?: number | undefined; + allowHalfOpen?: boolean | undefined; + readable?: boolean | undefined; + writable?: boolean | undefined; + signal?: AbortSignal; + } + interface OnReadOpts { + buffer: Uint8Array | (() => Uint8Array); + /** + * This function is called for every chunk of incoming data. + * Two arguments are passed to it: the number of bytes written to buffer and a reference to buffer. + * Return false from this function to implicitly pause() the socket. + */ + callback(bytesWritten: number, buf: Uint8Array): boolean; + } + interface ConnectOpts { + /** + * If specified, incoming data is stored in a single buffer and passed to the supplied callback when data arrives on the socket. + * Note: this will cause the streaming functionality to not provide any data, however events like 'error', 'end', and 'close' will + * still be emitted as normal and methods like pause() and resume() will also behave as expected. + */ + onread?: OnReadOpts | undefined; + } + interface TcpSocketConnectOpts extends ConnectOpts { + port: number; + host?: string | undefined; + localAddress?: string | undefined; + localPort?: number | undefined; + hints?: number | undefined; + family?: number | undefined; + lookup?: LookupFunction | undefined; + noDelay?: boolean | undefined; + keepAlive?: boolean | undefined; + keepAliveInitialDelay?: number | undefined; + /** + * @since v18.13.0 + */ + autoSelectFamily?: boolean | undefined; + /** + * @since v18.13.0 + */ + autoSelectFamilyAttemptTimeout?: number | undefined; + } + interface IpcSocketConnectOpts extends ConnectOpts { + path: string; + } + type SocketConnectOpts = TcpSocketConnectOpts | IpcSocketConnectOpts; + type SocketReadyState = "opening" | "open" | "readOnly" | "writeOnly" | "closed"; + /** + * This class is an abstraction of a TCP socket or a streaming `IPC` endpoint + * (uses named pipes on Windows, and Unix domain sockets otherwise). It is also + * an `EventEmitter`. + * + * A `net.Socket` can be created by the user and used directly to interact with + * a server. For example, it is returned by {@link createConnection}, + * so the user can use it to talk to the server. + * + * It can also be created by Node.js and passed to the user when a connection + * is received. For example, it is passed to the listeners of a `'connection'` event emitted on a {@link Server}, so the user can use + * it to interact with the client. + * @since v0.3.4 + */ + class Socket extends stream.Duplex { + constructor(options?: SocketConstructorOpts); + /** + * Destroys the socket after all data is written. If the `finish` event was already emitted the socket is destroyed immediately. + * If the socket is still writable it implicitly calls `socket.end()`. + * @since v0.3.4 + */ + destroySoon(): void; + /** + * Sends data on the socket. The second parameter specifies the encoding in the + * case of a string. It defaults to UTF8 encoding. + * + * Returns `true` if the entire data was flushed successfully to the kernel + * buffer. Returns `false` if all or part of the data was queued in user memory.`'drain'` will be emitted when the buffer is again free. + * + * The optional `callback` parameter will be executed when the data is finally + * written out, which may not be immediately. + * + * See `Writable` stream `write()` method for more + * information. + * @since v0.1.90 + * @param [encoding='utf8'] Only used when data is `string`. + */ + write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean; + write(str: Uint8Array | string, encoding?: BufferEncoding, cb?: (err?: Error) => void): boolean; + /** + * Initiate a connection on a given socket. + * + * Possible signatures: + * + * * `socket.connect(options[, connectListener])` + * * `socket.connect(path[, connectListener])` for `IPC` connections. + * * `socket.connect(port[, host][, connectListener])` for TCP connections. + * * Returns: `net.Socket` The socket itself. + * + * This function is asynchronous. When the connection is established, the `'connect'` event will be emitted. If there is a problem connecting, + * instead of a `'connect'` event, an `'error'` event will be emitted with + * the error passed to the `'error'` listener. + * The last parameter `connectListener`, if supplied, will be added as a listener + * for the `'connect'` event **once**. + * + * This function should only be used for reconnecting a socket after`'close'` has been emitted or otherwise it may lead to undefined + * behavior. + */ + connect(options: SocketConnectOpts, connectionListener?: () => void): this; + connect(port: number, host: string, connectionListener?: () => void): this; + connect(port: number, connectionListener?: () => void): this; + connect(path: string, connectionListener?: () => void): this; + /** + * Set the encoding for the socket as a `Readable Stream`. See `readable.setEncoding()` for more information. + * @since v0.1.90 + * @return The socket itself. + */ + setEncoding(encoding?: BufferEncoding): this; + /** + * Pauses the reading of data. That is, `'data'` events will not be emitted. + * Useful to throttle back an upload. + * @return The socket itself. + */ + pause(): this; + /** + * Close the TCP connection by sending an RST packet and destroy the stream. + * If this TCP socket is in connecting status, it will send an RST packet and destroy this TCP socket once it is connected. + * Otherwise, it will call `socket.destroy` with an `ERR_SOCKET_CLOSED` Error. + * If this is not a TCP socket (for example, a pipe), calling this method will immediately throw an `ERR_INVALID_HANDLE_TYPE` Error. + * @since v18.3.0, v16.17.0 + */ + resetAndDestroy(): this; + /** + * Resumes reading after a call to `socket.pause()`. + * @return The socket itself. + */ + resume(): this; + /** + * Sets the socket to timeout after `timeout` milliseconds of inactivity on + * the socket. By default `net.Socket` do not have a timeout. + * + * When an idle timeout is triggered the socket will receive a `'timeout'` event but the connection will not be severed. The user must manually call `socket.end()` or `socket.destroy()` to + * end the connection. + * + * ```js + * socket.setTimeout(3000); + * socket.on('timeout', () => { + * console.log('socket timeout'); + * socket.end(); + * }); + * ``` + * + * If `timeout` is 0, then the existing idle timeout is disabled. + * + * The optional `callback` parameter will be added as a one-time listener for the `'timeout'` event. + * @since v0.1.90 + * @return The socket itself. + */ + setTimeout(timeout: number, callback?: () => void): this; + /** + * Enable/disable the use of Nagle's algorithm. + * + * When a TCP connection is created, it will have Nagle's algorithm enabled. + * + * Nagle's algorithm delays data before it is sent via the network. It attempts + * to optimize throughput at the expense of latency. + * + * Passing `true` for `noDelay` or not passing an argument will disable Nagle's + * algorithm for the socket. Passing `false` for `noDelay` will enable Nagle's + * algorithm. + * @since v0.1.90 + * @param [noDelay=true] + * @return The socket itself. + */ + setNoDelay(noDelay?: boolean): this; + /** + * Enable/disable keep-alive functionality, and optionally set the initial + * delay before the first keepalive probe is sent on an idle socket. + * + * Set `initialDelay` (in milliseconds) to set the delay between the last + * data packet received and the first keepalive probe. Setting `0` for`initialDelay` will leave the value unchanged from the default + * (or previous) setting. + * + * Enabling the keep-alive functionality will set the following socket options: + * + * * `SO_KEEPALIVE=1` + * * `TCP_KEEPIDLE=initialDelay` + * * `TCP_KEEPCNT=10` + * * `TCP_KEEPINTVL=1` + * @since v0.1.92 + * @param [enable=false] + * @param [initialDelay=0] + * @return The socket itself. + */ + setKeepAlive(enable?: boolean, initialDelay?: number): this; + /** + * Returns the bound `address`, the address `family` name and `port` of the + * socket as reported by the operating system:`{ port: 12346, family: 'IPv4', address: '127.0.0.1' }` + * @since v0.1.90 + */ + address(): AddressInfo | {}; + /** + * Calling `unref()` on a socket will allow the program to exit if this is the only + * active socket in the event system. If the socket is already `unref`ed calling`unref()` again will have no effect. + * @since v0.9.1 + * @return The socket itself. + */ + unref(): this; + /** + * Opposite of `unref()`, calling `ref()` on a previously `unref`ed socket will _not_ let the program exit if it's the only socket left (the default behavior). + * If the socket is `ref`ed calling `ref` again will have no effect. + * @since v0.9.1 + * @return The socket itself. + */ + ref(): this; + /** + * This property is only present if the family autoselection algorithm is enabled in `socket.connect(options)` + * and it is an array of the addresses that have been attempted. + * + * Each address is a string in the form of `$IP:$PORT`. + * If the connection was successful, then the last address is the one that the socket is currently connected to. + * @since v19.4.0 + */ + readonly autoSelectFamilyAttemptedAddresses: string[]; + /** + * This property shows the number of characters buffered for writing. The buffer + * may contain strings whose length after encoding is not yet known. So this number + * is only an approximation of the number of bytes in the buffer. + * + * `net.Socket` has the property that `socket.write()` always works. This is to + * help users get up and running quickly. The computer cannot always keep up + * with the amount of data that is written to a socket. The network connection + * simply might be too slow. Node.js will internally queue up the data written to a + * socket and send it out over the wire when it is possible. + * + * The consequence of this internal buffering is that memory may grow. + * Users who experience large or growing `bufferSize` should attempt to + * "throttle" the data flows in their program with `socket.pause()` and `socket.resume()`. + * @since v0.3.8 + * @deprecated Since v14.6.0 - Use `writableLength` instead. + */ + readonly bufferSize: number; + /** + * The amount of received bytes. + * @since v0.5.3 + */ + readonly bytesRead: number; + /** + * The amount of bytes sent. + * @since v0.5.3 + */ + readonly bytesWritten: number; + /** + * If `true`,`socket.connect(options[, connectListener])` was + * called and has not yet finished. It will stay `true` until the socket becomes + * connected, then it is set to `false` and the `'connect'` event is emitted. Note + * that the `socket.connect(options[, connectListener])` callback is a listener for the `'connect'` event. + * @since v6.1.0 + */ + readonly connecting: boolean; + /** + * This is `true` if the socket is not connected yet, either because `.connect()`has not yet been called or because it is still in the process of connecting + * (see `socket.connecting`). + * @since v11.2.0, v10.16.0 + */ + readonly pending: boolean; + /** + * See `writable.destroyed` for further details. + */ + readonly destroyed: boolean; + /** + * The string representation of the local IP address the remote client is + * connecting on. For example, in a server listening on `'0.0.0.0'`, if a client + * connects on `'192.168.1.1'`, the value of `socket.localAddress` would be`'192.168.1.1'`. + * @since v0.9.6 + */ + readonly localAddress?: string; + /** + * The numeric representation of the local port. For example, `80` or `21`. + * @since v0.9.6 + */ + readonly localPort?: number; + /** + * The string representation of the local IP family. `'IPv4'` or `'IPv6'`. + * @since v18.8.0, v16.18.0 + */ + readonly localFamily?: string; + /** + * This property represents the state of the connection as a string. + * + * * If the stream is connecting `socket.readyState` is `opening`. + * * If the stream is readable and writable, it is `open`. + * * If the stream is readable and not writable, it is `readOnly`. + * * If the stream is not readable and writable, it is `writeOnly`. + * @since v0.5.0 + */ + readonly readyState: SocketReadyState; + /** + * The string representation of the remote IP address. For example,`'74.125.127.100'` or `'2001:4860:a005::68'`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.5.10 + */ + readonly remoteAddress?: string | undefined; + /** + * The string representation of the remote IP family. `'IPv4'` or `'IPv6'`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.11.14 + */ + readonly remoteFamily?: string | undefined; + /** + * The numeric representation of the remote port. For example, `80` or `21`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.5.10 + */ + readonly remotePort?: number | undefined; + /** + * The socket timeout in milliseconds as set by `socket.setTimeout()`. + * It is `undefined` if a timeout has not been set. + * @since v10.7.0 + */ + readonly timeout?: number | undefined; + /** + * Half-closes the socket. i.e., it sends a FIN packet. It is possible the + * server will still send some data. + * + * See `writable.end()` for further details. + * @since v0.1.90 + * @param [encoding='utf8'] Only used when data is `string`. + * @param callback Optional callback for when the socket is finished. + * @return The socket itself. + */ + end(callback?: () => void): this; + end(buffer: Uint8Array | string, callback?: () => void): this; + end(str: Uint8Array | string, encoding?: BufferEncoding, callback?: () => void): this; + /** + * events.EventEmitter + * 1. close + * 2. connect + * 3. data + * 4. drain + * 5. end + * 6. error + * 7. lookup + * 8. ready + * 9. timeout + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: (hadError: boolean) => void): this; + addListener(event: "connect", listener: () => void): this; + addListener(event: "data", listener: (data: Buffer) => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + addListener(event: "ready", listener: () => void): this; + addListener(event: "timeout", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close", hadError: boolean): boolean; + emit(event: "connect"): boolean; + emit(event: "data", data: Buffer): boolean; + emit(event: "drain"): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "lookup", err: Error, address: string, family: string | number, host: string): boolean; + emit(event: "ready"): boolean; + emit(event: "timeout"): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: (hadError: boolean) => void): this; + on(event: "connect", listener: () => void): this; + on(event: "data", listener: (data: Buffer) => void): this; + on(event: "drain", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + on(event: "ready", listener: () => void): this; + on(event: "timeout", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: (hadError: boolean) => void): this; + once(event: "connect", listener: () => void): this; + once(event: "data", listener: (data: Buffer) => void): this; + once(event: "drain", listener: () => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + once(event: "ready", listener: () => void): this; + once(event: "timeout", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: (hadError: boolean) => void): this; + prependListener(event: "connect", listener: () => void): this; + prependListener(event: "data", listener: (data: Buffer) => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + prependListener(event: "ready", listener: () => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: (hadError: boolean) => void): this; + prependOnceListener(event: "connect", listener: () => void): this; + prependOnceListener(event: "data", listener: (data: Buffer) => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + prependOnceListener(event: "ready", listener: () => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + } + interface ListenOptions extends Abortable { + port?: number | undefined; + host?: string | undefined; + backlog?: number | undefined; + path?: string | undefined; + exclusive?: boolean | undefined; + readableAll?: boolean | undefined; + writableAll?: boolean | undefined; + /** + * @default false + */ + ipv6Only?: boolean | undefined; + } + interface ServerOpts { + /** + * Indicates whether half-opened TCP connections are allowed. + * @default false + */ + allowHalfOpen?: boolean | undefined; + /** + * Indicates whether the socket should be paused on incoming connections. + * @default false + */ + pauseOnConnect?: boolean | undefined; + /** + * If set to `true`, it disables the use of Nagle's algorithm immediately after a new incoming connection is received. + * @default false + * @since v16.5.0 + */ + noDelay?: boolean | undefined; + /** + * If set to `true`, it enables keep-alive functionality on the socket immediately after a new incoming connection is received, + * similarly on what is done in `socket.setKeepAlive([enable][, initialDelay])`. + * @default false + * @since v16.5.0 + */ + keepAlive?: boolean | undefined; + /** + * If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket. + * @default 0 + * @since v16.5.0 + */ + keepAliveInitialDelay?: number | undefined; + } + interface DropArgument { + localAddress?: string; + localPort?: number; + localFamily?: string; + remoteAddress?: string; + remotePort?: number; + remoteFamily?: string; + } + /** + * This class is used to create a TCP or `IPC` server. + * @since v0.1.90 + */ + class Server extends EventEmitter { + constructor(connectionListener?: (socket: Socket) => void); + constructor(options?: ServerOpts, connectionListener?: (socket: Socket) => void); + /** + * Start a server listening for connections. A `net.Server` can be a TCP or + * an `IPC` server depending on what it listens to. + * + * Possible signatures: + * + * * `server.listen(handle[, backlog][, callback])` + * * `server.listen(options[, callback])` + * * `server.listen(path[, backlog][, callback])` for `IPC` servers + * * `server.listen([port[, host[, backlog]]][, callback])` for TCP servers + * + * This function is asynchronous. When the server starts listening, the `'listening'` event will be emitted. The last parameter `callback`will be added as a listener for the `'listening'` + * event. + * + * All `listen()` methods can take a `backlog` parameter to specify the maximum + * length of the queue of pending connections. The actual length will be determined + * by the OS through sysctl settings such as `tcp_max_syn_backlog` and `somaxconn`on Linux. The default value of this parameter is 511 (not 512). + * + * All {@link Socket} are set to `SO_REUSEADDR` (see [`socket(7)`](https://man7.org/linux/man-pages/man7/socket.7.html) for + * details). + * + * The `server.listen()` method can be called again if and only if there was an + * error during the first `server.listen()` call or `server.close()` has been + * called. Otherwise, an `ERR_SERVER_ALREADY_LISTEN` error will be thrown. + * + * One of the most common errors raised when listening is `EADDRINUSE`. + * This happens when another server is already listening on the requested`port`/`path`/`handle`. One way to handle this would be to retry + * after a certain amount of time: + * + * ```js + * server.on('error', (e) => { + * if (e.code === 'EADDRINUSE') { + * console.error('Address in use, retrying...'); + * setTimeout(() => { + * server.close(); + * server.listen(PORT, HOST); + * }, 1000); + * } + * }); + * ``` + */ + listen(port?: number, hostname?: string, backlog?: number, listeningListener?: () => void): this; + listen(port?: number, hostname?: string, listeningListener?: () => void): this; + listen(port?: number, backlog?: number, listeningListener?: () => void): this; + listen(port?: number, listeningListener?: () => void): this; + listen(path: string, backlog?: number, listeningListener?: () => void): this; + listen(path: string, listeningListener?: () => void): this; + listen(options: ListenOptions, listeningListener?: () => void): this; + listen(handle: any, backlog?: number, listeningListener?: () => void): this; + listen(handle: any, listeningListener?: () => void): this; + /** + * Stops the server from accepting new connections and keeps existing + * connections. This function is asynchronous, the server is finally closed + * when all connections are ended and the server emits a `'close'` event. + * The optional `callback` will be called once the `'close'` event occurs. Unlike + * that event, it will be called with an `Error` as its only argument if the server + * was not open when it was closed. + * @since v0.1.90 + * @param callback Called when the server is closed. + */ + close(callback?: (err?: Error) => void): this; + /** + * Returns the bound `address`, the address `family` name, and `port` of the server + * as reported by the operating system if listening on an IP socket + * (useful to find which port was assigned when getting an OS-assigned address):`{ port: 12346, family: 'IPv4', address: '127.0.0.1' }`. + * + * For a server listening on a pipe or Unix domain socket, the name is returned + * as a string. + * + * ```js + * const server = net.createServer((socket) => { + * socket.end('goodbye\n'); + * }).on('error', (err) => { + * // Handle errors here. + * throw err; + * }); + * + * // Grab an arbitrary unused port. + * server.listen(() => { + * console.log('opened server on', server.address()); + * }); + * ``` + * + * `server.address()` returns `null` before the `'listening'` event has been + * emitted or after calling `server.close()`. + * @since v0.1.90 + */ + address(): AddressInfo | string | null; + /** + * Asynchronously get the number of concurrent connections on the server. Works + * when sockets were sent to forks. + * + * Callback should take two arguments `err` and `count`. + * @since v0.9.7 + */ + getConnections(cb: (error: Error | null, count: number) => void): void; + /** + * Opposite of `unref()`, calling `ref()` on a previously `unref`ed server will _not_ let the program exit if it's the only server left (the default behavior). + * If the server is `ref`ed calling `ref()` again will have no effect. + * @since v0.9.1 + */ + ref(): this; + /** + * Calling `unref()` on a server will allow the program to exit if this is the only + * active server in the event system. If the server is already `unref`ed calling`unref()` again will have no effect. + * @since v0.9.1 + */ + unref(): this; + /** + * Set this property to reject connections when the server's connection count gets + * high. + * + * It is not recommended to use this option once a socket has been sent to a child + * with `child_process.fork()`. + * @since v0.2.0 + */ + maxConnections: number; + connections: number; + /** + * Indicates whether or not the server is listening for connections. + * @since v5.7.0 + */ + readonly listening: boolean; + /** + * events.EventEmitter + * 1. close + * 2. connection + * 3. error + * 4. listening + * 5. drop + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connection", listener: (socket: Socket) => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "drop", listener: (data?: DropArgument) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "connection", socket: Socket): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit(event: "drop", data?: DropArgument): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connection", listener: (socket: Socket) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "drop", listener: (data?: DropArgument) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connection", listener: (socket: Socket) => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "drop", listener: (data?: DropArgument) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connection", listener: (socket: Socket) => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "drop", listener: (data?: DropArgument) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connection", listener: (socket: Socket) => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "drop", listener: (data?: DropArgument) => void): this; + /** + * Calls {@link Server.close()} and returns a promise that fulfills when the server has closed. + * @since v20.5.0 + */ + [Symbol.asyncDispose](): Promise; + } + type IPVersion = "ipv4" | "ipv6"; + /** + * The `BlockList` object can be used with some network APIs to specify rules for + * disabling inbound or outbound access to specific IP addresses, IP ranges, or + * IP subnets. + * @since v15.0.0, v14.18.0 + */ + class BlockList { + /** + * Adds a rule to block the given IP address. + * @since v15.0.0, v14.18.0 + * @param address An IPv4 or IPv6 address. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addAddress(address: string, type?: IPVersion): void; + addAddress(address: SocketAddress): void; + /** + * Adds a rule to block a range of IP addresses from `start` (inclusive) to`end` (inclusive). + * @since v15.0.0, v14.18.0 + * @param start The starting IPv4 or IPv6 address in the range. + * @param end The ending IPv4 or IPv6 address in the range. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addRange(start: string, end: string, type?: IPVersion): void; + addRange(start: SocketAddress, end: SocketAddress): void; + /** + * Adds a rule to block a range of IP addresses specified as a subnet mask. + * @since v15.0.0, v14.18.0 + * @param net The network IPv4 or IPv6 address. + * @param prefix The number of CIDR prefix bits. For IPv4, this must be a value between `0` and `32`. For IPv6, this must be between `0` and `128`. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addSubnet(net: SocketAddress, prefix: number): void; + addSubnet(net: string, prefix: number, type?: IPVersion): void; + /** + * Returns `true` if the given IP address matches any of the rules added to the`BlockList`. + * + * ```js + * const blockList = new net.BlockList(); + * blockList.addAddress('123.123.123.123'); + * blockList.addRange('10.0.0.1', '10.0.0.10'); + * blockList.addSubnet('8592:757c:efae:4e45::', 64, 'ipv6'); + * + * console.log(blockList.check('123.123.123.123')); // Prints: true + * console.log(blockList.check('10.0.0.3')); // Prints: true + * console.log(blockList.check('222.111.111.222')); // Prints: false + * + * // IPv6 notation for IPv4 addresses works: + * console.log(blockList.check('::ffff:7b7b:7b7b', 'ipv6')); // Prints: true + * console.log(blockList.check('::ffff:123.123.123.123', 'ipv6')); // Prints: true + * ``` + * @since v15.0.0, v14.18.0 + * @param address The IP address to check + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + check(address: SocketAddress): boolean; + check(address: string, type?: IPVersion): boolean; + /** + * The list of rules added to the blocklist. + * @since v15.0.0, v14.18.0 + */ + rules: readonly string[]; + } + interface TcpNetConnectOpts extends TcpSocketConnectOpts, SocketConstructorOpts { + timeout?: number | undefined; + } + interface IpcNetConnectOpts extends IpcSocketConnectOpts, SocketConstructorOpts { + timeout?: number | undefined; + } + type NetConnectOpts = TcpNetConnectOpts | IpcNetConnectOpts; + /** + * Creates a new TCP or `IPC` server. + * + * If `allowHalfOpen` is set to `true`, when the other end of the socket + * signals the end of transmission, the server will only send back the end of + * transmission when `socket.end()` is explicitly called. For example, in the + * context of TCP, when a FIN packed is received, a FIN packed is sent + * back only when `socket.end()` is explicitly called. Until then the + * connection is half-closed (non-readable but still writable). See `'end'` event and [RFC 1122](https://tools.ietf.org/html/rfc1122) (section 4.2.2.13) for more information. + * + * If `pauseOnConnect` is set to `true`, then the socket associated with each + * incoming connection will be paused, and no data will be read from its handle. + * This allows connections to be passed between processes without any data being + * read by the original process. To begin reading data from a paused socket, call `socket.resume()`. + * + * The server can be a TCP server or an `IPC` server, depending on what it `listen()` to. + * + * Here is an example of a TCP echo server which listens for connections + * on port 8124: + * + * ```js + * const net = require('node:net'); + * const server = net.createServer((c) => { + * // 'connection' listener. + * console.log('client connected'); + * c.on('end', () => { + * console.log('client disconnected'); + * }); + * c.write('hello\r\n'); + * c.pipe(c); + * }); + * server.on('error', (err) => { + * throw err; + * }); + * server.listen(8124, () => { + * console.log('server bound'); + * }); + * ``` + * + * Test this by using `telnet`: + * + * ```bash + * telnet localhost 8124 + * ``` + * + * To listen on the socket `/tmp/echo.sock`: + * + * ```js + * server.listen('/tmp/echo.sock', () => { + * console.log('server bound'); + * }); + * ``` + * + * Use `nc` to connect to a Unix domain socket server: + * + * ```bash + * nc -U /tmp/echo.sock + * ``` + * @since v0.5.0 + * @param connectionListener Automatically set as a listener for the {@link 'connection'} event. + */ + function createServer(connectionListener?: (socket: Socket) => void): Server; + function createServer(options?: ServerOpts, connectionListener?: (socket: Socket) => void): Server; + /** + * Aliases to {@link createConnection}. + * + * Possible signatures: + * + * * {@link connect} + * * {@link connect} for `IPC` connections. + * * {@link connect} for TCP connections. + */ + function connect(options: NetConnectOpts, connectionListener?: () => void): Socket; + function connect(port: number, host?: string, connectionListener?: () => void): Socket; + function connect(path: string, connectionListener?: () => void): Socket; + /** + * A factory function, which creates a new {@link Socket}, + * immediately initiates connection with `socket.connect()`, + * then returns the `net.Socket` that starts the connection. + * + * When the connection is established, a `'connect'` event will be emitted + * on the returned socket. The last parameter `connectListener`, if supplied, + * will be added as a listener for the `'connect'` event **once**. + * + * Possible signatures: + * + * * {@link createConnection} + * * {@link createConnection} for `IPC` connections. + * * {@link createConnection} for TCP connections. + * + * The {@link connect} function is an alias to this function. + */ + function createConnection(options: NetConnectOpts, connectionListener?: () => void): Socket; + function createConnection(port: number, host?: string, connectionListener?: () => void): Socket; + function createConnection(path: string, connectionListener?: () => void): Socket; + /** + * Gets the current default value of the `autoSelectFamily` option of `socket.connect(options)`. + * The initial default value is `true`, unless the command line option`--no-network-family-autoselection` is provided. + * @since v19.4.0 + */ + function getDefaultAutoSelectFamily(): boolean; + /** + * Sets the default value of the `autoSelectFamily` option of `socket.connect(options)`. + * @since v19.4.0 + */ + function setDefaultAutoSelectFamily(value: boolean): void; + /** + * Gets the current default value of the `autoSelectFamilyAttemptTimeout` option of `socket.connect(options)`. + * The initial default value is `250`. + * @since v19.8.0 + */ + function getDefaultAutoSelectFamilyAttemptTimeout(): number; + /** + * Sets the default value of the `autoSelectFamilyAttemptTimeout` option of `socket.connect(options)`. + * @since v19.8.0 + */ + function setDefaultAutoSelectFamilyAttemptTimeout(value: number): void; + /** + * Returns `6` if `input` is an IPv6 address. Returns `4` if `input` is an IPv4 + * address in [dot-decimal notation](https://en.wikipedia.org/wiki/Dot-decimal_notation) with no leading zeroes. Otherwise, returns`0`. + * + * ```js + * net.isIP('::1'); // returns 6 + * net.isIP('127.0.0.1'); // returns 4 + * net.isIP('127.000.000.001'); // returns 0 + * net.isIP('127.0.0.1/24'); // returns 0 + * net.isIP('fhqwhgads'); // returns 0 + * ``` + * @since v0.3.0 + */ + function isIP(input: string): number; + /** + * Returns `true` if `input` is an IPv4 address in [dot-decimal notation](https://en.wikipedia.org/wiki/Dot-decimal_notation) with no + * leading zeroes. Otherwise, returns `false`. + * + * ```js + * net.isIPv4('127.0.0.1'); // returns true + * net.isIPv4('127.000.000.001'); // returns false + * net.isIPv4('127.0.0.1/24'); // returns false + * net.isIPv4('fhqwhgads'); // returns false + * ``` + * @since v0.3.0 + */ + function isIPv4(input: string): boolean; + /** + * Returns `true` if `input` is an IPv6 address. Otherwise, returns `false`. + * + * ```js + * net.isIPv6('::1'); // returns true + * net.isIPv6('fhqwhgads'); // returns false + * ``` + * @since v0.3.0 + */ + function isIPv6(input: string): boolean; + interface SocketAddressInitOptions { + /** + * The network address as either an IPv4 or IPv6 string. + * @default 127.0.0.1 + */ + address?: string | undefined; + /** + * @default `'ipv4'` + */ + family?: IPVersion | undefined; + /** + * An IPv6 flow-label used only if `family` is `'ipv6'`. + * @default 0 + */ + flowlabel?: number | undefined; + /** + * An IP port. + * @default 0 + */ + port?: number | undefined; + } + /** + * @since v15.14.0, v14.18.0 + */ + class SocketAddress { + constructor(options: SocketAddressInitOptions); + /** + * Either \`'ipv4'\` or \`'ipv6'\`. + * @since v15.14.0, v14.18.0 + */ + readonly address: string; + /** + * Either \`'ipv4'\` or \`'ipv6'\`. + * @since v15.14.0, v14.18.0 + */ + readonly family: IPVersion; + /** + * @since v15.14.0, v14.18.0 + */ + readonly port: number; + /** + * @since v15.14.0, v14.18.0 + */ + readonly flowlabel: number; + } +} +declare module "node:net" { + export * from "net"; +} diff --git a/dist/node_modules/@types/node/os.d.ts b/dist/node_modules/@types/node/os.d.ts new file mode 100644 index 0000000..39a33f7 --- /dev/null +++ b/dist/node_modules/@types/node/os.d.ts @@ -0,0 +1,478 @@ +/** + * The `node:os` module provides operating system-related utility methods and + * properties. It can be accessed using: + * + * ```js + * const os = require('node:os'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/os.js) + */ +declare module "os" { + interface CpuInfo { + model: string; + speed: number; + times: { + user: number; + nice: number; + sys: number; + idle: number; + irq: number; + }; + } + interface NetworkInterfaceBase { + address: string; + netmask: string; + mac: string; + internal: boolean; + cidr: string | null; + } + interface NetworkInterfaceInfoIPv4 extends NetworkInterfaceBase { + family: "IPv4"; + scopeid?: undefined; + } + interface NetworkInterfaceInfoIPv6 extends NetworkInterfaceBase { + family: "IPv6"; + scopeid: number; + } + interface UserInfo { + username: T; + uid: number; + gid: number; + shell: T | null; + homedir: T; + } + type NetworkInterfaceInfo = NetworkInterfaceInfoIPv4 | NetworkInterfaceInfoIPv6; + /** + * Returns the host name of the operating system as a string. + * @since v0.3.3 + */ + function hostname(): string; + /** + * Returns an array containing the 1, 5, and 15 minute load averages. + * + * The load average is a measure of system activity calculated by the operating + * system and expressed as a fractional number. + * + * The load average is a Unix-specific concept. On Windows, the return value is + * always `[0, 0, 0]`. + * @since v0.3.3 + */ + function loadavg(): number[]; + /** + * Returns the system uptime in number of seconds. + * @since v0.3.3 + */ + function uptime(): number; + /** + * Returns the amount of free system memory in bytes as an integer. + * @since v0.3.3 + */ + function freemem(): number; + /** + * Returns the total amount of system memory in bytes as an integer. + * @since v0.3.3 + */ + function totalmem(): number; + /** + * Returns an array of objects containing information about each logical CPU core. + * The array will be empty if no CPU information is available, such as if the`/proc` file system is unavailable. + * + * The properties included on each object include: + * + * ```js + * [ + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 252020, + * nice: 0, + * sys: 30340, + * idle: 1070356870, + * irq: 0, + * }, + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 306960, + * nice: 0, + * sys: 26980, + * idle: 1071569080, + * irq: 0, + * }, + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 248450, + * nice: 0, + * sys: 21750, + * idle: 1070919370, + * irq: 0, + * }, + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 256880, + * nice: 0, + * sys: 19430, + * idle: 1070905480, + * irq: 20, + * }, + * }, + * ] + * ``` + * + * `nice` values are POSIX-only. On Windows, the `nice` values of all processors + * are always 0. + * + * `os.cpus().length` should not be used to calculate the amount of parallelism + * available to an application. Use {@link availableParallelism} for this purpose. + * @since v0.3.3 + */ + function cpus(): CpuInfo[]; + /** + * Returns an estimate of the default amount of parallelism a program should use. + * Always returns a value greater than zero. + * + * This function is a small wrapper about libuv's [`uv_available_parallelism()`](https://docs.libuv.org/en/v1.x/misc.html#c.uv_available_parallelism). + * @since v19.4.0, v18.14.0 + */ + function availableParallelism(): number; + /** + * Returns the operating system name as returned by [`uname(3)`](https://linux.die.net/man/3/uname). For example, it + * returns `'Linux'` on Linux, `'Darwin'` on macOS, and `'Windows_NT'` on Windows. + * + * See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for additional information + * about the output of running [`uname(3)`](https://linux.die.net/man/3/uname) on various operating systems. + * @since v0.3.3 + */ + function type(): string; + /** + * Returns the operating system as a string. + * + * On POSIX systems, the operating system release is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `GetVersionExW()` is used. See + * [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v0.3.3 + */ + function release(): string; + /** + * Returns an object containing network interfaces that have been assigned a + * network address. + * + * Each key on the returned object identifies a network interface. The associated + * value is an array of objects that each describe an assigned network address. + * + * The properties available on the assigned network address object include: + * + * ```js + * { + * lo: [ + * { + * address: '127.0.0.1', + * netmask: '255.0.0.0', + * family: 'IPv4', + * mac: '00:00:00:00:00:00', + * internal: true, + * cidr: '127.0.0.1/8' + * }, + * { + * address: '::1', + * netmask: 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', + * family: 'IPv6', + * mac: '00:00:00:00:00:00', + * scopeid: 0, + * internal: true, + * cidr: '::1/128' + * } + * ], + * eth0: [ + * { + * address: '192.168.1.108', + * netmask: '255.255.255.0', + * family: 'IPv4', + * mac: '01:02:03:0a:0b:0c', + * internal: false, + * cidr: '192.168.1.108/24' + * }, + * { + * address: 'fe80::a00:27ff:fe4e:66a1', + * netmask: 'ffff:ffff:ffff:ffff::', + * family: 'IPv6', + * mac: '01:02:03:0a:0b:0c', + * scopeid: 1, + * internal: false, + * cidr: 'fe80::a00:27ff:fe4e:66a1/64' + * } + * ] + * } + * ``` + * @since v0.6.0 + */ + function networkInterfaces(): NodeJS.Dict; + /** + * Returns the string path of the current user's home directory. + * + * On POSIX, it uses the `$HOME` environment variable if defined. Otherwise it + * uses the [effective UID](https://en.wikipedia.org/wiki/User_identifier#Effective_user_ID) to look up the user's home directory. + * + * On Windows, it uses the `USERPROFILE` environment variable if defined. + * Otherwise it uses the path to the profile directory of the current user. + * @since v2.3.0 + */ + function homedir(): string; + /** + * Returns information about the currently effective user. On POSIX platforms, + * this is typically a subset of the password file. The returned object includes + * the `username`, `uid`, `gid`, `shell`, and `homedir`. On Windows, the `uid` and`gid` fields are `-1`, and `shell` is `null`. + * + * The value of `homedir` returned by `os.userInfo()` is provided by the operating + * system. This differs from the result of `os.homedir()`, which queries + * environment variables for the home directory before falling back to the + * operating system response. + * + * Throws a `SystemError` if a user has no `username` or `homedir`. + * @since v6.0.0 + */ + function userInfo(options: { encoding: "buffer" }): UserInfo; + function userInfo(options?: { encoding: BufferEncoding }): UserInfo; + type SignalConstants = { + [key in NodeJS.Signals]: number; + }; + namespace constants { + const UV_UDP_REUSEADDR: number; + namespace signals {} + const signals: SignalConstants; + namespace errno { + const E2BIG: number; + const EACCES: number; + const EADDRINUSE: number; + const EADDRNOTAVAIL: number; + const EAFNOSUPPORT: number; + const EAGAIN: number; + const EALREADY: number; + const EBADF: number; + const EBADMSG: number; + const EBUSY: number; + const ECANCELED: number; + const ECHILD: number; + const ECONNABORTED: number; + const ECONNREFUSED: number; + const ECONNRESET: number; + const EDEADLK: number; + const EDESTADDRREQ: number; + const EDOM: number; + const EDQUOT: number; + const EEXIST: number; + const EFAULT: number; + const EFBIG: number; + const EHOSTUNREACH: number; + const EIDRM: number; + const EILSEQ: number; + const EINPROGRESS: number; + const EINTR: number; + const EINVAL: number; + const EIO: number; + const EISCONN: number; + const EISDIR: number; + const ELOOP: number; + const EMFILE: number; + const EMLINK: number; + const EMSGSIZE: number; + const EMULTIHOP: number; + const ENAMETOOLONG: number; + const ENETDOWN: number; + const ENETRESET: number; + const ENETUNREACH: number; + const ENFILE: number; + const ENOBUFS: number; + const ENODATA: number; + const ENODEV: number; + const ENOENT: number; + const ENOEXEC: number; + const ENOLCK: number; + const ENOLINK: number; + const ENOMEM: number; + const ENOMSG: number; + const ENOPROTOOPT: number; + const ENOSPC: number; + const ENOSR: number; + const ENOSTR: number; + const ENOSYS: number; + const ENOTCONN: number; + const ENOTDIR: number; + const ENOTEMPTY: number; + const ENOTSOCK: number; + const ENOTSUP: number; + const ENOTTY: number; + const ENXIO: number; + const EOPNOTSUPP: number; + const EOVERFLOW: number; + const EPERM: number; + const EPIPE: number; + const EPROTO: number; + const EPROTONOSUPPORT: number; + const EPROTOTYPE: number; + const ERANGE: number; + const EROFS: number; + const ESPIPE: number; + const ESRCH: number; + const ESTALE: number; + const ETIME: number; + const ETIMEDOUT: number; + const ETXTBSY: number; + const EWOULDBLOCK: number; + const EXDEV: number; + const WSAEINTR: number; + const WSAEBADF: number; + const WSAEACCES: number; + const WSAEFAULT: number; + const WSAEINVAL: number; + const WSAEMFILE: number; + const WSAEWOULDBLOCK: number; + const WSAEINPROGRESS: number; + const WSAEALREADY: number; + const WSAENOTSOCK: number; + const WSAEDESTADDRREQ: number; + const WSAEMSGSIZE: number; + const WSAEPROTOTYPE: number; + const WSAENOPROTOOPT: number; + const WSAEPROTONOSUPPORT: number; + const WSAESOCKTNOSUPPORT: number; + const WSAEOPNOTSUPP: number; + const WSAEPFNOSUPPORT: number; + const WSAEAFNOSUPPORT: number; + const WSAEADDRINUSE: number; + const WSAEADDRNOTAVAIL: number; + const WSAENETDOWN: number; + const WSAENETUNREACH: number; + const WSAENETRESET: number; + const WSAECONNABORTED: number; + const WSAECONNRESET: number; + const WSAENOBUFS: number; + const WSAEISCONN: number; + const WSAENOTCONN: number; + const WSAESHUTDOWN: number; + const WSAETOOMANYREFS: number; + const WSAETIMEDOUT: number; + const WSAECONNREFUSED: number; + const WSAELOOP: number; + const WSAENAMETOOLONG: number; + const WSAEHOSTDOWN: number; + const WSAEHOSTUNREACH: number; + const WSAENOTEMPTY: number; + const WSAEPROCLIM: number; + const WSAEUSERS: number; + const WSAEDQUOT: number; + const WSAESTALE: number; + const WSAEREMOTE: number; + const WSASYSNOTREADY: number; + const WSAVERNOTSUPPORTED: number; + const WSANOTINITIALISED: number; + const WSAEDISCON: number; + const WSAENOMORE: number; + const WSAECANCELLED: number; + const WSAEINVALIDPROCTABLE: number; + const WSAEINVALIDPROVIDER: number; + const WSAEPROVIDERFAILEDINIT: number; + const WSASYSCALLFAILURE: number; + const WSASERVICE_NOT_FOUND: number; + const WSATYPE_NOT_FOUND: number; + const WSA_E_NO_MORE: number; + const WSA_E_CANCELLED: number; + const WSAEREFUSED: number; + } + namespace priority { + const PRIORITY_LOW: number; + const PRIORITY_BELOW_NORMAL: number; + const PRIORITY_NORMAL: number; + const PRIORITY_ABOVE_NORMAL: number; + const PRIORITY_HIGH: number; + const PRIORITY_HIGHEST: number; + } + } + const devNull: string; + const EOL: string; + /** + * Returns the operating system CPU architecture for which the Node.js binary was + * compiled. Possible values are `'arm'`, `'arm64'`, `'ia32'`, `'loong64'`,`'mips'`, `'mipsel'`, `'ppc'`, `'ppc64'`, `'riscv64'`, `'s390'`, `'s390x'`, + * and `'x64'`. + * + * The return value is equivalent to `process.arch`. + * @since v0.5.0 + */ + function arch(): string; + /** + * Returns a string identifying the kernel version. + * + * On POSIX systems, the operating system release is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `RtlGetVersion()` is used, and if it is not + * available, `GetVersionExW()` will be used. See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v13.11.0, v12.17.0 + */ + function version(): string; + /** + * Returns a string identifying the operating system platform for which + * the Node.js binary was compiled. The value is set at compile time. + * Possible values are `'aix'`, `'darwin'`, `'freebsd'`,`'linux'`,`'openbsd'`, `'sunos'`, and `'win32'`. + * + * The return value is equivalent to `process.platform`. + * + * The value `'android'` may also be returned if Node.js is built on the Android + * operating system. [Android support is experimental](https://github.com/nodejs/node/blob/HEAD/BUILDING.md#androidandroid-based-devices-eg-firefox-os). + * @since v0.5.0 + */ + function platform(): NodeJS.Platform; + /** + * Returns the machine type as a string, such as `arm`, `arm64`, `aarch64`,`mips`, `mips64`, `ppc64`, `ppc64le`, `s390`, `s390x`, `i386`, `i686`, `x86_64`. + * + * On POSIX systems, the machine type is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `RtlGetVersion()` is used, and if it is not + * available, `GetVersionExW()` will be used. See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v18.9.0, v16.18.0 + */ + function machine(): string; + /** + * Returns the operating system's default directory for temporary files as a + * string. + * @since v0.9.9 + */ + function tmpdir(): string; + /** + * Returns a string identifying the endianness of the CPU for which the Node.js + * binary was compiled. + * + * Possible values are `'BE'` for big endian and `'LE'` for little endian. + * @since v0.9.4 + */ + function endianness(): "BE" | "LE"; + /** + * Returns the scheduling priority for the process specified by `pid`. If `pid` is + * not provided or is `0`, the priority of the current process is returned. + * @since v10.10.0 + * @param [pid=0] The process ID to retrieve scheduling priority for. + */ + function getPriority(pid?: number): number; + /** + * Attempts to set the scheduling priority for the process specified by `pid`. If`pid` is not provided or is `0`, the process ID of the current process is used. + * + * The `priority` input must be an integer between `-20` (high priority) and `19`(low priority). Due to differences between Unix priority levels and Windows + * priority classes, `priority` is mapped to one of six priority constants in`os.constants.priority`. When retrieving a process priority level, this range + * mapping may cause the return value to be slightly different on Windows. To avoid + * confusion, set `priority` to one of the priority constants. + * + * On Windows, setting priority to `PRIORITY_HIGHEST` requires elevated user + * privileges. Otherwise the set priority will be silently reduced to`PRIORITY_HIGH`. + * @since v10.10.0 + * @param [pid=0] The process ID to set scheduling priority for. + * @param priority The scheduling priority to assign to the process. + */ + function setPriority(priority: number): void; + function setPriority(pid: number, priority: number): void; +} +declare module "node:os" { + export * from "os"; +} diff --git a/dist/node_modules/@types/node/package.json b/dist/node_modules/@types/node/package.json new file mode 100644 index 0000000..c1755fb --- /dev/null +++ b/dist/node_modules/@types/node/package.json @@ -0,0 +1,224 @@ +{ + "name": "@types/node", + "version": "20.11.25", + "description": "TypeScript definitions for node", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node", + "license": "MIT", + "contributors": [ + { + "name": "Microsoft TypeScript", + "githubUsername": "Microsoft", + "url": "https://github.com/Microsoft" + }, + { + "name": "Alberto Schiabel", + "githubUsername": "jkomyno", + "url": "https://github.com/jkomyno" + }, + { + "name": "Alvis HT Tang", + "githubUsername": "alvis", + "url": "https://github.com/alvis" + }, + { + "name": "Andrew Makarov", + "githubUsername": "r3nya", + "url": "https://github.com/r3nya" + }, + { + "name": "Benjamin Toueg", + "githubUsername": "btoueg", + "url": "https://github.com/btoueg" + }, + { + "name": "Chigozirim C.", + "githubUsername": "smac89", + "url": "https://github.com/smac89" + }, + { + "name": "David Junger", + "githubUsername": "touffy", + "url": "https://github.com/touffy" + }, + { + "name": "Deividas Bakanas", + "githubUsername": "DeividasBakanas", + "url": "https://github.com/DeividasBakanas" + }, + { + "name": "Eugene Y. Q. Shen", + "githubUsername": "eyqs", + "url": "https://github.com/eyqs" + }, + { + "name": "Hannes Magnusson", + "githubUsername": "Hannes-Magnusson-CK", + "url": "https://github.com/Hannes-Magnusson-CK" + }, + { + "name": "Huw", + "githubUsername": "hoo29", + "url": "https://github.com/hoo29" + }, + { + "name": "Kelvin Jin", + "githubUsername": "kjin", + "url": "https://github.com/kjin" + }, + { + "name": "Klaus Meinhardt", + "githubUsername": "ajafff", + "url": "https://github.com/ajafff" + }, + { + "name": "Lishude", + "githubUsername": "islishude", + "url": "https://github.com/islishude" + }, + { + "name": "Mariusz Wiktorczyk", + "githubUsername": "mwiktorczyk", + "url": "https://github.com/mwiktorczyk" + }, + { + "name": "Mohsen Azimi", + "githubUsername": "mohsen1", + "url": "https://github.com/mohsen1" + }, + { + "name": "Nikita Galkin", + "githubUsername": "galkin", + "url": "https://github.com/galkin" + }, + { + "name": "Parambir Singh", + "githubUsername": "parambirs", + "url": "https://github.com/parambirs" + }, + { + "name": "Sebastian Silbermann", + "githubUsername": "eps1lon", + "url": "https://github.com/eps1lon" + }, + { + "name": "Thomas den Hollander", + "githubUsername": "ThomasdenH", + "url": "https://github.com/ThomasdenH" + }, + { + "name": "Wilco Bakker", + "githubUsername": "WilcoBakker", + "url": "https://github.com/WilcoBakker" + }, + { + "name": "wwwy3y3", + "githubUsername": "wwwy3y3", + "url": "https://github.com/wwwy3y3" + }, + { + "name": "Samuel Ainsworth", + "githubUsername": "samuela", + "url": "https://github.com/samuela" + }, + { + "name": "Kyle Uehlein", + "githubUsername": "kuehlein", + "url": "https://github.com/kuehlein" + }, + { + "name": "Thanik Bhongbhibhat", + "githubUsername": "bhongy", + "url": "https://github.com/bhongy" + }, + { + "name": "Marcin Kopacz", + "githubUsername": "chyzwar", + "url": "https://github.com/chyzwar" + }, + { + "name": "Trivikram Kamat", + "githubUsername": "trivikr", + "url": "https://github.com/trivikr" + }, + { + "name": "Junxiao Shi", + "githubUsername": "yoursunny", + "url": "https://github.com/yoursunny" + }, + { + "name": "Ilia Baryshnikov", + "githubUsername": "qwelias", + "url": "https://github.com/qwelias" + }, + { + "name": "ExE Boss", + "githubUsername": "ExE-Boss", + "url": "https://github.com/ExE-Boss" + }, + { + "name": "Piotr Błażejewicz", + "githubUsername": "peterblazejewicz", + "url": "https://github.com/peterblazejewicz" + }, + { + "name": "Anna Henningsen", + "githubUsername": "addaleax", + "url": "https://github.com/addaleax" + }, + { + "name": "Victor Perin", + "githubUsername": "victorperin", + "url": "https://github.com/victorperin" + }, + { + "name": "Yongsheng Zhang", + "githubUsername": "ZYSzys", + "url": "https://github.com/ZYSzys" + }, + { + "name": "NodeJS Contributors", + "githubUsername": "NodeJS", + "url": "https://github.com/NodeJS" + }, + { + "name": "Linus Unnebäck", + "githubUsername": "LinusU", + "url": "https://github.com/LinusU" + }, + { + "name": "wafuwafu13", + "githubUsername": "wafuwafu13", + "url": "https://github.com/wafuwafu13" + }, + { + "name": "Matteo Collina", + "githubUsername": "mcollina", + "url": "https://github.com/mcollina" + }, + { + "name": "Dmitry Semigradsky", + "githubUsername": "Semigradsky", + "url": "https://github.com/Semigradsky" + } + ], + "main": "", + "types": "index.d.ts", + "typesVersions": { + "<=4.8": { + "*": [ + "ts4.8/*" + ] + } + }, + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/node" + }, + "scripts": {}, + "dependencies": { + "undici-types": "~5.26.4" + }, + "typesPublisherContentHash": "00807065f46490c97b07f39fd3138b5abc64842bd94592b7f9b9fda9b677b321", + "typeScriptVersion": "4.6" +} \ No newline at end of file diff --git a/dist/node_modules/@types/node/path.d.ts b/dist/node_modules/@types/node/path.d.ts new file mode 100644 index 0000000..6f07681 --- /dev/null +++ b/dist/node_modules/@types/node/path.d.ts @@ -0,0 +1,191 @@ +declare module "path/posix" { + import path = require("path"); + export = path; +} +declare module "path/win32" { + import path = require("path"); + export = path; +} +/** + * The `node:path` module provides utilities for working with file and directory + * paths. It can be accessed using: + * + * ```js + * const path = require('node:path'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/path.js) + */ +declare module "path" { + namespace path { + /** + * A parsed path object generated by path.parse() or consumed by path.format(). + */ + interface ParsedPath { + /** + * The root of the path such as '/' or 'c:\' + */ + root: string; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir: string; + /** + * The file name including extension (if any) such as 'index.html' + */ + base: string; + /** + * The file extension (if any) such as '.html' + */ + ext: string; + /** + * The file name without extension (if any) such as 'index' + */ + name: string; + } + interface FormatInputPathObject { + /** + * The root of the path such as '/' or 'c:\' + */ + root?: string | undefined; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir?: string | undefined; + /** + * The file name including extension (if any) such as 'index.html' + */ + base?: string | undefined; + /** + * The file extension (if any) such as '.html' + */ + ext?: string | undefined; + /** + * The file name without extension (if any) such as 'index' + */ + name?: string | undefined; + } + interface PlatformPath { + /** + * Normalize a string path, reducing '..' and '.' parts. + * When multiple slashes are found, they're replaced by a single one; when the path contains a trailing slash, it is preserved. On Windows backslashes are used. + * + * @param path string path to normalize. + * @throws {TypeError} if `path` is not a string. + */ + normalize(path: string): string; + /** + * Join all arguments together and normalize the resulting path. + * + * @param paths paths to join. + * @throws {TypeError} if any of the path segments is not a string. + */ + join(...paths: string[]): string; + /** + * The right-most parameter is considered {to}. Other parameters are considered an array of {from}. + * + * Starting from leftmost {from} parameter, resolves {to} to an absolute path. + * + * If {to} isn't already absolute, {from} arguments are prepended in right to left order, + * until an absolute path is found. If after using all {from} paths still no absolute path is found, + * the current working directory is used as well. The resulting path is normalized, + * and trailing slashes are removed unless the path gets resolved to the root directory. + * + * @param paths A sequence of paths or path segments. + * @throws {TypeError} if any of the arguments is not a string. + */ + resolve(...paths: string[]): string; + /** + * Determines whether {path} is an absolute path. An absolute path will always resolve to the same location, regardless of the working directory. + * + * If the given {path} is a zero-length string, `false` will be returned. + * + * @param path path to test. + * @throws {TypeError} if `path` is not a string. + */ + isAbsolute(path: string): boolean; + /** + * Solve the relative path from {from} to {to} based on the current working directory. + * At times we have two absolute paths, and we need to derive the relative path from one to the other. This is actually the reverse transform of path.resolve. + * + * @throws {TypeError} if either `from` or `to` is not a string. + */ + relative(from: string, to: string): string; + /** + * Return the directory name of a path. Similar to the Unix dirname command. + * + * @param path the path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + dirname(path: string): string; + /** + * Return the last portion of a path. Similar to the Unix basename command. + * Often used to extract the file name from a fully qualified path. + * + * @param path the path to evaluate. + * @param suffix optionally, an extension to remove from the result. + * @throws {TypeError} if `path` is not a string or if `ext` is given and is not a string. + */ + basename(path: string, suffix?: string): string; + /** + * Return the extension of the path, from the last '.' to end of string in the last portion of the path. + * If there is no '.' in the last portion of the path or the first character of it is '.', then it returns an empty string. + * + * @param path the path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + extname(path: string): string; + /** + * The platform-specific file separator. '\\' or '/'. + */ + readonly sep: "\\" | "/"; + /** + * The platform-specific file delimiter. ';' or ':'. + */ + readonly delimiter: ";" | ":"; + /** + * Returns an object from a path string - the opposite of format(). + * + * @param path path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + parse(path: string): ParsedPath; + /** + * Returns a path string from an object - the opposite of parse(). + * + * @param pathObject path to evaluate. + */ + format(pathObject: FormatInputPathObject): string; + /** + * On Windows systems only, returns an equivalent namespace-prefixed path for the given path. + * If path is not a string, path will be returned without modifications. + * This method is meaningful only on Windows system. + * On POSIX systems, the method is non-operational and always returns path without modifications. + */ + toNamespacedPath(path: string): string; + /** + * Posix specific pathing. + * Same as parent object on posix. + */ + readonly posix: PlatformPath; + /** + * Windows specific pathing. + * Same as parent object on windows + */ + readonly win32: PlatformPath; + } + } + const path: path.PlatformPath; + export = path; +} +declare module "node:path" { + import path = require("path"); + export = path; +} +declare module "node:path/posix" { + import path = require("path/posix"); + export = path; +} +declare module "node:path/win32" { + import path = require("path/win32"); + export = path; +} diff --git a/dist/node_modules/@types/node/perf_hooks.d.ts b/dist/node_modules/@types/node/perf_hooks.d.ts new file mode 100644 index 0000000..c0ce852 --- /dev/null +++ b/dist/node_modules/@types/node/perf_hooks.d.ts @@ -0,0 +1,645 @@ +/** + * This module provides an implementation of a subset of the W3C [Web Performance APIs](https://w3c.github.io/perf-timing-primer/) as well as additional APIs for + * Node.js-specific performance measurements. + * + * Node.js supports the following [Web Performance APIs](https://w3c.github.io/perf-timing-primer/): + * + * * [High Resolution Time](https://www.w3.org/TR/hr-time-2) + * * [Performance Timeline](https://w3c.github.io/performance-timeline/) + * * [User Timing](https://www.w3.org/TR/user-timing/) + * * [Resource Timing](https://www.w3.org/TR/resource-timing-2/) + * + * ```js + * const { PerformanceObserver, performance } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((items) => { + * console.log(items.getEntries()[0].duration); + * performance.clearMarks(); + * }); + * obs.observe({ type: 'measure' }); + * performance.measure('Start to Now'); + * + * performance.mark('A'); + * doSomeLongRunningProcess(() => { + * performance.measure('A to Now', 'A'); + * + * performance.mark('B'); + * performance.measure('A to B', 'A', 'B'); + * }); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/perf_hooks.js) + */ +declare module "perf_hooks" { + import { AsyncResource } from "node:async_hooks"; + type EntryType = "node" | "mark" | "measure" | "gc" | "function" | "http2" | "http" | "dns" | "net"; + interface NodeGCPerformanceDetail { + /** + * When `performanceEntry.entryType` is equal to 'gc', `the performance.kind` property identifies + * the type of garbage collection operation that occurred. + * See perf_hooks.constants for valid values. + */ + readonly kind?: number | undefined; + /** + * When `performanceEntry.entryType` is equal to 'gc', the `performance.flags` + * property contains additional information about garbage collection operation. + * See perf_hooks.constants for valid values. + */ + readonly flags?: number | undefined; + } + /** + * The constructor of this class is not exposed to users directly. + * @since v8.5.0 + */ + class PerformanceEntry { + protected constructor(); + /** + * The total number of milliseconds elapsed for this entry. This value will not + * be meaningful for all Performance Entry types. + * @since v8.5.0 + */ + readonly duration: number; + /** + * The name of the performance entry. + * @since v8.5.0 + */ + readonly name: string; + /** + * The high resolution millisecond timestamp marking the starting time of the + * Performance Entry. + * @since v8.5.0 + */ + readonly startTime: number; + /** + * The type of the performance entry. It may be one of: + * + * * `'node'` (Node.js only) + * * `'mark'` (available on the Web) + * * `'measure'` (available on the Web) + * * `'gc'` (Node.js only) + * * `'function'` (Node.js only) + * * `'http2'` (Node.js only) + * * `'http'` (Node.js only) + * @since v8.5.0 + */ + readonly entryType: EntryType; + /** + * Additional detail specific to the `entryType`. + * @since v16.0.0 + */ + readonly detail?: NodeGCPerformanceDetail | unknown | undefined; // TODO: Narrow this based on entry type. + toJSON(): any; + } + /** + * Exposes marks created via the `Performance.mark()` method. + * @since v18.2.0, v16.17.0 + */ + class PerformanceMark extends PerformanceEntry { + readonly duration: 0; + readonly entryType: "mark"; + } + /** + * Exposes measures created via the `Performance.measure()` method. + * + * The constructor of this class is not exposed to users directly. + * @since v18.2.0, v16.17.0 + */ + class PerformanceMeasure extends PerformanceEntry { + readonly entryType: "measure"; + } + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Provides timing details for Node.js itself. The constructor of this class + * is not exposed to users. + * @since v8.5.0 + */ + class PerformanceNodeTiming extends PerformanceEntry { + /** + * The high resolution millisecond timestamp at which the Node.js process + * completed bootstrapping. If bootstrapping has not yet finished, the property + * has the value of -1. + * @since v8.5.0 + */ + readonly bootstrapComplete: number; + /** + * The high resolution millisecond timestamp at which the Node.js environment was + * initialized. + * @since v8.5.0 + */ + readonly environment: number; + /** + * The high resolution millisecond timestamp of the amount of time the event loop + * has been idle within the event loop's event provider (e.g. `epoll_wait`). This + * does not take CPU usage into consideration. If the event loop has not yet + * started (e.g., in the first tick of the main script), the property has the + * value of 0. + * @since v14.10.0, v12.19.0 + */ + readonly idleTime: number; + /** + * The high resolution millisecond timestamp at which the Node.js event loop + * exited. If the event loop has not yet exited, the property has the value of -1\. + * It can only have a value of not -1 in a handler of the `'exit'` event. + * @since v8.5.0 + */ + readonly loopExit: number; + /** + * The high resolution millisecond timestamp at which the Node.js event loop + * started. If the event loop has not yet started (e.g., in the first tick of the + * main script), the property has the value of -1. + * @since v8.5.0 + */ + readonly loopStart: number; + /** + * The high resolution millisecond timestamp at which the V8 platform was + * initialized. + * @since v8.5.0 + */ + readonly v8Start: number; + } + interface EventLoopUtilization { + idle: number; + active: number; + utilization: number; + } + /** + * @param util1 The result of a previous call to eventLoopUtilization() + * @param util2 The result of a previous call to eventLoopUtilization() prior to util1 + */ + type EventLoopUtilityFunction = ( + util1?: EventLoopUtilization, + util2?: EventLoopUtilization, + ) => EventLoopUtilization; + interface MarkOptions { + /** + * Additional optional detail to include with the mark. + */ + detail?: unknown | undefined; + /** + * An optional timestamp to be used as the mark time. + * @default `performance.now()`. + */ + startTime?: number | undefined; + } + interface MeasureOptions { + /** + * Additional optional detail to include with the mark. + */ + detail?: unknown | undefined; + /** + * Duration between start and end times. + */ + duration?: number | undefined; + /** + * Timestamp to be used as the end time, or a string identifying a previously recorded mark. + */ + end?: number | string | undefined; + /** + * Timestamp to be used as the start time, or a string identifying a previously recorded mark. + */ + start?: number | string | undefined; + } + interface TimerifyOptions { + /** + * A histogram object created using + * `perf_hooks.createHistogram()` that will record runtime durations in + * nanoseconds. + */ + histogram?: RecordableHistogram | undefined; + } + interface Performance { + /** + * If name is not provided, removes all PerformanceMark objects from the Performance Timeline. + * If name is provided, removes only the named mark. + * @param name + */ + clearMarks(name?: string): void; + /** + * If name is not provided, removes all PerformanceMeasure objects from the Performance Timeline. + * If name is provided, removes only the named measure. + * @param name + * @since v16.7.0 + */ + clearMeasures(name?: string): void; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime`. + * If you are only interested in performance entries of certain types or that have certain names, see + * `performance.getEntriesByType()` and `performance.getEntriesByName()`. + * @since v16.7.0 + */ + getEntries(): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime` + * whose `performanceEntry.name` is equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to `type`. + * @param name + * @param type + * @since v16.7.0 + */ + getEntriesByName(name: string, type?: EntryType): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime` + * whose `performanceEntry.entryType` is equal to `type`. + * @param type + * @since v16.7.0 + */ + getEntriesByType(type: EntryType): PerformanceEntry[]; + /** + * Creates a new PerformanceMark entry in the Performance Timeline. + * A PerformanceMark is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'mark', + * and whose performanceEntry.duration is always 0. + * Performance marks are used to mark specific significant moments in the Performance Timeline. + * @param name + * @return The PerformanceMark entry that was created + */ + mark(name?: string, options?: MarkOptions): PerformanceMark; + /** + * Creates a new PerformanceMeasure entry in the Performance Timeline. + * A PerformanceMeasure is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'measure', + * and whose performanceEntry.duration measures the number of milliseconds elapsed since startMark and endMark. + * + * The startMark argument may identify any existing PerformanceMark in the the Performance Timeline, or may identify + * any of the timestamp properties provided by the PerformanceNodeTiming class. If the named startMark does not exist, + * then startMark is set to timeOrigin by default. + * + * The endMark argument must identify any existing PerformanceMark in the the Performance Timeline or any of the timestamp + * properties provided by the PerformanceNodeTiming class. If the named endMark does not exist, an error will be thrown. + * @param name + * @param startMark + * @param endMark + * @return The PerformanceMeasure entry that was created + */ + measure(name: string, startMark?: string, endMark?: string): PerformanceMeasure; + measure(name: string, options: MeasureOptions): PerformanceMeasure; + /** + * An instance of the PerformanceNodeTiming class that provides performance metrics for specific Node.js operational milestones. + */ + readonly nodeTiming: PerformanceNodeTiming; + /** + * @return the current high resolution millisecond timestamp + */ + now(): number; + /** + * The timeOrigin specifies the high resolution millisecond timestamp from which all performance metric durations are measured. + */ + readonly timeOrigin: number; + /** + * Wraps a function within a new function that measures the running time of the wrapped function. + * A PerformanceObserver must be subscribed to the 'function' event type in order for the timing details to be accessed. + * @param fn + */ + timerify any>(fn: T, options?: TimerifyOptions): T; + /** + * eventLoopUtilization is similar to CPU utilization except that it is calculated using high precision wall-clock time. + * It represents the percentage of time the event loop has spent outside the event loop's event provider (e.g. epoll_wait). + * No other CPU idle time is taken into consideration. + */ + eventLoopUtilization: EventLoopUtilityFunction; + } + interface PerformanceObserverEntryList { + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime`. + * + * ```js + * const { + * performance, + * PerformanceObserver, + * } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntries()); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 81.465639, + * * duration: 0, + * * detail: null + * * }, + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 81.860064, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ type: 'mark' }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntries(): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime` whose `performanceEntry.name` is + * equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to`type`. + * + * ```js + * const { + * performance, + * PerformanceObserver, + * } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntriesByName('meow')); + * + * * [ + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 98.545991, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * console.log(perfObserverList.getEntriesByName('nope')); // [] + * + * console.log(perfObserverList.getEntriesByName('test', 'mark')); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 63.518931, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * console.log(perfObserverList.getEntriesByName('test', 'measure')); // [] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ entryTypes: ['mark', 'measure'] }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntriesByName(name: string, type?: EntryType): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime` whose `performanceEntry.entryType`is equal to `type`. + * + * ```js + * const { + * performance, + * PerformanceObserver, + * } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntriesByType('mark')); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 55.897834, + * * duration: 0, + * * detail: null + * * }, + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 56.350146, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ type: 'mark' }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntriesByType(type: EntryType): PerformanceEntry[]; + } + type PerformanceObserverCallback = (list: PerformanceObserverEntryList, observer: PerformanceObserver) => void; + /** + * @since v8.5.0 + */ + class PerformanceObserver extends AsyncResource { + constructor(callback: PerformanceObserverCallback); + /** + * Disconnects the `PerformanceObserver` instance from all notifications. + * @since v8.5.0 + */ + disconnect(): void; + /** + * Subscribes the `PerformanceObserver` instance to notifications of new `PerformanceEntry` instances identified either by `options.entryTypes`or `options.type`: + * + * ```js + * const { + * performance, + * PerformanceObserver, + * } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((list, observer) => { + * // Called once asynchronously. `list` contains three items. + * }); + * obs.observe({ type: 'mark' }); + * + * for (let n = 0; n < 3; n++) + * performance.mark(`test${n}`); + * ``` + * @since v8.5.0 + */ + observe( + options: + | { + entryTypes: readonly EntryType[]; + buffered?: boolean | undefined; + } + | { + type: EntryType; + buffered?: boolean | undefined; + }, + ): void; + } + namespace constants { + const NODE_PERFORMANCE_GC_MAJOR: number; + const NODE_PERFORMANCE_GC_MINOR: number; + const NODE_PERFORMANCE_GC_INCREMENTAL: number; + const NODE_PERFORMANCE_GC_WEAKCB: number; + const NODE_PERFORMANCE_GC_FLAGS_NO: number; + const NODE_PERFORMANCE_GC_FLAGS_CONSTRUCT_RETAINED: number; + const NODE_PERFORMANCE_GC_FLAGS_FORCED: number; + const NODE_PERFORMANCE_GC_FLAGS_SYNCHRONOUS_PHANTOM_PROCESSING: number; + const NODE_PERFORMANCE_GC_FLAGS_ALL_AVAILABLE_GARBAGE: number; + const NODE_PERFORMANCE_GC_FLAGS_ALL_EXTERNAL_MEMORY: number; + const NODE_PERFORMANCE_GC_FLAGS_SCHEDULE_IDLE: number; + } + const performance: Performance; + interface EventLoopMonitorOptions { + /** + * The sampling rate in milliseconds. + * Must be greater than zero. + * @default 10 + */ + resolution?: number | undefined; + } + interface Histogram { + /** + * Returns a `Map` object detailing the accumulated percentile distribution. + * @since v11.10.0 + */ + readonly percentiles: Map; + /** + * The number of times the event loop delay exceeded the maximum 1 hour event + * loop delay threshold. + * @since v11.10.0 + */ + readonly exceeds: number; + /** + * The minimum recorded event loop delay. + * @since v11.10.0 + */ + readonly min: number; + /** + * The maximum recorded event loop delay. + * @since v11.10.0 + */ + readonly max: number; + /** + * The mean of the recorded event loop delays. + * @since v11.10.0 + */ + readonly mean: number; + /** + * The standard deviation of the recorded event loop delays. + * @since v11.10.0 + */ + readonly stddev: number; + /** + * Resets the collected histogram data. + * @since v11.10.0 + */ + reset(): void; + /** + * Returns the value at the given percentile. + * @since v11.10.0 + * @param percentile A percentile value in the range (0, 100]. + */ + percentile(percentile: number): number; + } + interface IntervalHistogram extends Histogram { + /** + * Enables the update interval timer. Returns `true` if the timer was + * started, `false` if it was already started. + * @since v11.10.0 + */ + enable(): boolean; + /** + * Disables the update interval timer. Returns `true` if the timer was + * stopped, `false` if it was already stopped. + * @since v11.10.0 + */ + disable(): boolean; + } + interface RecordableHistogram extends Histogram { + /** + * @since v15.9.0, v14.18.0 + * @param val The amount to record in the histogram. + */ + record(val: number | bigint): void; + /** + * Calculates the amount of time (in nanoseconds) that has passed since the + * previous call to `recordDelta()` and records that amount in the histogram. + * + * ## Examples + * @since v15.9.0, v14.18.0 + */ + recordDelta(): void; + /** + * Adds the values from `other` to this histogram. + * @since v17.4.0, v16.14.0 + */ + add(other: RecordableHistogram): void; + } + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Creates an `IntervalHistogram` object that samples and reports the event loop + * delay over time. The delays will be reported in nanoseconds. + * + * Using a timer to detect approximate event loop delay works because the + * execution of timers is tied specifically to the lifecycle of the libuv + * event loop. That is, a delay in the loop will cause a delay in the execution + * of the timer, and those delays are specifically what this API is intended to + * detect. + * + * ```js + * const { monitorEventLoopDelay } = require('node:perf_hooks'); + * const h = monitorEventLoopDelay({ resolution: 20 }); + * h.enable(); + * // Do something. + * h.disable(); + * console.log(h.min); + * console.log(h.max); + * console.log(h.mean); + * console.log(h.stddev); + * console.log(h.percentiles); + * console.log(h.percentile(50)); + * console.log(h.percentile(99)); + * ``` + * @since v11.10.0 + */ + function monitorEventLoopDelay(options?: EventLoopMonitorOptions): IntervalHistogram; + interface CreateHistogramOptions { + /** + * The minimum recordable value. Must be an integer value greater than 0. + * @default 1 + */ + min?: number | bigint | undefined; + /** + * The maximum recordable value. Must be an integer value greater than min. + * @default Number.MAX_SAFE_INTEGER + */ + max?: number | bigint | undefined; + /** + * The number of accuracy digits. Must be a number between 1 and 5. + * @default 3 + */ + figures?: number | undefined; + } + /** + * Returns a `RecordableHistogram`. + * @since v15.9.0, v14.18.0 + */ + function createHistogram(options?: CreateHistogramOptions): RecordableHistogram; + import { performance as _performance } from "perf_hooks"; + global { + /** + * `performance` is a global reference for `require('perf_hooks').performance` + * https://nodejs.org/api/globals.html#performance + * @since v16.0.0 + */ + var performance: typeof globalThis extends { + onmessage: any; + performance: infer T; + } ? T + : typeof _performance; + } +} +declare module "node:perf_hooks" { + export * from "perf_hooks"; +} diff --git a/dist/node_modules/@types/node/process.d.ts b/dist/node_modules/@types/node/process.d.ts new file mode 100644 index 0000000..d22308f --- /dev/null +++ b/dist/node_modules/@types/node/process.d.ts @@ -0,0 +1,1561 @@ +declare module "process" { + import * as tty from "node:tty"; + import { Worker } from "node:worker_threads"; + global { + var process: NodeJS.Process; + namespace NodeJS { + // this namespace merge is here because these are specifically used + // as the type for process.stdin, process.stdout, and process.stderr. + // they can't live in tty.d.ts because we need to disambiguate the imported name. + interface ReadStream extends tty.ReadStream {} + interface WriteStream extends tty.WriteStream {} + interface MemoryUsageFn { + /** + * The `process.memoryUsage()` method iterate over each page to gather informations about memory + * usage which can be slow depending on the program memory allocations. + */ + (): MemoryUsage; + /** + * method returns an integer representing the Resident Set Size (RSS) in bytes. + */ + rss(): number; + } + interface MemoryUsage { + rss: number; + heapTotal: number; + heapUsed: number; + external: number; + arrayBuffers: number; + } + interface CpuUsage { + user: number; + system: number; + } + interface ProcessRelease { + name: string; + sourceUrl?: string | undefined; + headersUrl?: string | undefined; + libUrl?: string | undefined; + lts?: string | undefined; + } + interface ProcessVersions extends Dict { + http_parser: string; + node: string; + v8: string; + ares: string; + uv: string; + zlib: string; + modules: string; + openssl: string; + } + type Platform = + | "aix" + | "android" + | "darwin" + | "freebsd" + | "haiku" + | "linux" + | "openbsd" + | "sunos" + | "win32" + | "cygwin" + | "netbsd"; + type Architecture = + | "arm" + | "arm64" + | "ia32" + | "mips" + | "mipsel" + | "ppc" + | "ppc64" + | "riscv64" + | "s390" + | "s390x" + | "x64"; + type Signals = + | "SIGABRT" + | "SIGALRM" + | "SIGBUS" + | "SIGCHLD" + | "SIGCONT" + | "SIGFPE" + | "SIGHUP" + | "SIGILL" + | "SIGINT" + | "SIGIO" + | "SIGIOT" + | "SIGKILL" + | "SIGPIPE" + | "SIGPOLL" + | "SIGPROF" + | "SIGPWR" + | "SIGQUIT" + | "SIGSEGV" + | "SIGSTKFLT" + | "SIGSTOP" + | "SIGSYS" + | "SIGTERM" + | "SIGTRAP" + | "SIGTSTP" + | "SIGTTIN" + | "SIGTTOU" + | "SIGUNUSED" + | "SIGURG" + | "SIGUSR1" + | "SIGUSR2" + | "SIGVTALRM" + | "SIGWINCH" + | "SIGXCPU" + | "SIGXFSZ" + | "SIGBREAK" + | "SIGLOST" + | "SIGINFO"; + type UncaughtExceptionOrigin = "uncaughtException" | "unhandledRejection"; + type MultipleResolveType = "resolve" | "reject"; + type BeforeExitListener = (code: number) => void; + type DisconnectListener = () => void; + type ExitListener = (code: number) => void; + type RejectionHandledListener = (promise: Promise) => void; + type UncaughtExceptionListener = (error: Error, origin: UncaughtExceptionOrigin) => void; + /** + * Most of the time the unhandledRejection will be an Error, but this should not be relied upon + * as *anything* can be thrown/rejected, it is therefore unsafe to assume that the value is an Error. + */ + type UnhandledRejectionListener = (reason: unknown, promise: Promise) => void; + type WarningListener = (warning: Error) => void; + type MessageListener = (message: unknown, sendHandle: unknown) => void; + type SignalsListener = (signal: Signals) => void; + type MultipleResolveListener = ( + type: MultipleResolveType, + promise: Promise, + value: unknown, + ) => void; + type WorkerListener = (worker: Worker) => void; + interface Socket extends ReadWriteStream { + isTTY?: true | undefined; + } + // Alias for compatibility + interface ProcessEnv extends Dict { + /** + * Can be used to change the default timezone at runtime + */ + TZ?: string; + } + interface HRTime { + (time?: [number, number]): [number, number]; + bigint(): bigint; + } + interface ProcessReport { + /** + * Directory where the report is written. + * working directory of the Node.js process. + * @default '' indicating that reports are written to the current + */ + directory: string; + /** + * Filename where the report is written. + * The default value is the empty string. + * @default '' the output filename will be comprised of a timestamp, + * PID, and sequence number. + */ + filename: string; + /** + * Returns a JSON-formatted diagnostic report for the running process. + * The report's JavaScript stack trace is taken from err, if present. + */ + getReport(err?: Error): string; + /** + * If true, a diagnostic report is generated on fatal errors, + * such as out of memory errors or failed C++ assertions. + * @default false + */ + reportOnFatalError: boolean; + /** + * If true, a diagnostic report is generated when the process + * receives the signal specified by process.report.signal. + * @default false + */ + reportOnSignal: boolean; + /** + * If true, a diagnostic report is generated on uncaught exception. + * @default false + */ + reportOnUncaughtException: boolean; + /** + * The signal used to trigger the creation of a diagnostic report. + * @default 'SIGUSR2' + */ + signal: Signals; + /** + * Writes a diagnostic report to a file. If filename is not provided, the default filename + * includes the date, time, PID, and a sequence number. + * The report's JavaScript stack trace is taken from err, if present. + * + * @param fileName Name of the file where the report is written. + * This should be a relative path, that will be appended to the directory specified in + * `process.report.directory`, or the current working directory of the Node.js process, + * if unspecified. + * @param error A custom error used for reporting the JavaScript stack. + * @return Filename of the generated report. + */ + writeReport(fileName?: string): string; + writeReport(error?: Error): string; + writeReport(fileName?: string, err?: Error): string; + } + interface ResourceUsage { + fsRead: number; + fsWrite: number; + involuntaryContextSwitches: number; + ipcReceived: number; + ipcSent: number; + majorPageFault: number; + maxRSS: number; + minorPageFault: number; + sharedMemorySize: number; + signalsCount: number; + swappedOut: number; + systemCPUTime: number; + unsharedDataSize: number; + unsharedStackSize: number; + userCPUTime: number; + voluntaryContextSwitches: number; + } + interface EmitWarningOptions { + /** + * When `warning` is a `string`, `type` is the name to use for the _type_ of warning being emitted. + * + * @default 'Warning' + */ + type?: string | undefined; + /** + * A unique identifier for the warning instance being emitted. + */ + code?: string | undefined; + /** + * When `warning` is a `string`, `ctor` is an optional function used to limit the generated stack trace. + * + * @default process.emitWarning + */ + ctor?: Function | undefined; + /** + * Additional text to include with the error. + */ + detail?: string | undefined; + } + interface ProcessConfig { + readonly target_defaults: { + readonly cflags: any[]; + readonly default_configuration: string; + readonly defines: string[]; + readonly include_dirs: string[]; + readonly libraries: string[]; + }; + readonly variables: { + readonly clang: number; + readonly host_arch: string; + readonly node_install_npm: boolean; + readonly node_install_waf: boolean; + readonly node_prefix: string; + readonly node_shared_openssl: boolean; + readonly node_shared_v8: boolean; + readonly node_shared_zlib: boolean; + readonly node_use_dtrace: boolean; + readonly node_use_etw: boolean; + readonly node_use_openssl: boolean; + readonly target_arch: string; + readonly v8_no_strict_aliasing: number; + readonly v8_use_snapshot: boolean; + readonly visibility: string; + }; + } + interface Process extends EventEmitter { + /** + * The `process.stdout` property returns a stream connected to`stdout` (fd `1`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `1` refers to a file, in which case it is + * a `Writable` stream. + * + * For example, to copy `process.stdin` to `process.stdout`: + * + * ```js + * import { stdin, stdout } from 'node:process'; + * + * stdin.pipe(stdout); + * ``` + * + * `process.stdout` differs from other Node.js streams in important ways. See `note on process I/O` for more information. + */ + stdout: WriteStream & { + fd: 1; + }; + /** + * The `process.stderr` property returns a stream connected to`stderr` (fd `2`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `2` refers to a file, in which case it is + * a `Writable` stream. + * + * `process.stderr` differs from other Node.js streams in important ways. See `note on process I/O` for more information. + */ + stderr: WriteStream & { + fd: 2; + }; + /** + * The `process.stdin` property returns a stream connected to`stdin` (fd `0`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `0` refers to a file, in which case it is + * a `Readable` stream. + * + * For details of how to read from `stdin` see `readable.read()`. + * + * As a `Duplex` stream, `process.stdin` can also be used in "old" mode that + * is compatible with scripts written for Node.js prior to v0.10\. + * For more information see `Stream compatibility`. + * + * In "old" streams mode the `stdin` stream is paused by default, so one + * must call `process.stdin.resume()` to read from it. Note also that calling`process.stdin.resume()` itself would switch stream to "old" mode. + */ + stdin: ReadStream & { + fd: 0; + }; + openStdin(): Socket; + /** + * The `process.argv` property returns an array containing the command-line + * arguments passed when the Node.js process was launched. The first element will + * be {@link execPath}. See `process.argv0` if access to the original value + * of `argv[0]` is needed. The second element will be the path to the JavaScript + * file being executed. The remaining elements will be any additional command-line + * arguments. + * + * For example, assuming the following script for `process-args.js`: + * + * ```js + * import { argv } from 'node:process'; + * + * // print process.argv + * argv.forEach((val, index) => { + * console.log(`${index}: ${val}`); + * }); + * ``` + * + * Launching the Node.js process as: + * + * ```bash + * node process-args.js one two=three four + * ``` + * + * Would generate the output: + * + * ```text + * 0: /usr/local/bin/node + * 1: /Users/mjr/work/node/process-args.js + * 2: one + * 3: two=three + * 4: four + * ``` + * @since v0.1.27 + */ + argv: string[]; + /** + * The `process.argv0` property stores a read-only copy of the original value of`argv[0]` passed when Node.js starts. + * + * ```console + * $ bash -c 'exec -a customArgv0 ./node' + * > process.argv[0] + * '/Volumes/code/external/node/out/Release/node' + * > process.argv0 + * 'customArgv0' + * ``` + * @since v6.4.0 + */ + argv0: string; + /** + * The `process.execArgv` property returns the set of Node.js-specific command-line + * options passed when the Node.js process was launched. These options do not + * appear in the array returned by the {@link argv} property, and do not + * include the Node.js executable, the name of the script, or any options following + * the script name. These options are useful in order to spawn child processes with + * the same execution environment as the parent. + * + * ```bash + * node --harmony script.js --version + * ``` + * + * Results in `process.execArgv`: + * + * ```js + * ['--harmony'] + * ``` + * + * And `process.argv`: + * + * ```js + * ['/usr/local/bin/node', 'script.js', '--version'] + * ``` + * + * Refer to `Worker constructor` for the detailed behavior of worker + * threads with this property. + * @since v0.7.7 + */ + execArgv: string[]; + /** + * The `process.execPath` property returns the absolute pathname of the executable + * that started the Node.js process. Symbolic links, if any, are resolved. + * + * ```js + * '/usr/local/bin/node' + * ``` + * @since v0.1.100 + */ + execPath: string; + /** + * The `process.abort()` method causes the Node.js process to exit immediately and + * generate a core file. + * + * This feature is not available in `Worker` threads. + * @since v0.7.0 + */ + abort(): never; + /** + * The `process.chdir()` method changes the current working directory of the + * Node.js process or throws an exception if doing so fails (for instance, if + * the specified `directory` does not exist). + * + * ```js + * import { chdir, cwd } from 'node:process'; + * + * console.log(`Starting directory: ${cwd()}`); + * try { + * chdir('/tmp'); + * console.log(`New directory: ${cwd()}`); + * } catch (err) { + * console.error(`chdir: ${err}`); + * } + * ``` + * + * This feature is not available in `Worker` threads. + * @since v0.1.17 + */ + chdir(directory: string): void; + /** + * The `process.cwd()` method returns the current working directory of the Node.js + * process. + * + * ```js + * import { cwd } from 'node:process'; + * + * console.log(`Current directory: ${cwd()}`); + * ``` + * @since v0.1.8 + */ + cwd(): string; + /** + * The port used by the Node.js debugger when enabled. + * + * ```js + * import process from 'node:process'; + * + * process.debugPort = 5858; + * ``` + * @since v0.7.2 + */ + debugPort: number; + /** + * The `process.emitWarning()` method can be used to emit custom or application + * specific process warnings. These can be listened for by adding a handler to the `'warning'` event. + * + * ```js + * import { emitWarning } from 'node:process'; + * + * // Emit a warning with a code and additional detail. + * emitWarning('Something happened!', { + * code: 'MY_WARNING', + * detail: 'This is some additional information', + * }); + * // Emits: + * // (node:56338) [MY_WARNING] Warning: Something happened! + * // This is some additional information + * ``` + * + * In this example, an `Error` object is generated internally by`process.emitWarning()` and passed through to the `'warning'` handler. + * + * ```js + * import process from 'node:process'; + * + * process.on('warning', (warning) => { + * console.warn(warning.name); // 'Warning' + * console.warn(warning.message); // 'Something happened!' + * console.warn(warning.code); // 'MY_WARNING' + * console.warn(warning.stack); // Stack trace + * console.warn(warning.detail); // 'This is some additional information' + * }); + * ``` + * + * If `warning` is passed as an `Error` object, the `options` argument is ignored. + * @since v8.0.0 + * @param warning The warning to emit. + */ + emitWarning(warning: string | Error, ctor?: Function): void; + emitWarning(warning: string | Error, type?: string, ctor?: Function): void; + emitWarning(warning: string | Error, type?: string, code?: string, ctor?: Function): void; + emitWarning(warning: string | Error, options?: EmitWarningOptions): void; + /** + * The `process.env` property returns an object containing the user environment. + * See [`environ(7)`](http://man7.org/linux/man-pages/man7/environ.7.html). + * + * An example of this object looks like: + * + * ```js + * { + * TERM: 'xterm-256color', + * SHELL: '/usr/local/bin/bash', + * USER: 'maciej', + * PATH: '~/.bin/:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin', + * PWD: '/Users/maciej', + * EDITOR: 'vim', + * SHLVL: '1', + * HOME: '/Users/maciej', + * LOGNAME: 'maciej', + * _: '/usr/local/bin/node' + * } + * ``` + * + * It is possible to modify this object, but such modifications will not be + * reflected outside the Node.js process, or (unless explicitly requested) + * to other `Worker` threads. + * In other words, the following example would not work: + * + * ```bash + * node -e 'process.env.foo = "bar"' && echo $foo + * ``` + * + * While the following will: + * + * ```js + * import { env } from 'node:process'; + * + * env.foo = 'bar'; + * console.log(env.foo); + * ``` + * + * Assigning a property on `process.env` will implicitly convert the value + * to a string. **This behavior is deprecated.** Future versions of Node.js may + * throw an error when the value is not a string, number, or boolean. + * + * ```js + * import { env } from 'node:process'; + * + * env.test = null; + * console.log(env.test); + * // => 'null' + * env.test = undefined; + * console.log(env.test); + * // => 'undefined' + * ``` + * + * Use `delete` to delete a property from `process.env`. + * + * ```js + * import { env } from 'node:process'; + * + * env.TEST = 1; + * delete env.TEST; + * console.log(env.TEST); + * // => undefined + * ``` + * + * On Windows operating systems, environment variables are case-insensitive. + * + * ```js + * import { env } from 'node:process'; + * + * env.TEST = 1; + * console.log(env.test); + * // => 1 + * ``` + * + * Unless explicitly specified when creating a `Worker` instance, + * each `Worker` thread has its own copy of `process.env`, based on its + * parent thread's `process.env`, or whatever was specified as the `env` option + * to the `Worker` constructor. Changes to `process.env` will not be visible + * across `Worker` threads, and only the main thread can make changes that + * are visible to the operating system or to native add-ons. On Windows, a copy of`process.env` on a `Worker` instance operates in a case-sensitive manner + * unlike the main thread. + * @since v0.1.27 + */ + env: ProcessEnv; + /** + * The `process.exit()` method instructs Node.js to terminate the process + * synchronously with an exit status of `code`. If `code` is omitted, exit uses + * either the 'success' code `0` or the value of `process.exitCode` if it has been + * set. Node.js will not terminate until all the `'exit'` event listeners are + * called. + * + * To exit with a 'failure' code: + * + * ```js + * import { exit } from 'node:process'; + * + * exit(1); + * ``` + * + * The shell that executed Node.js should see the exit code as `1`. + * + * Calling `process.exit()` will force the process to exit as quickly as possible + * even if there are still asynchronous operations pending that have not yet + * completed fully, including I/O operations to `process.stdout` and`process.stderr`. + * + * In most situations, it is not actually necessary to call `process.exit()`explicitly. The Node.js process will exit on its own _if there is no additional_ + * _work pending_ in the event loop. The `process.exitCode` property can be set to + * tell the process which exit code to use when the process exits gracefully. + * + * For instance, the following example illustrates a _misuse_ of the`process.exit()` method that could lead to data printed to stdout being + * truncated and lost: + * + * ```js + * import { exit } from 'node:process'; + * + * // This is an example of what *not* to do: + * if (someConditionNotMet()) { + * printUsageToStdout(); + * exit(1); + * } + * ``` + * + * The reason this is problematic is because writes to `process.stdout` in Node.js + * are sometimes _asynchronous_ and may occur over multiple ticks of the Node.js + * event loop. Calling `process.exit()`, however, forces the process to exit _before_ those additional writes to `stdout` can be performed. + * + * Rather than calling `process.exit()` directly, the code _should_ set the`process.exitCode` and allow the process to exit naturally by avoiding + * scheduling any additional work for the event loop: + * + * ```js + * import process from 'node:process'; + * + * // How to properly set the exit code while letting + * // the process exit gracefully. + * if (someConditionNotMet()) { + * printUsageToStdout(); + * process.exitCode = 1; + * } + * ``` + * + * If it is necessary to terminate the Node.js process due to an error condition, + * throwing an _uncaught_ error and allowing the process to terminate accordingly + * is safer than calling `process.exit()`. + * + * In `Worker` threads, this function stops the current thread rather + * than the current process. + * @since v0.1.13 + * @param [code=0] The exit code. For string type, only integer strings (e.g.,'1') are allowed. + */ + exit(code?: number): never; + /** + * A number which will be the process exit code, when the process either + * exits gracefully, or is exited via {@link exit} without specifying + * a code. + * + * Specifying a code to {@link exit} will override any + * previous setting of `process.exitCode`. + * @since v0.11.8 + */ + exitCode?: number | undefined; + /** + * The `process.getgid()` method returns the numerical group identity of the + * process. (See [`getgid(2)`](http://man7.org/linux/man-pages/man2/getgid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getgid) { + * console.log(`Current gid: ${process.getgid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.1.31 + */ + getgid?: () => number; + /** + * The `process.setgid()` method sets the group identity of the process. (See [`setgid(2)`](http://man7.org/linux/man-pages/man2/setgid.2.html).) The `id` can be passed as either a + * numeric ID or a group name + * string. If a group name is specified, this method blocks while resolving the + * associated numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getgid && process.setgid) { + * console.log(`Current gid: ${process.getgid()}`); + * try { + * process.setgid(501); + * console.log(`New gid: ${process.getgid()}`); + * } catch (err) { + * console.log(`Failed to set gid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.1.31 + * @param id The group name or ID + */ + setgid?: (id: number | string) => void; + /** + * The `process.getuid()` method returns the numeric user identity of the process. + * (See [`getuid(2)`](http://man7.org/linux/man-pages/man2/getuid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getuid) { + * console.log(`Current uid: ${process.getuid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.1.28 + */ + getuid?: () => number; + /** + * The `process.setuid(id)` method sets the user identity of the process. (See [`setuid(2)`](http://man7.org/linux/man-pages/man2/setuid.2.html).) The `id` can be passed as either a + * numeric ID or a username string. + * If a username is specified, the method blocks while resolving the associated + * numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getuid && process.setuid) { + * console.log(`Current uid: ${process.getuid()}`); + * try { + * process.setuid(501); + * console.log(`New uid: ${process.getuid()}`); + * } catch (err) { + * console.log(`Failed to set uid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.1.28 + */ + setuid?: (id: number | string) => void; + /** + * The `process.geteuid()` method returns the numerical effective user identity of + * the process. (See [`geteuid(2)`](http://man7.org/linux/man-pages/man2/geteuid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.geteuid) { + * console.log(`Current uid: ${process.geteuid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v2.0.0 + */ + geteuid?: () => number; + /** + * The `process.seteuid()` method sets the effective user identity of the process. + * (See [`seteuid(2)`](http://man7.org/linux/man-pages/man2/seteuid.2.html).) The `id` can be passed as either a numeric ID or a username + * string. If a username is specified, the method blocks while resolving the + * associated numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.geteuid && process.seteuid) { + * console.log(`Current uid: ${process.geteuid()}`); + * try { + * process.seteuid(501); + * console.log(`New uid: ${process.geteuid()}`); + * } catch (err) { + * console.log(`Failed to set uid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v2.0.0 + * @param id A user name or ID + */ + seteuid?: (id: number | string) => void; + /** + * The `process.getegid()` method returns the numerical effective group identity + * of the Node.js process. (See [`getegid(2)`](http://man7.org/linux/man-pages/man2/getegid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getegid) { + * console.log(`Current gid: ${process.getegid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v2.0.0 + */ + getegid?: () => number; + /** + * The `process.setegid()` method sets the effective group identity of the process. + * (See [`setegid(2)`](http://man7.org/linux/man-pages/man2/setegid.2.html).) The `id` can be passed as either a numeric ID or a group + * name string. If a group name is specified, this method blocks while resolving + * the associated a numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getegid && process.setegid) { + * console.log(`Current gid: ${process.getegid()}`); + * try { + * process.setegid(501); + * console.log(`New gid: ${process.getegid()}`); + * } catch (err) { + * console.log(`Failed to set gid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v2.0.0 + * @param id A group name or ID + */ + setegid?: (id: number | string) => void; + /** + * The `process.getgroups()` method returns an array with the supplementary group + * IDs. POSIX leaves it unspecified if the effective group ID is included but + * Node.js ensures it always is. + * + * ```js + * import process from 'process'; + * + * if (process.getgroups) { + * console.log(process.getgroups()); // [ 16, 21, 297 ] + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.9.4 + */ + getgroups?: () => number[]; + /** + * The `process.setgroups()` method sets the supplementary group IDs for the + * Node.js process. This is a privileged operation that requires the Node.js + * process to have `root` or the `CAP_SETGID` capability. + * + * The `groups` array can contain numeric group IDs, group names, or both. + * + * ```js + * import process from 'process'; + * + * if (process.getgroups && process.setgroups) { + * try { + * process.setgroups([501]); + * console.log(process.getgroups()); // new groups + * } catch (err) { + * console.log(`Failed to set groups: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.9.4 + */ + setgroups?: (groups: ReadonlyArray) => void; + /** + * The `process.setUncaughtExceptionCaptureCallback()` function sets a function + * that will be invoked when an uncaught exception occurs, which will receive the + * exception value itself as its first argument. + * + * If such a function is set, the `'uncaughtException'` event will + * not be emitted. If `--abort-on-uncaught-exception` was passed from the + * command line or set through `v8.setFlagsFromString()`, the process will + * not abort. Actions configured to take place on exceptions such as report + * generations will be affected too + * + * To unset the capture function,`process.setUncaughtExceptionCaptureCallback(null)` may be used. Calling this + * method with a non-`null` argument while another capture function is set will + * throw an error. + * + * Using this function is mutually exclusive with using the deprecated `domain` built-in module. + * @since v9.3.0 + */ + setUncaughtExceptionCaptureCallback(cb: ((err: Error) => void) | null): void; + /** + * Indicates whether a callback has been set using {@link setUncaughtExceptionCaptureCallback}. + * @since v9.3.0 + */ + hasUncaughtExceptionCaptureCallback(): boolean; + /** + * The `process.sourceMapsEnabled` property returns whether the [Source Map v3](https://sourcemaps.info/spec.html) support for stack traces is enabled. + * @since v20.7.0 + * @experimental + */ + readonly sourceMapsEnabled: boolean; + /** + * This function enables or disables the [Source Map v3](https://sourcemaps.info/spec.html) support for + * stack traces. + * + * It provides same features as launching Node.js process with commandline options`--enable-source-maps`. + * + * Only source maps in JavaScript files that are loaded after source maps has been + * enabled will be parsed and loaded. + * @since v16.6.0, v14.18.0 + * @experimental + */ + setSourceMapsEnabled(value: boolean): void; + /** + * The `process.version` property contains the Node.js version string. + * + * ```js + * import { version } from 'node:process'; + * + * console.log(`Version: ${version}`); + * // Version: v14.8.0 + * ``` + * + * To get the version string without the prepended _v_, use`process.versions.node`. + * @since v0.1.3 + */ + readonly version: string; + /** + * The `process.versions` property returns an object listing the version strings of + * Node.js and its dependencies. `process.versions.modules` indicates the current + * ABI version, which is increased whenever a C++ API changes. Node.js will refuse + * to load modules that were compiled against a different module ABI version. + * + * ```js + * import { versions } from 'node:process'; + * + * console.log(versions); + * ``` + * + * Will generate an object similar to: + * + * ```console + * { node: '20.2.0', + * acorn: '8.8.2', + * ada: '2.4.0', + * ares: '1.19.0', + * base64: '0.5.0', + * brotli: '1.0.9', + * cjs_module_lexer: '1.2.2', + * cldr: '43.0', + * icu: '73.1', + * llhttp: '8.1.0', + * modules: '115', + * napi: '8', + * nghttp2: '1.52.0', + * nghttp3: '0.7.0', + * ngtcp2: '0.8.1', + * openssl: '3.0.8+quic', + * simdutf: '3.2.9', + * tz: '2023c', + * undici: '5.22.0', + * unicode: '15.0', + * uv: '1.44.2', + * uvwasi: '0.0.16', + * v8: '11.3.244.8-node.9', + * zlib: '1.2.13' } + * ``` + * @since v0.2.0 + */ + readonly versions: ProcessVersions; + /** + * The `process.config` property returns a frozen `Object` containing the + * JavaScript representation of the configure options used to compile the current + * Node.js executable. This is the same as the `config.gypi` file that was produced + * when running the `./configure` script. + * + * An example of the possible output looks like: + * + * ```js + * { + * target_defaults: + * { cflags: [], + * default_configuration: 'Release', + * defines: [], + * include_dirs: [], + * libraries: [] }, + * variables: + * { + * host_arch: 'x64', + * napi_build_version: 5, + * node_install_npm: 'true', + * node_prefix: '', + * node_shared_cares: 'false', + * node_shared_http_parser: 'false', + * node_shared_libuv: 'false', + * node_shared_zlib: 'false', + * node_use_openssl: 'true', + * node_shared_openssl: 'false', + * strict_aliasing: 'true', + * target_arch: 'x64', + * v8_use_snapshot: 1 + * } + * } + * ``` + * @since v0.7.7 + */ + readonly config: ProcessConfig; + /** + * The `process.kill()` method sends the `signal` to the process identified by`pid`. + * + * Signal names are strings such as `'SIGINT'` or `'SIGHUP'`. See `Signal Events` and [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for more information. + * + * This method will throw an error if the target `pid` does not exist. As a special + * case, a signal of `0` can be used to test for the existence of a process. + * Windows platforms will throw an error if the `pid` is used to kill a process + * group. + * + * Even though the name of this function is `process.kill()`, it is really just a + * signal sender, like the `kill` system call. The signal sent may do something + * other than kill the target process. + * + * ```js + * import process, { kill } from 'node:process'; + * + * process.on('SIGHUP', () => { + * console.log('Got SIGHUP signal.'); + * }); + * + * setTimeout(() => { + * console.log('Exiting.'); + * process.exit(0); + * }, 100); + * + * kill(process.pid, 'SIGHUP'); + * ``` + * + * When `SIGUSR1` is received by a Node.js process, Node.js will start the + * debugger. See `Signal Events`. + * @since v0.0.6 + * @param pid A process ID + * @param [signal='SIGTERM'] The signal to send, either as a string or number. + */ + kill(pid: number, signal?: string | number): true; + /** + * The `process.pid` property returns the PID of the process. + * + * ```js + * import { pid } from 'node:process'; + * + * console.log(`This process is pid ${pid}`); + * ``` + * @since v0.1.15 + */ + readonly pid: number; + /** + * The `process.ppid` property returns the PID of the parent of the + * current process. + * + * ```js + * import { ppid } from 'node:process'; + * + * console.log(`The parent process is pid ${ppid}`); + * ``` + * @since v9.2.0, v8.10.0, v6.13.0 + */ + readonly ppid: number; + /** + * The `process.title` property returns the current process title (i.e. returns + * the current value of `ps`). Assigning a new value to `process.title` modifies + * the current value of `ps`. + * + * When a new value is assigned, different platforms will impose different maximum + * length restrictions on the title. Usually such restrictions are quite limited. + * For instance, on Linux and macOS, `process.title` is limited to the size of the + * binary name plus the length of the command-line arguments because setting the`process.title` overwrites the `argv` memory of the process. Node.js v0.8 + * allowed for longer process title strings by also overwriting the `environ`memory but that was potentially insecure and confusing in some (rather obscure) + * cases. + * + * Assigning a value to `process.title` might not result in an accurate label + * within process manager applications such as macOS Activity Monitor or Windows + * Services Manager. + * @since v0.1.104 + */ + title: string; + /** + * The operating system CPU architecture for which the Node.js binary was compiled. + * Possible values are: `'arm'`, `'arm64'`, `'ia32'`, `'loong64'`, `'mips'`,`'mipsel'`, `'ppc'`, `'ppc64'`, `'riscv64'`, `'s390'`, `'s390x'`, and `'x64'`. + * + * ```js + * import { arch } from 'node:process'; + * + * console.log(`This processor architecture is ${arch}`); + * ``` + * @since v0.5.0 + */ + readonly arch: Architecture; + /** + * The `process.platform` property returns a string identifying the operating + * system platform for which the Node.js binary was compiled. + * + * Currently possible values are: + * + * * `'aix'` + * * `'darwin'` + * * `'freebsd'` + * * `'linux'` + * * `'openbsd'` + * * `'sunos'` + * * `'win32'` + * + * ```js + * import { platform } from 'node:process'; + * + * console.log(`This platform is ${platform}`); + * ``` + * + * The value `'android'` may also be returned if the Node.js is built on the + * Android operating system. However, Android support in Node.js [is experimental](https://github.com/nodejs/node/blob/HEAD/BUILDING.md#androidandroid-based-devices-eg-firefox-os). + * @since v0.1.16 + */ + readonly platform: Platform; + /** + * The `process.mainModule` property provides an alternative way of retrieving `require.main`. The difference is that if the main module changes at + * runtime, `require.main` may still refer to the original main module in + * modules that were required before the change occurred. Generally, it's + * safe to assume that the two refer to the same module. + * + * As with `require.main`, `process.mainModule` will be `undefined` if there + * is no entry script. + * @since v0.1.17 + * @deprecated Since v14.0.0 - Use `main` instead. + */ + mainModule?: Module | undefined; + memoryUsage: MemoryUsageFn; + /** + * Gets the amount of memory available to the process (in bytes) based on + * limits imposed by the OS. If there is no such constraint, or the constraint + * is unknown, `undefined` is returned. + * + * See [`uv_get_constrained_memory`](https://docs.libuv.org/en/v1.x/misc.html#c.uv_get_constrained_memory) for more + * information. + * @since v19.6.0, v18.15.0 + * @experimental + */ + constrainedMemory(): number | undefined; + /** + * The `process.cpuUsage()` method returns the user and system CPU time usage of + * the current process, in an object with properties `user` and `system`, whose + * values are microsecond values (millionth of a second). These values measure time + * spent in user and system code respectively, and may end up being greater than + * actual elapsed time if multiple CPU cores are performing work for this process. + * + * The result of a previous call to `process.cpuUsage()` can be passed as the + * argument to the function, to get a diff reading. + * + * ```js + * import { cpuUsage } from 'node:process'; + * + * const startUsage = cpuUsage(); + * // { user: 38579, system: 6986 } + * + * // spin the CPU for 500 milliseconds + * const now = Date.now(); + * while (Date.now() - now < 500); + * + * console.log(cpuUsage(startUsage)); + * // { user: 514883, system: 11226 } + * ``` + * @since v6.1.0 + * @param previousValue A previous return value from calling `process.cpuUsage()` + */ + cpuUsage(previousValue?: CpuUsage): CpuUsage; + /** + * `process.nextTick()` adds `callback` to the "next tick queue". This queue is + * fully drained after the current operation on the JavaScript stack runs to + * completion and before the event loop is allowed to continue. It's possible to + * create an infinite loop if one were to recursively call `process.nextTick()`. + * See the [Event Loop](https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#process-nexttick) guide for more background. + * + * ```js + * import { nextTick } from 'node:process'; + * + * console.log('start'); + * nextTick(() => { + * console.log('nextTick callback'); + * }); + * console.log('scheduled'); + * // Output: + * // start + * // scheduled + * // nextTick callback + * ``` + * + * This is important when developing APIs in order to give users the opportunity + * to assign event handlers _after_ an object has been constructed but before any + * I/O has occurred: + * + * ```js + * import { nextTick } from 'node:process'; + * + * function MyThing(options) { + * this.setupOptions(options); + * + * nextTick(() => { + * this.startDoingStuff(); + * }); + * } + * + * const thing = new MyThing(); + * thing.getReadyForStuff(); + * + * // thing.startDoingStuff() gets called now, not before. + * ``` + * + * It is very important for APIs to be either 100% synchronous or 100% + * asynchronous. Consider this example: + * + * ```js + * // WARNING! DO NOT USE! BAD UNSAFE HAZARD! + * function maybeSync(arg, cb) { + * if (arg) { + * cb(); + * return; + * } + * + * fs.stat('file', cb); + * } + * ``` + * + * This API is hazardous because in the following case: + * + * ```js + * const maybeTrue = Math.random() > 0.5; + * + * maybeSync(maybeTrue, () => { + * foo(); + * }); + * + * bar(); + * ``` + * + * It is not clear whether `foo()` or `bar()` will be called first. + * + * The following approach is much better: + * + * ```js + * import { nextTick } from 'node:process'; + * + * function definitelyAsync(arg, cb) { + * if (arg) { + * nextTick(cb); + * return; + * } + * + * fs.stat('file', cb); + * } + * ``` + * @since v0.1.26 + * @param args Additional arguments to pass when invoking the `callback` + */ + nextTick(callback: Function, ...args: any[]): void; + /** + * The `process.release` property returns an `Object` containing metadata related + * to the current release, including URLs for the source tarball and headers-only + * tarball. + * + * `process.release` contains the following properties: + * + * ```js + * { + * name: 'node', + * lts: 'Hydrogen', + * sourceUrl: 'https://nodejs.org/download/release/v18.12.0/node-v18.12.0.tar.gz', + * headersUrl: 'https://nodejs.org/download/release/v18.12.0/node-v18.12.0-headers.tar.gz', + * libUrl: 'https://nodejs.org/download/release/v18.12.0/win-x64/node.lib' + * } + * ``` + * + * In custom builds from non-release versions of the source tree, only the`name` property may be present. The additional properties should not be + * relied upon to exist. + * @since v3.0.0 + */ + readonly release: ProcessRelease; + features: { + inspector: boolean; + debug: boolean; + uv: boolean; + ipv6: boolean; + tls_alpn: boolean; + tls_sni: boolean; + tls_ocsp: boolean; + tls: boolean; + }; + /** + * `process.umask()` returns the Node.js process's file mode creation mask. Child + * processes inherit the mask from the parent process. + * @since v0.1.19 + * @deprecated Calling `process.umask()` with no argument causes the process-wide umask to be written twice. This introduces a race condition between threads, and is a potential * + * security vulnerability. There is no safe, cross-platform alternative API. + */ + umask(): number; + /** + * Can only be set if not in worker thread. + */ + umask(mask: string | number): number; + /** + * The `process.uptime()` method returns the number of seconds the current Node.js + * process has been running. + * + * The return value includes fractions of a second. Use `Math.floor()` to get whole + * seconds. + * @since v0.5.0 + */ + uptime(): number; + hrtime: HRTime; + /** + * If the Node.js process was spawned with an IPC channel, the process.channel property is a reference to the IPC channel. + * If no IPC channel exists, this property is undefined. + * @since v7.1.0 + */ + channel?: { + /** + * This method makes the IPC channel keep the event loop of the process running if .unref() has been called before. + * @since v7.1.0 + */ + ref(): void; + /** + * This method makes the IPC channel not keep the event loop of the process running, and lets it finish even while the channel is open. + * @since v7.1.0 + */ + unref(): void; + }; + /** + * If Node.js is spawned with an IPC channel, the `process.send()` method can be + * used to send messages to the parent process. Messages will be received as a `'message'` event on the parent's `ChildProcess` object. + * + * If Node.js was not spawned with an IPC channel, `process.send` will be `undefined`. + * + * The message goes through serialization and parsing. The resulting message might + * not be the same as what is originally sent. + * @since v0.5.9 + * @param options used to parameterize the sending of certain types of handles.`options` supports the following properties: + */ + send?( + message: any, + sendHandle?: any, + options?: { + swallowErrors?: boolean | undefined; + }, + callback?: (error: Error | null) => void, + ): boolean; + /** + * If the Node.js process is spawned with an IPC channel (see the `Child Process` and `Cluster` documentation), the `process.disconnect()` method will close the + * IPC channel to the parent process, allowing the child process to exit gracefully + * once there are no other connections keeping it alive. + * + * The effect of calling `process.disconnect()` is the same as calling `ChildProcess.disconnect()` from the parent process. + * + * If the Node.js process was not spawned with an IPC channel,`process.disconnect()` will be `undefined`. + * @since v0.7.2 + */ + disconnect(): void; + /** + * If the Node.js process is spawned with an IPC channel (see the `Child Process` and `Cluster` documentation), the `process.connected` property will return`true` so long as the IPC + * channel is connected and will return `false` after`process.disconnect()` is called. + * + * Once `process.connected` is `false`, it is no longer possible to send messages + * over the IPC channel using `process.send()`. + * @since v0.7.2 + */ + connected: boolean; + /** + * The `process.allowedNodeEnvironmentFlags` property is a special, + * read-only `Set` of flags allowable within the `NODE_OPTIONS` environment variable. + * + * `process.allowedNodeEnvironmentFlags` extends `Set`, but overrides`Set.prototype.has` to recognize several different possible flag + * representations. `process.allowedNodeEnvironmentFlags.has()` will + * return `true` in the following cases: + * + * * Flags may omit leading single (`-`) or double (`--`) dashes; e.g.,`inspect-brk` for `--inspect-brk`, or `r` for `-r`. + * * Flags passed through to V8 (as listed in `--v8-options`) may replace + * one or more _non-leading_ dashes for an underscore, or vice-versa; + * e.g., `--perf_basic_prof`, `--perf-basic-prof`, `--perf_basic-prof`, + * etc. + * * Flags may contain one or more equals (`=`) characters; all + * characters after and including the first equals will be ignored; + * e.g., `--stack-trace-limit=100`. + * * Flags _must_ be allowable within `NODE_OPTIONS`. + * + * When iterating over `process.allowedNodeEnvironmentFlags`, flags will + * appear only _once_; each will begin with one or more dashes. Flags + * passed through to V8 will contain underscores instead of non-leading + * dashes: + * + * ```js + * import { allowedNodeEnvironmentFlags } from 'node:process'; + * + * allowedNodeEnvironmentFlags.forEach((flag) => { + * // -r + * // --inspect-brk + * // --abort_on_uncaught_exception + * // ... + * }); + * ``` + * + * The methods `add()`, `clear()`, and `delete()` of`process.allowedNodeEnvironmentFlags` do nothing, and will fail + * silently. + * + * If Node.js was compiled _without_ `NODE_OPTIONS` support (shown in {@link config}), `process.allowedNodeEnvironmentFlags` will + * contain what _would have_ been allowable. + * @since v10.10.0 + */ + allowedNodeEnvironmentFlags: ReadonlySet; + /** + * `process.report` is an object whose methods are used to generate diagnostic + * reports for the current process. Additional documentation is available in the `report documentation`. + * @since v11.8.0 + */ + report?: ProcessReport | undefined; + /** + * ```js + * import { resourceUsage } from 'node:process'; + * + * console.log(resourceUsage()); + * /* + * Will output: + * { + * userCPUTime: 82872, + * systemCPUTime: 4143, + * maxRSS: 33164, + * sharedMemorySize: 0, + * unsharedDataSize: 0, + * unsharedStackSize: 0, + * minorPageFault: 2469, + * majorPageFault: 0, + * swappedOut: 0, + * fsRead: 0, + * fsWrite: 8, + * ipcSent: 0, + * ipcReceived: 0, + * signalsCount: 0, + * voluntaryContextSwitches: 79, + * involuntaryContextSwitches: 1 + * } + * + * ``` + * @since v12.6.0 + * @return the resource usage for the current process. All of these values come from the `uv_getrusage` call which returns a [`uv_rusage_t` struct][uv_rusage_t]. + */ + resourceUsage(): ResourceUsage; + /** + * The `process.traceDeprecation` property indicates whether the`--trace-deprecation` flag is set on the current Node.js process. See the + * documentation for the `'warning' event` and the `emitWarning() method` for more information about this + * flag's behavior. + * @since v0.8.0 + */ + traceDeprecation: boolean; + /* EventEmitter */ + addListener(event: "beforeExit", listener: BeforeExitListener): this; + addListener(event: "disconnect", listener: DisconnectListener): this; + addListener(event: "exit", listener: ExitListener): this; + addListener(event: "rejectionHandled", listener: RejectionHandledListener): this; + addListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; + addListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + addListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + addListener(event: "warning", listener: WarningListener): this; + addListener(event: "message", listener: MessageListener): this; + addListener(event: Signals, listener: SignalsListener): this; + addListener(event: "multipleResolves", listener: MultipleResolveListener): this; + addListener(event: "worker", listener: WorkerListener): this; + emit(event: "beforeExit", code: number): boolean; + emit(event: "disconnect"): boolean; + emit(event: "exit", code: number): boolean; + emit(event: "rejectionHandled", promise: Promise): boolean; + emit(event: "uncaughtException", error: Error): boolean; + emit(event: "uncaughtExceptionMonitor", error: Error): boolean; + emit(event: "unhandledRejection", reason: unknown, promise: Promise): boolean; + emit(event: "warning", warning: Error): boolean; + emit(event: "message", message: unknown, sendHandle: unknown): this; + emit(event: Signals, signal?: Signals): boolean; + emit( + event: "multipleResolves", + type: MultipleResolveType, + promise: Promise, + value: unknown, + ): this; + emit(event: "worker", listener: WorkerListener): this; + on(event: "beforeExit", listener: BeforeExitListener): this; + on(event: "disconnect", listener: DisconnectListener): this; + on(event: "exit", listener: ExitListener): this; + on(event: "rejectionHandled", listener: RejectionHandledListener): this; + on(event: "uncaughtException", listener: UncaughtExceptionListener): this; + on(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + on(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + on(event: "warning", listener: WarningListener): this; + on(event: "message", listener: MessageListener): this; + on(event: Signals, listener: SignalsListener): this; + on(event: "multipleResolves", listener: MultipleResolveListener): this; + on(event: "worker", listener: WorkerListener): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "beforeExit", listener: BeforeExitListener): this; + once(event: "disconnect", listener: DisconnectListener): this; + once(event: "exit", listener: ExitListener): this; + once(event: "rejectionHandled", listener: RejectionHandledListener): this; + once(event: "uncaughtException", listener: UncaughtExceptionListener): this; + once(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + once(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + once(event: "warning", listener: WarningListener): this; + once(event: "message", listener: MessageListener): this; + once(event: Signals, listener: SignalsListener): this; + once(event: "multipleResolves", listener: MultipleResolveListener): this; + once(event: "worker", listener: WorkerListener): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "beforeExit", listener: BeforeExitListener): this; + prependListener(event: "disconnect", listener: DisconnectListener): this; + prependListener(event: "exit", listener: ExitListener): this; + prependListener(event: "rejectionHandled", listener: RejectionHandledListener): this; + prependListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; + prependListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + prependListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + prependListener(event: "warning", listener: WarningListener): this; + prependListener(event: "message", listener: MessageListener): this; + prependListener(event: Signals, listener: SignalsListener): this; + prependListener(event: "multipleResolves", listener: MultipleResolveListener): this; + prependListener(event: "worker", listener: WorkerListener): this; + prependOnceListener(event: "beforeExit", listener: BeforeExitListener): this; + prependOnceListener(event: "disconnect", listener: DisconnectListener): this; + prependOnceListener(event: "exit", listener: ExitListener): this; + prependOnceListener(event: "rejectionHandled", listener: RejectionHandledListener): this; + prependOnceListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; + prependOnceListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + prependOnceListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + prependOnceListener(event: "warning", listener: WarningListener): this; + prependOnceListener(event: "message", listener: MessageListener): this; + prependOnceListener(event: Signals, listener: SignalsListener): this; + prependOnceListener(event: "multipleResolves", listener: MultipleResolveListener): this; + prependOnceListener(event: "worker", listener: WorkerListener): this; + listeners(event: "beforeExit"): BeforeExitListener[]; + listeners(event: "disconnect"): DisconnectListener[]; + listeners(event: "exit"): ExitListener[]; + listeners(event: "rejectionHandled"): RejectionHandledListener[]; + listeners(event: "uncaughtException"): UncaughtExceptionListener[]; + listeners(event: "uncaughtExceptionMonitor"): UncaughtExceptionListener[]; + listeners(event: "unhandledRejection"): UnhandledRejectionListener[]; + listeners(event: "warning"): WarningListener[]; + listeners(event: "message"): MessageListener[]; + listeners(event: Signals): SignalsListener[]; + listeners(event: "multipleResolves"): MultipleResolveListener[]; + listeners(event: "worker"): WorkerListener[]; + } + } + } + export = process; +} +declare module "node:process" { + import process = require("process"); + export = process; +} diff --git a/dist/node_modules/@types/node/punycode.d.ts b/dist/node_modules/@types/node/punycode.d.ts new file mode 100644 index 0000000..d2fc9f9 --- /dev/null +++ b/dist/node_modules/@types/node/punycode.d.ts @@ -0,0 +1,117 @@ +/** + * **The version of the punycode module bundled in Node.js is being deprecated.**In a future major version of Node.js this module will be removed. Users + * currently depending on the `punycode` module should switch to using the + * userland-provided [Punycode.js](https://github.com/bestiejs/punycode.js) module instead. For punycode-based URL + * encoding, see `url.domainToASCII` or, more generally, the `WHATWG URL API`. + * + * The `punycode` module is a bundled version of the [Punycode.js](https://github.com/bestiejs/punycode.js) module. It + * can be accessed using: + * + * ```js + * const punycode = require('punycode'); + * ``` + * + * [Punycode](https://tools.ietf.org/html/rfc3492) is a character encoding scheme defined by RFC 3492 that is + * primarily intended for use in Internationalized Domain Names. Because host + * names in URLs are limited to ASCII characters only, Domain Names that contain + * non-ASCII characters must be converted into ASCII using the Punycode scheme. + * For instance, the Japanese character that translates into the English word,`'example'` is `'例'`. The Internationalized Domain Name, `'例.com'` (equivalent + * to `'example.com'`) is represented by Punycode as the ASCII string`'xn--fsq.com'`. + * + * The `punycode` module provides a simple implementation of the Punycode standard. + * + * The `punycode` module is a third-party dependency used by Node.js and + * made available to developers as a convenience. Fixes or other modifications to + * the module must be directed to the [Punycode.js](https://github.com/bestiejs/punycode.js) project. + * @deprecated Since v7.0.0 - Deprecated + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/punycode.js) + */ +declare module "punycode" { + /** + * The `punycode.decode()` method converts a [Punycode](https://tools.ietf.org/html/rfc3492) string of ASCII-only + * characters to the equivalent string of Unicode codepoints. + * + * ```js + * punycode.decode('maana-pta'); // 'mañana' + * punycode.decode('--dqo34k'); // '☃-⌘' + * ``` + * @since v0.5.1 + */ + function decode(string: string): string; + /** + * The `punycode.encode()` method converts a string of Unicode codepoints to a [Punycode](https://tools.ietf.org/html/rfc3492) string of ASCII-only characters. + * + * ```js + * punycode.encode('mañana'); // 'maana-pta' + * punycode.encode('☃-⌘'); // '--dqo34k' + * ``` + * @since v0.5.1 + */ + function encode(string: string): string; + /** + * The `punycode.toUnicode()` method converts a string representing a domain name + * containing [Punycode](https://tools.ietf.org/html/rfc3492) encoded characters into Unicode. Only the [Punycode](https://tools.ietf.org/html/rfc3492) encoded parts of the domain name are be + * converted. + * + * ```js + * // decode domain names + * punycode.toUnicode('xn--maana-pta.com'); // 'mañana.com' + * punycode.toUnicode('xn----dqo34k.com'); // '☃-⌘.com' + * punycode.toUnicode('example.com'); // 'example.com' + * ``` + * @since v0.6.1 + */ + function toUnicode(domain: string): string; + /** + * The `punycode.toASCII()` method converts a Unicode string representing an + * Internationalized Domain Name to [Punycode](https://tools.ietf.org/html/rfc3492). Only the non-ASCII parts of the + * domain name will be converted. Calling `punycode.toASCII()` on a string that + * already only contains ASCII characters will have no effect. + * + * ```js + * // encode domain names + * punycode.toASCII('mañana.com'); // 'xn--maana-pta.com' + * punycode.toASCII('☃-⌘.com'); // 'xn----dqo34k.com' + * punycode.toASCII('example.com'); // 'example.com' + * ``` + * @since v0.6.1 + */ + function toASCII(domain: string): string; + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + const ucs2: ucs2; + interface ucs2 { + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + decode(string: string): number[]; + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + encode(codePoints: readonly number[]): string; + } + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + const version: string; +} +declare module "node:punycode" { + export * from "punycode"; +} diff --git a/dist/node_modules/@types/node/querystring.d.ts b/dist/node_modules/@types/node/querystring.d.ts new file mode 100644 index 0000000..b36ea2c --- /dev/null +++ b/dist/node_modules/@types/node/querystring.d.ts @@ -0,0 +1,141 @@ +/** + * The `node:querystring` module provides utilities for parsing and formatting URL + * query strings. It can be accessed using: + * + * ```js + * const querystring = require('node:querystring'); + * ``` + * + * `querystring` is more performant than `URLSearchParams` but is not a + * standardized API. Use `URLSearchParams` when performance is not critical or + * when compatibility with browser code is desirable. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/querystring.js) + */ +declare module "querystring" { + interface StringifyOptions { + encodeURIComponent?: ((str: string) => string) | undefined; + } + interface ParseOptions { + maxKeys?: number | undefined; + decodeURIComponent?: ((str: string) => string) | undefined; + } + interface ParsedUrlQuery extends NodeJS.Dict {} + interface ParsedUrlQueryInput extends + NodeJS.Dict< + | string + | number + | boolean + | readonly string[] + | readonly number[] + | readonly boolean[] + | null + > + {} + /** + * The `querystring.stringify()` method produces a URL query string from a + * given `obj` by iterating through the object's "own properties". + * + * It serializes the following types of values passed in `obj`:[string](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) | + * [number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) | + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) | + * [boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) | + * [string\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) | + * [number\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) | + * [bigint\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) | + * [boolean\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) The numeric values must be finite. Any other input values will be coerced to + * empty strings. + * + * ```js + * querystring.stringify({ foo: 'bar', baz: ['qux', 'quux'], corge: '' }); + * // Returns 'foo=bar&baz=qux&baz=quux&corge=' + * + * querystring.stringify({ foo: 'bar', baz: 'qux' }, ';', ':'); + * // Returns 'foo:bar;baz:qux' + * ``` + * + * By default, characters requiring percent-encoding within the query string will + * be encoded as UTF-8\. If an alternative encoding is required, then an alternative`encodeURIComponent` option will need to be specified: + * + * ```js + * // Assuming gbkEncodeURIComponent function already exists, + * + * querystring.stringify({ w: '中文', foo: 'bar' }, null, null, + * { encodeURIComponent: gbkEncodeURIComponent }); + * ``` + * @since v0.1.25 + * @param obj The object to serialize into a URL query string + * @param [sep='&'] The substring used to delimit key and value pairs in the query string. + * @param [eq='='] . The substring used to delimit keys and values in the query string. + */ + function stringify(obj?: ParsedUrlQueryInput, sep?: string, eq?: string, options?: StringifyOptions): string; + /** + * The `querystring.parse()` method parses a URL query string (`str`) into a + * collection of key and value pairs. + * + * For example, the query string `'foo=bar&abc=xyz&abc=123'` is parsed into: + * + * ```json + * { + * "foo": "bar", + * "abc": ["xyz", "123"] + * } + * ``` + * + * The object returned by the `querystring.parse()` method _does not_prototypically inherit from the JavaScript `Object`. This means that typical`Object` methods such as `obj.toString()`, + * `obj.hasOwnProperty()`, and others + * are not defined and _will not work_. + * + * By default, percent-encoded characters within the query string will be assumed + * to use UTF-8 encoding. If an alternative character encoding is used, then an + * alternative `decodeURIComponent` option will need to be specified: + * + * ```js + * // Assuming gbkDecodeURIComponent function already exists... + * + * querystring.parse('w=%D6%D0%CE%C4&foo=bar', null, null, + * { decodeURIComponent: gbkDecodeURIComponent }); + * ``` + * @since v0.1.25 + * @param str The URL query string to parse + * @param [sep='&'] The substring used to delimit key and value pairs in the query string. + * @param [eq='='] . The substring used to delimit keys and values in the query string. + */ + function parse(str: string, sep?: string, eq?: string, options?: ParseOptions): ParsedUrlQuery; + /** + * The querystring.encode() function is an alias for querystring.stringify(). + */ + const encode: typeof stringify; + /** + * The querystring.decode() function is an alias for querystring.parse(). + */ + const decode: typeof parse; + /** + * The `querystring.escape()` method performs URL percent-encoding on the given`str` in a manner that is optimized for the specific requirements of URL + * query strings. + * + * The `querystring.escape()` method is used by `querystring.stringify()` and is + * generally not expected to be used directly. It is exported primarily to allow + * application code to provide a replacement percent-encoding implementation if + * necessary by assigning `querystring.escape` to an alternative function. + * @since v0.1.25 + */ + function escape(str: string): string; + /** + * The `querystring.unescape()` method performs decoding of URL percent-encoded + * characters on the given `str`. + * + * The `querystring.unescape()` method is used by `querystring.parse()` and is + * generally not expected to be used directly. It is exported primarily to allow + * application code to provide a replacement decoding implementation if + * necessary by assigning `querystring.unescape` to an alternative function. + * + * By default, the `querystring.unescape()` method will attempt to use the + * JavaScript built-in `decodeURIComponent()` method to decode. If that fails, + * a safer equivalent that does not throw on malformed URLs will be used. + * @since v0.1.25 + */ + function unescape(str: string): string; +} +declare module "node:querystring" { + export * from "querystring"; +} diff --git a/dist/node_modules/@types/node/readline.d.ts b/dist/node_modules/@types/node/readline.d.ts new file mode 100644 index 0000000..b06d58b --- /dev/null +++ b/dist/node_modules/@types/node/readline.d.ts @@ -0,0 +1,539 @@ +/** + * The `node:readline` module provides an interface for reading data from a `Readable` stream (such as `process.stdin`) one line at a time. + * + * To use the promise-based APIs: + * + * ```js + * import * as readline from 'node:readline/promises'; + * ``` + * + * To use the callback and sync APIs: + * + * ```js + * import * as readline from 'node:readline'; + * ``` + * + * The following simple example illustrates the basic use of the `node:readline`module. + * + * ```js + * import * as readline from 'node:readline/promises'; + * import { stdin as input, stdout as output } from 'node:process'; + * + * const rl = readline.createInterface({ input, output }); + * + * const answer = await rl.question('What do you think of Node.js? '); + * + * console.log(`Thank you for your valuable feedback: ${answer}`); + * + * rl.close(); + * ``` + * + * Once this code is invoked, the Node.js application will not terminate until the`readline.Interface` is closed because the interface waits for data to be + * received on the `input` stream. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/readline.js) + */ +declare module "readline" { + import { Abortable, EventEmitter } from "node:events"; + import * as promises from "node:readline/promises"; + export { promises }; + export interface Key { + sequence?: string | undefined; + name?: string | undefined; + ctrl?: boolean | undefined; + meta?: boolean | undefined; + shift?: boolean | undefined; + } + /** + * Instances of the `readline.Interface` class are constructed using the`readline.createInterface()` method. Every instance is associated with a + * single `input` `Readable` stream and a single `output` `Writable` stream. + * The `output` stream is used to print prompts for user input that arrives on, + * and is read from, the `input` stream. + * @since v0.1.104 + */ + export class Interface extends EventEmitter { + readonly terminal: boolean; + /** + * The current input data being processed by node. + * + * This can be used when collecting input from a TTY stream to retrieve the + * current value that has been processed thus far, prior to the `line` event + * being emitted. Once the `line` event has been emitted, this property will + * be an empty string. + * + * Be aware that modifying the value during the instance runtime may have + * unintended consequences if `rl.cursor` is not also controlled. + * + * **If not using a TTY stream for input, use the `'line'` event.** + * + * One possible use case would be as follows: + * + * ```js + * const values = ['lorem ipsum', 'dolor sit amet']; + * const rl = readline.createInterface(process.stdin); + * const showResults = debounce(() => { + * console.log( + * '\n', + * values.filter((val) => val.startsWith(rl.line)).join(' '), + * ); + * }, 300); + * process.stdin.on('keypress', (c, k) => { + * showResults(); + * }); + * ``` + * @since v0.1.98 + */ + readonly line: string; + /** + * The cursor position relative to `rl.line`. + * + * This will track where the current cursor lands in the input string, when + * reading input from a TTY stream. The position of cursor determines the + * portion of the input string that will be modified as input is processed, + * as well as the column where the terminal caret will be rendered. + * @since v0.1.98 + */ + readonly cursor: number; + /** + * NOTE: According to the documentation: + * + * > Instances of the `readline.Interface` class are constructed using the + * > `readline.createInterface()` method. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/readline.html#class-interfaceconstructor + */ + protected constructor( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ); + /** + * NOTE: According to the documentation: + * + * > Instances of the `readline.Interface` class are constructed using the + * > `readline.createInterface()` method. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/readline.html#class-interfaceconstructor + */ + protected constructor(options: ReadLineOptions); + /** + * The `rl.getPrompt()` method returns the current prompt used by `rl.prompt()`. + * @since v15.3.0, v14.17.0 + * @return the current prompt string + */ + getPrompt(): string; + /** + * The `rl.setPrompt()` method sets the prompt that will be written to `output`whenever `rl.prompt()` is called. + * @since v0.1.98 + */ + setPrompt(prompt: string): void; + /** + * The `rl.prompt()` method writes the `Interface` instances configured`prompt` to a new line in `output` in order to provide a user with a new + * location at which to provide input. + * + * When called, `rl.prompt()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or`undefined` the prompt is not written. + * @since v0.1.98 + * @param preserveCursor If `true`, prevents the cursor placement from being reset to `0`. + */ + prompt(preserveCursor?: boolean): void; + /** + * The `rl.question()` method displays the `query` by writing it to the `output`, + * waits for user input to be provided on `input`, then invokes the `callback`function passing the provided input as the first argument. + * + * When called, `rl.question()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or`undefined` the `query` is not written. + * + * The `callback` function passed to `rl.question()` does not follow the typical + * pattern of accepting an `Error` object or `null` as the first argument. + * The `callback` is called with the provided answer as the only argument. + * + * An error will be thrown if calling `rl.question()` after `rl.close()`. + * + * Example usage: + * + * ```js + * rl.question('What is your favorite food? ', (answer) => { + * console.log(`Oh, so your favorite food is ${answer}`); + * }); + * ``` + * + * Using an `AbortController` to cancel a question. + * + * ```js + * const ac = new AbortController(); + * const signal = ac.signal; + * + * rl.question('What is your favorite food? ', { signal }, (answer) => { + * console.log(`Oh, so your favorite food is ${answer}`); + * }); + * + * signal.addEventListener('abort', () => { + * console.log('The food question timed out'); + * }, { once: true }); + * + * setTimeout(() => ac.abort(), 10000); + * ``` + * @since v0.3.3 + * @param query A statement or query to write to `output`, prepended to the prompt. + * @param callback A callback function that is invoked with the user's input in response to the `query`. + */ + question(query: string, callback: (answer: string) => void): void; + question(query: string, options: Abortable, callback: (answer: string) => void): void; + /** + * The `rl.pause()` method pauses the `input` stream, allowing it to be resumed + * later if necessary. + * + * Calling `rl.pause()` does not immediately pause other events (including`'line'`) from being emitted by the `Interface` instance. + * @since v0.3.4 + */ + pause(): this; + /** + * The `rl.resume()` method resumes the `input` stream if it has been paused. + * @since v0.3.4 + */ + resume(): this; + /** + * The `rl.close()` method closes the `Interface` instance and + * relinquishes control over the `input` and `output` streams. When called, + * the `'close'` event will be emitted. + * + * Calling `rl.close()` does not immediately stop other events (including `'line'`) + * from being emitted by the `Interface` instance. + * @since v0.1.98 + */ + close(): void; + /** + * The `rl.write()` method will write either `data` or a key sequence identified + * by `key` to the `output`. The `key` argument is supported only if `output` is + * a `TTY` text terminal. See `TTY keybindings` for a list of key + * combinations. + * + * If `key` is specified, `data` is ignored. + * + * When called, `rl.write()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or`undefined` the `data` and `key` are not written. + * + * ```js + * rl.write('Delete this!'); + * // Simulate Ctrl+U to delete the line written previously + * rl.write(null, { ctrl: true, name: 'u' }); + * ``` + * + * The `rl.write()` method will write the data to the `readline` `Interface`'s`input`_as if it were provided by the user_. + * @since v0.1.98 + */ + write(data: string | Buffer, key?: Key): void; + write(data: undefined | null | string | Buffer, key: Key): void; + /** + * Returns the real position of the cursor in relation to the input + * prompt + string. Long input (wrapping) strings, as well as multiple + * line prompts are included in the calculations. + * @since v13.5.0, v12.16.0 + */ + getCursorPos(): CursorPos; + /** + * events.EventEmitter + * 1. close + * 2. line + * 3. pause + * 4. resume + * 5. SIGCONT + * 6. SIGINT + * 7. SIGTSTP + * 8. history + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "line", listener: (input: string) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: "SIGCONT", listener: () => void): this; + addListener(event: "SIGINT", listener: () => void): this; + addListener(event: "SIGTSTP", listener: () => void): this; + addListener(event: "history", listener: (history: string[]) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "line", input: string): boolean; + emit(event: "pause"): boolean; + emit(event: "resume"): boolean; + emit(event: "SIGCONT"): boolean; + emit(event: "SIGINT"): boolean; + emit(event: "SIGTSTP"): boolean; + emit(event: "history", history: string[]): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "line", listener: (input: string) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: "SIGCONT", listener: () => void): this; + on(event: "SIGINT", listener: () => void): this; + on(event: "SIGTSTP", listener: () => void): this; + on(event: "history", listener: (history: string[]) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "line", listener: (input: string) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: "SIGCONT", listener: () => void): this; + once(event: "SIGINT", listener: () => void): this; + once(event: "SIGTSTP", listener: () => void): this; + once(event: "history", listener: (history: string[]) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "line", listener: (input: string) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: "SIGCONT", listener: () => void): this; + prependListener(event: "SIGINT", listener: () => void): this; + prependListener(event: "SIGTSTP", listener: () => void): this; + prependListener(event: "history", listener: (history: string[]) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "line", listener: (input: string) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: "SIGCONT", listener: () => void): this; + prependOnceListener(event: "SIGINT", listener: () => void): this; + prependOnceListener(event: "SIGTSTP", listener: () => void): this; + prependOnceListener(event: "history", listener: (history: string[]) => void): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + export type ReadLine = Interface; // type forwarded for backwards compatibility + export type Completer = (line: string) => CompleterResult; + export type AsyncCompleter = ( + line: string, + callback: (err?: null | Error, result?: CompleterResult) => void, + ) => void; + export type CompleterResult = [string[], string]; + export interface ReadLineOptions { + input: NodeJS.ReadableStream; + output?: NodeJS.WritableStream | undefined; + completer?: Completer | AsyncCompleter | undefined; + terminal?: boolean | undefined; + /** + * Initial list of history lines. This option makes sense + * only if `terminal` is set to `true` by the user or by an internal `output` + * check, otherwise the history caching mechanism is not initialized at all. + * @default [] + */ + history?: string[] | undefined; + historySize?: number | undefined; + prompt?: string | undefined; + crlfDelay?: number | undefined; + /** + * If `true`, when a new input line added + * to the history list duplicates an older one, this removes the older line + * from the list. + * @default false + */ + removeHistoryDuplicates?: boolean | undefined; + escapeCodeTimeout?: number | undefined; + tabSize?: number | undefined; + } + /** + * The `readline.createInterface()` method creates a new `readline.Interface`instance. + * + * ```js + * const readline = require('node:readline'); + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout, + * }); + * ``` + * + * Once the `readline.Interface` instance is created, the most common case is to + * listen for the `'line'` event: + * + * ```js + * rl.on('line', (line) => { + * console.log(`Received: ${line}`); + * }); + * ``` + * + * If `terminal` is `true` for this instance then the `output` stream will get + * the best compatibility if it defines an `output.columns` property and emits + * a `'resize'` event on the `output` if or when the columns ever change + * (`process.stdout` does this automatically when it is a TTY). + * + * When creating a `readline.Interface` using `stdin` as input, the program + * will not terminate until it receives an [EOF character](https://en.wikipedia.org/wiki/End-of-file#EOF_character). To exit without + * waiting for user input, call `process.stdin.unref()`. + * @since v0.1.98 + */ + export function createInterface( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ): Interface; + export function createInterface(options: ReadLineOptions): Interface; + /** + * The `readline.emitKeypressEvents()` method causes the given `Readable` stream to begin emitting `'keypress'` events corresponding to received input. + * + * Optionally, `interface` specifies a `readline.Interface` instance for which + * autocompletion is disabled when copy-pasted input is detected. + * + * If the `stream` is a `TTY`, then it must be in raw mode. + * + * This is automatically called by any readline instance on its `input` if the`input` is a terminal. Closing the `readline` instance does not stop + * the `input` from emitting `'keypress'` events. + * + * ```js + * readline.emitKeypressEvents(process.stdin); + * if (process.stdin.isTTY) + * process.stdin.setRawMode(true); + * ``` + * + * ## Example: Tiny CLI + * + * The following example illustrates the use of `readline.Interface` class to + * implement a small command-line interface: + * + * ```js + * const readline = require('node:readline'); + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout, + * prompt: 'OHAI> ', + * }); + * + * rl.prompt(); + * + * rl.on('line', (line) => { + * switch (line.trim()) { + * case 'hello': + * console.log('world!'); + * break; + * default: + * console.log(`Say what? I might have heard '${line.trim()}'`); + * break; + * } + * rl.prompt(); + * }).on('close', () => { + * console.log('Have a great day!'); + * process.exit(0); + * }); + * ``` + * + * ## Example: Read file stream line-by-Line + * + * A common use case for `readline` is to consume an input file one line at a + * time. The easiest way to do so is leveraging the `fs.ReadStream` API as + * well as a `for await...of` loop: + * + * ```js + * const fs = require('node:fs'); + * const readline = require('node:readline'); + * + * async function processLineByLine() { + * const fileStream = fs.createReadStream('input.txt'); + * + * const rl = readline.createInterface({ + * input: fileStream, + * crlfDelay: Infinity, + * }); + * // Note: we use the crlfDelay option to recognize all instances of CR LF + * // ('\r\n') in input.txt as a single line break. + * + * for await (const line of rl) { + * // Each line in input.txt will be successively available here as `line`. + * console.log(`Line from file: ${line}`); + * } + * } + * + * processLineByLine(); + * ``` + * + * Alternatively, one could use the `'line'` event: + * + * ```js + * const fs = require('node:fs'); + * const readline = require('node:readline'); + * + * const rl = readline.createInterface({ + * input: fs.createReadStream('sample.txt'), + * crlfDelay: Infinity, + * }); + * + * rl.on('line', (line) => { + * console.log(`Line from file: ${line}`); + * }); + * ``` + * + * Currently, `for await...of` loop can be a bit slower. If `async` / `await`flow and speed are both essential, a mixed approach can be applied: + * + * ```js + * const { once } = require('node:events'); + * const { createReadStream } = require('node:fs'); + * const { createInterface } = require('node:readline'); + * + * (async function processLineByLine() { + * try { + * const rl = createInterface({ + * input: createReadStream('big-file.txt'), + * crlfDelay: Infinity, + * }); + * + * rl.on('line', (line) => { + * // Process the line. + * }); + * + * await once(rl, 'close'); + * + * console.log('File processed.'); + * } catch (err) { + * console.error(err); + * } + * })(); + * ``` + * @since v0.7.7 + */ + export function emitKeypressEvents(stream: NodeJS.ReadableStream, readlineInterface?: Interface): void; + export type Direction = -1 | 0 | 1; + export interface CursorPos { + rows: number; + cols: number; + } + /** + * The `readline.clearLine()` method clears current line of given `TTY` stream + * in a specified direction identified by `dir`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function clearLine(stream: NodeJS.WritableStream, dir: Direction, callback?: () => void): boolean; + /** + * The `readline.clearScreenDown()` method clears the given `TTY` stream from + * the current position of the cursor down. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function clearScreenDown(stream: NodeJS.WritableStream, callback?: () => void): boolean; + /** + * The `readline.cursorTo()` method moves cursor to the specified position in a + * given `TTY` `stream`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function cursorTo(stream: NodeJS.WritableStream, x: number, y?: number, callback?: () => void): boolean; + /** + * The `readline.moveCursor()` method moves the cursor _relative_ to its current + * position in a given `TTY` `stream`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function moveCursor(stream: NodeJS.WritableStream, dx: number, dy: number, callback?: () => void): boolean; +} +declare module "node:readline" { + export * from "readline"; +} diff --git a/dist/node_modules/@types/node/readline/promises.d.ts b/dist/node_modules/@types/node/readline/promises.d.ts new file mode 100644 index 0000000..73fb111 --- /dev/null +++ b/dist/node_modules/@types/node/readline/promises.d.ts @@ -0,0 +1,150 @@ +/** + * @since v17.0.0 + * @experimental + */ +declare module "readline/promises" { + import { AsyncCompleter, Completer, Direction, Interface as _Interface, ReadLineOptions } from "node:readline"; + import { Abortable } from "node:events"; + /** + * Instances of the `readlinePromises.Interface` class are constructed using the`readlinePromises.createInterface()` method. Every instance is associated with a + * single `input` `Readable` stream and a single `output` `Writable` stream. + * The `output` stream is used to print prompts for user input that arrives on, + * and is read from, the `input` stream. + * @since v17.0.0 + */ + class Interface extends _Interface { + /** + * The `rl.question()` method displays the `query` by writing it to the `output`, + * waits for user input to be provided on `input`, then invokes the `callback`function passing the provided input as the first argument. + * + * When called, `rl.question()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or`undefined` the `query` is not written. + * + * If the question is called after `rl.close()`, it returns a rejected promise. + * + * Example usage: + * + * ```js + * const answer = await rl.question('What is your favorite food? '); + * console.log(`Oh, so your favorite food is ${answer}`); + * ``` + * + * Using an `AbortSignal` to cancel a question. + * + * ```js + * const signal = AbortSignal.timeout(10_000); + * + * signal.addEventListener('abort', () => { + * console.log('The food question timed out'); + * }, { once: true }); + * + * const answer = await rl.question('What is your favorite food? ', { signal }); + * console.log(`Oh, so your favorite food is ${answer}`); + * ``` + * @since v17.0.0 + * @param query A statement or query to write to `output`, prepended to the prompt. + * @return A promise that is fulfilled with the user's input in response to the `query`. + */ + question(query: string): Promise; + question(query: string, options: Abortable): Promise; + } + /** + * @since v17.0.0 + */ + class Readline { + /** + * @param stream A TTY stream. + */ + constructor( + stream: NodeJS.WritableStream, + options?: { + autoCommit?: boolean; + }, + ); + /** + * The `rl.clearLine()` method adds to the internal list of pending action an + * action that clears current line of the associated `stream` in a specified + * direction identified by `dir`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true`was passed to the constructor. + * @since v17.0.0 + * @return this + */ + clearLine(dir: Direction): this; + /** + * The `rl.clearScreenDown()` method adds to the internal list of pending action an + * action that clears the associated stream from the current position of the + * cursor down. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true`was passed to the constructor. + * @since v17.0.0 + * @return this + */ + clearScreenDown(): this; + /** + * The `rl.commit()` method sends all the pending actions to the associated`stream` and clears the internal list of pending actions. + * @since v17.0.0 + */ + commit(): Promise; + /** + * The `rl.cursorTo()` method adds to the internal list of pending action an action + * that moves cursor to the specified position in the associated `stream`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true`was passed to the constructor. + * @since v17.0.0 + * @return this + */ + cursorTo(x: number, y?: number): this; + /** + * The `rl.moveCursor()` method adds to the internal list of pending action an + * action that moves the cursor _relative_ to its current position in the + * associated `stream`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true`was passed to the constructor. + * @since v17.0.0 + * @return this + */ + moveCursor(dx: number, dy: number): this; + /** + * The `rl.rollback` methods clears the internal list of pending actions without + * sending it to the associated `stream`. + * @since v17.0.0 + * @return this + */ + rollback(): this; + } + /** + * The `readlinePromises.createInterface()` method creates a new `readlinePromises.Interface`instance. + * + * ```js + * const readlinePromises = require('node:readline/promises'); + * const rl = readlinePromises.createInterface({ + * input: process.stdin, + * output: process.stdout, + * }); + * ``` + * + * Once the `readlinePromises.Interface` instance is created, the most common case + * is to listen for the `'line'` event: + * + * ```js + * rl.on('line', (line) => { + * console.log(`Received: ${line}`); + * }); + * ``` + * + * If `terminal` is `true` for this instance then the `output` stream will get + * the best compatibility if it defines an `output.columns` property and emits + * a `'resize'` event on the `output` if or when the columns ever change + * (`process.stdout` does this automatically when it is a TTY). + * @since v17.0.0 + */ + function createInterface( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ): Interface; + function createInterface(options: ReadLineOptions): Interface; +} +declare module "node:readline/promises" { + export * from "readline/promises"; +} diff --git a/dist/node_modules/@types/node/repl.d.ts b/dist/node_modules/@types/node/repl.d.ts new file mode 100644 index 0000000..6c5f81b --- /dev/null +++ b/dist/node_modules/@types/node/repl.d.ts @@ -0,0 +1,430 @@ +/** + * The `node:repl` module provides a Read-Eval-Print-Loop (REPL) implementation + * that is available both as a standalone program or includible in other + * applications. It can be accessed using: + * + * ```js + * const repl = require('node:repl'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/repl.js) + */ +declare module "repl" { + import { AsyncCompleter, Completer, Interface } from "node:readline"; + import { Context } from "node:vm"; + import { InspectOptions } from "node:util"; + interface ReplOptions { + /** + * The input prompt to display. + * @default "> " + */ + prompt?: string | undefined; + /** + * The `Readable` stream from which REPL input will be read. + * @default process.stdin + */ + input?: NodeJS.ReadableStream | undefined; + /** + * The `Writable` stream to which REPL output will be written. + * @default process.stdout + */ + output?: NodeJS.WritableStream | undefined; + /** + * If `true`, specifies that the output should be treated as a TTY terminal, and have + * ANSI/VT100 escape codes written to it. + * Default: checking the value of the `isTTY` property on the output stream upon + * instantiation. + */ + terminal?: boolean | undefined; + /** + * The function to be used when evaluating each given line of input. + * Default: an async wrapper for the JavaScript `eval()` function. An `eval` function can + * error with `repl.Recoverable` to indicate the input was incomplete and prompt for + * additional lines. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_default_evaluation + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_custom_evaluation_functions + */ + eval?: REPLEval | undefined; + /** + * Defines if the repl prints output previews or not. + * @default `true` Always `false` in case `terminal` is falsy. + */ + preview?: boolean | undefined; + /** + * If `true`, specifies that the default `writer` function should include ANSI color + * styling to REPL output. If a custom `writer` function is provided then this has no + * effect. + * Default: the REPL instance's `terminal` value. + */ + useColors?: boolean | undefined; + /** + * If `true`, specifies that the default evaluation function will use the JavaScript + * `global` as the context as opposed to creating a new separate context for the REPL + * instance. The node CLI REPL sets this value to `true`. + * Default: `false`. + */ + useGlobal?: boolean | undefined; + /** + * If `true`, specifies that the default writer will not output the return value of a + * command if it evaluates to `undefined`. + * Default: `false`. + */ + ignoreUndefined?: boolean | undefined; + /** + * The function to invoke to format the output of each command before writing to `output`. + * Default: a wrapper for `util.inspect`. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_customizing_repl_output + */ + writer?: REPLWriter | undefined; + /** + * An optional function used for custom Tab auto completion. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/readline.html#readline_use_of_the_completer_function + */ + completer?: Completer | AsyncCompleter | undefined; + /** + * A flag that specifies whether the default evaluator executes all JavaScript commands in + * strict mode or default (sloppy) mode. + * Accepted values are: + * - `repl.REPL_MODE_SLOPPY` - evaluates expressions in sloppy mode. + * - `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to + * prefacing every repl statement with `'use strict'`. + */ + replMode?: typeof REPL_MODE_SLOPPY | typeof REPL_MODE_STRICT | undefined; + /** + * Stop evaluating the current piece of code when `SIGINT` is received, i.e. `Ctrl+C` is + * pressed. This cannot be used together with a custom `eval` function. + * Default: `false`. + */ + breakEvalOnSigint?: boolean | undefined; + } + type REPLEval = ( + this: REPLServer, + evalCmd: string, + context: Context, + file: string, + cb: (err: Error | null, result: any) => void, + ) => void; + type REPLWriter = (this: REPLServer, obj: any) => string; + /** + * This is the default "writer" value, if none is passed in the REPL options, + * and it can be overridden by custom print functions. + */ + const writer: REPLWriter & { + options: InspectOptions; + }; + type REPLCommandAction = (this: REPLServer, text: string) => void; + interface REPLCommand { + /** + * Help text to be displayed when `.help` is entered. + */ + help?: string | undefined; + /** + * The function to execute, optionally accepting a single string argument. + */ + action: REPLCommandAction; + } + /** + * Instances of `repl.REPLServer` are created using the {@link start} method + * or directly using the JavaScript `new` keyword. + * + * ```js + * const repl = require('node:repl'); + * + * const options = { useColors: true }; + * + * const firstInstance = repl.start(options); + * const secondInstance = new repl.REPLServer(options); + * ``` + * @since v0.1.91 + */ + class REPLServer extends Interface { + /** + * The `vm.Context` provided to the `eval` function to be used for JavaScript + * evaluation. + */ + readonly context: Context; + /** + * @deprecated since v14.3.0 - Use `input` instead. + */ + readonly inputStream: NodeJS.ReadableStream; + /** + * @deprecated since v14.3.0 - Use `output` instead. + */ + readonly outputStream: NodeJS.WritableStream; + /** + * The `Readable` stream from which REPL input will be read. + */ + readonly input: NodeJS.ReadableStream; + /** + * The `Writable` stream to which REPL output will be written. + */ + readonly output: NodeJS.WritableStream; + /** + * The commands registered via `replServer.defineCommand()`. + */ + readonly commands: NodeJS.ReadOnlyDict; + /** + * A value indicating whether the REPL is currently in "editor mode". + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_commands_and_special_keys + */ + readonly editorMode: boolean; + /** + * A value indicating whether the `_` variable has been assigned. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly underscoreAssigned: boolean; + /** + * The last evaluation result from the REPL (assigned to the `_` variable inside of the REPL). + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly last: any; + /** + * A value indicating whether the `_error` variable has been assigned. + * + * @since v9.8.0 + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly underscoreErrAssigned: boolean; + /** + * The last error raised inside the REPL (assigned to the `_error` variable inside of the REPL). + * + * @since v9.8.0 + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly lastError: any; + /** + * Specified in the REPL options, this is the function to be used when evaluating each + * given line of input. If not specified in the REPL options, this is an async wrapper + * for the JavaScript `eval()` function. + */ + readonly eval: REPLEval; + /** + * Specified in the REPL options, this is a value indicating whether the default + * `writer` function should include ANSI color styling to REPL output. + */ + readonly useColors: boolean; + /** + * Specified in the REPL options, this is a value indicating whether the default `eval` + * function will use the JavaScript `global` as the context as opposed to creating a new + * separate context for the REPL instance. + */ + readonly useGlobal: boolean; + /** + * Specified in the REPL options, this is a value indicating whether the default `writer` + * function should output the result of a command if it evaluates to `undefined`. + */ + readonly ignoreUndefined: boolean; + /** + * Specified in the REPL options, this is the function to invoke to format the output of + * each command before writing to `outputStream`. If not specified in the REPL options, + * this will be a wrapper for `util.inspect`. + */ + readonly writer: REPLWriter; + /** + * Specified in the REPL options, this is the function to use for custom Tab auto-completion. + */ + readonly completer: Completer | AsyncCompleter; + /** + * Specified in the REPL options, this is a flag that specifies whether the default `eval` + * function should execute all JavaScript commands in strict mode or default (sloppy) mode. + * Possible values are: + * - `repl.REPL_MODE_SLOPPY` - evaluates expressions in sloppy mode. + * - `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to + * prefacing every repl statement with `'use strict'`. + */ + readonly replMode: typeof REPL_MODE_SLOPPY | typeof REPL_MODE_STRICT; + /** + * NOTE: According to the documentation: + * + * > Instances of `repl.REPLServer` are created using the `repl.start()` method and + * > _should not_ be created directly using the JavaScript `new` keyword. + * + * `REPLServer` cannot be subclassed due to implementation specifics in NodeJS. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_class_replserver + */ + private constructor(); + /** + * The `replServer.defineCommand()` method is used to add new `.`\-prefixed commands + * to the REPL instance. Such commands are invoked by typing a `.` followed by the`keyword`. The `cmd` is either a `Function` or an `Object` with the following + * properties: + * + * The following example shows two new commands added to the REPL instance: + * + * ```js + * const repl = require('node:repl'); + * + * const replServer = repl.start({ prompt: '> ' }); + * replServer.defineCommand('sayhello', { + * help: 'Say hello', + * action(name) { + * this.clearBufferedCommand(); + * console.log(`Hello, ${name}!`); + * this.displayPrompt(); + * }, + * }); + * replServer.defineCommand('saybye', function saybye() { + * console.log('Goodbye!'); + * this.close(); + * }); + * ``` + * + * The new commands can then be used from within the REPL instance: + * + * ```console + * > .sayhello Node.js User + * Hello, Node.js User! + * > .saybye + * Goodbye! + * ``` + * @since v0.3.0 + * @param keyword The command keyword (_without_ a leading `.` character). + * @param cmd The function to invoke when the command is processed. + */ + defineCommand(keyword: string, cmd: REPLCommandAction | REPLCommand): void; + /** + * The `replServer.displayPrompt()` method readies the REPL instance for input + * from the user, printing the configured `prompt` to a new line in the `output`and resuming the `input` to accept new input. + * + * When multi-line input is being entered, an ellipsis is printed rather than the + * 'prompt'. + * + * When `preserveCursor` is `true`, the cursor placement will not be reset to `0`. + * + * The `replServer.displayPrompt` method is primarily intended to be called from + * within the action function for commands registered using the`replServer.defineCommand()` method. + * @since v0.1.91 + */ + displayPrompt(preserveCursor?: boolean): void; + /** + * The `replServer.clearBufferedCommand()` method clears any command that has been + * buffered but not yet executed. This method is primarily intended to be + * called from within the action function for commands registered using the`replServer.defineCommand()` method. + * @since v9.0.0 + */ + clearBufferedCommand(): void; + /** + * Initializes a history log file for the REPL instance. When executing the + * Node.js binary and using the command-line REPL, a history file is initialized + * by default. However, this is not the case when creating a REPL + * programmatically. Use this method to initialize a history log file when working + * with REPL instances programmatically. + * @since v11.10.0 + * @param historyPath the path to the history file + * @param callback called when history writes are ready or upon error + */ + setupHistory(path: string, callback: (err: Error | null, repl: this) => void): void; + /** + * events.EventEmitter + * 1. close - inherited from `readline.Interface` + * 2. line - inherited from `readline.Interface` + * 3. pause - inherited from `readline.Interface` + * 4. resume - inherited from `readline.Interface` + * 5. SIGCONT - inherited from `readline.Interface` + * 6. SIGINT - inherited from `readline.Interface` + * 7. SIGTSTP - inherited from `readline.Interface` + * 8. exit + * 9. reset + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "line", listener: (input: string) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: "SIGCONT", listener: () => void): this; + addListener(event: "SIGINT", listener: () => void): this; + addListener(event: "SIGTSTP", listener: () => void): this; + addListener(event: "exit", listener: () => void): this; + addListener(event: "reset", listener: (context: Context) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "line", input: string): boolean; + emit(event: "pause"): boolean; + emit(event: "resume"): boolean; + emit(event: "SIGCONT"): boolean; + emit(event: "SIGINT"): boolean; + emit(event: "SIGTSTP"): boolean; + emit(event: "exit"): boolean; + emit(event: "reset", context: Context): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "line", listener: (input: string) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: "SIGCONT", listener: () => void): this; + on(event: "SIGINT", listener: () => void): this; + on(event: "SIGTSTP", listener: () => void): this; + on(event: "exit", listener: () => void): this; + on(event: "reset", listener: (context: Context) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "line", listener: (input: string) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: "SIGCONT", listener: () => void): this; + once(event: "SIGINT", listener: () => void): this; + once(event: "SIGTSTP", listener: () => void): this; + once(event: "exit", listener: () => void): this; + once(event: "reset", listener: (context: Context) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "line", listener: (input: string) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: "SIGCONT", listener: () => void): this; + prependListener(event: "SIGINT", listener: () => void): this; + prependListener(event: "SIGTSTP", listener: () => void): this; + prependListener(event: "exit", listener: () => void): this; + prependListener(event: "reset", listener: (context: Context) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "line", listener: (input: string) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: "SIGCONT", listener: () => void): this; + prependOnceListener(event: "SIGINT", listener: () => void): this; + prependOnceListener(event: "SIGTSTP", listener: () => void): this; + prependOnceListener(event: "exit", listener: () => void): this; + prependOnceListener(event: "reset", listener: (context: Context) => void): this; + } + /** + * A flag passed in the REPL options. Evaluates expressions in sloppy mode. + */ + const REPL_MODE_SLOPPY: unique symbol; + /** + * A flag passed in the REPL options. Evaluates expressions in strict mode. + * This is equivalent to prefacing every repl statement with `'use strict'`. + */ + const REPL_MODE_STRICT: unique symbol; + /** + * The `repl.start()` method creates and starts a {@link REPLServer} instance. + * + * If `options` is a string, then it specifies the input prompt: + * + * ```js + * const repl = require('node:repl'); + * + * // a Unix style prompt + * repl.start('$ '); + * ``` + * @since v0.1.91 + */ + function start(options?: string | ReplOptions): REPLServer; + /** + * Indicates a recoverable error that a `REPLServer` can use to support multi-line input. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_recoverable_errors + */ + class Recoverable extends SyntaxError { + err: Error; + constructor(err: Error); + } +} +declare module "node:repl" { + export * from "repl"; +} diff --git a/dist/node_modules/@types/node/stream.d.ts b/dist/node_modules/@types/node/stream.d.ts new file mode 100644 index 0000000..947a019 --- /dev/null +++ b/dist/node_modules/@types/node/stream.d.ts @@ -0,0 +1,1701 @@ +/** + * A stream is an abstract interface for working with streaming data in Node.js. + * The `node:stream` module provides an API for implementing the stream interface. + * + * There are many stream objects provided by Node.js. For instance, a `request to an HTTP server` and `process.stdout` are both stream instances. + * + * Streams can be readable, writable, or both. All streams are instances of `EventEmitter`. + * + * To access the `node:stream` module: + * + * ```js + * const stream = require('node:stream'); + * ``` + * + * The `node:stream` module is useful for creating new types of stream instances. + * It is usually not necessary to use the `node:stream` module to consume streams. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/stream.js) + */ +declare module "stream" { + import { Abortable, EventEmitter } from "node:events"; + import { Blob as NodeBlob } from "node:buffer"; + import * as streamPromises from "node:stream/promises"; + import * as streamConsumers from "node:stream/consumers"; + import * as streamWeb from "node:stream/web"; + + type ComposeFnParam = (source: any) => void; + + class internal extends EventEmitter { + pipe( + destination: T, + options?: { + end?: boolean | undefined; + }, + ): T; + compose( + stream: T | ComposeFnParam | Iterable | AsyncIterable, + options?: { signal: AbortSignal }, + ): T; + } + import Stream = internal.Stream; + import Readable = internal.Readable; + import ReadableOptions = internal.ReadableOptions; + interface ArrayOptions { + /** the maximum concurrent invocations of `fn` to call on the stream at once. **Default: 1**. */ + concurrency?: number; + /** allows destroying the stream if the signal is aborted. */ + signal?: AbortSignal; + } + class ReadableBase extends Stream implements NodeJS.ReadableStream { + /** + * A utility method for creating Readable Streams out of iterators. + */ + static from(iterable: Iterable | AsyncIterable, options?: ReadableOptions): Readable; + /** + * Returns whether the stream has been read from or cancelled. + * @since v16.8.0 + */ + static isDisturbed(stream: Readable | NodeJS.ReadableStream): boolean; + /** + * Returns whether the stream was destroyed or errored before emitting `'end'`. + * @since v16.8.0 + * @experimental + */ + readonly readableAborted: boolean; + /** + * Is `true` if it is safe to call `readable.read()`, which means + * the stream has not been destroyed or emitted `'error'` or `'end'`. + * @since v11.4.0 + */ + readable: boolean; + /** + * Returns whether `'data'` has been emitted. + * @since v16.7.0, v14.18.0 + * @experimental + */ + readonly readableDidRead: boolean; + /** + * Getter for the property `encoding` of a given `Readable` stream. The `encoding`property can be set using the `readable.setEncoding()` method. + * @since v12.7.0 + */ + readonly readableEncoding: BufferEncoding | null; + /** + * Becomes `true` when `'end'` event is emitted. + * @since v12.9.0 + */ + readonly readableEnded: boolean; + /** + * This property reflects the current state of a `Readable` stream as described + * in the `Three states` section. + * @since v9.4.0 + */ + readonly readableFlowing: boolean | null; + /** + * Returns the value of `highWaterMark` passed when creating this `Readable`. + * @since v9.3.0 + */ + readonly readableHighWaterMark: number; + /** + * This property contains the number of bytes (or objects) in the queue + * ready to be read. The value provides introspection data regarding + * the status of the `highWaterMark`. + * @since v9.4.0 + */ + readonly readableLength: number; + /** + * Getter for the property `objectMode` of a given `Readable` stream. + * @since v12.3.0 + */ + readonly readableObjectMode: boolean; + /** + * Is `true` after `readable.destroy()` has been called. + * @since v8.0.0 + */ + destroyed: boolean; + /** + * Is `true` after `'close'` has been emitted. + * @since v18.0.0 + */ + readonly closed: boolean; + /** + * Returns error if the stream has been destroyed with an error. + * @since v18.0.0 + */ + readonly errored: Error | null; + constructor(opts?: ReadableOptions); + _construct?(callback: (error?: Error | null) => void): void; + _read(size: number): void; + /** + * The `readable.read()` method reads data out of the internal buffer and + * returns it. If no data is available to be read, `null` is returned. By default, + * the data is returned as a `Buffer` object unless an encoding has been + * specified using the `readable.setEncoding()` method or the stream is operating + * in object mode. + * + * The optional `size` argument specifies a specific number of bytes to read. If`size` bytes are not available to be read, `null` will be returned _unless_the stream has ended, in which + * case all of the data remaining in the internal + * buffer will be returned. + * + * If the `size` argument is not specified, all of the data contained in the + * internal buffer will be returned. + * + * The `size` argument must be less than or equal to 1 GiB. + * + * The `readable.read()` method should only be called on `Readable` streams + * operating in paused mode. In flowing mode, `readable.read()` is called + * automatically until the internal buffer is fully drained. + * + * ```js + * const readable = getReadableStreamSomehow(); + * + * // 'readable' may be triggered multiple times as data is buffered in + * readable.on('readable', () => { + * let chunk; + * console.log('Stream is readable (new data received in buffer)'); + * // Use a loop to make sure we read all currently available data + * while (null !== (chunk = readable.read())) { + * console.log(`Read ${chunk.length} bytes of data...`); + * } + * }); + * + * // 'end' will be triggered once when there is no more data available + * readable.on('end', () => { + * console.log('Reached end of stream.'); + * }); + * ``` + * + * Each call to `readable.read()` returns a chunk of data, or `null`. The chunks + * are not concatenated. A `while` loop is necessary to consume all data + * currently in the buffer. When reading a large file `.read()` may return `null`, + * having consumed all buffered content so far, but there is still more data to + * come not yet buffered. In this case a new `'readable'` event will be emitted + * when there is more data in the buffer. Finally the `'end'` event will be + * emitted when there is no more data to come. + * + * Therefore to read a file's whole contents from a `readable`, it is necessary + * to collect chunks across multiple `'readable'` events: + * + * ```js + * const chunks = []; + * + * readable.on('readable', () => { + * let chunk; + * while (null !== (chunk = readable.read())) { + * chunks.push(chunk); + * } + * }); + * + * readable.on('end', () => { + * const content = chunks.join(''); + * }); + * ``` + * + * A `Readable` stream in object mode will always return a single item from + * a call to `readable.read(size)`, regardless of the value of the`size` argument. + * + * If the `readable.read()` method returns a chunk of data, a `'data'` event will + * also be emitted. + * + * Calling {@link read} after the `'end'` event has + * been emitted will return `null`. No runtime error will be raised. + * @since v0.9.4 + * @param size Optional argument to specify how much data to read. + */ + read(size?: number): any; + /** + * The `readable.setEncoding()` method sets the character encoding for + * data read from the `Readable` stream. + * + * By default, no encoding is assigned and stream data will be returned as`Buffer` objects. Setting an encoding causes the stream data + * to be returned as strings of the specified encoding rather than as `Buffer`objects. For instance, calling `readable.setEncoding('utf8')` will cause the + * output data to be interpreted as UTF-8 data, and passed as strings. Calling`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal + * string format. + * + * The `Readable` stream will properly handle multi-byte characters delivered + * through the stream that would otherwise become improperly decoded if simply + * pulled from the stream as `Buffer` objects. + * + * ```js + * const readable = getReadableStreamSomehow(); + * readable.setEncoding('utf8'); + * readable.on('data', (chunk) => { + * assert.equal(typeof chunk, 'string'); + * console.log('Got %d characters of string data:', chunk.length); + * }); + * ``` + * @since v0.9.4 + * @param encoding The encoding to use. + */ + setEncoding(encoding: BufferEncoding): this; + /** + * The `readable.pause()` method will cause a stream in flowing mode to stop + * emitting `'data'` events, switching out of flowing mode. Any data that + * becomes available will remain in the internal buffer. + * + * ```js + * const readable = getReadableStreamSomehow(); + * readable.on('data', (chunk) => { + * console.log(`Received ${chunk.length} bytes of data.`); + * readable.pause(); + * console.log('There will be no additional data for 1 second.'); + * setTimeout(() => { + * console.log('Now data will start flowing again.'); + * readable.resume(); + * }, 1000); + * }); + * ``` + * + * The `readable.pause()` method has no effect if there is a `'readable'`event listener. + * @since v0.9.4 + */ + pause(): this; + /** + * The `readable.resume()` method causes an explicitly paused `Readable` stream to + * resume emitting `'data'` events, switching the stream into flowing mode. + * + * The `readable.resume()` method can be used to fully consume the data from a + * stream without actually processing any of that data: + * + * ```js + * getReadableStreamSomehow() + * .resume() + * .on('end', () => { + * console.log('Reached the end, but did not read anything.'); + * }); + * ``` + * + * The `readable.resume()` method has no effect if there is a `'readable'`event listener. + * @since v0.9.4 + */ + resume(): this; + /** + * The `readable.isPaused()` method returns the current operating state of the`Readable`. This is used primarily by the mechanism that underlies the`readable.pipe()` method. In most + * typical cases, there will be no reason to + * use this method directly. + * + * ```js + * const readable = new stream.Readable(); + * + * readable.isPaused(); // === false + * readable.pause(); + * readable.isPaused(); // === true + * readable.resume(); + * readable.isPaused(); // === false + * ``` + * @since v0.11.14 + */ + isPaused(): boolean; + /** + * The `readable.unpipe()` method detaches a `Writable` stream previously attached + * using the {@link pipe} method. + * + * If the `destination` is not specified, then _all_ pipes are detached. + * + * If the `destination` is specified, but no pipe is set up for it, then + * the method does nothing. + * + * ```js + * const fs = require('node:fs'); + * const readable = getReadableStreamSomehow(); + * const writable = fs.createWriteStream('file.txt'); + * // All the data from readable goes into 'file.txt', + * // but only for the first second. + * readable.pipe(writable); + * setTimeout(() => { + * console.log('Stop writing to file.txt.'); + * readable.unpipe(writable); + * console.log('Manually close the file stream.'); + * writable.end(); + * }, 1000); + * ``` + * @since v0.9.4 + * @param destination Optional specific stream to unpipe + */ + unpipe(destination?: NodeJS.WritableStream): this; + /** + * Passing `chunk` as `null` signals the end of the stream (EOF) and behaves the + * same as `readable.push(null)`, after which no more data can be written. The EOF + * signal is put at the end of the buffer and any buffered data will still be + * flushed. + * + * The `readable.unshift()` method pushes a chunk of data back into the internal + * buffer. This is useful in certain situations where a stream is being consumed by + * code that needs to "un-consume" some amount of data that it has optimistically + * pulled out of the source, so that the data can be passed on to some other party. + * + * The `stream.unshift(chunk)` method cannot be called after the `'end'` event + * has been emitted or a runtime error will be thrown. + * + * Developers using `stream.unshift()` often should consider switching to + * use of a `Transform` stream instead. See the `API for stream implementers` section for more information. + * + * ```js + * // Pull off a header delimited by \n\n. + * // Use unshift() if we get too much. + * // Call the callback with (error, header, stream). + * const { StringDecoder } = require('node:string_decoder'); + * function parseHeader(stream, callback) { + * stream.on('error', callback); + * stream.on('readable', onReadable); + * const decoder = new StringDecoder('utf8'); + * let header = ''; + * function onReadable() { + * let chunk; + * while (null !== (chunk = stream.read())) { + * const str = decoder.write(chunk); + * if (str.includes('\n\n')) { + * // Found the header boundary. + * const split = str.split(/\n\n/); + * header += split.shift(); + * const remaining = split.join('\n\n'); + * const buf = Buffer.from(remaining, 'utf8'); + * stream.removeListener('error', callback); + * // Remove the 'readable' listener before unshifting. + * stream.removeListener('readable', onReadable); + * if (buf.length) + * stream.unshift(buf); + * // Now the body of the message can be read from the stream. + * callback(null, header, stream); + * return; + * } + * // Still reading the header. + * header += str; + * } + * } + * } + * ``` + * + * Unlike {@link push}, `stream.unshift(chunk)` will not + * end the reading process by resetting the internal reading state of the stream. + * This can cause unexpected results if `readable.unshift()` is called during a + * read (i.e. from within a {@link _read} implementation on a + * custom stream). Following the call to `readable.unshift()` with an immediate {@link push} will reset the reading state appropriately, + * however it is best to simply avoid calling `readable.unshift()` while in the + * process of performing a read. + * @since v0.9.11 + * @param chunk Chunk of data to unshift onto the read queue. For streams not operating in object mode, `chunk` must be a string, `Buffer`, `Uint8Array`, or `null`. For object mode + * streams, `chunk` may be any JavaScript value. + * @param encoding Encoding of string chunks. Must be a valid `Buffer` encoding, such as `'utf8'` or `'ascii'`. + */ + unshift(chunk: any, encoding?: BufferEncoding): void; + /** + * Prior to Node.js 0.10, streams did not implement the entire `node:stream`module API as it is currently defined. (See `Compatibility` for more + * information.) + * + * When using an older Node.js library that emits `'data'` events and has a {@link pause} method that is advisory only, the`readable.wrap()` method can be used to create a `Readable` + * stream that uses + * the old stream as its data source. + * + * It will rarely be necessary to use `readable.wrap()` but the method has been + * provided as a convenience for interacting with older Node.js applications and + * libraries. + * + * ```js + * const { OldReader } = require('./old-api-module.js'); + * const { Readable } = require('node:stream'); + * const oreader = new OldReader(); + * const myReader = new Readable().wrap(oreader); + * + * myReader.on('readable', () => { + * myReader.read(); // etc. + * }); + * ``` + * @since v0.9.4 + * @param stream An "old style" readable stream + */ + wrap(stream: NodeJS.ReadableStream): this; + push(chunk: any, encoding?: BufferEncoding): boolean; + /** + * The iterator created by this method gives users the option to cancel the destruction + * of the stream if the `for await...of` loop is exited by `return`, `break`, or `throw`, + * or if the iterator should destroy the stream if the stream emitted an error during iteration. + * @since v16.3.0 + * @param options.destroyOnReturn When set to `false`, calling `return` on the async iterator, + * or exiting a `for await...of` iteration using a `break`, `return`, or `throw` will not destroy the stream. + * **Default: `true`**. + */ + iterator(options?: { destroyOnReturn?: boolean }): AsyncIterableIterator; + /** + * This method allows mapping over the stream. The *fn* function will be called for every chunk in the stream. + * If the *fn* function returns a promise - that promise will be `await`ed before being passed to the result stream. + * @since v17.4.0, v16.14.0 + * @param fn a function to map over every chunk in the stream. Async or not. + * @returns a stream mapped with the function *fn*. + */ + map(fn: (data: any, options?: Pick) => any, options?: ArrayOptions): Readable; + /** + * This method allows filtering the stream. For each chunk in the stream the *fn* function will be called + * and if it returns a truthy value, the chunk will be passed to the result stream. + * If the *fn* function returns a promise - that promise will be `await`ed. + * @since v17.4.0, v16.14.0 + * @param fn a function to filter chunks from the stream. Async or not. + * @returns a stream filtered with the predicate *fn*. + */ + filter( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Readable; + /** + * This method allows iterating a stream. For each chunk in the stream the *fn* function will be called. + * If the *fn* function returns a promise - that promise will be `await`ed. + * + * This method is different from `for await...of` loops in that it can optionally process chunks concurrently. + * In addition, a `forEach` iteration can only be stopped by having passed a `signal` option + * and aborting the related AbortController while `for await...of` can be stopped with `break` or `return`. + * In either case the stream will be destroyed. + * + * This method is different from listening to the `'data'` event in that it uses the `readable` event + * in the underlying machinary and can limit the number of concurrent *fn* calls. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise for when the stream has finished. + */ + forEach( + fn: (data: any, options?: Pick) => void | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method allows easily obtaining the contents of a stream. + * + * As this method reads the entire stream into memory, it negates the benefits of streams. It's intended + * for interoperability and convenience, not as the primary way to consume streams. + * @since v17.5.0 + * @returns a promise containing an array with the contents of the stream. + */ + toArray(options?: Pick): Promise; + /** + * This method is similar to `Array.prototype.some` and calls *fn* on each chunk in the stream + * until the awaited return value is `true` (or any truthy value). Once an *fn* call on a chunk + * `await`ed return value is truthy, the stream is destroyed and the promise is fulfilled with `true`. + * If none of the *fn* calls on the chunks return a truthy value, the promise is fulfilled with `false`. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise evaluating to `true` if *fn* returned a truthy value for at least one of the chunks. + */ + some( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method is similar to `Array.prototype.find` and calls *fn* on each chunk in the stream + * to find a chunk with a truthy value for *fn*. Once an *fn* call's awaited return value is truthy, + * the stream is destroyed and the promise is fulfilled with value for which *fn* returned a truthy value. + * If all of the *fn* calls on the chunks return a falsy value, the promise is fulfilled with `undefined`. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise evaluating to the first chunk for which *fn* evaluated with a truthy value, + * or `undefined` if no element was found. + */ + find( + fn: (data: any, options?: Pick) => data is T, + options?: ArrayOptions, + ): Promise; + find( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method is similar to `Array.prototype.every` and calls *fn* on each chunk in the stream + * to check if all awaited return values are truthy value for *fn*. Once an *fn* call on a chunk + * `await`ed return value is falsy, the stream is destroyed and the promise is fulfilled with `false`. + * If all of the *fn* calls on the chunks return a truthy value, the promise is fulfilled with `true`. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise evaluating to `true` if *fn* returned a truthy value for every one of the chunks. + */ + every( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method returns a new stream by applying the given callback to each chunk of the stream + * and then flattening the result. + * + * It is possible to return a stream or another iterable or async iterable from *fn* and the result streams + * will be merged (flattened) into the returned stream. + * @since v17.5.0 + * @param fn a function to map over every chunk in the stream. May be async. May be a stream or generator. + * @returns a stream flat-mapped with the function *fn*. + */ + flatMap(fn: (data: any, options?: Pick) => any, options?: ArrayOptions): Readable; + /** + * This method returns a new stream with the first *limit* chunks dropped from the start. + * @since v17.5.0 + * @param limit the number of chunks to drop from the readable. + * @returns a stream with *limit* chunks dropped from the start. + */ + drop(limit: number, options?: Pick): Readable; + /** + * This method returns a new stream with the first *limit* chunks. + * @since v17.5.0 + * @param limit the number of chunks to take from the readable. + * @returns a stream with *limit* chunks taken. + */ + take(limit: number, options?: Pick): Readable; + /** + * This method returns a new stream with chunks of the underlying stream paired with a counter + * in the form `[index, chunk]`. The first index value is `0` and it increases by 1 for each chunk produced. + * @since v17.5.0 + * @returns a stream of indexed pairs. + */ + asIndexedPairs(options?: Pick): Readable; + /** + * This method calls *fn* on each chunk of the stream in order, passing it the result from the calculation + * on the previous element. It returns a promise for the final value of the reduction. + * + * If no *initial* value is supplied the first chunk of the stream is used as the initial value. + * If the stream is empty, the promise is rejected with a `TypeError` with the `ERR_INVALID_ARGS` code property. + * + * The reducer function iterates the stream element-by-element which means that there is no *concurrency* parameter + * or parallelism. To perform a reduce concurrently, you can extract the async function to `readable.map` method. + * @since v17.5.0 + * @param fn a reducer function to call over every chunk in the stream. Async or not. + * @param initial the initial value to use in the reduction. + * @returns a promise for the final value of the reduction. + */ + reduce( + fn: (previous: any, data: any, options?: Pick) => T, + initial?: undefined, + options?: Pick, + ): Promise; + reduce( + fn: (previous: T, data: any, options?: Pick) => T, + initial: T, + options?: Pick, + ): Promise; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + /** + * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the readable + * stream will release any internal resources and subsequent calls to `push()`will be ignored. + * + * Once `destroy()` has been called any further calls will be a no-op and no + * further errors except from `_destroy()` may be emitted as `'error'`. + * + * Implementors should not override this method, but instead implement `readable._destroy()`. + * @since v8.0.0 + * @param error Error which will be passed as payload in `'error'` event + */ + destroy(error?: Error): this; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. data + * 3. end + * 4. error + * 5. pause + * 6. readable + * 7. resume + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: any) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "data", chunk: any): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "pause"): boolean; + emit(event: "readable"): boolean; + emit(event: "resume"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: any) => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: any) => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: any) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: any) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "data", listener: (chunk: any) => void): this; + removeListener(event: "end", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "pause", listener: () => void): this; + removeListener(event: "readable", listener: () => void): this; + removeListener(event: "resume", listener: () => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + /** + * Calls `readable.destroy()` with an `AbortError` and returns a promise that fulfills when the stream is finished. + * @since v20.4.0 + */ + [Symbol.asyncDispose](): Promise; + } + import WritableOptions = internal.WritableOptions; + class WritableBase extends Stream implements NodeJS.WritableStream { + /** + * Is `true` if it is safe to call `writable.write()`, which means + * the stream has not been destroyed, errored, or ended. + * @since v11.4.0 + */ + readonly writable: boolean; + /** + * Is `true` after `writable.end()` has been called. This property + * does not indicate whether the data has been flushed, for this use `writable.writableFinished` instead. + * @since v12.9.0 + */ + readonly writableEnded: boolean; + /** + * Is set to `true` immediately before the `'finish'` event is emitted. + * @since v12.6.0 + */ + readonly writableFinished: boolean; + /** + * Return the value of `highWaterMark` passed when creating this `Writable`. + * @since v9.3.0 + */ + readonly writableHighWaterMark: number; + /** + * This property contains the number of bytes (or objects) in the queue + * ready to be written. The value provides introspection data regarding + * the status of the `highWaterMark`. + * @since v9.4.0 + */ + readonly writableLength: number; + /** + * Getter for the property `objectMode` of a given `Writable` stream. + * @since v12.3.0 + */ + readonly writableObjectMode: boolean; + /** + * Number of times `writable.uncork()` needs to be + * called in order to fully uncork the stream. + * @since v13.2.0, v12.16.0 + */ + readonly writableCorked: number; + /** + * Is `true` after `writable.destroy()` has been called. + * @since v8.0.0 + */ + destroyed: boolean; + /** + * Is `true` after `'close'` has been emitted. + * @since v18.0.0 + */ + readonly closed: boolean; + /** + * Returns error if the stream has been destroyed with an error. + * @since v18.0.0 + */ + readonly errored: Error | null; + /** + * Is `true` if the stream's buffer has been full and stream will emit `'drain'`. + * @since v15.2.0, v14.17.0 + */ + readonly writableNeedDrain: boolean; + constructor(opts?: WritableOptions); + _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + _writev?( + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + _construct?(callback: (error?: Error | null) => void): void; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + _final(callback: (error?: Error | null) => void): void; + /** + * The `writable.write()` method writes some data to the stream, and calls the + * supplied `callback` once the data has been fully handled. If an error + * occurs, the `callback` will be called with the error as its + * first argument. The `callback` is called asynchronously and before `'error'` is + * emitted. + * + * The return value is `true` if the internal buffer is less than the`highWaterMark` configured when the stream was created after admitting `chunk`. + * If `false` is returned, further attempts to write data to the stream should + * stop until the `'drain'` event is emitted. + * + * While a stream is not draining, calls to `write()` will buffer `chunk`, and + * return false. Once all currently buffered chunks are drained (accepted for + * delivery by the operating system), the `'drain'` event will be emitted. + * Once `write()` returns false, do not write more chunks + * until the `'drain'` event is emitted. While calling `write()` on a stream that + * is not draining is allowed, Node.js will buffer all written chunks until + * maximum memory usage occurs, at which point it will abort unconditionally. + * Even before it aborts, high memory usage will cause poor garbage collector + * performance and high RSS (which is not typically released back to the system, + * even after the memory is no longer required). Since TCP sockets may never + * drain if the remote peer does not read the data, writing a socket that is + * not draining may lead to a remotely exploitable vulnerability. + * + * Writing data while the stream is not draining is particularly + * problematic for a `Transform`, because the `Transform` streams are paused + * by default until they are piped or a `'data'` or `'readable'` event handler + * is added. + * + * If the data to be written can be generated or fetched on demand, it is + * recommended to encapsulate the logic into a `Readable` and use {@link pipe}. However, if calling `write()` is preferred, it is + * possible to respect backpressure and avoid memory issues using the `'drain'` event: + * + * ```js + * function write(data, cb) { + * if (!stream.write(data)) { + * stream.once('drain', cb); + * } else { + * process.nextTick(cb); + * } + * } + * + * // Wait for cb to be called before doing any other write. + * write('hello', () => { + * console.log('Write completed, do more writes now.'); + * }); + * ``` + * + * A `Writable` stream in object mode will always ignore the `encoding` argument. + * @since v0.9.4 + * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any + * JavaScript value other than `null`. + * @param [encoding='utf8'] The encoding, if `chunk` is a string. + * @param callback Callback for when this chunk of data is flushed. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + write(chunk: any, callback?: (error: Error | null | undefined) => void): boolean; + write(chunk: any, encoding: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean; + /** + * The `writable.setDefaultEncoding()` method sets the default `encoding` for a `Writable` stream. + * @since v0.11.15 + * @param encoding The new default encoding + */ + setDefaultEncoding(encoding: BufferEncoding): this; + /** + * Calling the `writable.end()` method signals that no more data will be written + * to the `Writable`. The optional `chunk` and `encoding` arguments allow one + * final additional chunk of data to be written immediately before closing the + * stream. + * + * Calling the {@link write} method after calling {@link end} will raise an error. + * + * ```js + * // Write 'hello, ' and then end with 'world!'. + * const fs = require('node:fs'); + * const file = fs.createWriteStream('example.txt'); + * file.write('hello, '); + * file.end('world!'); + * // Writing more now is not allowed! + * ``` + * @since v0.9.4 + * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any + * JavaScript value other than `null`. + * @param encoding The encoding if `chunk` is a string + * @param callback Callback for when the stream is finished. + */ + end(cb?: () => void): this; + end(chunk: any, cb?: () => void): this; + end(chunk: any, encoding: BufferEncoding, cb?: () => void): this; + /** + * The `writable.cork()` method forces all written data to be buffered in memory. + * The buffered data will be flushed when either the {@link uncork} or {@link end} methods are called. + * + * The primary intent of `writable.cork()` is to accommodate a situation in which + * several small chunks are written to the stream in rapid succession. Instead of + * immediately forwarding them to the underlying destination, `writable.cork()`buffers all the chunks until `writable.uncork()` is called, which will pass them + * all to `writable._writev()`, if present. This prevents a head-of-line blocking + * situation where data is being buffered while waiting for the first small chunk + * to be processed. However, use of `writable.cork()` without implementing`writable._writev()` may have an adverse effect on throughput. + * + * See also: `writable.uncork()`, `writable._writev()`. + * @since v0.11.2 + */ + cork(): void; + /** + * The `writable.uncork()` method flushes all data buffered since {@link cork} was called. + * + * When using `writable.cork()` and `writable.uncork()` to manage the buffering + * of writes to a stream, defer calls to `writable.uncork()` using`process.nextTick()`. Doing so allows batching of all`writable.write()` calls that occur within a given Node.js event + * loop phase. + * + * ```js + * stream.cork(); + * stream.write('some '); + * stream.write('data '); + * process.nextTick(() => stream.uncork()); + * ``` + * + * If the `writable.cork()` method is called multiple times on a stream, the + * same number of calls to `writable.uncork()` must be called to flush the buffered + * data. + * + * ```js + * stream.cork(); + * stream.write('some '); + * stream.cork(); + * stream.write('data '); + * process.nextTick(() => { + * stream.uncork(); + * // The data will not be flushed until uncork() is called a second time. + * stream.uncork(); + * }); + * ``` + * + * See also: `writable.cork()`. + * @since v0.11.2 + */ + uncork(): void; + /** + * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the writable + * stream has ended and subsequent calls to `write()` or `end()` will result in + * an `ERR_STREAM_DESTROYED` error. + * This is a destructive and immediate way to destroy a stream. Previous calls to`write()` may not have drained, and may trigger an `ERR_STREAM_DESTROYED` error. + * Use `end()` instead of destroy if data should flush before close, or wait for + * the `'drain'` event before destroying the stream. + * + * Once `destroy()` has been called any further calls will be a no-op and no + * further errors except from `_destroy()` may be emitted as `'error'`. + * + * Implementors should not override this method, + * but instead implement `writable._destroy()`. + * @since v8.0.0 + * @param error Optional, an error to emit with `'error'` event. + */ + destroy(error?: Error): this; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. drain + * 3. error + * 4. finish + * 5. pipe + * 6. unpipe + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pipe", listener: (src: Readable) => void): this; + addListener(event: "unpipe", listener: (src: Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "drain"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "pipe", src: Readable): boolean; + emit(event: "unpipe", src: Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pipe", listener: (src: Readable) => void): this; + on(event: "unpipe", listener: (src: Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pipe", listener: (src: Readable) => void): this; + once(event: "unpipe", listener: (src: Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pipe", listener: (src: Readable) => void): this; + prependListener(event: "unpipe", listener: (src: Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "drain", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "finish", listener: () => void): this; + removeListener(event: "pipe", listener: (src: Readable) => void): this; + removeListener(event: "unpipe", listener: (src: Readable) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + namespace internal { + class Stream extends internal { + constructor(opts?: ReadableOptions); + } + interface StreamOptions extends Abortable { + emitClose?: boolean | undefined; + highWaterMark?: number | undefined; + objectMode?: boolean | undefined; + construct?(this: T, callback: (error?: Error | null) => void): void; + destroy?(this: T, error: Error | null, callback: (error?: Error | null) => void): void; + autoDestroy?: boolean | undefined; + } + interface ReadableOptions extends StreamOptions { + encoding?: BufferEncoding | undefined; + read?(this: Readable, size: number): void; + } + /** + * @since v0.9.4 + */ + class Readable extends ReadableBase { + /** + * A utility method for creating a `Readable` from a web `ReadableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb( + readableStream: streamWeb.ReadableStream, + options?: Pick, + ): Readable; + /** + * A utility method for creating a web `ReadableStream` from a `Readable`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamReadable: Readable): streamWeb.ReadableStream; + } + interface WritableOptions extends StreamOptions { + decodeStrings?: boolean | undefined; + defaultEncoding?: BufferEncoding | undefined; + write?( + this: Writable, + chunk: any, + encoding: BufferEncoding, + callback: (error?: Error | null) => void, + ): void; + writev?( + this: Writable, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + final?(this: Writable, callback: (error?: Error | null) => void): void; + } + /** + * @since v0.9.4 + */ + class Writable extends WritableBase { + /** + * A utility method for creating a `Writable` from a web `WritableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb( + writableStream: streamWeb.WritableStream, + options?: Pick, + ): Writable; + /** + * A utility method for creating a web `WritableStream` from a `Writable`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamWritable: Writable): streamWeb.WritableStream; + } + interface DuplexOptions extends ReadableOptions, WritableOptions { + allowHalfOpen?: boolean | undefined; + readableObjectMode?: boolean | undefined; + writableObjectMode?: boolean | undefined; + readableHighWaterMark?: number | undefined; + writableHighWaterMark?: number | undefined; + writableCorked?: number | undefined; + construct?(this: Duplex, callback: (error?: Error | null) => void): void; + read?(this: Duplex, size: number): void; + write?(this: Duplex, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + writev?( + this: Duplex, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + final?(this: Duplex, callback: (error?: Error | null) => void): void; + destroy?(this: Duplex, error: Error | null, callback: (error?: Error | null) => void): void; + } + /** + * Duplex streams are streams that implement both the `Readable` and `Writable` interfaces. + * + * Examples of `Duplex` streams include: + * + * * `TCP sockets` + * * `zlib streams` + * * `crypto streams` + * @since v0.9.4 + */ + class Duplex extends ReadableBase implements WritableBase { + readonly writable: boolean; + readonly writableEnded: boolean; + readonly writableFinished: boolean; + readonly writableHighWaterMark: number; + readonly writableLength: number; + readonly writableObjectMode: boolean; + readonly writableCorked: number; + readonly writableNeedDrain: boolean; + readonly closed: boolean; + readonly errored: Error | null; + /** + * If `false` then the stream will automatically end the writable side when the + * readable side ends. Set initially by the `allowHalfOpen` constructor option, + * which defaults to `true`. + * + * This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is + * emitted. + * @since v0.9.4 + */ + allowHalfOpen: boolean; + constructor(opts?: DuplexOptions); + /** + * A utility method for creating duplex streams. + * + * - `Stream` converts writable stream into writable `Duplex` and readable stream + * to `Duplex`. + * - `Blob` converts into readable `Duplex`. + * - `string` converts into readable `Duplex`. + * - `ArrayBuffer` converts into readable `Duplex`. + * - `AsyncIterable` converts into a readable `Duplex`. Cannot yield `null`. + * - `AsyncGeneratorFunction` converts into a readable/writable transform + * `Duplex`. Must take a source `AsyncIterable` as first parameter. Cannot yield + * `null`. + * - `AsyncFunction` converts into a writable `Duplex`. Must return + * either `null` or `undefined` + * - `Object ({ writable, readable })` converts `readable` and + * `writable` into `Stream` and then combines them into `Duplex` where the + * `Duplex` will write to the `writable` and read from the `readable`. + * - `Promise` converts into readable `Duplex`. Value `null` is ignored. + * + * @since v16.8.0 + */ + static from( + src: + | Stream + | NodeBlob + | ArrayBuffer + | string + | Iterable + | AsyncIterable + | AsyncGeneratorFunction + | Promise + | Object, + ): Duplex; + _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + _writev?( + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + _final(callback: (error?: Error | null) => void): void; + write(chunk: any, encoding?: BufferEncoding, cb?: (error: Error | null | undefined) => void): boolean; + write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean; + setDefaultEncoding(encoding: BufferEncoding): this; + end(cb?: () => void): this; + end(chunk: any, cb?: () => void): this; + end(chunk: any, encoding?: BufferEncoding, cb?: () => void): this; + cork(): void; + uncork(): void; + /** + * A utility method for creating a web `ReadableStream` and `WritableStream` from a `Duplex`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamDuplex: Duplex): { + readable: streamWeb.ReadableStream; + writable: streamWeb.WritableStream; + }; + /** + * A utility method for creating a `Duplex` from a web `ReadableStream` and `WritableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb( + duplexStream: { + readable: streamWeb.ReadableStream; + writable: streamWeb.WritableStream; + }, + options?: Pick< + DuplexOptions, + "allowHalfOpen" | "decodeStrings" | "encoding" | "highWaterMark" | "objectMode" | "signal" + >, + ): Duplex; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. data + * 3. drain + * 4. end + * 5. error + * 6. finish + * 7. pause + * 8. pipe + * 9. readable + * 10. resume + * 11. unpipe + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: any) => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "pipe", listener: (src: Readable) => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: "unpipe", listener: (src: Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "data", chunk: any): boolean; + emit(event: "drain"): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "pause"): boolean; + emit(event: "pipe", src: Readable): boolean; + emit(event: "readable"): boolean; + emit(event: "resume"): boolean; + emit(event: "unpipe", src: Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: any) => void): this; + on(event: "drain", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pause", listener: () => void): this; + on(event: "pipe", listener: (src: Readable) => void): this; + on(event: "readable", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: "unpipe", listener: (src: Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: any) => void): this; + once(event: "drain", listener: () => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pause", listener: () => void): this; + once(event: "pipe", listener: (src: Readable) => void): this; + once(event: "readable", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: "unpipe", listener: (src: Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: any) => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "pipe", listener: (src: Readable) => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: "unpipe", listener: (src: Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: any) => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: Readable) => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: "unpipe", listener: (src: Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "data", listener: (chunk: any) => void): this; + removeListener(event: "drain", listener: () => void): this; + removeListener(event: "end", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "finish", listener: () => void): this; + removeListener(event: "pause", listener: () => void): this; + removeListener(event: "pipe", listener: (src: Readable) => void): this; + removeListener(event: "readable", listener: () => void): this; + removeListener(event: "resume", listener: () => void): this; + removeListener(event: "unpipe", listener: (src: Readable) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + type TransformCallback = (error?: Error | null, data?: any) => void; + interface TransformOptions extends DuplexOptions { + construct?(this: Transform, callback: (error?: Error | null) => void): void; + read?(this: Transform, size: number): void; + write?( + this: Transform, + chunk: any, + encoding: BufferEncoding, + callback: (error?: Error | null) => void, + ): void; + writev?( + this: Transform, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + final?(this: Transform, callback: (error?: Error | null) => void): void; + destroy?(this: Transform, error: Error | null, callback: (error?: Error | null) => void): void; + transform?(this: Transform, chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; + flush?(this: Transform, callback: TransformCallback): void; + } + /** + * Transform streams are `Duplex` streams where the output is in some way + * related to the input. Like all `Duplex` streams, `Transform` streams + * implement both the `Readable` and `Writable` interfaces. + * + * Examples of `Transform` streams include: + * + * * `zlib streams` + * * `crypto streams` + * @since v0.9.4 + */ + class Transform extends Duplex { + constructor(opts?: TransformOptions); + _transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; + _flush(callback: TransformCallback): void; + } + /** + * The `stream.PassThrough` class is a trivial implementation of a `Transform` stream that simply passes the input bytes across to the output. Its purpose is + * primarily for examples and testing, but there are some use cases where`stream.PassThrough` is useful as a building block for novel sorts of streams. + */ + class PassThrough extends Transform {} + /** + * A stream to attach a signal to. + * + * Attaches an AbortSignal to a readable or writeable stream. This lets code + * control stream destruction using an `AbortController`. + * + * Calling `abort` on the `AbortController` corresponding to the passed`AbortSignal` will behave the same way as calling `.destroy(new AbortError())`on the stream, and `controller.error(new + * AbortError())` for webstreams. + * + * ```js + * const fs = require('node:fs'); + * + * const controller = new AbortController(); + * const read = addAbortSignal( + * controller.signal, + * fs.createReadStream(('object.json')), + * ); + * // Later, abort the operation closing the stream + * controller.abort(); + * ``` + * + * Or using an `AbortSignal` with a readable stream as an async iterable: + * + * ```js + * const controller = new AbortController(); + * setTimeout(() => controller.abort(), 10_000); // set a timeout + * const stream = addAbortSignal( + * controller.signal, + * fs.createReadStream(('object.json')), + * ); + * (async () => { + * try { + * for await (const chunk of stream) { + * await process(chunk); + * } + * } catch (e) { + * if (e.name === 'AbortError') { + * // The operation was cancelled + * } else { + * throw e; + * } + * } + * })(); + * ``` + * + * Or using an `AbortSignal` with a ReadableStream: + * + * ```js + * const controller = new AbortController(); + * const rs = new ReadableStream({ + * start(controller) { + * controller.enqueue('hello'); + * controller.enqueue('world'); + * controller.close(); + * }, + * }); + * + * addAbortSignal(controller.signal, rs); + * + * finished(rs, (err) => { + * if (err) { + * if (err.name === 'AbortError') { + * // The operation was cancelled + * } + * } + * }); + * + * const reader = rs.getReader(); + * + * reader.read().then(({ value, done }) => { + * console.log(value); // hello + * console.log(done); // false + * controller.abort(); + * }); + * ``` + * @since v15.4.0 + * @param signal A signal representing possible cancellation + * @param stream a stream to attach a signal to + */ + function addAbortSignal(signal: AbortSignal, stream: T): T; + /** + * Returns the default highWaterMark used by streams. + * Defaults to `16384` (16 KiB), or `16` for `objectMode`. + * @since v19.9.0 + * @param objectMode + */ + function getDefaultHighWaterMark(objectMode: boolean): number; + /** + * Sets the default highWaterMark used by streams. + * @since v19.9.0 + * @param objectMode + * @param value highWaterMark value + */ + function setDefaultHighWaterMark(objectMode: boolean, value: number): void; + interface FinishedOptions extends Abortable { + error?: boolean | undefined; + readable?: boolean | undefined; + writable?: boolean | undefined; + } + /** + * A readable and/or writable stream/webstream. + * + * A function to get notified when a stream is no longer readable, writable + * or has experienced an error or a premature close event. + * + * ```js + * const { finished } = require('node:stream'); + * const fs = require('node:fs'); + * + * const rs = fs.createReadStream('archive.tar'); + * + * finished(rs, (err) => { + * if (err) { + * console.error('Stream failed.', err); + * } else { + * console.log('Stream is done reading.'); + * } + * }); + * + * rs.resume(); // Drain the stream. + * ``` + * + * Especially useful in error handling scenarios where a stream is destroyed + * prematurely (like an aborted HTTP request), and will not emit `'end'`or `'finish'`. + * + * The `finished` API provides `promise version`. + * + * `stream.finished()` leaves dangling event listeners (in particular`'error'`, `'end'`, `'finish'` and `'close'`) after `callback` has been + * invoked. The reason for this is so that unexpected `'error'` events (due to + * incorrect stream implementations) do not cause unexpected crashes. + * If this is unwanted behavior then the returned cleanup function needs to be + * invoked in the callback: + * + * ```js + * const cleanup = finished(rs, (err) => { + * cleanup(); + * // ... + * }); + * ``` + * @since v10.0.0 + * @param stream A readable and/or writable stream. + * @param callback A callback function that takes an optional error argument. + * @return A cleanup function which removes all registered listeners. + */ + function finished( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + options: FinishedOptions, + callback: (err?: NodeJS.ErrnoException | null) => void, + ): () => void; + function finished( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + callback: (err?: NodeJS.ErrnoException | null) => void, + ): () => void; + namespace finished { + function __promisify__( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + options?: FinishedOptions, + ): Promise; + } + type PipelineSourceFunction = () => Iterable | AsyncIterable; + type PipelineSource = Iterable | AsyncIterable | NodeJS.ReadableStream | PipelineSourceFunction; + type PipelineTransform, U> = + | NodeJS.ReadWriteStream + | (( + source: S extends (...args: any[]) => Iterable | AsyncIterable ? AsyncIterable + : S, + ) => AsyncIterable); + type PipelineTransformSource = PipelineSource | PipelineTransform; + type PipelineDestinationIterableFunction = (source: AsyncIterable) => AsyncIterable; + type PipelineDestinationPromiseFunction = (source: AsyncIterable) => Promise

; + type PipelineDestination, P> = S extends + PipelineTransformSource ? + | NodeJS.WritableStream + | PipelineDestinationIterableFunction + | PipelineDestinationPromiseFunction + : never; + type PipelineCallback> = S extends + PipelineDestinationPromiseFunction ? (err: NodeJS.ErrnoException | null, value: P) => void + : (err: NodeJS.ErrnoException | null) => void; + type PipelinePromise> = S extends + PipelineDestinationPromiseFunction ? Promise

: Promise; + interface PipelineOptions { + signal?: AbortSignal | undefined; + end?: boolean | undefined; + } + /** + * A module method to pipe between streams and generators forwarding errors and + * properly cleaning up and provide a callback when the pipeline is complete. + * + * ```js + * const { pipeline } = require('node:stream'); + * const fs = require('node:fs'); + * const zlib = require('node:zlib'); + * + * // Use the pipeline API to easily pipe a series of streams + * // together and get notified when the pipeline is fully done. + * + * // A pipeline to gzip a potentially huge tar file efficiently: + * + * pipeline( + * fs.createReadStream('archive.tar'), + * zlib.createGzip(), + * fs.createWriteStream('archive.tar.gz'), + * (err) => { + * if (err) { + * console.error('Pipeline failed.', err); + * } else { + * console.log('Pipeline succeeded.'); + * } + * }, + * ); + * ``` + * + * The `pipeline` API provides a `promise version`. + * + * `stream.pipeline()` will call `stream.destroy(err)` on all streams except: + * + * * `Readable` streams which have emitted `'end'` or `'close'`. + * * `Writable` streams which have emitted `'finish'` or `'close'`. + * + * `stream.pipeline()` leaves dangling event listeners on the streams + * after the `callback` has been invoked. In the case of reuse of streams after + * failure, this can cause event listener leaks and swallowed errors. If the last + * stream is readable, dangling event listeners will be removed so that the last + * stream can be consumed later. + * + * `stream.pipeline()` closes all the streams when an error is raised. + * The `IncomingRequest` usage with `pipeline` could lead to an unexpected behavior + * once it would destroy the socket without sending the expected response. + * See the example below: + * + * ```js + * const fs = require('node:fs'); + * const http = require('node:http'); + * const { pipeline } = require('node:stream'); + * + * const server = http.createServer((req, res) => { + * const fileStream = fs.createReadStream('./fileNotExist.txt'); + * pipeline(fileStream, res, (err) => { + * if (err) { + * console.log(err); // No such file + * // this message can't be sent once `pipeline` already destroyed the socket + * return res.end('error!!!'); + * } + * }); + * }); + * ``` + * @since v10.0.0 + * @param callback Called when the pipeline is fully done. + */ + function pipeline, B extends PipelineDestination>( + source: A, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + transform4: T4, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline( + streams: ReadonlyArray, + callback?: (err: NodeJS.ErrnoException | null) => void, + ): NodeJS.WritableStream; + function pipeline( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array< + NodeJS.ReadWriteStream | NodeJS.WritableStream | ((err: NodeJS.ErrnoException | null) => void) + > + ): NodeJS.WritableStream; + namespace pipeline { + function __promisify__, B extends PipelineDestination>( + source: A, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + transform4: T4, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__( + streams: ReadonlyArray, + options?: PipelineOptions, + ): Promise; + function __promisify__( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array + ): Promise; + } + interface Pipe { + close(): void; + hasRef(): boolean; + ref(): void; + unref(): void; + } + /** + * Returns whether the stream has encountered an error. + * @since v17.3.0, v16.14.0 + * @experimental + */ + function isErrored(stream: Readable | Writable | NodeJS.ReadableStream | NodeJS.WritableStream): boolean; + /** + * Returns whether the stream is readable. + * @since v17.4.0, v16.14.0 + * @experimental + */ + function isReadable(stream: Readable | NodeJS.ReadableStream): boolean; + const promises: typeof streamPromises; + const consumers: typeof streamConsumers; + } + export = internal; +} +declare module "node:stream" { + import stream = require("stream"); + export = stream; +} diff --git a/dist/node_modules/@types/node/stream/consumers.d.ts b/dist/node_modules/@types/node/stream/consumers.d.ts new file mode 100644 index 0000000..5ad9cba --- /dev/null +++ b/dist/node_modules/@types/node/stream/consumers.d.ts @@ -0,0 +1,12 @@ +declare module "stream/consumers" { + import { Blob as NodeBlob } from "node:buffer"; + import { Readable } from "node:stream"; + function buffer(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function text(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function arrayBuffer(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function blob(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function json(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; +} +declare module "node:stream/consumers" { + export * from "stream/consumers"; +} diff --git a/dist/node_modules/@types/node/stream/promises.d.ts b/dist/node_modules/@types/node/stream/promises.d.ts new file mode 100644 index 0000000..6eac5b7 --- /dev/null +++ b/dist/node_modules/@types/node/stream/promises.d.ts @@ -0,0 +1,83 @@ +declare module "stream/promises" { + import { + FinishedOptions, + PipelineDestination, + PipelineOptions, + PipelinePromise, + PipelineSource, + PipelineTransform, + } from "node:stream"; + function finished( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + options?: FinishedOptions, + ): Promise; + function pipeline, B extends PipelineDestination>( + source: A, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + transform4: T4, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline( + streams: ReadonlyArray, + options?: PipelineOptions, + ): Promise; + function pipeline( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array + ): Promise; +} +declare module "node:stream/promises" { + export * from "stream/promises"; +} diff --git a/dist/node_modules/@types/node/stream/web.d.ts b/dist/node_modules/@types/node/stream/web.d.ts new file mode 100644 index 0000000..361594d --- /dev/null +++ b/dist/node_modules/@types/node/stream/web.d.ts @@ -0,0 +1,366 @@ +declare module "stream/web" { + // stub module, pending copy&paste from .d.ts or manual impl + // copy from lib.dom.d.ts + interface ReadableWritablePair { + readable: ReadableStream; + /** + * Provides a convenient, chainable way of piping this readable stream + * through a transform stream (or any other { writable, readable } + * pair). It simply pipes the stream into the writable side of the + * supplied pair, and returns the readable side for further use. + * + * Piping a stream will lock it for the duration of the pipe, preventing + * any other consumer from acquiring a reader. + */ + writable: WritableStream; + } + interface StreamPipeOptions { + preventAbort?: boolean; + preventCancel?: boolean; + /** + * Pipes this readable stream to a given writable stream destination. + * The way in which the piping process behaves under various error + * conditions can be customized with a number of passed options. It + * returns a promise that fulfills when the piping process completes + * successfully, or rejects if any errors were encountered. + * + * Piping a stream will lock it for the duration of the pipe, preventing + * any other consumer from acquiring a reader. + * + * Errors and closures of the source and destination streams propagate + * as follows: + * + * An error in this source readable stream will abort destination, + * unless preventAbort is truthy. The returned promise will be rejected + * with the source's error, or with any error that occurs during + * aborting the destination. + * + * An error in destination will cancel this source readable stream, + * unless preventCancel is truthy. The returned promise will be rejected + * with the destination's error, or with any error that occurs during + * canceling the source. + * + * When this source readable stream closes, destination will be closed, + * unless preventClose is truthy. The returned promise will be fulfilled + * once this process completes, unless an error is encountered while + * closing the destination, in which case it will be rejected with that + * error. + * + * If destination starts out closed or closing, this source readable + * stream will be canceled, unless preventCancel is true. The returned + * promise will be rejected with an error indicating piping to a closed + * stream failed, or with any error that occurs during canceling the + * source. + * + * The signal option can be set to an AbortSignal to allow aborting an + * ongoing pipe operation via the corresponding AbortController. In this + * case, this source readable stream will be canceled, and destination + * aborted, unless the respective options preventCancel or preventAbort + * are set. + */ + preventClose?: boolean; + signal?: AbortSignal; + } + interface ReadableStreamGenericReader { + readonly closed: Promise; + cancel(reason?: any): Promise; + } + interface ReadableStreamDefaultReadValueResult { + done: false; + value: T; + } + interface ReadableStreamDefaultReadDoneResult { + done: true; + value?: undefined; + } + type ReadableStreamController = ReadableStreamDefaultController; + type ReadableStreamDefaultReadResult = + | ReadableStreamDefaultReadValueResult + | ReadableStreamDefaultReadDoneResult; + interface ReadableStreamReadValueResult { + done: false; + value: T; + } + interface ReadableStreamReadDoneResult { + done: true; + value?: T; + } + type ReadableStreamReadResult = ReadableStreamReadValueResult | ReadableStreamReadDoneResult; + interface ReadableByteStreamControllerCallback { + (controller: ReadableByteStreamController): void | PromiseLike; + } + interface UnderlyingSinkAbortCallback { + (reason?: any): void | PromiseLike; + } + interface UnderlyingSinkCloseCallback { + (): void | PromiseLike; + } + interface UnderlyingSinkStartCallback { + (controller: WritableStreamDefaultController): any; + } + interface UnderlyingSinkWriteCallback { + (chunk: W, controller: WritableStreamDefaultController): void | PromiseLike; + } + interface UnderlyingSourceCancelCallback { + (reason?: any): void | PromiseLike; + } + interface UnderlyingSourcePullCallback { + (controller: ReadableStreamController): void | PromiseLike; + } + interface UnderlyingSourceStartCallback { + (controller: ReadableStreamController): any; + } + interface TransformerFlushCallback { + (controller: TransformStreamDefaultController): void | PromiseLike; + } + interface TransformerStartCallback { + (controller: TransformStreamDefaultController): any; + } + interface TransformerTransformCallback { + (chunk: I, controller: TransformStreamDefaultController): void | PromiseLike; + } + interface UnderlyingByteSource { + autoAllocateChunkSize?: number; + cancel?: ReadableStreamErrorCallback; + pull?: ReadableByteStreamControllerCallback; + start?: ReadableByteStreamControllerCallback; + type: "bytes"; + } + interface UnderlyingSource { + cancel?: UnderlyingSourceCancelCallback; + pull?: UnderlyingSourcePullCallback; + start?: UnderlyingSourceStartCallback; + type?: undefined; + } + interface UnderlyingSink { + abort?: UnderlyingSinkAbortCallback; + close?: UnderlyingSinkCloseCallback; + start?: UnderlyingSinkStartCallback; + type?: undefined; + write?: UnderlyingSinkWriteCallback; + } + interface ReadableStreamErrorCallback { + (reason: any): void | PromiseLike; + } + /** This Streams API interface represents a readable stream of byte data. */ + interface ReadableStream { + readonly locked: boolean; + cancel(reason?: any): Promise; + getReader(): ReadableStreamDefaultReader; + getReader(options: { mode: "byob" }): ReadableStreamBYOBReader; + pipeThrough(transform: ReadableWritablePair, options?: StreamPipeOptions): ReadableStream; + pipeTo(destination: WritableStream, options?: StreamPipeOptions): Promise; + tee(): [ReadableStream, ReadableStream]; + values(options?: { preventCancel?: boolean }): AsyncIterableIterator; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + const ReadableStream: { + prototype: ReadableStream; + new(underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy): ReadableStream; + new(underlyingSource?: UnderlyingSource, strategy?: QueuingStrategy): ReadableStream; + }; + interface ReadableStreamDefaultReader extends ReadableStreamGenericReader { + read(): Promise>; + releaseLock(): void; + } + interface ReadableStreamBYOBReader extends ReadableStreamGenericReader { + read(view: T): Promise>; + releaseLock(): void; + } + const ReadableStreamDefaultReader: { + prototype: ReadableStreamDefaultReader; + new(stream: ReadableStream): ReadableStreamDefaultReader; + }; + const ReadableStreamBYOBReader: any; + const ReadableStreamBYOBRequest: any; + interface ReadableByteStreamController { + readonly byobRequest: undefined; + readonly desiredSize: number | null; + close(): void; + enqueue(chunk: ArrayBufferView): void; + error(error?: any): void; + } + const ReadableByteStreamController: { + prototype: ReadableByteStreamController; + new(): ReadableByteStreamController; + }; + interface ReadableStreamDefaultController { + readonly desiredSize: number | null; + close(): void; + enqueue(chunk?: R): void; + error(e?: any): void; + } + const ReadableStreamDefaultController: { + prototype: ReadableStreamDefaultController; + new(): ReadableStreamDefaultController; + }; + interface Transformer { + flush?: TransformerFlushCallback; + readableType?: undefined; + start?: TransformerStartCallback; + transform?: TransformerTransformCallback; + writableType?: undefined; + } + interface TransformStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const TransformStream: { + prototype: TransformStream; + new( + transformer?: Transformer, + writableStrategy?: QueuingStrategy, + readableStrategy?: QueuingStrategy, + ): TransformStream; + }; + interface TransformStreamDefaultController { + readonly desiredSize: number | null; + enqueue(chunk?: O): void; + error(reason?: any): void; + terminate(): void; + } + const TransformStreamDefaultController: { + prototype: TransformStreamDefaultController; + new(): TransformStreamDefaultController; + }; + /** + * This Streams API interface provides a standard abstraction for writing + * streaming data to a destination, known as a sink. This object comes with + * built-in back pressure and queuing. + */ + interface WritableStream { + readonly locked: boolean; + abort(reason?: any): Promise; + close(): Promise; + getWriter(): WritableStreamDefaultWriter; + } + const WritableStream: { + prototype: WritableStream; + new(underlyingSink?: UnderlyingSink, strategy?: QueuingStrategy): WritableStream; + }; + /** + * This Streams API interface is the object returned by + * WritableStream.getWriter() and once created locks the < writer to the + * WritableStream ensuring that no other streams can write to the underlying + * sink. + */ + interface WritableStreamDefaultWriter { + readonly closed: Promise; + readonly desiredSize: number | null; + readonly ready: Promise; + abort(reason?: any): Promise; + close(): Promise; + releaseLock(): void; + write(chunk?: W): Promise; + } + const WritableStreamDefaultWriter: { + prototype: WritableStreamDefaultWriter; + new(stream: WritableStream): WritableStreamDefaultWriter; + }; + /** + * This Streams API interface represents a controller allowing control of a + * WritableStream's state. When constructing a WritableStream, the + * underlying sink is given a corresponding WritableStreamDefaultController + * instance to manipulate. + */ + interface WritableStreamDefaultController { + error(e?: any): void; + } + const WritableStreamDefaultController: { + prototype: WritableStreamDefaultController; + new(): WritableStreamDefaultController; + }; + interface QueuingStrategy { + highWaterMark?: number; + size?: QueuingStrategySize; + } + interface QueuingStrategySize { + (chunk?: T): number; + } + interface QueuingStrategyInit { + /** + * Creates a new ByteLengthQueuingStrategy with the provided high water + * mark. + * + * Note that the provided high water mark will not be validated ahead of + * time. Instead, if it is negative, NaN, or not a number, the resulting + * ByteLengthQueuingStrategy will cause the corresponding stream + * constructor to throw. + */ + highWaterMark: number; + } + /** + * This Streams API interface provides a built-in byte length queuing + * strategy that can be used when constructing streams. + */ + interface ByteLengthQueuingStrategy extends QueuingStrategy { + readonly highWaterMark: number; + readonly size: QueuingStrategySize; + } + const ByteLengthQueuingStrategy: { + prototype: ByteLengthQueuingStrategy; + new(init: QueuingStrategyInit): ByteLengthQueuingStrategy; + }; + /** + * This Streams API interface provides a built-in byte length queuing + * strategy that can be used when constructing streams. + */ + interface CountQueuingStrategy extends QueuingStrategy { + readonly highWaterMark: number; + readonly size: QueuingStrategySize; + } + const CountQueuingStrategy: { + prototype: CountQueuingStrategy; + new(init: QueuingStrategyInit): CountQueuingStrategy; + }; + interface TextEncoderStream { + /** Returns "utf-8". */ + readonly encoding: "utf-8"; + readonly readable: ReadableStream; + readonly writable: WritableStream; + readonly [Symbol.toStringTag]: string; + } + const TextEncoderStream: { + prototype: TextEncoderStream; + new(): TextEncoderStream; + }; + interface TextDecoderOptions { + fatal?: boolean; + ignoreBOM?: boolean; + } + type BufferSource = ArrayBufferView | ArrayBuffer; + interface TextDecoderStream { + /** Returns encoding's name, lower cased. */ + readonly encoding: string; + /** Returns `true` if error mode is "fatal", and `false` otherwise. */ + readonly fatal: boolean; + /** Returns `true` if ignore BOM flag is set, and `false` otherwise. */ + readonly ignoreBOM: boolean; + readonly readable: ReadableStream; + readonly writable: WritableStream; + readonly [Symbol.toStringTag]: string; + } + const TextDecoderStream: { + prototype: TextDecoderStream; + new(encoding?: string, options?: TextDecoderOptions): TextDecoderStream; + }; + interface CompressionStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const CompressionStream: { + prototype: CompressionStream; + new(format: string): CompressionStream; + }; + interface DecompressionStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const DecompressionStream: { + prototype: DecompressionStream; + new(format: string): DecompressionStream; + }; +} +declare module "node:stream/web" { + export * from "stream/web"; +} diff --git a/dist/node_modules/@types/node/string_decoder.d.ts b/dist/node_modules/@types/node/string_decoder.d.ts new file mode 100644 index 0000000..cd76bf8 --- /dev/null +++ b/dist/node_modules/@types/node/string_decoder.d.ts @@ -0,0 +1,67 @@ +/** + * The `node:string_decoder` module provides an API for decoding `Buffer` objects + * into strings in a manner that preserves encoded multi-byte UTF-8 and UTF-16 + * characters. It can be accessed using: + * + * ```js + * const { StringDecoder } = require('node:string_decoder'); + * ``` + * + * The following example shows the basic use of the `StringDecoder` class. + * + * ```js + * const { StringDecoder } = require('node:string_decoder'); + * const decoder = new StringDecoder('utf8'); + * + * const cent = Buffer.from([0xC2, 0xA2]); + * console.log(decoder.write(cent)); // Prints: ¢ + * + * const euro = Buffer.from([0xE2, 0x82, 0xAC]); + * console.log(decoder.write(euro)); // Prints: € + * ``` + * + * When a `Buffer` instance is written to the `StringDecoder` instance, an + * internal buffer is used to ensure that the decoded string does not contain + * any incomplete multibyte characters. These are held in the buffer until the + * next call to `stringDecoder.write()` or until `stringDecoder.end()` is called. + * + * In the following example, the three UTF-8 encoded bytes of the European Euro + * symbol (`€`) are written over three separate operations: + * + * ```js + * const { StringDecoder } = require('node:string_decoder'); + * const decoder = new StringDecoder('utf8'); + * + * decoder.write(Buffer.from([0xE2])); + * decoder.write(Buffer.from([0x82])); + * console.log(decoder.end(Buffer.from([0xAC]))); // Prints: € + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/string_decoder.js) + */ +declare module "string_decoder" { + class StringDecoder { + constructor(encoding?: BufferEncoding); + /** + * Returns a decoded string, ensuring that any incomplete multibyte characters at + * the end of the `Buffer`, or `TypedArray`, or `DataView` are omitted from the + * returned string and stored in an internal buffer for the next call to`stringDecoder.write()` or `stringDecoder.end()`. + * @since v0.1.99 + * @param buffer The bytes to decode. + */ + write(buffer: string | Buffer | NodeJS.ArrayBufferView): string; + /** + * Returns any remaining input stored in the internal buffer as a string. Bytes + * representing incomplete UTF-8 and UTF-16 characters will be replaced with + * substitution characters appropriate for the character encoding. + * + * If the `buffer` argument is provided, one final call to `stringDecoder.write()`is performed before returning the remaining input. + * After `end()` is called, the `stringDecoder` object can be reused for new input. + * @since v0.9.3 + * @param buffer The bytes to decode. + */ + end(buffer?: string | Buffer | NodeJS.ArrayBufferView): string; + } +} +declare module "node:string_decoder" { + export * from "string_decoder"; +} diff --git a/dist/node_modules/@types/node/test.d.ts b/dist/node_modules/@types/node/test.d.ts new file mode 100644 index 0000000..149c9e0 --- /dev/null +++ b/dist/node_modules/@types/node/test.d.ts @@ -0,0 +1,1465 @@ +/** + * The `node:test` module facilitates the creation of JavaScript tests. + * To access it: + * + * ```js + * import test from 'node:test'; + * ``` + * + * This module is only available under the `node:` scheme. The following will not + * work: + * + * ```js + * import test from 'test'; + * ``` + * + * Tests created via the `test` module consist of a single function that is + * processed in one of three ways: + * + * 1. A synchronous function that is considered failing if it throws an exception, + * and is considered passing otherwise. + * 2. A function that returns a `Promise` that is considered failing if the`Promise` rejects, and is considered passing if the `Promise` fulfills. + * 3. A function that receives a callback function. If the callback receives any + * truthy value as its first argument, the test is considered failing. If a + * falsy value is passed as the first argument to the callback, the test is + * considered passing. If the test function receives a callback function and + * also returns a `Promise`, the test will fail. + * + * The following example illustrates how tests are written using the`test` module. + * + * ```js + * test('synchronous passing test', (t) => { + * // This test passes because it does not throw an exception. + * assert.strictEqual(1, 1); + * }); + * + * test('synchronous failing test', (t) => { + * // This test fails because it throws an exception. + * assert.strictEqual(1, 2); + * }); + * + * test('asynchronous passing test', async (t) => { + * // This test passes because the Promise returned by the async + * // function is settled and not rejected. + * assert.strictEqual(1, 1); + * }); + * + * test('asynchronous failing test', async (t) => { + * // This test fails because the Promise returned by the async + * // function is rejected. + * assert.strictEqual(1, 2); + * }); + * + * test('failing test using Promises', (t) => { + * // Promises can be used directly as well. + * return new Promise((resolve, reject) => { + * setImmediate(() => { + * reject(new Error('this will cause the test to fail')); + * }); + * }); + * }); + * + * test('callback passing test', (t, done) => { + * // done() is the callback function. When the setImmediate() runs, it invokes + * // done() with no arguments. + * setImmediate(done); + * }); + * + * test('callback failing test', (t, done) => { + * // When the setImmediate() runs, done() is invoked with an Error object and + * // the test fails. + * setImmediate(() => { + * done(new Error('callback failure')); + * }); + * }); + * ``` + * + * If any tests fail, the process exit code is set to `1`. + * @since v18.0.0, v16.17.0 + * @see [source](https://github.com/nodejs/node/blob/v20.4.0/lib/test.js) + */ +declare module "node:test" { + import { Readable } from "node:stream"; + import { AsyncResource } from "node:async_hooks"; + /** + * **Note:**`shard` is used to horizontally parallelize test running across + * machines or processes, ideal for large-scale executions across varied + * environments. It's incompatible with `watch` mode, tailored for rapid + * code iteration by automatically rerunning tests on file changes. + * + * ```js + * import { tap } from 'node:test/reporters'; + * import { run } from 'node:test'; + * import process from 'node:process'; + * import path from 'node:path'; + * + * run({ files: [path.resolve('./tests/test.js')] }) + * .compose(tap) + * .pipe(process.stdout); + * ``` + * @since v18.9.0, v16.19.0 + * @param options Configuration options for running tests. The following properties are supported: + */ + function run(options?: RunOptions): TestsStream; + /** + * The `test()` function is the value imported from the `test` module. Each + * invocation of this function results in reporting the test to the `TestsStream`. + * + * The `TestContext` object passed to the `fn` argument can be used to perform + * actions related to the current test. Examples include skipping the test, adding + * additional diagnostic information, or creating subtests. + * + * `test()` returns a `Promise` that fulfills once the test completes. + * if `test()` is called within a `describe()` block, it fulfills immediately. + * The return value can usually be discarded for top level tests. + * However, the return value from subtests should be used to prevent the parent + * test from finishing first and cancelling the subtest + * as shown in the following example. + * + * ```js + * test('top level test', async (t) => { + * // The setTimeout() in the following subtest would cause it to outlive its + * // parent test if 'await' is removed on the next line. Once the parent test + * // completes, it will cancel any outstanding subtests. + * await t.test('longer running subtest', async (t) => { + * return new Promise((resolve, reject) => { + * setTimeout(resolve, 1000); + * }); + * }); + * }); + * ``` + * + * The `timeout` option can be used to fail the test if it takes longer than`timeout` milliseconds to complete. However, it is not a reliable mechanism for + * canceling tests because a running test might block the application thread and + * thus prevent the scheduled cancellation. + * @since v18.0.0, v16.17.0 + * @param [name='The name'] The name of the test, which is displayed when reporting test results. + * @param options Configuration options for the test. The following properties are supported: + * @param [fn='A no-op function'] The function under test. The first argument to this function is a {@link TestContext} object. If the test uses callbacks, the callback function is passed as the + * second argument. + * @return Fulfilled with `undefined` once the test completes, or immediately if the test runs within {@link describe}. + */ + function test(name?: string, fn?: TestFn): Promise; + function test(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function test(options?: TestOptions, fn?: TestFn): Promise; + function test(fn?: TestFn): Promise; + namespace test { + export { after, afterEach, before, beforeEach, describe, it, mock, only, run, skip, test, todo }; + } + /** + * The `describe()` function imported from the `node:test` module. Each + * invocation of this function results in the creation of a Subtest. + * After invocation of top level `describe` functions, + * all top level tests and suites will execute. + * @param [name='The name'] The name of the suite, which is displayed when reporting test results. + * @param options Configuration options for the suite. supports the same options as `test([name][, options][, fn])`. + * @param [fn='A no-op function'] The function under suite declaring all subtests and subsuites. The first argument to this function is a {@link SuiteContext} object. + * @return Immediately fulfilled with `undefined`. + */ + function describe(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function describe(name?: string, fn?: SuiteFn): Promise; + function describe(options?: TestOptions, fn?: SuiteFn): Promise; + function describe(fn?: SuiteFn): Promise; + namespace describe { + /** + * Shorthand for skipping a suite, same as `describe([name], { skip: true }[, fn])`. + */ + function skip(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function skip(name?: string, fn?: SuiteFn): Promise; + function skip(options?: TestOptions, fn?: SuiteFn): Promise; + function skip(fn?: SuiteFn): Promise; + /** + * Shorthand for marking a suite as `TODO`, same as `describe([name], { todo: true }[, fn])`. + */ + function todo(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function todo(name?: string, fn?: SuiteFn): Promise; + function todo(options?: TestOptions, fn?: SuiteFn): Promise; + function todo(fn?: SuiteFn): Promise; + /** + * Shorthand for marking a suite as `only`, same as `describe([name], { only: true }[, fn])`. + * @since v18.15.0 + */ + function only(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function only(name?: string, fn?: SuiteFn): Promise; + function only(options?: TestOptions, fn?: SuiteFn): Promise; + function only(fn?: SuiteFn): Promise; + } + /** + * Shorthand for `test()`. + * + * The `it()` function is imported from the `node:test` module. + * @since v18.6.0, v16.17.0 + */ + function it(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function it(name?: string, fn?: TestFn): Promise; + function it(options?: TestOptions, fn?: TestFn): Promise; + function it(fn?: TestFn): Promise; + namespace it { + /** + * Shorthand for skipping a test, same as `it([name], { skip: true }[, fn])`. + */ + function skip(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function skip(name?: string, fn?: TestFn): Promise; + function skip(options?: TestOptions, fn?: TestFn): Promise; + function skip(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `TODO`, same as `it([name], { todo: true }[, fn])`. + */ + function todo(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function todo(name?: string, fn?: TestFn): Promise; + function todo(options?: TestOptions, fn?: TestFn): Promise; + function todo(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `only`, same as `it([name], { only: true }[, fn])`. + * @since v18.15.0 + */ + function only(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function only(name?: string, fn?: TestFn): Promise; + function only(options?: TestOptions, fn?: TestFn): Promise; + function only(fn?: TestFn): Promise; + } + /** + * Shorthand for skipping a test, same as `test([name], { skip: true }[, fn])`. + * @since v20.2.0 + */ + function skip(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function skip(name?: string, fn?: TestFn): Promise; + function skip(options?: TestOptions, fn?: TestFn): Promise; + function skip(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `TODO`, same as `test([name], { todo: true }[, fn])`. + * @since v20.2.0 + */ + function todo(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function todo(name?: string, fn?: TestFn): Promise; + function todo(options?: TestOptions, fn?: TestFn): Promise; + function todo(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `only`, same as `test([name], { only: true }[, fn])`. + * @since v20.2.0 + */ + function only(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function only(name?: string, fn?: TestFn): Promise; + function only(options?: TestOptions, fn?: TestFn): Promise; + function only(fn?: TestFn): Promise; + /** + * The type of a function under test. The first argument to this function is a + * {@link TestContext} object. If the test uses callbacks, the callback function is passed as + * the second argument. + */ + type TestFn = (t: TestContext, done: (result?: any) => void) => void | Promise; + /** + * The type of a function under Suite. + */ + type SuiteFn = (s: SuiteContext) => void | Promise; + interface TestShard { + /** + * A positive integer between 1 and `` that specifies the index of the shard to run. + */ + index: number; + /** + * A positive integer that specifies the total number of shards to split the test files to. + */ + total: number; + } + interface RunOptions { + /** + * If a number is provided, then that many files would run in parallel. + * If truthy, it would run (number of cpu cores - 1) files in parallel. + * If falsy, it would only run one file at a time. + * If unspecified, subtests inherit this value from their parent. + * @default true + */ + concurrency?: number | boolean | undefined; + /** + * An array containing the list of files to run. + * If unspecified, the test runner execution model will be used. + */ + files?: readonly string[] | undefined; + /** + * Allows aborting an in-progress test execution. + * @default undefined + */ + signal?: AbortSignal | undefined; + /** + * A number of milliseconds the test will fail after. + * If unspecified, subtests inherit this value from their parent. + * @default Infinity + */ + timeout?: number | undefined; + /** + * Sets inspector port of test child process. + * If a nullish value is provided, each process gets its own port, + * incremented from the primary's `process.debugPort`. + */ + inspectPort?: number | (() => number) | undefined; + /** + * That can be used to only run tests whose name matches the provided pattern. + * Test name patterns are interpreted as JavaScript regular expressions. + * For each test that is executed, any corresponding test hooks, such as `beforeEach()`, are also run. + */ + testNamePatterns?: string | RegExp | string[] | RegExp[]; + /** + * If truthy, the test context will only run tests that have the `only` option set + */ + only?: boolean; + /** + * A function that accepts the TestsStream instance and can be used to setup listeners before any tests are run. + */ + setup?: (root: Test) => void | Promise; + /** + * Whether to run in watch mode or not. + * @default false + */ + watch?: boolean | undefined; + /** + * Running tests in a specific shard. + * @default undefined + */ + shard?: TestShard | undefined; + } + class Test extends AsyncResource { + concurrency: number; + nesting: number; + only: boolean; + reporter: TestsStream; + runOnlySubtests: boolean; + testNumber: number; + timeout: number | null; + } + /** + * A successful call to `run()` method will return a new `TestsStream` object, streaming a series of events representing the execution of the tests.`TestsStream` will emit events, in the + * order of the tests definition + * @since v18.9.0, v16.19.0 + */ + class TestsStream extends Readable implements NodeJS.ReadableStream { + addListener(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + addListener(event: "test:fail", listener: (data: TestFail) => void): this; + addListener(event: "test:pass", listener: (data: TestPass) => void): this; + addListener(event: "test:plan", listener: (data: TestPlan) => void): this; + addListener(event: "test:start", listener: (data: TestStart) => void): this; + addListener(event: "test:stderr", listener: (data: TestStderr) => void): this; + addListener(event: "test:stdout", listener: (data: TestStdout) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + emit(event: "test:diagnostic", data: DiagnosticData): boolean; + emit(event: "test:fail", data: TestFail): boolean; + emit(event: "test:pass", data: TestPass): boolean; + emit(event: "test:plan", data: TestPlan): boolean; + emit(event: "test:start", data: TestStart): boolean; + emit(event: "test:stderr", data: TestStderr): boolean; + emit(event: "test:stdout", data: TestStdout): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + on(event: "test:fail", listener: (data: TestFail) => void): this; + on(event: "test:pass", listener: (data: TestPass) => void): this; + on(event: "test:plan", listener: (data: TestPlan) => void): this; + on(event: "test:start", listener: (data: TestStart) => void): this; + on(event: "test:stderr", listener: (data: TestStderr) => void): this; + on(event: "test:stdout", listener: (data: TestStdout) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + once(event: "test:fail", listener: (data: TestFail) => void): this; + once(event: "test:pass", listener: (data: TestPass) => void): this; + once(event: "test:plan", listener: (data: TestPlan) => void): this; + once(event: "test:start", listener: (data: TestStart) => void): this; + once(event: "test:stderr", listener: (data: TestStderr) => void): this; + once(event: "test:stdout", listener: (data: TestStdout) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + prependListener(event: "test:fail", listener: (data: TestFail) => void): this; + prependListener(event: "test:pass", listener: (data: TestPass) => void): this; + prependListener(event: "test:plan", listener: (data: TestPlan) => void): this; + prependListener(event: "test:start", listener: (data: TestStart) => void): this; + prependListener(event: "test:stderr", listener: (data: TestStderr) => void): this; + prependListener(event: "test:stdout", listener: (data: TestStdout) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + prependOnceListener(event: "test:fail", listener: (data: TestFail) => void): this; + prependOnceListener(event: "test:pass", listener: (data: TestPass) => void): this; + prependOnceListener(event: "test:plan", listener: (data: TestPlan) => void): this; + prependOnceListener(event: "test:start", listener: (data: TestStart) => void): this; + prependOnceListener(event: "test:stderr", listener: (data: TestStderr) => void): this; + prependOnceListener(event: "test:stdout", listener: (data: TestStdout) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + } + /** + * An instance of `TestContext` is passed to each test function in order to + * interact with the test runner. However, the `TestContext` constructor is not + * exposed as part of the API. + * @since v18.0.0, v16.17.0 + */ + class TestContext { + /** + * This function is used to create a hook running before subtest of the current test. + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as + * the second argument. Default: A no-op function. + * @param options Configuration options for the hook. + * @since v20.1.0 + */ + before: typeof before; + /** + * This function is used to create a hook running before each subtest of the current test. + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as + * the second argument. Default: A no-op function. + * @param options Configuration options for the hook. + * @since v18.8.0 + */ + beforeEach: typeof beforeEach; + /** + * This function is used to create a hook that runs after the current test finishes. + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as + * the second argument. Default: A no-op function. + * @param options Configuration options for the hook. + * @since v18.13.0 + */ + after: typeof after; + /** + * This function is used to create a hook running after each subtest of the current test. + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as + * the second argument. Default: A no-op function. + * @param options Configuration options for the hook. + * @since v18.8.0 + */ + afterEach: typeof afterEach; + /** + * This function is used to write diagnostics to the output. Any diagnostic + * information is included at the end of the test's results. This function does + * not return a value. + * + * ```js + * test('top level test', (t) => { + * t.diagnostic('A diagnostic message'); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param message Message to be reported. + */ + diagnostic(message: string): void; + /** + * The name of the test. + * @since v18.8.0, v16.18.0 + */ + readonly name: string; + /** + * If `shouldRunOnlyTests` is truthy, the test context will only run tests that + * have the `only` option set. Otherwise, all tests are run. If Node.js was not + * started with the `--test-only` command-line option, this function is a + * no-op. + * + * ```js + * test('top level test', (t) => { + * // The test context can be set to run subtests with the 'only' option. + * t.runOnly(true); + * return Promise.all([ + * t.test('this subtest is now skipped'), + * t.test('this subtest is run', { only: true }), + * ]); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param shouldRunOnlyTests Whether or not to run `only` tests. + */ + runOnly(shouldRunOnlyTests: boolean): void; + /** + * ```js + * test('top level test', async (t) => { + * await fetch('some/uri', { signal: t.signal }); + * }); + * ``` + * @since v18.7.0, v16.17.0 + */ + readonly signal: AbortSignal; + /** + * This function causes the test's output to indicate the test as skipped. If`message` is provided, it is included in the output. Calling `skip()` does + * not terminate execution of the test function. This function does not return a + * value. + * + * ```js + * test('top level test', (t) => { + * // Make sure to return here as well if the test contains additional logic. + * t.skip('this is skipped'); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param message Optional skip message. + */ + skip(message?: string): void; + /** + * This function adds a `TODO` directive to the test's output. If `message` is + * provided, it is included in the output. Calling `todo()` does not terminate + * execution of the test function. This function does not return a value. + * + * ```js + * test('top level test', (t) => { + * // This test is marked as `TODO` + * t.todo('this is a todo'); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param message Optional `TODO` message. + */ + todo(message?: string): void; + /** + * This function is used to create subtests under the current test. This function behaves in + * the same fashion as the top level {@link test} function. + * @since v18.0.0 + * @param name The name of the test, which is displayed when reporting test results. + * Default: The `name` property of fn, or `''` if `fn` does not have a name. + * @param options Configuration options for the test + * @param fn The function under test. This first argument to this function is a + * {@link TestContext} object. If the test uses callbacks, the callback function is + * passed as the second argument. Default: A no-op function. + * @returns A {@link Promise} resolved with `undefined` once the test completes. + */ + test: typeof test; + /** + * Each test provides its own MockTracker instance. + */ + readonly mock: MockTracker; + } + /** + * An instance of `SuiteContext` is passed to each suite function in order to + * interact with the test runner. However, the `SuiteContext` constructor is not + * exposed as part of the API. + * @since v18.7.0, v16.17.0 + */ + class SuiteContext { + /** + * The name of the suite. + * @since v18.8.0, v16.18.0 + */ + readonly name: string; + /** + * Can be used to abort test subtasks when the test has been aborted. + * @since v18.7.0, v16.17.0 + */ + readonly signal: AbortSignal; + } + interface TestOptions { + /** + * If a number is provided, then that many tests would run in parallel. + * If truthy, it would run (number of cpu cores - 1) tests in parallel. + * For subtests, it will be `Infinity` tests in parallel. + * If falsy, it would only run one test at a time. + * If unspecified, subtests inherit this value from their parent. + * @default false + */ + concurrency?: number | boolean | undefined; + /** + * If truthy, and the test context is configured to run `only` tests, then this test will be + * run. Otherwise, the test is skipped. + * @default false + */ + only?: boolean | undefined; + /** + * Allows aborting an in-progress test. + * @since v18.8.0 + */ + signal?: AbortSignal | undefined; + /** + * If truthy, the test is skipped. If a string is provided, that string is displayed in the + * test results as the reason for skipping the test. + * @default false + */ + skip?: boolean | string | undefined; + /** + * A number of milliseconds the test will fail after. If unspecified, subtests inherit this + * value from their parent. + * @default Infinity + * @since v18.7.0 + */ + timeout?: number | undefined; + /** + * If truthy, the test marked as `TODO`. If a string is provided, that string is displayed in + * the test results as the reason why the test is `TODO`. + * @default false + */ + todo?: boolean | string | undefined; + } + /** + * This function is used to create a hook running before running a suite. + * + * ```js + * describe('tests', async () => { + * before(() => console.log('about to run some test')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param [fn='A no-op function'] The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. The following properties are supported: + */ + function before(fn?: HookFn, options?: HookOptions): void; + /** + * This function is used to create a hook running after running a suite. + * + * ```js + * describe('tests', async () => { + * after(() => console.log('finished running tests')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param [fn='A no-op function'] The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. The following properties are supported: + */ + function after(fn?: HookFn, options?: HookOptions): void; + /** + * This function is used to create a hook running + * before each subtest of the current suite. + * + * ```js + * describe('tests', async () => { + * beforeEach(() => console.log('about to run a test')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param [fn='A no-op function'] The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. The following properties are supported: + */ + function beforeEach(fn?: HookFn, options?: HookOptions): void; + /** + * This function is used to create a hook running + * after each subtest of the current test. + * + * ```js + * describe('tests', async () => { + * afterEach(() => console.log('finished running a test')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param [fn='A no-op function'] The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. The following properties are supported: + */ + function afterEach(fn?: HookFn, options?: HookOptions): void; + /** + * The hook function. If the hook uses callbacks, the callback function is passed as the + * second argument. + */ + type HookFn = (s: SuiteContext, done: (result?: any) => void) => any; + /** + * Configuration options for hooks. + * @since v18.8.0 + */ + interface HookOptions { + /** + * Allows aborting an in-progress hook. + */ + signal?: AbortSignal | undefined; + /** + * A number of milliseconds the hook will fail after. If unspecified, subtests inherit this + * value from their parent. + * @default Infinity + */ + timeout?: number | undefined; + } + interface MockFunctionOptions { + /** + * The number of times that the mock will use the behavior of `implementation`. + * Once the mock function has been called `times` times, + * it will automatically restore the behavior of `original`. + * This value must be an integer greater than zero. + * @default Infinity + */ + times?: number | undefined; + } + interface MockMethodOptions extends MockFunctionOptions { + /** + * If `true`, `object[methodName]` is treated as a getter. + * This option cannot be used with the `setter` option. + */ + getter?: boolean | undefined; + /** + * If `true`, `object[methodName]` is treated as a setter. + * This option cannot be used with the `getter` option. + */ + setter?: boolean | undefined; + } + type Mock = F & { + mock: MockFunctionContext; + }; + type NoOpFunction = (...args: any[]) => undefined; + type FunctionPropertyNames = { + [K in keyof T]: T[K] extends Function ? K : never; + }[keyof T]; + /** + * The `MockTracker` class is used to manage mocking functionality. The test runner + * module provides a top level `mock` export which is a `MockTracker` instance. + * Each test also provides its own `MockTracker` instance via the test context's`mock` property. + * @since v19.1.0, v18.13.0 + */ + class MockTracker { + /** + * This function is used to create a mock function. + * + * The following example creates a mock function that increments a counter by one + * on each invocation. The `times` option is used to modify the mock behavior such + * that the first two invocations add two to the counter instead of one. + * + * ```js + * test('mocks a counting function', (t) => { + * let cnt = 0; + * + * function addOne() { + * cnt++; + * return cnt; + * } + * + * function addTwo() { + * cnt += 2; + * return cnt; + * } + * + * const fn = t.mock.fn(addOne, addTwo, { times: 2 }); + * + * assert.strictEqual(fn(), 2); + * assert.strictEqual(fn(), 4); + * assert.strictEqual(fn(), 5); + * assert.strictEqual(fn(), 6); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param [original='A no-op function'] An optional function to create a mock on. + * @param implementation An optional function used as the mock implementation for `original`. This is useful for creating mocks that exhibit one behavior for a specified number of calls and + * then restore the behavior of `original`. + * @param options Optional configuration options for the mock function. The following properties are supported: + * @return The mocked function. The mocked function contains a special `mock` property, which is an instance of {@link MockFunctionContext}, and can be used for inspecting and changing the + * behavior of the mocked function. + */ + fn(original?: F, options?: MockFunctionOptions): Mock; + fn( + original?: F, + implementation?: Implementation, + options?: MockFunctionOptions, + ): Mock; + /** + * This function is used to create a mock on an existing object method. The + * following example demonstrates how a mock is created on an existing object + * method. + * + * ```js + * test('spies on an object method', (t) => { + * const number = { + * value: 5, + * subtract(a) { + * return this.value - a; + * }, + * }; + * + * t.mock.method(number, 'subtract'); + * assert.strictEqual(number.subtract.mock.calls.length, 0); + * assert.strictEqual(number.subtract(3), 2); + * assert.strictEqual(number.subtract.mock.calls.length, 1); + * + * const call = number.subtract.mock.calls[0]; + * + * assert.deepStrictEqual(call.arguments, [3]); + * assert.strictEqual(call.result, 2); + * assert.strictEqual(call.error, undefined); + * assert.strictEqual(call.target, undefined); + * assert.strictEqual(call.this, number); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param object The object whose method is being mocked. + * @param methodName The identifier of the method on `object` to mock. If `object[methodName]` is not a function, an error is thrown. + * @param implementation An optional function used as the mock implementation for `object[methodName]`. + * @param options Optional configuration options for the mock method. The following properties are supported: + * @return The mocked method. The mocked method contains a special `mock` property, which is an instance of {@link MockFunctionContext}, and can be used for inspecting and changing the + * behavior of the mocked method. + */ + method< + MockedObject extends object, + MethodName extends FunctionPropertyNames, + >( + object: MockedObject, + methodName: MethodName, + options?: MockFunctionOptions, + ): MockedObject[MethodName] extends Function ? Mock + : never; + method< + MockedObject extends object, + MethodName extends FunctionPropertyNames, + Implementation extends Function, + >( + object: MockedObject, + methodName: MethodName, + implementation: Implementation, + options?: MockFunctionOptions, + ): MockedObject[MethodName] extends Function ? Mock + : never; + method( + object: MockedObject, + methodName: keyof MockedObject, + options: MockMethodOptions, + ): Mock; + method( + object: MockedObject, + methodName: keyof MockedObject, + implementation: Function, + options: MockMethodOptions, + ): Mock; + + /** + * This function is syntax sugar for `MockTracker.method` with `options.getter`set to `true`. + * @since v19.3.0, v18.13.0 + */ + getter< + MockedObject extends object, + MethodName extends keyof MockedObject, + >( + object: MockedObject, + methodName: MethodName, + options?: MockFunctionOptions, + ): Mock<() => MockedObject[MethodName]>; + getter< + MockedObject extends object, + MethodName extends keyof MockedObject, + Implementation extends Function, + >( + object: MockedObject, + methodName: MethodName, + implementation?: Implementation, + options?: MockFunctionOptions, + ): Mock<(() => MockedObject[MethodName]) | Implementation>; + /** + * This function is syntax sugar for `MockTracker.method` with `options.setter`set to `true`. + * @since v19.3.0, v18.13.0 + */ + setter< + MockedObject extends object, + MethodName extends keyof MockedObject, + >( + object: MockedObject, + methodName: MethodName, + options?: MockFunctionOptions, + ): Mock<(value: MockedObject[MethodName]) => void>; + setter< + MockedObject extends object, + MethodName extends keyof MockedObject, + Implementation extends Function, + >( + object: MockedObject, + methodName: MethodName, + implementation?: Implementation, + options?: MockFunctionOptions, + ): Mock<((value: MockedObject[MethodName]) => void) | Implementation>; + /** + * This function restores the default behavior of all mocks that were previously + * created by this `MockTracker` and disassociates the mocks from the`MockTracker` instance. Once disassociated, the mocks can still be used, but the`MockTracker` instance can no longer be + * used to reset their behavior or + * otherwise interact with them. + * + * After each test completes, this function is called on the test context's`MockTracker`. If the global `MockTracker` is used extensively, calling this + * function manually is recommended. + * @since v19.1.0, v18.13.0 + */ + reset(): void; + /** + * This function restores the default behavior of all mocks that were previously + * created by this `MockTracker`. Unlike `mock.reset()`, `mock.restoreAll()` does + * not disassociate the mocks from the `MockTracker` instance. + * @since v19.1.0, v18.13.0 + */ + restoreAll(): void; + timers: MockTimers; + } + const mock: MockTracker; + interface MockFunctionCall< + F extends Function, + ReturnType = F extends (...args: any) => infer T ? T + : F extends abstract new(...args: any) => infer T ? T + : unknown, + Args = F extends (...args: infer Y) => any ? Y + : F extends abstract new(...args: infer Y) => any ? Y + : unknown[], + > { + /** + * An array of the arguments passed to the mock function. + */ + arguments: Args; + /** + * If the mocked function threw then this property contains the thrown value. + */ + error: unknown | undefined; + /** + * The value returned by the mocked function. + * + * If the mocked function threw, it will be `undefined`. + */ + result: ReturnType | undefined; + /** + * An `Error` object whose stack can be used to determine the callsite of the mocked function invocation. + */ + stack: Error; + /** + * If the mocked function is a constructor, this field contains the class being constructed. + * Otherwise this will be `undefined`. + */ + target: F extends abstract new(...args: any) => any ? F : undefined; + /** + * The mocked function's `this` value. + */ + this: unknown; + } + /** + * The `MockFunctionContext` class is used to inspect or manipulate the behavior of + * mocks created via the `MockTracker` APIs. + * @since v19.1.0, v18.13.0 + */ + class MockFunctionContext { + /** + * A getter that returns a copy of the internal array used to track calls to the + * mock. Each entry in the array is an object with the following properties. + * @since v19.1.0, v18.13.0 + */ + readonly calls: Array>; + /** + * This function returns the number of times that this mock has been invoked. This + * function is more efficient than checking `ctx.calls.length` because `ctx.calls`is a getter that creates a copy of the internal call tracking array. + * @since v19.1.0, v18.13.0 + * @return The number of times that this mock has been invoked. + */ + callCount(): number; + /** + * This function is used to change the behavior of an existing mock. + * + * The following example creates a mock function using `t.mock.fn()`, calls the + * mock function, and then changes the mock implementation to a different function. + * + * ```js + * test('changes a mock behavior', (t) => { + * let cnt = 0; + * + * function addOne() { + * cnt++; + * return cnt; + * } + * + * function addTwo() { + * cnt += 2; + * return cnt; + * } + * + * const fn = t.mock.fn(addOne); + * + * assert.strictEqual(fn(), 1); + * fn.mock.mockImplementation(addTwo); + * assert.strictEqual(fn(), 3); + * assert.strictEqual(fn(), 5); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param implementation The function to be used as the mock's new implementation. + */ + mockImplementation(implementation: Function): void; + /** + * This function is used to change the behavior of an existing mock for a single + * invocation. Once invocation `onCall` has occurred, the mock will revert to + * whatever behavior it would have used had `mockImplementationOnce()` not been + * called. + * + * The following example creates a mock function using `t.mock.fn()`, calls the + * mock function, changes the mock implementation to a different function for the + * next invocation, and then resumes its previous behavior. + * + * ```js + * test('changes a mock behavior once', (t) => { + * let cnt = 0; + * + * function addOne() { + * cnt++; + * return cnt; + * } + * + * function addTwo() { + * cnt += 2; + * return cnt; + * } + * + * const fn = t.mock.fn(addOne); + * + * assert.strictEqual(fn(), 1); + * fn.mock.mockImplementationOnce(addTwo); + * assert.strictEqual(fn(), 3); + * assert.strictEqual(fn(), 4); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param implementation The function to be used as the mock's implementation for the invocation number specified by `onCall`. + * @param onCall The invocation number that will use `implementation`. If the specified invocation has already occurred then an exception is thrown. + */ + mockImplementationOnce(implementation: Function, onCall?: number): void; + /** + * Resets the call history of the mock function. + * @since v19.3.0, v18.13.0 + */ + resetCalls(): void; + /** + * Resets the implementation of the mock function to its original behavior. The + * mock can still be used after calling this function. + * @since v19.1.0, v18.13.0 + */ + restore(): void; + } + type Timer = "setInterval" | "setTimeout" | "setImmediate" | "Date"; + + interface MockTimersOptions { + apis: Timer[]; + now?: number | Date; + } + /** + * Mocking timers is a technique commonly used in software testing to simulate and + * control the behavior of timers, such as `setInterval` and `setTimeout`, + * without actually waiting for the specified time intervals. + * + * The MockTimers API also allows for mocking of the `Date` constructor and + * `setImmediate`/`clearImmediate` functions. + * + * The `MockTracker` provides a top-level `timers` export + * which is a `MockTimers` instance. + * @since v20.4.0 + * @experimental + */ + class MockTimers { + /** + * Enables timer mocking for the specified timers. + * + * **Note:** When you enable mocking for a specific timer, its associated + * clear function will also be implicitly mocked. + * + * **Note:** Mocking `Date` will affect the behavior of the mocked timers + * as they use the same internal clock. + * + * Example usage without setting initial time: + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.enable({ apis: ['setInterval', 'Date'], now: 1234 }); + * ``` + * + * The above example enables mocking for the `Date` constructor, `setInterval` timer and + * implicitly mocks the `clearInterval` function. Only the `Date` constructor from `globalThis`, + * `setInterval` and `clearInterval` functions from `node:timers`,`node:timers/promises`, and `globalThis` will be mocked. + * + * Example usage with initial time set + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.enable({ apis: ['Date'], now: 1000 }); + * ``` + * + * Example usage with initial Date object as time set + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.enable({ apis: ['Date'], now: new Date() }); + * ``` + * + * Alternatively, if you call `mock.timers.enable()` without any parameters: + * + * All timers (`'setInterval'`, `'clearInterval'`, `'Date'`, `'setImmediate'`, `'clearImmediate'`, `'setTimeout'`, and `'clearTimeout'`) + * will be mocked. + * + * The `setInterval`, `clearInterval`, `setTimeout`, and `clearTimeout` functions from `node:timers`, `node:timers/promises`, + * and `globalThis` will be mocked. + * The `Date` constructor from `globalThis` will be mocked. + * + * If there is no initial epoch set, the initial date will be based on 0 in the Unix epoch. This is `January 1st, 1970, 00:00:00 UTC`. You can set an initial date by passing a now property to the `.enable()` method. This value will be used as the initial date for the mocked Date object. It can either be a positive integer, or another Date object. + * @since v20.4.0 + */ + enable(options?: MockTimersOptions): void; + /** + * You can use the `.setTime()` method to manually move the mocked date to another time. This method only accepts a positive integer. + * Note: This method will execute any mocked timers that are in the past from the new time. + * In the below example we are setting a new time for the mocked date. + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * test('sets the time of a date object', (context) => { + * // Optionally choose what to mock + * context.mock.timers.enable({ apis: ['Date'], now: 100 }); + * assert.strictEqual(Date.now(), 100); + * // Advance in time will also advance the date + * context.mock.timers.setTime(1000); + * context.mock.timers.tick(200); + * assert.strictEqual(Date.now(), 1200); + * }); + * ``` + */ + setTime(time: number): void; + /** + * This function restores the default behavior of all mocks that were previously + * created by this `MockTimers` instance and disassociates the mocks + * from the `MockTracker` instance. + * + * **Note:** After each test completes, this function is called on + * the test context's `MockTracker`. + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.reset(); + * ``` + * @since v20.4.0 + */ + reset(): void; + /** + * Advances time for all mocked timers. + * + * **Note:** This diverges from how `setTimeout` in Node.js behaves and accepts + * only positive numbers. In Node.js, `setTimeout` with negative numbers is + * only supported for web compatibility reasons. + * + * The following example mocks a `setTimeout` function and + * by using `.tick` advances in + * time triggering all pending timers. + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('mocks setTimeout to be executed synchronously without having to actually wait for it', (context) => { + * const fn = context.mock.fn(); + * + * context.mock.timers.enable({ apis: ['setTimeout'] }); + * + * setTimeout(fn, 9999); + * + * assert.strictEqual(fn.mock.callCount(), 0); + * + * // Advance in time + * context.mock.timers.tick(9999); + * + * assert.strictEqual(fn.mock.callCount(), 1); + * }); + * ``` + * + * Alternativelly, the `.tick` function can be called many times + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('mocks setTimeout to be executed synchronously without having to actually wait for it', (context) => { + * const fn = context.mock.fn(); + * context.mock.timers.enable({ apis: ['setTimeout'] }); + * const nineSecs = 9000; + * setTimeout(fn, nineSecs); + * + * const twoSeconds = 3000; + * context.mock.timers.tick(twoSeconds); + * context.mock.timers.tick(twoSeconds); + * context.mock.timers.tick(twoSeconds); + * + * assert.strictEqual(fn.mock.callCount(), 1); + * }); + * ``` + * + * Advancing time using `.tick` will also advance the time for any `Date` object + * created after the mock was enabled (if `Date` was also set to be mocked). + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('mocks setTimeout to be executed synchronously without having to actually wait for it', (context) => { + * const fn = context.mock.fn(); + * + * context.mock.timers.enable({ apis: ['setTimeout', 'Date'] }); + * setTimeout(fn, 9999); + * + * assert.strictEqual(fn.mock.callCount(), 0); + * assert.strictEqual(Date.now(), 0); + * + * // Advance in time + * context.mock.timers.tick(9999); + * assert.strictEqual(fn.mock.callCount(), 1); + * assert.strictEqual(Date.now(), 9999); + * }); + * ``` + * @since v20.4.0 + */ + tick(milliseconds: number): void; + /** + * Triggers all pending mocked timers immediately. If the `Date` object is also + * mocked, it will also advance the `Date` object to the furthest timer's time. + * + * The example below triggers all pending timers immediately, + * causing them to execute without any delay. + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('runAll functions following the given order', (context) => { + * context.mock.timers.enable({ apis: ['setTimeout', 'Date'] }); + * const results = []; + * setTimeout(() => results.push(1), 9999); + * + * // Notice that if both timers have the same timeout, + * // the order of execution is guaranteed + * setTimeout(() => results.push(3), 8888); + * setTimeout(() => results.push(2), 8888); + * + * assert.deepStrictEqual(results, []); + * + * context.mock.timers.runAll(); + * assert.deepStrictEqual(results, [3, 2, 1]); + * // The Date object is also advanced to the furthest timer's time + * assert.strictEqual(Date.now(), 9999); + * }); + * ``` + * + * **Note:** The `runAll()` function is specifically designed for + * triggering timers in the context of timer mocking. + * It does not have any effect on real-time system + * clocks or actual timers outside of the mocking environment. + * @since v20.4.0 + */ + runAll(): void; + /** + * Calls {@link MockTimers.reset()}. + */ + [Symbol.dispose](): void; + } + export { + after, + afterEach, + before, + beforeEach, + describe, + it, + Mock, + mock, + only, + run, + skip, + test, + test as default, + todo, + }; +} + +interface TestLocationInfo { + /** + * The column number where the test is defined, or + * `undefined` if the test was run through the REPL. + */ + column?: number; + /** + * The path of the test file, `undefined` if test is not ran through a file. + */ + file?: string; + /** + * The line number where the test is defined, or + * `undefined` if the test was run through the REPL. + */ + line?: number; +} +interface DiagnosticData extends TestLocationInfo { + /** + * The diagnostic message. + */ + message: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestFail extends TestLocationInfo { + /** + * Additional execution metadata. + */ + details: { + /** + * The duration of the test in milliseconds. + */ + duration_ms: number; + /** + * The error thrown by the test. + */ + error: Error; + /** + * The type of the test, used to denote whether this is a suite. + * @since 20.0.0, 19.9.0, 18.17.0 + */ + type?: "suite"; + }; + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The ordinal number of the test. + */ + testNumber: number; + /** + * Present if `context.todo` is called. + */ + todo?: string | boolean; + /** + * Present if `context.skip` is called. + */ + skip?: string | boolean; +} +interface TestPass extends TestLocationInfo { + /** + * Additional execution metadata. + */ + details: { + /** + * The duration of the test in milliseconds. + */ + duration_ms: number; + /** + * The type of the test, used to denote whether this is a suite. + * @since 20.0.0, 19.9.0, 18.17.0 + */ + type?: "suite"; + }; + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The ordinal number of the test. + */ + testNumber: number; + /** + * Present if `context.todo` is called. + */ + todo?: string | boolean; + /** + * Present if `context.skip` is called. + */ + skip?: string | boolean; +} +interface TestPlan extends TestLocationInfo { + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The number of subtests that have ran. + */ + count: number; +} +interface TestStart extends TestLocationInfo { + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestStderr extends TestLocationInfo { + /** + * The message written to `stderr` + */ + message: string; +} +interface TestStdout extends TestLocationInfo { + /** + * The message written to `stdout` + */ + message: string; +} +interface TestEnqueue extends TestLocationInfo { + /** + * The test name + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestDequeue extends TestLocationInfo { + /** + * The test name + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; +} + +/** + * The `node:test/reporters` module exposes the builtin-reporters for `node:test`. + * To access it: + * + * ```js + * import test from 'node:test/reporters'; + * ``` + * + * This module is only available under the `node:` scheme. The following will not + * work: + * + * ```js + * import test from 'test/reporters'; + * ``` + * @since v19.9.0 + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/test/reporters.js) + */ +declare module "node:test/reporters" { + import { Transform, TransformOptions } from "node:stream"; + + type TestEvent = + | { type: "test:diagnostic"; data: DiagnosticData } + | { type: "test:fail"; data: TestFail } + | { type: "test:pass"; data: TestPass } + | { type: "test:plan"; data: TestPlan } + | { type: "test:start"; data: TestStart } + | { type: "test:stderr"; data: TestStderr } + | { type: "test:stdout"; data: TestStdout } + | { type: "test:enqueue"; data: TestEnqueue } + | { type: "test:dequeue"; data: TestDequeue } + | { type: "test:watch:drained" }; + type TestEventGenerator = AsyncGenerator; + + /** + * The `dot` reporter outputs the test results in a compact format, + * where each passing test is represented by a `.`, + * and each failing test is represented by a `X`. + */ + function dot(source: TestEventGenerator): AsyncGenerator<"\n" | "." | "X", void>; + /** + * The `tap` reporter outputs the test results in the [TAP](https://testanything.org/) format. + */ + function tap(source: TestEventGenerator): AsyncGenerator; + /** + * The `spec` reporter outputs the test results in a human-readable format. + */ + class Spec extends Transform { + constructor(); + } + /** + * The `junit` reporter outputs test results in a jUnit XML format + */ + function junit(source: TestEventGenerator): AsyncGenerator; + class Lcov extends Transform { + constructor(opts?: TransformOptions); + } + export { dot, junit, Lcov as lcov, Spec as spec, tap, TestEvent }; +} diff --git a/dist/node_modules/@types/node/timers.d.ts b/dist/node_modules/@types/node/timers.d.ts new file mode 100644 index 0000000..039f31f --- /dev/null +++ b/dist/node_modules/@types/node/timers.d.ts @@ -0,0 +1,240 @@ +/** + * The `timer` module exposes a global API for scheduling functions to + * be called at some future period of time. Because the timer functions are + * globals, there is no need to call `require('node:timers')` to use the API. + * + * The timer functions within Node.js implement a similar API as the timers API + * provided by Web Browsers but use a different internal implementation that is + * built around the Node.js [Event Loop](https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#setimmediate-vs-settimeout). + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/timers.js) + */ +declare module "timers" { + import { Abortable } from "node:events"; + import { + setImmediate as setImmediatePromise, + setInterval as setIntervalPromise, + setTimeout as setTimeoutPromise, + } from "node:timers/promises"; + interface TimerOptions extends Abortable { + /** + * Set to `false` to indicate that the scheduled `Timeout` + * should not require the Node.js event loop to remain active. + * @default true + */ + ref?: boolean | undefined; + } + let setTimeout: typeof global.setTimeout; + let clearTimeout: typeof global.clearTimeout; + let setInterval: typeof global.setInterval; + let clearInterval: typeof global.clearInterval; + let setImmediate: typeof global.setImmediate; + let clearImmediate: typeof global.clearImmediate; + global { + namespace NodeJS { + // compatibility with older typings + interface Timer extends RefCounted { + hasRef(): boolean; + refresh(): this; + [Symbol.toPrimitive](): number; + } + /** + * This object is created internally and is returned from `setImmediate()`. It + * can be passed to `clearImmediate()` in order to cancel the scheduled + * actions. + * + * By default, when an immediate is scheduled, the Node.js event loop will continue + * running as long as the immediate is active. The `Immediate` object returned by `setImmediate()` exports both `immediate.ref()` and `immediate.unref()`functions that can be used to + * control this default behavior. + */ + class Immediate implements RefCounted { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the`Immediate` is active. Calling `immediate.ref()` multiple times will have no + * effect. + * + * By default, all `Immediate` objects are "ref'ed", making it normally unnecessary + * to call `immediate.ref()` unless `immediate.unref()` had been called previously. + * @since v9.7.0 + * @return a reference to `immediate` + */ + ref(): this; + /** + * When called, the active `Immediate` object will not require the Node.js event + * loop to remain active. If there is no other activity keeping the event loop + * running, the process may exit before the `Immediate` object's callback is + * invoked. Calling `immediate.unref()` multiple times will have no effect. + * @since v9.7.0 + * @return a reference to `immediate` + */ + unref(): this; + /** + * If true, the `Immediate` object will keep the Node.js event loop active. + * @since v11.0.0 + */ + hasRef(): boolean; + _onImmediate: Function; // to distinguish it from the Timeout class + /** + * Cancels the immediate. This is similar to calling `clearImmediate()`. + * @since v20.5.0 + */ + [Symbol.dispose](): void; + } + /** + * This object is created internally and is returned from `setTimeout()` and `setInterval()`. It can be passed to either `clearTimeout()` or `clearInterval()` in order to cancel the + * scheduled actions. + * + * By default, when a timer is scheduled using either `setTimeout()` or `setInterval()`, the Node.js event loop will continue running as long as the + * timer is active. Each of the `Timeout` objects returned by these functions + * export both `timeout.ref()` and `timeout.unref()` functions that can be used to + * control this default behavior. + */ + class Timeout implements Timer { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the`Timeout` is active. Calling `timeout.ref()` multiple times will have no effect. + * + * By default, all `Timeout` objects are "ref'ed", making it normally unnecessary + * to call `timeout.ref()` unless `timeout.unref()` had been called previously. + * @since v0.9.1 + * @return a reference to `timeout` + */ + ref(): this; + /** + * When called, the active `Timeout` object will not require the Node.js event loop + * to remain active. If there is no other activity keeping the event loop running, + * the process may exit before the `Timeout` object's callback is invoked. Calling`timeout.unref()` multiple times will have no effect. + * @since v0.9.1 + * @return a reference to `timeout` + */ + unref(): this; + /** + * If true, the `Timeout` object will keep the Node.js event loop active. + * @since v11.0.0 + */ + hasRef(): boolean; + /** + * Sets the timer's start time to the current time, and reschedules the timer to + * call its callback at the previously specified duration adjusted to the current + * time. This is useful for refreshing a timer without allocating a new + * JavaScript object. + * + * Using this on a timer that has already called its callback will reactivate the + * timer. + * @since v10.2.0 + * @return a reference to `timeout` + */ + refresh(): this; + [Symbol.toPrimitive](): number; + /** + * Cancels the timeout. + * @since v20.5.0 + */ + [Symbol.dispose](): void; + } + } + /** + * Schedules execution of a one-time `callback` after `delay` milliseconds. + * + * The `callback` will likely not be invoked in precisely `delay` milliseconds. + * Node.js makes no guarantees about the exact timing of when callbacks will fire, + * nor of their ordering. The callback will be called as close as possible to the + * time specified. + * + * When `delay` is larger than `2147483647` or less than `1`, the `delay`will be set to `1`. Non-integer delays are truncated to an integer. + * + * If `callback` is not a function, a `TypeError` will be thrown. + * + * This method has a custom variant for promises that is available using `timersPromises.setTimeout()`. + * @since v0.0.1 + * @param callback The function to call when the timer elapses. + * @param [delay=1] The number of milliseconds to wait before calling the `callback`. + * @param args Optional arguments to pass when the `callback` is called. + * @return for use with {@link clearTimeout} + */ + function setTimeout( + callback: (...args: TArgs) => void, + ms?: number, + ...args: TArgs + ): NodeJS.Timeout; + // util.promisify no rest args compability + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + function setTimeout(callback: (args: void) => void, ms?: number): NodeJS.Timeout; + namespace setTimeout { + const __promisify__: typeof setTimeoutPromise; + } + /** + * Cancels a `Timeout` object created by `setTimeout()`. + * @since v0.0.1 + * @param timeout A `Timeout` object as returned by {@link setTimeout} or the `primitive` of the `Timeout` object as a string or a number. + */ + function clearTimeout(timeoutId: NodeJS.Timeout | string | number | undefined): void; + /** + * Schedules repeated execution of `callback` every `delay` milliseconds. + * + * When `delay` is larger than `2147483647` or less than `1`, the `delay` will be + * set to `1`. Non-integer delays are truncated to an integer. + * + * If `callback` is not a function, a `TypeError` will be thrown. + * + * This method has a custom variant for promises that is available using `timersPromises.setInterval()`. + * @since v0.0.1 + * @param callback The function to call when the timer elapses. + * @param [delay=1] The number of milliseconds to wait before calling the `callback`. + * @param args Optional arguments to pass when the `callback` is called. + * @return for use with {@link clearInterval} + */ + function setInterval( + callback: (...args: TArgs) => void, + ms?: number, + ...args: TArgs + ): NodeJS.Timeout; + // util.promisify no rest args compability + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + function setInterval(callback: (args: void) => void, ms?: number): NodeJS.Timeout; + namespace setInterval { + const __promisify__: typeof setIntervalPromise; + } + /** + * Cancels a `Timeout` object created by `setInterval()`. + * @since v0.0.1 + * @param timeout A `Timeout` object as returned by {@link setInterval} or the `primitive` of the `Timeout` object as a string or a number. + */ + function clearInterval(intervalId: NodeJS.Timeout | string | number | undefined): void; + /** + * Schedules the "immediate" execution of the `callback` after I/O events' + * callbacks. + * + * When multiple calls to `setImmediate()` are made, the `callback` functions are + * queued for execution in the order in which they are created. The entire callback + * queue is processed every event loop iteration. If an immediate timer is queued + * from inside an executing callback, that timer will not be triggered until the + * next event loop iteration. + * + * If `callback` is not a function, a `TypeError` will be thrown. + * + * This method has a custom variant for promises that is available using `timersPromises.setImmediate()`. + * @since v0.9.1 + * @param callback The function to call at the end of this turn of the Node.js `Event Loop` + * @param args Optional arguments to pass when the `callback` is called. + * @return for use with {@link clearImmediate} + */ + function setImmediate( + callback: (...args: TArgs) => void, + ...args: TArgs + ): NodeJS.Immediate; + // util.promisify no rest args compability + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + function setImmediate(callback: (args: void) => void): NodeJS.Immediate; + namespace setImmediate { + const __promisify__: typeof setImmediatePromise; + } + /** + * Cancels an `Immediate` object created by `setImmediate()`. + * @since v0.9.1 + * @param immediate An `Immediate` object as returned by {@link setImmediate}. + */ + function clearImmediate(immediateId: NodeJS.Immediate | undefined): void; + function queueMicrotask(callback: () => void): void; + } +} +declare module "node:timers" { + export * from "timers"; +} diff --git a/dist/node_modules/@types/node/timers/promises.d.ts b/dist/node_modules/@types/node/timers/promises.d.ts new file mode 100644 index 0000000..5a54dc7 --- /dev/null +++ b/dist/node_modules/@types/node/timers/promises.d.ts @@ -0,0 +1,93 @@ +/** + * The `timers/promises` API provides an alternative set of timer functions + * that return `Promise` objects. The API is accessible via`require('node:timers/promises')`. + * + * ```js + * import { + * setTimeout, + * setImmediate, + * setInterval, + * } from 'timers/promises'; + * ``` + * @since v15.0.0 + */ +declare module "timers/promises" { + import { TimerOptions } from "node:timers"; + /** + * ```js + * import { + * setTimeout, + * } from 'timers/promises'; + * + * const res = await setTimeout(100, 'result'); + * + * console.log(res); // Prints 'result' + * ``` + * @since v15.0.0 + * @param [delay=1] The number of milliseconds to wait before fulfilling the promise. + * @param value A value with which the promise is fulfilled. + */ + function setTimeout(delay?: number, value?: T, options?: TimerOptions): Promise; + /** + * ```js + * import { + * setImmediate, + * } from 'timers/promises'; + * + * const res = await setImmediate('result'); + * + * console.log(res); // Prints 'result' + * ``` + * @since v15.0.0 + * @param value A value with which the promise is fulfilled. + */ + function setImmediate(value?: T, options?: TimerOptions): Promise; + /** + * Returns an async iterator that generates values in an interval of `delay` ms. + * If `ref` is `true`, you need to call `next()` of async iterator explicitly + * or implicitly to keep the event loop alive. + * + * ```js + * import { + * setInterval, + * } from 'timers/promises'; + * + * const interval = 100; + * for await (const startTime of setInterval(interval, Date.now())) { + * const now = Date.now(); + * console.log(now); + * if ((now - startTime) > 1000) + * break; + * } + * console.log(Date.now()); + * ``` + * @since v15.9.0 + */ + function setInterval(delay?: number, value?: T, options?: TimerOptions): AsyncIterable; + interface Scheduler { + /** + * ```js + * import { scheduler } from 'node:timers/promises'; + * + * await scheduler.wait(1000); // Wait one second before continuing + * ``` + * An experimental API defined by the Scheduling APIs draft specification being developed as a standard Web Platform API. + * Calling timersPromises.scheduler.wait(delay, options) is roughly equivalent to calling timersPromises.setTimeout(delay, undefined, options) except that the ref option is not supported. + * @since v16.14.0 + * @experimental + * @param [delay=1] The number of milliseconds to wait before fulfilling the promise. + */ + wait: (delay?: number, options?: TimerOptions) => Promise; + /** + * An experimental API defined by the Scheduling APIs draft specification being developed as a standard Web Platform API. + * Calling timersPromises.scheduler.yield() is equivalent to calling timersPromises.setImmediate() with no arguments. + * @since v16.14.0 + * @experimental + */ + yield: () => Promise; + } + const scheduler: Scheduler; +} +declare module "node:timers/promises" { + export * from "timers/promises"; +} diff --git a/dist/node_modules/@types/node/tls.d.ts b/dist/node_modules/@types/node/tls.d.ts new file mode 100644 index 0000000..9f6d6be --- /dev/null +++ b/dist/node_modules/@types/node/tls.d.ts @@ -0,0 +1,1210 @@ +/** + * The `node:tls` module provides an implementation of the Transport Layer Security + * (TLS) and Secure Socket Layer (SSL) protocols that is built on top of OpenSSL. + * The module can be accessed using: + * + * ```js + * const tls = require('node:tls'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/tls.js) + */ +declare module "tls" { + import { X509Certificate } from "node:crypto"; + import * as net from "node:net"; + import * as stream from "stream"; + const CLIENT_RENEG_LIMIT: number; + const CLIENT_RENEG_WINDOW: number; + interface Certificate { + /** + * Country code. + */ + C: string; + /** + * Street. + */ + ST: string; + /** + * Locality. + */ + L: string; + /** + * Organization. + */ + O: string; + /** + * Organizational unit. + */ + OU: string; + /** + * Common name. + */ + CN: string; + } + interface PeerCertificate { + /** + * `true` if a Certificate Authority (CA), `false` otherwise. + * @since v18.13.0 + */ + ca: boolean; + /** + * The DER encoded X.509 certificate data. + */ + raw: Buffer; + /** + * The certificate subject. + */ + subject: Certificate; + /** + * The certificate issuer, described in the same terms as the `subject`. + */ + issuer: Certificate; + /** + * The date-time the certificate is valid from. + */ + valid_from: string; + /** + * The date-time the certificate is valid to. + */ + valid_to: string; + /** + * The certificate serial number, as a hex string. + */ + serialNumber: string; + /** + * The SHA-1 digest of the DER encoded certificate. + * It is returned as a `:` separated hexadecimal string. + */ + fingerprint: string; + /** + * The SHA-256 digest of the DER encoded certificate. + * It is returned as a `:` separated hexadecimal string. + */ + fingerprint256: string; + /** + * The SHA-512 digest of the DER encoded certificate. + * It is returned as a `:` separated hexadecimal string. + */ + fingerprint512: string; + /** + * The extended key usage, a set of OIDs. + */ + ext_key_usage?: string[]; + /** + * A string containing concatenated names for the subject, + * an alternative to the `subject` names. + */ + subjectaltname?: string; + /** + * An array describing the AuthorityInfoAccess, used with OCSP. + */ + infoAccess?: NodeJS.Dict; + /** + * For RSA keys: The RSA bit size. + * + * For EC keys: The key size in bits. + */ + bits?: number; + /** + * The RSA exponent, as a string in hexadecimal number notation. + */ + exponent?: string; + /** + * The RSA modulus, as a hexadecimal string. + */ + modulus?: string; + /** + * The public key. + */ + pubkey?: Buffer; + /** + * The ASN.1 name of the OID of the elliptic curve. + * Well-known curves are identified by an OID. + * While it is unusual, it is possible that the curve + * is identified by its mathematical properties, + * in which case it will not have an OID. + */ + asn1Curve?: string; + /** + * The NIST name for the elliptic curve,if it has one + * (not all well-known curves have been assigned names by NIST). + */ + nistCurve?: string; + } + interface DetailedPeerCertificate extends PeerCertificate { + /** + * The issuer certificate object. + * For self-signed certificates, this may be a circular reference. + */ + issuerCertificate: DetailedPeerCertificate; + } + interface CipherNameAndProtocol { + /** + * The cipher name. + */ + name: string; + /** + * SSL/TLS protocol version. + */ + version: string; + /** + * IETF name for the cipher suite. + */ + standardName: string; + } + interface EphemeralKeyInfo { + /** + * The supported types are 'DH' and 'ECDH'. + */ + type: string; + /** + * The name property is available only when type is 'ECDH'. + */ + name?: string | undefined; + /** + * The size of parameter of an ephemeral key exchange. + */ + size: number; + } + interface KeyObject { + /** + * Private keys in PEM format. + */ + pem: string | Buffer; + /** + * Optional passphrase. + */ + passphrase?: string | undefined; + } + interface PxfObject { + /** + * PFX or PKCS12 encoded private key and certificate chain. + */ + buf: string | Buffer; + /** + * Optional passphrase. + */ + passphrase?: string | undefined; + } + interface TLSSocketOptions extends SecureContextOptions, CommonConnectionOptions { + /** + * If true the TLS socket will be instantiated in server-mode. + * Defaults to false. + */ + isServer?: boolean | undefined; + /** + * An optional net.Server instance. + */ + server?: net.Server | undefined; + /** + * An optional Buffer instance containing a TLS session. + */ + session?: Buffer | undefined; + /** + * If true, specifies that the OCSP status request extension will be + * added to the client hello and an 'OCSPResponse' event will be + * emitted on the socket before establishing a secure communication + */ + requestOCSP?: boolean | undefined; + } + /** + * Performs transparent encryption of written data and all required TLS + * negotiation. + * + * Instances of `tls.TLSSocket` implement the duplex `Stream` interface. + * + * Methods that return TLS connection metadata (e.g.{@link TLSSocket.getPeerCertificate}) will only return data while the + * connection is open. + * @since v0.11.4 + */ + class TLSSocket extends net.Socket { + /** + * Construct a new tls.TLSSocket object from an existing TCP socket. + */ + constructor(socket: net.Socket | stream.Duplex, options?: TLSSocketOptions); + /** + * This property is `true` if the peer certificate was signed by one of the CAs + * specified when creating the `tls.TLSSocket` instance, otherwise `false`. + * @since v0.11.4 + */ + authorized: boolean; + /** + * Returns the reason why the peer's certificate was not been verified. This + * property is set only when `tlsSocket.authorized === false`. + * @since v0.11.4 + */ + authorizationError: Error; + /** + * Always returns `true`. This may be used to distinguish TLS sockets from regular`net.Socket` instances. + * @since v0.11.4 + */ + encrypted: true; + /** + * String containing the selected ALPN protocol. + * Before a handshake has completed, this value is always null. + * When a handshake is completed but not ALPN protocol was selected, tlsSocket.alpnProtocol equals false. + */ + alpnProtocol: string | false | null; + /** + * Returns an object representing the local certificate. The returned object has + * some properties corresponding to the fields of the certificate. + * + * See {@link TLSSocket.getPeerCertificate} for an example of the certificate + * structure. + * + * If there is no local certificate, an empty object will be returned. If the + * socket has been destroyed, `null` will be returned. + * @since v11.2.0 + */ + getCertificate(): PeerCertificate | object | null; + /** + * Returns an object containing information on the negotiated cipher suite. + * + * For example, a TLSv1.2 protocol with AES256-SHA cipher: + * + * ```json + * { + * "name": "AES256-SHA", + * "standardName": "TLS_RSA_WITH_AES_256_CBC_SHA", + * "version": "SSLv3" + * } + * ``` + * + * See [SSL\_CIPHER\_get\_name](https://www.openssl.org/docs/man1.1.1/man3/SSL_CIPHER_get_name.html) for more information. + * @since v0.11.4 + */ + getCipher(): CipherNameAndProtocol; + /** + * Returns an object representing the type, name, and size of parameter of + * an ephemeral key exchange in `perfect forward secrecy` on a client + * connection. It returns an empty object when the key exchange is not + * ephemeral. As this is only supported on a client socket; `null` is returned + * if called on a server socket. The supported types are `'DH'` and `'ECDH'`. The`name` property is available only when type is `'ECDH'`. + * + * For example: `{ type: 'ECDH', name: 'prime256v1', size: 256 }`. + * @since v5.0.0 + */ + getEphemeralKeyInfo(): EphemeralKeyInfo | object | null; + /** + * As the `Finished` messages are message digests of the complete handshake + * (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can + * be used for external authentication procedures when the authentication + * provided by SSL/TLS is not desired or is not enough. + * + * Corresponds to the `SSL_get_finished` routine in OpenSSL and may be used + * to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929). + * @since v9.9.0 + * @return The latest `Finished` message that has been sent to the socket as part of a SSL/TLS handshake, or `undefined` if no `Finished` message has been sent yet. + */ + getFinished(): Buffer | undefined; + /** + * Returns an object representing the peer's certificate. If the peer does not + * provide a certificate, an empty object will be returned. If the socket has been + * destroyed, `null` will be returned. + * + * If the full certificate chain was requested, each certificate will include an`issuerCertificate` property containing an object representing its issuer's + * certificate. + * @since v0.11.4 + * @param detailed Include the full certificate chain if `true`, otherwise include just the peer's certificate. + * @return A certificate object. + */ + getPeerCertificate(detailed: true): DetailedPeerCertificate; + getPeerCertificate(detailed?: false): PeerCertificate; + getPeerCertificate(detailed?: boolean): PeerCertificate | DetailedPeerCertificate; + /** + * As the `Finished` messages are message digests of the complete handshake + * (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can + * be used for external authentication procedures when the authentication + * provided by SSL/TLS is not desired or is not enough. + * + * Corresponds to the `SSL_get_peer_finished` routine in OpenSSL and may be used + * to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929). + * @since v9.9.0 + * @return The latest `Finished` message that is expected or has actually been received from the socket as part of a SSL/TLS handshake, or `undefined` if there is no `Finished` message so + * far. + */ + getPeerFinished(): Buffer | undefined; + /** + * Returns a string containing the negotiated SSL/TLS protocol version of the + * current connection. The value `'unknown'` will be returned for connected + * sockets that have not completed the handshaking process. The value `null` will + * be returned for server sockets or disconnected client sockets. + * + * Protocol versions are: + * + * * `'SSLv3'` + * * `'TLSv1'` + * * `'TLSv1.1'` + * * `'TLSv1.2'` + * * `'TLSv1.3'` + * + * See the OpenSSL [`SSL_get_version`](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_version.html) documentation for more information. + * @since v5.7.0 + */ + getProtocol(): string | null; + /** + * Returns the TLS session data or `undefined` if no session was + * negotiated. On the client, the data can be provided to the `session` option of {@link connect} to resume the connection. On the server, it may be useful + * for debugging. + * + * See `Session Resumption` for more information. + * + * Note: `getSession()` works only for TLSv1.2 and below. For TLSv1.3, applications + * must use the `'session'` event (it also works for TLSv1.2 and below). + * @since v0.11.4 + */ + getSession(): Buffer | undefined; + /** + * See [SSL\_get\_shared\_sigalgs](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_shared_sigalgs.html) for more information. + * @since v12.11.0 + * @return List of signature algorithms shared between the server and the client in the order of decreasing preference. + */ + getSharedSigalgs(): string[]; + /** + * For a client, returns the TLS session ticket if one is available, or`undefined`. For a server, always returns `undefined`. + * + * It may be useful for debugging. + * + * See `Session Resumption` for more information. + * @since v0.11.4 + */ + getTLSTicket(): Buffer | undefined; + /** + * See `Session Resumption` for more information. + * @since v0.5.6 + * @return `true` if the session was reused, `false` otherwise. + */ + isSessionReused(): boolean; + /** + * The `tlsSocket.renegotiate()` method initiates a TLS renegotiation process. + * Upon completion, the `callback` function will be passed a single argument + * that is either an `Error` (if the request failed) or `null`. + * + * This method can be used to request a peer's certificate after the secure + * connection has been established. + * + * When running as the server, the socket will be destroyed with an error after`handshakeTimeout` timeout. + * + * For TLSv1.3, renegotiation cannot be initiated, it is not supported by the + * protocol. + * @since v0.11.8 + * @param callback If `renegotiate()` returned `true`, callback is attached once to the `'secure'` event. If `renegotiate()` returned `false`, `callback` will be called in the next tick with + * an error, unless the `tlsSocket` has been destroyed, in which case `callback` will not be called at all. + * @return `true` if renegotiation was initiated, `false` otherwise. + */ + renegotiate( + options: { + rejectUnauthorized?: boolean | undefined; + requestCert?: boolean | undefined; + }, + callback: (err: Error | null) => void, + ): undefined | boolean; + /** + * The `tlsSocket.setMaxSendFragment()` method sets the maximum TLS fragment size. + * Returns `true` if setting the limit succeeded; `false` otherwise. + * + * Smaller fragment sizes decrease the buffering latency on the client: larger + * fragments are buffered by the TLS layer until the entire fragment is received + * and its integrity is verified; large fragments can span multiple roundtrips + * and their processing can be delayed due to packet loss or reordering. However, + * smaller fragments add extra TLS framing bytes and CPU overhead, which may + * decrease overall server throughput. + * @since v0.11.11 + * @param [size=16384] The maximum TLS fragment size. The maximum value is `16384`. + */ + setMaxSendFragment(size: number): boolean; + /** + * Disables TLS renegotiation for this `TLSSocket` instance. Once called, attempts + * to renegotiate will trigger an `'error'` event on the `TLSSocket`. + * @since v8.4.0 + */ + disableRenegotiation(): void; + /** + * When enabled, TLS packet trace information is written to `stderr`. This can be + * used to debug TLS connection problems. + * + * The format of the output is identical to the output of`openssl s_client -trace` or `openssl s_server -trace`. While it is produced by + * OpenSSL's `SSL_trace()` function, the format is undocumented, can change + * without notice, and should not be relied on. + * @since v12.2.0 + */ + enableTrace(): void; + /** + * Returns the peer certificate as an `X509Certificate` object. + * + * If there is no peer certificate, or the socket has been destroyed,`undefined` will be returned. + * @since v15.9.0 + */ + getPeerX509Certificate(): X509Certificate | undefined; + /** + * Returns the local certificate as an `X509Certificate` object. + * + * If there is no local certificate, or the socket has been destroyed,`undefined` will be returned. + * @since v15.9.0 + */ + getX509Certificate(): X509Certificate | undefined; + /** + * Keying material is used for validations to prevent different kind of attacks in + * network protocols, for example in the specifications of IEEE 802.1X. + * + * Example + * + * ```js + * const keyingMaterial = tlsSocket.exportKeyingMaterial( + * 128, + * 'client finished'); + * + * /* + * Example return value of keyingMaterial: + * + * + * ``` + * + * See the OpenSSL [`SSL_export_keying_material`](https://www.openssl.org/docs/man1.1.1/man3/SSL_export_keying_material.html) documentation for more + * information. + * @since v13.10.0, v12.17.0 + * @param length number of bytes to retrieve from keying material + * @param label an application specific label, typically this will be a value from the [IANA Exporter Label + * Registry](https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#exporter-labels). + * @param context Optionally provide a context. + * @return requested bytes of the keying material + */ + exportKeyingMaterial(length: number, label: string, context: Buffer): Buffer; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; + addListener(event: "secureConnect", listener: () => void): this; + addListener(event: "session", listener: (session: Buffer) => void): this; + addListener(event: "keylog", listener: (line: Buffer) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "OCSPResponse", response: Buffer): boolean; + emit(event: "secureConnect"): boolean; + emit(event: "session", session: Buffer): boolean; + emit(event: "keylog", line: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "OCSPResponse", listener: (response: Buffer) => void): this; + on(event: "secureConnect", listener: () => void): this; + on(event: "session", listener: (session: Buffer) => void): this; + on(event: "keylog", listener: (line: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "OCSPResponse", listener: (response: Buffer) => void): this; + once(event: "secureConnect", listener: () => void): this; + once(event: "session", listener: (session: Buffer) => void): this; + once(event: "keylog", listener: (line: Buffer) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; + prependListener(event: "secureConnect", listener: () => void): this; + prependListener(event: "session", listener: (session: Buffer) => void): this; + prependListener(event: "keylog", listener: (line: Buffer) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; + prependOnceListener(event: "secureConnect", listener: () => void): this; + prependOnceListener(event: "session", listener: (session: Buffer) => void): this; + prependOnceListener(event: "keylog", listener: (line: Buffer) => void): this; + } + interface CommonConnectionOptions { + /** + * An optional TLS context object from tls.createSecureContext() + */ + secureContext?: SecureContext | undefined; + /** + * When enabled, TLS packet trace information is written to `stderr`. This can be + * used to debug TLS connection problems. + * @default false + */ + enableTrace?: boolean | undefined; + /** + * If true the server will request a certificate from clients that + * connect and attempt to verify that certificate. Defaults to + * false. + */ + requestCert?: boolean | undefined; + /** + * An array of strings or a Buffer naming possible ALPN protocols. + * (Protocols should be ordered by their priority.) + */ + ALPNProtocols?: string[] | Uint8Array[] | Uint8Array | undefined; + /** + * SNICallback(servername, cb) A function that will be + * called if the client supports SNI TLS extension. Two arguments + * will be passed when called: servername and cb. SNICallback should + * invoke cb(null, ctx), where ctx is a SecureContext instance. + * (tls.createSecureContext(...) can be used to get a proper + * SecureContext.) If SNICallback wasn't provided the default callback + * with high-level API will be used (see below). + */ + SNICallback?: ((servername: string, cb: (err: Error | null, ctx?: SecureContext) => void) => void) | undefined; + /** + * If true the server will reject any connection which is not + * authorized with the list of supplied CAs. This option only has an + * effect if requestCert is true. + * @default true + */ + rejectUnauthorized?: boolean | undefined; + } + interface TlsOptions extends SecureContextOptions, CommonConnectionOptions, net.ServerOpts { + /** + * Abort the connection if the SSL/TLS handshake does not finish in the + * specified number of milliseconds. A 'tlsClientError' is emitted on + * the tls.Server object whenever a handshake times out. Default: + * 120000 (120 seconds). + */ + handshakeTimeout?: number | undefined; + /** + * The number of seconds after which a TLS session created by the + * server will no longer be resumable. See Session Resumption for more + * information. Default: 300. + */ + sessionTimeout?: number | undefined; + /** + * 48-bytes of cryptographically strong pseudo-random data. + */ + ticketKeys?: Buffer | undefined; + /** + * @param socket + * @param identity identity parameter sent from the client. + * @return pre-shared key that must either be + * a buffer or `null` to stop the negotiation process. Returned PSK must be + * compatible with the selected cipher's digest. + * + * When negotiating TLS-PSK (pre-shared keys), this function is called + * with the identity provided by the client. + * If the return value is `null` the negotiation process will stop and an + * "unknown_psk_identity" alert message will be sent to the other party. + * If the server wishes to hide the fact that the PSK identity was not known, + * the callback must provide some random data as `psk` to make the connection + * fail with "decrypt_error" before negotiation is finished. + * PSK ciphers are disabled by default, and using TLS-PSK thus + * requires explicitly specifying a cipher suite with the `ciphers` option. + * More information can be found in the RFC 4279. + */ + pskCallback?(socket: TLSSocket, identity: string): DataView | NodeJS.TypedArray | null; + /** + * hint to send to a client to help + * with selecting the identity during TLS-PSK negotiation. Will be ignored + * in TLS 1.3. Upon failing to set pskIdentityHint `tlsClientError` will be + * emitted with `ERR_TLS_PSK_SET_IDENTIY_HINT_FAILED` code. + */ + pskIdentityHint?: string | undefined; + } + interface PSKCallbackNegotation { + psk: DataView | NodeJS.TypedArray; + identity: string; + } + interface ConnectionOptions extends SecureContextOptions, CommonConnectionOptions { + host?: string | undefined; + port?: number | undefined; + path?: string | undefined; // Creates unix socket connection to path. If this option is specified, `host` and `port` are ignored. + socket?: stream.Duplex | undefined; // Establish secure connection on a given socket rather than creating a new socket + checkServerIdentity?: typeof checkServerIdentity | undefined; + servername?: string | undefined; // SNI TLS Extension + session?: Buffer | undefined; + minDHSize?: number | undefined; + lookup?: net.LookupFunction | undefined; + timeout?: number | undefined; + /** + * When negotiating TLS-PSK (pre-shared keys), this function is called + * with optional identity `hint` provided by the server or `null` + * in case of TLS 1.3 where `hint` was removed. + * It will be necessary to provide a custom `tls.checkServerIdentity()` + * for the connection as the default one will try to check hostname/IP + * of the server against the certificate but that's not applicable for PSK + * because there won't be a certificate present. + * More information can be found in the RFC 4279. + * + * @param hint message sent from the server to help client + * decide which identity to use during negotiation. + * Always `null` if TLS 1.3 is used. + * @returns Return `null` to stop the negotiation process. `psk` must be + * compatible with the selected cipher's digest. + * `identity` must use UTF-8 encoding. + */ + pskCallback?(hint: string | null): PSKCallbackNegotation | null; + } + /** + * Accepts encrypted connections using TLS or SSL. + * @since v0.3.2 + */ + class Server extends net.Server { + constructor(secureConnectionListener?: (socket: TLSSocket) => void); + constructor(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void); + /** + * The `server.addContext()` method adds a secure context that will be used if + * the client request's SNI name matches the supplied `hostname` (or wildcard). + * + * When there are multiple matching contexts, the most recently added one is + * used. + * @since v0.5.3 + * @param hostname A SNI host name or wildcard (e.g. `'*'`) + * @param context An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc), or a TLS context object created + * with {@link createSecureContext} itself. + */ + addContext(hostname: string, context: SecureContextOptions): void; + /** + * Returns the session ticket keys. + * + * See `Session Resumption` for more information. + * @since v3.0.0 + * @return A 48-byte buffer containing the session ticket keys. + */ + getTicketKeys(): Buffer; + /** + * The `server.setSecureContext()` method replaces the secure context of an + * existing server. Existing connections to the server are not interrupted. + * @since v11.0.0 + * @param options An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc). + */ + setSecureContext(options: SecureContextOptions): void; + /** + * Sets the session ticket keys. + * + * Changes to the ticket keys are effective only for future server connections. + * Existing or currently pending server connections will use the previous keys. + * + * See `Session Resumption` for more information. + * @since v3.0.0 + * @param keys A 48-byte buffer containing the session ticket keys. + */ + setTicketKeys(keys: Buffer): void; + /** + * events.EventEmitter + * 1. tlsClientError + * 2. newSession + * 3. OCSPRequest + * 4. resumeSession + * 5. secureConnection + * 6. keylog + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + addListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + addListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + addListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + addListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + addListener(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "tlsClientError", err: Error, tlsSocket: TLSSocket): boolean; + emit(event: "newSession", sessionId: Buffer, sessionData: Buffer, callback: () => void): boolean; + emit( + event: "OCSPRequest", + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ): boolean; + emit( + event: "resumeSession", + sessionId: Buffer, + callback: (err: Error | null, sessionData: Buffer | null) => void, + ): boolean; + emit(event: "secureConnection", tlsSocket: TLSSocket): boolean; + emit(event: "keylog", line: Buffer, tlsSocket: TLSSocket): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + on(event: "newSession", listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + on( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + on( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + on(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + on(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + once( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + once( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + once( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + once(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + once(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + prependListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + prependListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + prependListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + prependListener(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + prependOnceListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + prependOnceListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependOnceListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + prependOnceListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + } + /** + * @deprecated since v0.11.3 Use `tls.TLSSocket` instead. + */ + interface SecurePair { + encrypted: TLSSocket; + cleartext: TLSSocket; + } + type SecureVersion = "TLSv1.3" | "TLSv1.2" | "TLSv1.1" | "TLSv1"; + interface SecureContextOptions { + /** + * If set, this will be called when a client opens a connection using the ALPN extension. + * One argument will be passed to the callback: an object containing `servername` and `protocols` fields, + * respectively containing the server name from the SNI extension (if any) and an array of + * ALPN protocol name strings. The callback must return either one of the strings listed in `protocols`, + * which will be returned to the client as the selected ALPN protocol, or `undefined`, + * to reject the connection with a fatal alert. If a string is returned that does not match one of + * the client's ALPN protocols, an error will be thrown. + * This option cannot be used with the `ALPNProtocols` option, and setting both options will throw an error. + */ + ALPNCallback?: ((arg: { servername: string; protocols: string[] }) => string | undefined) | undefined; + /** + * Optionally override the trusted CA certificates. Default is to trust + * the well-known CAs curated by Mozilla. Mozilla's CAs are completely + * replaced when CAs are explicitly specified using this option. + */ + ca?: string | Buffer | Array | undefined; + /** + * Cert chains in PEM format. One cert chain should be provided per + * private key. Each cert chain should consist of the PEM formatted + * certificate for a provided private key, followed by the PEM + * formatted intermediate certificates (if any), in order, and not + * including the root CA (the root CA must be pre-known to the peer, + * see ca). When providing multiple cert chains, they do not have to + * be in the same order as their private keys in key. If the + * intermediate certificates are not provided, the peer will not be + * able to validate the certificate, and the handshake will fail. + */ + cert?: string | Buffer | Array | undefined; + /** + * Colon-separated list of supported signature algorithms. The list + * can contain digest algorithms (SHA256, MD5 etc.), public key + * algorithms (RSA-PSS, ECDSA etc.), combination of both (e.g + * 'RSA+SHA384') or TLS v1.3 scheme names (e.g. rsa_pss_pss_sha512). + */ + sigalgs?: string | undefined; + /** + * Cipher suite specification, replacing the default. For more + * information, see modifying the default cipher suite. Permitted + * ciphers can be obtained via tls.getCiphers(). Cipher names must be + * uppercased in order for OpenSSL to accept them. + */ + ciphers?: string | undefined; + /** + * Name of an OpenSSL engine which can provide the client certificate. + */ + clientCertEngine?: string | undefined; + /** + * PEM formatted CRLs (Certificate Revocation Lists). + */ + crl?: string | Buffer | Array | undefined; + /** + * `'auto'` or custom Diffie-Hellman parameters, required for non-ECDHE perfect forward secrecy. + * If omitted or invalid, the parameters are silently discarded and DHE ciphers will not be available. + * ECDHE-based perfect forward secrecy will still be available. + */ + dhparam?: string | Buffer | undefined; + /** + * A string describing a named curve or a colon separated list of curve + * NIDs or names, for example P-521:P-384:P-256, to use for ECDH key + * agreement. Set to auto to select the curve automatically. Use + * crypto.getCurves() to obtain a list of available curve names. On + * recent releases, openssl ecparam -list_curves will also display the + * name and description of each available elliptic curve. Default: + * tls.DEFAULT_ECDH_CURVE. + */ + ecdhCurve?: string | undefined; + /** + * Attempt to use the server's cipher suite preferences instead of the + * client's. When true, causes SSL_OP_CIPHER_SERVER_PREFERENCE to be + * set in secureOptions + */ + honorCipherOrder?: boolean | undefined; + /** + * Private keys in PEM format. PEM allows the option of private keys + * being encrypted. Encrypted keys will be decrypted with + * options.passphrase. Multiple keys using different algorithms can be + * provided either as an array of unencrypted key strings or buffers, + * or an array of objects in the form {pem: [, + * passphrase: ]}. The object form can only occur in an array. + * object.passphrase is optional. Encrypted keys will be decrypted with + * object.passphrase if provided, or options.passphrase if it is not. + */ + key?: string | Buffer | Array | undefined; + /** + * Name of an OpenSSL engine to get private key from. Should be used + * together with privateKeyIdentifier. + */ + privateKeyEngine?: string | undefined; + /** + * Identifier of a private key managed by an OpenSSL engine. Should be + * used together with privateKeyEngine. Should not be set together with + * key, because both options define a private key in different ways. + */ + privateKeyIdentifier?: string | undefined; + /** + * Optionally set the maximum TLS version to allow. One + * of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the + * `secureProtocol` option, use one or the other. + * **Default:** `'TLSv1.3'`, unless changed using CLI options. Using + * `--tls-max-v1.2` sets the default to `'TLSv1.2'`. Using `--tls-max-v1.3` sets the default to + * `'TLSv1.3'`. If multiple of the options are provided, the highest maximum is used. + */ + maxVersion?: SecureVersion | undefined; + /** + * Optionally set the minimum TLS version to allow. One + * of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the + * `secureProtocol` option, use one or the other. It is not recommended to use + * less than TLSv1.2, but it may be required for interoperability. + * **Default:** `'TLSv1.2'`, unless changed using CLI options. Using + * `--tls-v1.0` sets the default to `'TLSv1'`. Using `--tls-v1.1` sets the default to + * `'TLSv1.1'`. Using `--tls-min-v1.3` sets the default to + * 'TLSv1.3'. If multiple of the options are provided, the lowest minimum is used. + */ + minVersion?: SecureVersion | undefined; + /** + * Shared passphrase used for a single private key and/or a PFX. + */ + passphrase?: string | undefined; + /** + * PFX or PKCS12 encoded private key and certificate chain. pfx is an + * alternative to providing key and cert individually. PFX is usually + * encrypted, if it is, passphrase will be used to decrypt it. Multiple + * PFX can be provided either as an array of unencrypted PFX buffers, + * or an array of objects in the form {buf: [, + * passphrase: ]}. The object form can only occur in an array. + * object.passphrase is optional. Encrypted PFX will be decrypted with + * object.passphrase if provided, or options.passphrase if it is not. + */ + pfx?: string | Buffer | Array | undefined; + /** + * Optionally affect the OpenSSL protocol behavior, which is not + * usually necessary. This should be used carefully if at all! Value is + * a numeric bitmask of the SSL_OP_* options from OpenSSL Options + */ + secureOptions?: number | undefined; // Value is a numeric bitmask of the `SSL_OP_*` options + /** + * Legacy mechanism to select the TLS protocol version to use, it does + * not support independent control of the minimum and maximum version, + * and does not support limiting the protocol to TLSv1.3. Use + * minVersion and maxVersion instead. The possible values are listed as + * SSL_METHODS, use the function names as strings. For example, use + * 'TLSv1_1_method' to force TLS version 1.1, or 'TLS_method' to allow + * any TLS protocol version up to TLSv1.3. It is not recommended to use + * TLS versions less than 1.2, but it may be required for + * interoperability. Default: none, see minVersion. + */ + secureProtocol?: string | undefined; + /** + * Opaque identifier used by servers to ensure session state is not + * shared between applications. Unused by clients. + */ + sessionIdContext?: string | undefined; + /** + * 48-bytes of cryptographically strong pseudo-random data. + * See Session Resumption for more information. + */ + ticketKeys?: Buffer | undefined; + /** + * The number of seconds after which a TLS session created by the + * server will no longer be resumable. See Session Resumption for more + * information. Default: 300. + */ + sessionTimeout?: number | undefined; + } + interface SecureContext { + context: any; + } + /** + * Verifies the certificate `cert` is issued to `hostname`. + * + * Returns [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object, populating it with `reason`, `host`, and `cert` on + * failure. On success, returns [undefined](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Undefined_type). + * + * This function is intended to be used in combination with the`checkServerIdentity` option that can be passed to {@link connect} and as + * such operates on a `certificate object`. For other purposes, consider using `x509.checkHost()` instead. + * + * This function can be overwritten by providing an alternative function as the`options.checkServerIdentity` option that is passed to `tls.connect()`. The + * overwriting function can call `tls.checkServerIdentity()` of course, to augment + * the checks done with additional verification. + * + * This function is only called if the certificate passed all other checks, such as + * being issued by trusted CA (`options.ca`). + * + * Earlier versions of Node.js incorrectly accepted certificates for a given`hostname` if a matching `uniformResourceIdentifier` subject alternative name + * was present (see [CVE-2021-44531](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44531)). Applications that wish to accept`uniformResourceIdentifier` subject alternative names can use + * a custom`options.checkServerIdentity` function that implements the desired behavior. + * @since v0.8.4 + * @param hostname The host name or IP address to verify the certificate against. + * @param cert A `certificate object` representing the peer's certificate. + */ + function checkServerIdentity(hostname: string, cert: PeerCertificate): Error | undefined; + /** + * Creates a new {@link Server}. The `secureConnectionListener`, if provided, is + * automatically set as a listener for the `'secureConnection'` event. + * + * The `ticketKeys` options is automatically shared between `node:cluster` module + * workers. + * + * The following illustrates a simple echo server: + * + * ```js + * const tls = require('node:tls'); + * const fs = require('node:fs'); + * + * const options = { + * key: fs.readFileSync('server-key.pem'), + * cert: fs.readFileSync('server-cert.pem'), + * + * // This is necessary only if using client certificate authentication. + * requestCert: true, + * + * // This is necessary only if the client uses a self-signed certificate. + * ca: [ fs.readFileSync('client-cert.pem') ], + * }; + * + * const server = tls.createServer(options, (socket) => { + * console.log('server connected', + * socket.authorized ? 'authorized' : 'unauthorized'); + * socket.write('welcome!\n'); + * socket.setEncoding('utf8'); + * socket.pipe(socket); + * }); + * server.listen(8000, () => { + * console.log('server bound'); + * }); + * ``` + * + * The server can be tested by connecting to it using the example client from {@link connect}. + * @since v0.3.2 + */ + function createServer(secureConnectionListener?: (socket: TLSSocket) => void): Server; + function createServer(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void): Server; + /** + * The `callback` function, if specified, will be added as a listener for the `'secureConnect'` event. + * + * `tls.connect()` returns a {@link TLSSocket} object. + * + * Unlike the `https` API, `tls.connect()` does not enable the + * SNI (Server Name Indication) extension by default, which may cause some + * servers to return an incorrect certificate or reject the connection + * altogether. To enable SNI, set the `servername` option in addition + * to `host`. + * + * The following illustrates a client for the echo server example from {@link createServer}: + * + * ```js + * // Assumes an echo server that is listening on port 8000. + * const tls = require('node:tls'); + * const fs = require('node:fs'); + * + * const options = { + * // Necessary only if the server requires client certificate authentication. + * key: fs.readFileSync('client-key.pem'), + * cert: fs.readFileSync('client-cert.pem'), + * + * // Necessary only if the server uses a self-signed certificate. + * ca: [ fs.readFileSync('server-cert.pem') ], + * + * // Necessary only if the server's cert isn't for "localhost". + * checkServerIdentity: () => { return null; }, + * }; + * + * const socket = tls.connect(8000, options, () => { + * console.log('client connected', + * socket.authorized ? 'authorized' : 'unauthorized'); + * process.stdin.pipe(socket); + * process.stdin.resume(); + * }); + * socket.setEncoding('utf8'); + * socket.on('data', (data) => { + * console.log(data); + * }); + * socket.on('end', () => { + * console.log('server ends connection'); + * }); + * ``` + * @since v0.11.3 + */ + function connect(options: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + function connect( + port: number, + host?: string, + options?: ConnectionOptions, + secureConnectListener?: () => void, + ): TLSSocket; + function connect(port: number, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + /** + * Creates a new secure pair object with two streams, one of which reads and writes + * the encrypted data and the other of which reads and writes the cleartext data. + * Generally, the encrypted stream is piped to/from an incoming encrypted data + * stream and the cleartext one is used as a replacement for the initial encrypted + * stream. + * + * `tls.createSecurePair()` returns a `tls.SecurePair` object with `cleartext` and`encrypted` stream properties. + * + * Using `cleartext` has the same API as {@link TLSSocket}. + * + * The `tls.createSecurePair()` method is now deprecated in favor of`tls.TLSSocket()`. For example, the code: + * + * ```js + * pair = tls.createSecurePair(// ... ); + * pair.encrypted.pipe(socket); + * socket.pipe(pair.encrypted); + * ``` + * + * can be replaced by: + * + * ```js + * secureSocket = tls.TLSSocket(socket, options); + * ``` + * + * where `secureSocket` has the same API as `pair.cleartext`. + * @since v0.3.2 + * @deprecated Since v0.11.3 - Use {@link TLSSocket} instead. + * @param context A secure context object as returned by `tls.createSecureContext()` + * @param isServer `true` to specify that this TLS connection should be opened as a server. + * @param requestCert `true` to specify whether a server should request a certificate from a connecting client. Only applies when `isServer` is `true`. + * @param rejectUnauthorized If not `false` a server automatically reject clients with invalid certificates. Only applies when `isServer` is `true`. + */ + function createSecurePair( + context?: SecureContext, + isServer?: boolean, + requestCert?: boolean, + rejectUnauthorized?: boolean, + ): SecurePair; + /** + * {@link createServer} sets the default value of the `honorCipherOrder` option + * to `true`, other APIs that create secure contexts leave it unset. + * + * {@link createServer} uses a 128 bit truncated SHA1 hash value generated + * from `process.argv` as the default value of the `sessionIdContext` option, other + * APIs that create secure contexts have no default value. + * + * The `tls.createSecureContext()` method creates a `SecureContext` object. It is + * usable as an argument to several `tls` APIs, such as `server.addContext()`, + * but has no public methods. The {@link Server} constructor and the {@link createServer} method do not support the `secureContext` option. + * + * A key is _required_ for ciphers that use certificates. Either `key` or`pfx` can be used to provide it. + * + * If the `ca` option is not given, then Node.js will default to using [Mozilla's publicly trusted list of + * CAs](https://hg.mozilla.org/mozilla-central/raw-file/tip/security/nss/lib/ckfw/builtins/certdata.txt). + * + * Custom DHE parameters are discouraged in favor of the new `dhparam: 'auto'`option. When set to `'auto'`, well-known DHE parameters of sufficient strength + * will be selected automatically. Otherwise, if necessary, `openssl dhparam` can + * be used to create custom parameters. The key length must be greater than or + * equal to 1024 bits or else an error will be thrown. Although 1024 bits is + * permissible, use 2048 bits or larger for stronger security. + * @since v0.11.13 + */ + function createSecureContext(options?: SecureContextOptions): SecureContext; + /** + * Returns an array with the names of the supported TLS ciphers. The names are + * lower-case for historical reasons, but must be uppercased to be used in + * the `ciphers` option of {@link createSecureContext}. + * + * Not all supported ciphers are enabled by default. See `Modifying the default TLS cipher suite`. + * + * Cipher names that start with `'tls_'` are for TLSv1.3, all the others are for + * TLSv1.2 and below. + * + * ```js + * console.log(tls.getCiphers()); // ['aes128-gcm-sha256', 'aes128-sha', ...] + * ``` + * @since v0.10.2 + */ + function getCiphers(): string[]; + /** + * The default curve name to use for ECDH key agreement in a tls server. + * The default value is 'auto'. See tls.createSecureContext() for further + * information. + */ + let DEFAULT_ECDH_CURVE: string; + /** + * The default value of the maxVersion option of + * tls.createSecureContext(). It can be assigned any of the supported TLS + * protocol versions, 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1'. Default: + * 'TLSv1.3', unless changed using CLI options. Using --tls-max-v1.2 sets + * the default to 'TLSv1.2'. Using --tls-max-v1.3 sets the default to + * 'TLSv1.3'. If multiple of the options are provided, the highest maximum + * is used. + */ + let DEFAULT_MAX_VERSION: SecureVersion; + /** + * The default value of the minVersion option of tls.createSecureContext(). + * It can be assigned any of the supported TLS protocol versions, + * 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1'. Default: 'TLSv1.2', unless + * changed using CLI options. Using --tls-min-v1.0 sets the default to + * 'TLSv1'. Using --tls-min-v1.1 sets the default to 'TLSv1.1'. Using + * --tls-min-v1.3 sets the default to 'TLSv1.3'. If multiple of the options + * are provided, the lowest minimum is used. + */ + let DEFAULT_MIN_VERSION: SecureVersion; + /** + * The default value of the ciphers option of tls.createSecureContext(). + * It can be assigned any of the supported OpenSSL ciphers. + * Defaults to the content of crypto.constants.defaultCoreCipherList, unless + * changed using CLI options using --tls-default-ciphers. + */ + let DEFAULT_CIPHERS: string; + /** + * An immutable array of strings representing the root certificates (in PEM + * format) used for verifying peer certificates. This is the default value + * of the ca option to tls.createSecureContext(). + */ + const rootCertificates: readonly string[]; +} +declare module "node:tls" { + export * from "tls"; +} diff --git a/dist/node_modules/@types/node/trace_events.d.ts b/dist/node_modules/@types/node/trace_events.d.ts new file mode 100644 index 0000000..3361359 --- /dev/null +++ b/dist/node_modules/@types/node/trace_events.d.ts @@ -0,0 +1,182 @@ +/** + * The `node:trace_events` module provides a mechanism to centralize tracing + * information generated by V8, Node.js core, and userspace code. + * + * Tracing can be enabled with the `--trace-event-categories` command-line flag + * or by using the `node:trace_events` module. The `--trace-event-categories` flag + * accepts a list of comma-separated category names. + * + * The available categories are: + * + * * `node`: An empty placeholder. + * * `node.async_hooks`: Enables capture of detailed `async_hooks` trace data. + * The `async_hooks` events have a unique `asyncId` and a special `triggerId` `triggerAsyncId` property. + * * `node.bootstrap`: Enables capture of Node.js bootstrap milestones. + * * `node.console`: Enables capture of `console.time()` and `console.count()`output. + * * `node.threadpoolwork.sync`: Enables capture of trace data for threadpool + * synchronous operations, such as `blob`, `zlib`, `crypto` and `node_api`. + * * `node.threadpoolwork.async`: Enables capture of trace data for threadpool + * asynchronous operations, such as `blob`, `zlib`, `crypto` and `node_api`. + * * `node.dns.native`: Enables capture of trace data for DNS queries. + * * `node.net.native`: Enables capture of trace data for network. + * * `node.environment`: Enables capture of Node.js Environment milestones. + * * `node.fs.sync`: Enables capture of trace data for file system sync methods. + * * `node.fs_dir.sync`: Enables capture of trace data for file system sync + * directory methods. + * * `node.fs.async`: Enables capture of trace data for file system async methods. + * * `node.fs_dir.async`: Enables capture of trace data for file system async + * directory methods. + * * `node.perf`: Enables capture of `Performance API` measurements. + * * `node.perf.usertiming`: Enables capture of only Performance API User Timing + * measures and marks. + * * `node.perf.timerify`: Enables capture of only Performance API timerify + * measurements. + * * `node.promises.rejections`: Enables capture of trace data tracking the number + * of unhandled Promise rejections and handled-after-rejections. + * * `node.vm.script`: Enables capture of trace data for the `node:vm` module's`runInNewContext()`, `runInContext()`, and `runInThisContext()` methods. + * * `v8`: The `V8` events are GC, compiling, and execution related. + * * `node.http`: Enables capture of trace data for http request / response. + * + * By default the `node`, `node.async_hooks`, and `v8` categories are enabled. + * + * ```bash + * node --trace-event-categories v8,node,node.async_hooks server.js + * ``` + * + * Prior versions of Node.js required the use of the `--trace-events-enabled`flag to enable trace events. This requirement has been removed. However, the`--trace-events-enabled` flag _may_ still be + * used and will enable the`node`, `node.async_hooks`, and `v8` trace event categories by default. + * + * ```bash + * node --trace-events-enabled + * + * # is equivalent to + * + * node --trace-event-categories v8,node,node.async_hooks + * ``` + * + * Alternatively, trace events may be enabled using the `node:trace_events` module: + * + * ```js + * const trace_events = require('node:trace_events'); + * const tracing = trace_events.createTracing({ categories: ['node.perf'] }); + * tracing.enable(); // Enable trace event capture for the 'node.perf' category + * + * // do work + * + * tracing.disable(); // Disable trace event capture for the 'node.perf' category + * ``` + * + * Running Node.js with tracing enabled will produce log files that can be opened + * in the [`chrome://tracing`](https://www.chromium.org/developers/how-tos/trace-event-profiling-tool) tab of Chrome. + * + * The logging file is by default called `node_trace.${rotation}.log`, where`${rotation}` is an incrementing log-rotation id. The filepath pattern can + * be specified with `--trace-event-file-pattern` that accepts a template + * string that supports `${rotation}` and `${pid}`: + * + * ```bash + * node --trace-event-categories v8 --trace-event-file-pattern '${pid}-${rotation}.log' server.js + * ``` + * + * To guarantee that the log file is properly generated after signal events like`SIGINT`, `SIGTERM`, or `SIGBREAK`, make sure to have the appropriate handlers + * in your code, such as: + * + * ```js + * process.on('SIGINT', function onSigint() { + * console.info('Received SIGINT.'); + * process.exit(130); // Or applicable exit code depending on OS and signal + * }); + * ``` + * + * The tracing system uses the same time source + * as the one used by `process.hrtime()`. + * However the trace-event timestamps are expressed in microseconds, + * unlike `process.hrtime()` which returns nanoseconds. + * + * The features from this module are not available in `Worker` threads. + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/trace_events.js) + */ +declare module "trace_events" { + /** + * The `Tracing` object is used to enable or disable tracing for sets of + * categories. Instances are created using the + * `trace_events.createTracing()` method. + * + * When created, the `Tracing` object is disabled. Calling the + * `tracing.enable()` method adds the categories to the set of enabled trace + * event categories. Calling `tracing.disable()` will remove the categories + * from the set of enabled trace event categories. + */ + interface Tracing { + /** + * A comma-separated list of the trace event categories covered by this + * `Tracing` object. + */ + readonly categories: string; + /** + * Disables this `Tracing` object. + * + * Only trace event categories _not_ covered by other enabled `Tracing` + * objects and _not_ specified by the `--trace-event-categories` flag + * will be disabled. + */ + disable(): void; + /** + * Enables this `Tracing` object for the set of categories covered by + * the `Tracing` object. + */ + enable(): void; + /** + * `true` only if the `Tracing` object has been enabled. + */ + readonly enabled: boolean; + } + interface CreateTracingOptions { + /** + * An array of trace category names. Values included in the array are + * coerced to a string when possible. An error will be thrown if the + * value cannot be coerced. + */ + categories: string[]; + } + /** + * Creates and returns a `Tracing` object for the given set of `categories`. + * + * ```js + * const trace_events = require('node:trace_events'); + * const categories = ['node.perf', 'node.async_hooks']; + * const tracing = trace_events.createTracing({ categories }); + * tracing.enable(); + * // do stuff + * tracing.disable(); + * ``` + * @since v10.0.0 + * @return . + */ + function createTracing(options: CreateTracingOptions): Tracing; + /** + * Returns a comma-separated list of all currently-enabled trace event + * categories. The current set of enabled trace event categories is determined + * by the _union_ of all currently-enabled `Tracing` objects and any categories + * enabled using the `--trace-event-categories` flag. + * + * Given the file `test.js` below, the command`node --trace-event-categories node.perf test.js` will print`'node.async_hooks,node.perf'` to the console. + * + * ```js + * const trace_events = require('node:trace_events'); + * const t1 = trace_events.createTracing({ categories: ['node.async_hooks'] }); + * const t2 = trace_events.createTracing({ categories: ['node.perf'] }); + * const t3 = trace_events.createTracing({ categories: ['v8'] }); + * + * t1.enable(); + * t2.enable(); + * + * console.log(trace_events.getEnabledCategories()); + * ``` + * @since v10.0.0 + */ + function getEnabledCategories(): string | undefined; +} +declare module "node:trace_events" { + export * from "trace_events"; +} diff --git a/dist/node_modules/@types/node/ts4.8/assert.d.ts b/dist/node_modules/@types/node/ts4.8/assert.d.ts new file mode 100644 index 0000000..cd82143 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/assert.d.ts @@ -0,0 +1,996 @@ +/** + * The `node:assert` module provides a set of assertion functions for verifying + * invariants. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/assert.js) + */ +declare module "assert" { + /** + * An alias of {@link ok}. + * @since v0.5.9 + * @param value The input that is checked for being truthy. + */ + function assert(value: unknown, message?: string | Error): asserts value; + namespace assert { + /** + * Indicates the failure of an assertion. All errors thrown by the `node:assert`module will be instances of the `AssertionError` class. + */ + class AssertionError extends Error { + /** + * Set to the `actual` argument for methods such as {@link assert.strictEqual()}. + */ + actual: unknown; + /** + * Set to the `expected` argument for methods such as {@link assert.strictEqual()}. + */ + expected: unknown; + /** + * Set to the passed in operator value. + */ + operator: string; + /** + * Indicates if the message was auto-generated (`true`) or not. + */ + generatedMessage: boolean; + /** + * Value is always `ERR_ASSERTION` to show that the error is an assertion error. + */ + code: "ERR_ASSERTION"; + constructor(options?: { + /** If provided, the error message is set to this value. */ + message?: string | undefined; + /** The `actual` property on the error instance. */ + actual?: unknown | undefined; + /** The `expected` property on the error instance. */ + expected?: unknown | undefined; + /** The `operator` property on the error instance. */ + operator?: string | undefined; + /** If provided, the generated stack trace omits frames before this function. */ + // eslint-disable-next-line @typescript-eslint/ban-types + stackStartFn?: Function | undefined; + }); + } + /** + * This feature is deprecated and will be removed in a future version. + * Please consider using alternatives such as the `mock` helper function. + * @since v14.2.0, v12.19.0 + * @deprecated Deprecated + */ + class CallTracker { + /** + * The wrapper function is expected to be called exactly `exact` times. If the + * function has not been called exactly `exact` times when `tracker.verify()` is called, then `tracker.verify()` will throw an + * error. + * + * ```js + * import assert from 'node:assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func); + * ``` + * @since v14.2.0, v12.19.0 + * @param [fn='A no-op function'] + * @param [exact=1] + * @return that wraps `fn`. + */ + calls(exact?: number): () => void; + calls any>(fn?: Func, exact?: number): Func; + /** + * Example: + * + * ```js + * import assert from 'node:assert'; + * + * const tracker = new assert.CallTracker(); + * + * function func() {} + * const callsfunc = tracker.calls(func); + * callsfunc(1, 2, 3); + * + * assert.deepStrictEqual(tracker.getCalls(callsfunc), + * [{ thisArg: undefined, arguments: [1, 2, 3] }]); + * ``` + * @since v18.8.0, v16.18.0 + * @param fn + * @return An Array with all the calls to a tracked function. + */ + getCalls(fn: Function): CallTrackerCall[]; + /** + * The arrays contains information about the expected and actual number of calls of + * the functions that have not been called the expected number of times. + * + * ```js + * import assert from 'node:assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func, 2); + * + * // Returns an array containing information on callsfunc() + * console.log(tracker.report()); + * // [ + * // { + * // message: 'Expected the func function to be executed 2 time(s) but was + * // executed 0 time(s).', + * // actual: 0, + * // expected: 2, + * // operator: 'func', + * // stack: stack trace + * // } + * // ] + * ``` + * @since v14.2.0, v12.19.0 + * @return An Array of objects containing information about the wrapper functions returned by `calls`. + */ + report(): CallTrackerReportInformation[]; + /** + * Reset calls of the call tracker. + * If a tracked function is passed as an argument, the calls will be reset for it. + * If no arguments are passed, all tracked functions will be reset. + * + * ```js + * import assert from 'node:assert'; + * + * const tracker = new assert.CallTracker(); + * + * function func() {} + * const callsfunc = tracker.calls(func); + * + * callsfunc(); + * // Tracker was called once + * assert.strictEqual(tracker.getCalls(callsfunc).length, 1); + * + * tracker.reset(callsfunc); + * assert.strictEqual(tracker.getCalls(callsfunc).length, 0); + * ``` + * @since v18.8.0, v16.18.0 + * @param fn a tracked function to reset. + */ + reset(fn?: Function): void; + /** + * Iterates through the list of functions passed to `tracker.calls()` and will throw an error for functions that + * have not been called the expected number of times. + * + * ```js + * import assert from 'node:assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func, 2); + * + * callsfunc(); + * + * // Will throw an error since callsfunc() was only called once. + * tracker.verify(); + * ``` + * @since v14.2.0, v12.19.0 + */ + verify(): void; + } + interface CallTrackerCall { + thisArg: object; + arguments: unknown[]; + } + interface CallTrackerReportInformation { + message: string; + /** The actual number of times the function was called. */ + actual: number; + /** The number of times the function was expected to be called. */ + expected: number; + /** The name of the function that is wrapped. */ + operator: string; + /** A stack trace of the function. */ + stack: object; + } + type AssertPredicate = RegExp | (new() => object) | ((thrown: unknown) => boolean) | object | Error; + /** + * Throws an `AssertionError` with the provided error message or a default + * error message. If the `message` parameter is an instance of an `Error` then + * it will be thrown instead of the `AssertionError`. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.fail(); + * // AssertionError [ERR_ASSERTION]: Failed + * + * assert.fail('boom'); + * // AssertionError [ERR_ASSERTION]: boom + * + * assert.fail(new TypeError('need array')); + * // TypeError: need array + * ``` + * + * Using `assert.fail()` with more than two arguments is possible but deprecated. + * See below for further details. + * @since v0.1.21 + * @param [message='Failed'] + */ + function fail(message?: string | Error): never; + /** @deprecated since v10.0.0 - use fail([message]) or other assert functions instead. */ + function fail( + actual: unknown, + expected: unknown, + message?: string | Error, + operator?: string, + // eslint-disable-next-line @typescript-eslint/ban-types + stackStartFn?: Function, + ): never; + /** + * Tests if `value` is truthy. It is equivalent to`assert.equal(!!value, true, message)`. + * + * If `value` is not truthy, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is `undefined`, a default + * error message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * If no arguments are passed in at all `message` will be set to the string:`` 'No value argument passed to `assert.ok()`' ``. + * + * Be aware that in the `repl` the error message will be different to the one + * thrown in a file! See below for further details. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.ok(true); + * // OK + * assert.ok(1); + * // OK + * + * assert.ok(); + * // AssertionError: No value argument passed to `assert.ok()` + * + * assert.ok(false, 'it\'s false'); + * // AssertionError: it's false + * + * // In the repl: + * assert.ok(typeof 123 === 'string'); + * // AssertionError: false == true + * + * // In a file (e.g. test.js): + * assert.ok(typeof 123 === 'string'); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(typeof 123 === 'string') + * + * assert.ok(false); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(false) + * + * assert.ok(0); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(0) + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * // Using `assert()` works the same: + * assert(0); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert(0) + * ``` + * @since v0.1.21 + */ + function ok(value: unknown, message?: string | Error): asserts value; + /** + * **Strict assertion mode** + * + * An alias of {@link strictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link strictEqual} instead. + * + * Tests shallow, coercive equality between the `actual` and `expected` parameters + * using the [`==` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Equality). `NaN` is specially handled + * and treated as being identical if both sides are `NaN`. + * + * ```js + * import assert from 'node:assert'; + * + * assert.equal(1, 1); + * // OK, 1 == 1 + * assert.equal(1, '1'); + * // OK, 1 == '1' + * assert.equal(NaN, NaN); + * // OK + * + * assert.equal(1, 2); + * // AssertionError: 1 == 2 + * assert.equal({ a: { b: 1 } }, { a: { b: 1 } }); + * // AssertionError: { a: { b: 1 } } == { a: { b: 1 } } + * ``` + * + * If the values are not equal, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is undefined, a default + * error message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * @since v0.1.21 + */ + function equal(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link notStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link notStrictEqual} instead. + * + * Tests shallow, coercive inequality with the [`!=` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Inequality). `NaN` is + * specially handled and treated as being identical if both sides are `NaN`. + * + * ```js + * import assert from 'node:assert'; + * + * assert.notEqual(1, 2); + * // OK + * + * assert.notEqual(1, 1); + * // AssertionError: 1 != 1 + * + * assert.notEqual(1, '1'); + * // AssertionError: 1 != '1' + * ``` + * + * If the values are equal, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is undefined, a default error + * message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * @since v0.1.21 + */ + function notEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link deepStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link deepStrictEqual} instead. + * + * Tests for deep equality between the `actual` and `expected` parameters. Consider + * using {@link deepStrictEqual} instead. {@link deepEqual} can have + * surprising results. + * + * _Deep equality_ means that the enumerable "own" properties of child objects + * are also recursively evaluated by the following rules. + * @since v0.1.21 + */ + function deepEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link notDeepStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link notDeepStrictEqual} instead. + * + * Tests for any deep inequality. Opposite of {@link deepEqual}. + * + * ```js + * import assert from 'node:assert'; + * + * const obj1 = { + * a: { + * b: 1, + * }, + * }; + * const obj2 = { + * a: { + * b: 2, + * }, + * }; + * const obj3 = { + * a: { + * b: 1, + * }, + * }; + * const obj4 = { __proto__: obj1 }; + * + * assert.notDeepEqual(obj1, obj1); + * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } } + * + * assert.notDeepEqual(obj1, obj2); + * // OK + * + * assert.notDeepEqual(obj1, obj3); + * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } } + * + * assert.notDeepEqual(obj1, obj4); + * // OK + * ``` + * + * If the values are deeply equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a default + * error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function notDeepEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Tests strict equality between the `actual` and `expected` parameters as + * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.strictEqual(1, 2); + * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal: + * // + * // 1 !== 2 + * + * assert.strictEqual(1, 1); + * // OK + * + * assert.strictEqual('Hello foobar', 'Hello World!'); + * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal: + * // + actual - expected + * // + * // + 'Hello foobar' + * // - 'Hello World!' + * // ^ + * + * const apples = 1; + * const oranges = 2; + * assert.strictEqual(apples, oranges, `apples ${apples} !== oranges ${oranges}`); + * // AssertionError [ERR_ASSERTION]: apples 1 !== oranges 2 + * + * assert.strictEqual(1, '1', new TypeError('Inputs are not identical')); + * // TypeError: Inputs are not identical + * ``` + * + * If the values are not strictly equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a + * default error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function strictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T; + /** + * Tests strict inequality between the `actual` and `expected` parameters as + * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.notStrictEqual(1, 2); + * // OK + * + * assert.notStrictEqual(1, 1); + * // AssertionError [ERR_ASSERTION]: Expected "actual" to be strictly unequal to: + * // + * // 1 + * + * assert.notStrictEqual(1, '1'); + * // OK + * ``` + * + * If the values are strictly equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a + * default error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function notStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Tests for deep equality between the `actual` and `expected` parameters. + * "Deep" equality means that the enumerable "own" properties of child objects + * are recursively evaluated also by the following rules. + * @since v1.2.0 + */ + function deepStrictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T; + /** + * Tests for deep strict inequality. Opposite of {@link deepStrictEqual}. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.notDeepStrictEqual({ a: 1 }, { a: '1' }); + * // OK + * ``` + * + * If the values are deeply and strictly equal, an `AssertionError` is thrown + * with a `message` property set equal to the value of the `message` parameter. If + * the `message` parameter is undefined, a default error message is assigned. If + * the `message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v1.2.0 + */ + function notDeepStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Expects the function `fn` to throw an error. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function, + * a validation object where each property will be tested for strict deep equality, + * or an instance of error where each property will be tested for strict deep + * equality including the non-enumerable `message` and `name` properties. When + * using an object, it is also possible to use a regular expression, when + * validating against a string property. See below for examples. + * + * If specified, `message` will be appended to the message provided by the`AssertionError` if the `fn` call fails to throw or in case the error validation + * fails. + * + * Custom validation object/error instance: + * + * ```js + * import assert from 'node:assert/strict'; + * + * const err = new TypeError('Wrong value'); + * err.code = 404; + * err.foo = 'bar'; + * err.info = { + * nested: true, + * baz: 'text', + * }; + * err.reg = /abc/i; + * + * assert.throws( + * () => { + * throw err; + * }, + * { + * name: 'TypeError', + * message: 'Wrong value', + * info: { + * nested: true, + * baz: 'text', + * }, + * // Only properties on the validation object will be tested for. + * // Using nested objects requires all properties to be present. Otherwise + * // the validation is going to fail. + * }, + * ); + * + * // Using regular expressions to validate error properties: + * assert.throws( + * () => { + * throw err; + * }, + * { + * // The `name` and `message` properties are strings and using regular + * // expressions on those will match against the string. If they fail, an + * // error is thrown. + * name: /^TypeError$/, + * message: /Wrong/, + * foo: 'bar', + * info: { + * nested: true, + * // It is not possible to use regular expressions for nested properties! + * baz: 'text', + * }, + * // The `reg` property contains a regular expression and only if the + * // validation object contains an identical regular expression, it is going + * // to pass. + * reg: /abc/i, + * }, + * ); + * + * // Fails due to the different `message` and `name` properties: + * assert.throws( + * () => { + * const otherErr = new Error('Not found'); + * // Copy all enumerable properties from `err` to `otherErr`. + * for (const [key, value] of Object.entries(err)) { + * otherErr[key] = value; + * } + * throw otherErr; + * }, + * // The error's `message` and `name` properties will also be checked when using + * // an error as validation object. + * err, + * ); + * ``` + * + * Validate instanceof using constructor: + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * Error, + * ); + * ``` + * + * Validate error message using [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions): + * + * Using a regular expression runs `.toString` on the error object, and will + * therefore also include the error name. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * /^Error: Wrong value$/, + * ); + * ``` + * + * Custom error validation: + * + * The function must return `true` to indicate all internal validations passed. + * It will otherwise fail with an `AssertionError`. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * (err) => { + * assert(err instanceof Error); + * assert(/value/.test(err)); + * // Avoid returning anything from validation functions besides `true`. + * // Otherwise, it's not clear what part of the validation failed. Instead, + * // throw an error about the specific validation that failed (as done in this + * // example) and add as much helpful debugging information to that error as + * // possible. + * return true; + * }, + * 'unexpected error', + * ); + * ``` + * + * `error` cannot be a string. If a string is provided as the second + * argument, then `error` is assumed to be omitted and the string will be used for`message` instead. This can lead to easy-to-miss mistakes. Using the same + * message as the thrown error message is going to result in an`ERR_AMBIGUOUS_ARGUMENT` error. Please read the example below carefully if using + * a string as the second argument gets considered: + * + * ```js + * import assert from 'node:assert/strict'; + * + * function throwingFirst() { + * throw new Error('First'); + * } + * + * function throwingSecond() { + * throw new Error('Second'); + * } + * + * function notThrowing() {} + * + * // The second argument is a string and the input function threw an Error. + * // The first case will not throw as it does not match for the error message + * // thrown by the input function! + * assert.throws(throwingFirst, 'Second'); + * // In the next example the message has no benefit over the message from the + * // error and since it is not clear if the user intended to actually match + * // against the error message, Node.js throws an `ERR_AMBIGUOUS_ARGUMENT` error. + * assert.throws(throwingSecond, 'Second'); + * // TypeError [ERR_AMBIGUOUS_ARGUMENT] + * + * // The string is only used (as message) in case the function does not throw: + * assert.throws(notThrowing, 'Second'); + * // AssertionError [ERR_ASSERTION]: Missing expected exception: Second + * + * // If it was intended to match for the error message do this instead: + * // It does not throw because the error messages match. + * assert.throws(throwingSecond, /Second$/); + * + * // If the error message does not match, an AssertionError is thrown. + * assert.throws(throwingFirst, /Second$/); + * // AssertionError [ERR_ASSERTION] + * ``` + * + * Due to the confusing error-prone notation, avoid a string as the second + * argument. + * @since v0.1.21 + */ + function throws(block: () => unknown, message?: string | Error): void; + function throws(block: () => unknown, error: AssertPredicate, message?: string | Error): void; + /** + * Asserts that the function `fn` does not throw an error. + * + * Using `assert.doesNotThrow()` is actually not useful because there + * is no benefit in catching an error and then rethrowing it. Instead, consider + * adding a comment next to the specific code path that should not throw and keep + * error messages as expressive as possible. + * + * When `assert.doesNotThrow()` is called, it will immediately call the `fn`function. + * + * If an error is thrown and it is the same type as that specified by the `error`parameter, then an `AssertionError` is thrown. If the error is of a + * different type, or if the `error` parameter is undefined, the error is + * propagated back to the caller. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), or a validation + * function. See {@link throws} for more details. + * + * The following, for instance, will throw the `TypeError` because there is no + * matching error type in the assertion: + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * SyntaxError, + * ); + * ``` + * + * However, the following will result in an `AssertionError` with the message + * 'Got unwanted exception...': + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * TypeError, + * ); + * ``` + * + * If an `AssertionError` is thrown and a value is provided for the `message`parameter, the value of `message` will be appended to the `AssertionError` message: + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * /Wrong value/, + * 'Whoops', + * ); + * // Throws: AssertionError: Got unwanted exception: Whoops + * ``` + * @since v0.1.21 + */ + function doesNotThrow(block: () => unknown, message?: string | Error): void; + function doesNotThrow(block: () => unknown, error: AssertPredicate, message?: string | Error): void; + /** + * Throws `value` if `value` is not `undefined` or `null`. This is useful when + * testing the `error` argument in callbacks. The stack trace contains all frames + * from the error passed to `ifError()` including the potential new frames for`ifError()` itself. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.ifError(null); + * // OK + * assert.ifError(0); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 0 + * assert.ifError('error'); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 'error' + * assert.ifError(new Error()); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: Error + * + * // Create some random error frames. + * let err; + * (function errorFrame() { + * err = new Error('test error'); + * })(); + * + * (function ifErrorFrame() { + * assert.ifError(err); + * })(); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: test error + * // at ifErrorFrame + * // at errorFrame + * ``` + * @since v0.1.97 + */ + function ifError(value: unknown): asserts value is null | undefined; + /** + * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately + * calls the function and awaits the returned promise to complete. It will then + * check that the promise is rejected. + * + * If `asyncFn` is a function and it throws an error synchronously,`assert.rejects()` will return a rejected `Promise` with that error. If the + * function does not return a promise, `assert.rejects()` will return a rejected`Promise` with an `ERR_INVALID_RETURN_VALUE` error. In both cases the error + * handler is skipped. + * + * Besides the async nature to await the completion behaves identically to {@link throws}. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function, + * an object where each property will be tested for, or an instance of error where + * each property will be tested for including the non-enumerable `message` and`name` properties. + * + * If specified, `message` will be the message provided by the `AssertionError` if the `asyncFn` fails to reject. + * + * ```js + * import assert from 'node:assert/strict'; + * + * await assert.rejects( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * { + * name: 'TypeError', + * message: 'Wrong value', + * }, + * ); + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * await assert.rejects( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * (err) => { + * assert.strictEqual(err.name, 'TypeError'); + * assert.strictEqual(err.message, 'Wrong value'); + * return true; + * }, + * ); + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.rejects( + * Promise.reject(new Error('Wrong value')), + * Error, + * ).then(() => { + * // ... + * }); + * ``` + * + * `error` cannot be a string. If a string is provided as the second + * argument, then `error` is assumed to be omitted and the string will be used for`message` instead. This can lead to easy-to-miss mistakes. Please read the + * example in {@link throws} carefully if using a string as the second + * argument gets considered. + * @since v10.0.0 + */ + function rejects(block: (() => Promise) | Promise, message?: string | Error): Promise; + function rejects( + block: (() => Promise) | Promise, + error: AssertPredicate, + message?: string | Error, + ): Promise; + /** + * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately + * calls the function and awaits the returned promise to complete. It will then + * check that the promise is not rejected. + * + * If `asyncFn` is a function and it throws an error synchronously,`assert.doesNotReject()` will return a rejected `Promise` with that error. If + * the function does not return a promise, `assert.doesNotReject()` will return a + * rejected `Promise` with an `ERR_INVALID_RETURN_VALUE` error. In both cases + * the error handler is skipped. + * + * Using `assert.doesNotReject()` is actually not useful because there is little + * benefit in catching a rejection and then rejecting it again. Instead, consider + * adding a comment next to the specific code path that should not reject and keep + * error messages as expressive as possible. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), or a validation + * function. See {@link throws} for more details. + * + * Besides the async nature to await the completion behaves identically to {@link doesNotThrow}. + * + * ```js + * import assert from 'node:assert/strict'; + * + * await assert.doesNotReject( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * SyntaxError, + * ); + * ``` + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotReject(Promise.reject(new TypeError('Wrong value'))) + * .then(() => { + * // ... + * }); + * ``` + * @since v10.0.0 + */ + function doesNotReject( + block: (() => Promise) | Promise, + message?: string | Error, + ): Promise; + function doesNotReject( + block: (() => Promise) | Promise, + error: AssertPredicate, + message?: string | Error, + ): Promise; + /** + * Expects the `string` input to match the regular expression. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.match('I will fail', /pass/); + * // AssertionError [ERR_ASSERTION]: The input did not match the regular ... + * + * assert.match(123, /pass/); + * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string. + * + * assert.match('I will pass', /pass/); + * // OK + * ``` + * + * If the values do not match, or if the `string` argument is of another type than`string`, an `AssertionError` is thrown with a `message` property set equal + * to the value of the `message` parameter. If the `message` parameter is + * undefined, a default error message is assigned. If the `message` parameter is an + * instance of an `Error` then it will be thrown instead of the `AssertionError`. + * @since v13.6.0, v12.16.0 + */ + function match(value: string, regExp: RegExp, message?: string | Error): void; + /** + * Expects the `string` input not to match the regular expression. + * + * ```js + * import assert from 'node:assert/strict'; + * + * assert.doesNotMatch('I will fail', /fail/); + * // AssertionError [ERR_ASSERTION]: The input was expected to not match the ... + * + * assert.doesNotMatch(123, /pass/); + * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string. + * + * assert.doesNotMatch('I will pass', /different/); + * // OK + * ``` + * + * If the values do match, or if the `string` argument is of another type than`string`, an `AssertionError` is thrown with a `message` property set equal + * to the value of the `message` parameter. If the `message` parameter is + * undefined, a default error message is assigned. If the `message` parameter is an + * instance of an `Error` then it will be thrown instead of the `AssertionError`. + * @since v13.6.0, v12.16.0 + */ + function doesNotMatch(value: string, regExp: RegExp, message?: string | Error): void; + const strict: + & Omit< + typeof assert, + | "equal" + | "notEqual" + | "deepEqual" + | "notDeepEqual" + | "ok" + | "strictEqual" + | "deepStrictEqual" + | "ifError" + | "strict" + > + & { + (value: unknown, message?: string | Error): asserts value; + equal: typeof strictEqual; + notEqual: typeof notStrictEqual; + deepEqual: typeof deepStrictEqual; + notDeepEqual: typeof notDeepStrictEqual; + // Mapped types and assertion functions are incompatible? + // TS2775: Assertions require every name in the call target + // to be declared with an explicit type annotation. + ok: typeof ok; + strictEqual: typeof strictEqual; + deepStrictEqual: typeof deepStrictEqual; + ifError: typeof ifError; + strict: typeof strict; + }; + } + export = assert; +} +declare module "node:assert" { + import assert = require("assert"); + export = assert; +} diff --git a/dist/node_modules/@types/node/ts4.8/assert/strict.d.ts b/dist/node_modules/@types/node/ts4.8/assert/strict.d.ts new file mode 100644 index 0000000..f333913 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/assert/strict.d.ts @@ -0,0 +1,8 @@ +declare module "assert/strict" { + import { strict } from "node:assert"; + export = strict; +} +declare module "node:assert/strict" { + import { strict } from "node:assert"; + export = strict; +} diff --git a/dist/node_modules/@types/node/ts4.8/async_hooks.d.ts b/dist/node_modules/@types/node/ts4.8/async_hooks.d.ts new file mode 100644 index 0000000..0667a61 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/async_hooks.d.ts @@ -0,0 +1,539 @@ +/** + * We strongly discourage the use of the `async_hooks` API. + * Other APIs that can cover most of its use cases include: + * + * * `AsyncLocalStorage` tracks async context + * * `process.getActiveResourcesInfo()` tracks active resources + * + * The `node:async_hooks` module provides an API to track asynchronous resources. + * It can be accessed using: + * + * ```js + * import async_hooks from 'node:async_hooks'; + * ``` + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/async_hooks.js) + */ +declare module "async_hooks" { + /** + * ```js + * import { executionAsyncId } from 'node:async_hooks'; + * import fs from 'node:fs'; + * + * console.log(executionAsyncId()); // 1 - bootstrap + * const path = '.'; + * fs.open(path, 'r', (err, fd) => { + * console.log(executionAsyncId()); // 6 - open() + * }); + * ``` + * + * The ID returned from `executionAsyncId()` is related to execution timing, not + * causality (which is covered by `triggerAsyncId()`): + * + * ```js + * const server = net.createServer((conn) => { + * // Returns the ID of the server, not of the new connection, because the + * // callback runs in the execution scope of the server's MakeCallback(). + * async_hooks.executionAsyncId(); + * + * }).listen(port, () => { + * // Returns the ID of a TickObject (process.nextTick()) because all + * // callbacks passed to .listen() are wrapped in a nextTick(). + * async_hooks.executionAsyncId(); + * }); + * ``` + * + * Promise contexts may not get precise `executionAsyncIds` by default. + * See the section on `promise execution tracking`. + * @since v8.1.0 + * @return The `asyncId` of the current execution context. Useful to track when something calls. + */ + function executionAsyncId(): number; + /** + * Resource objects returned by `executionAsyncResource()` are most often internal + * Node.js handle objects with undocumented APIs. Using any functions or properties + * on the object is likely to crash your application and should be avoided. + * + * Using `executionAsyncResource()` in the top-level execution context will + * return an empty object as there is no handle or request object to use, + * but having an object representing the top-level can be helpful. + * + * ```js + * import { open } from 'node:fs'; + * import { executionAsyncId, executionAsyncResource } from 'node:async_hooks'; + * + * console.log(executionAsyncId(), executionAsyncResource()); // 1 {} + * open(new URL(import.meta.url), 'r', (err, fd) => { + * console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap + * }); + * ``` + * + * This can be used to implement continuation local storage without the + * use of a tracking `Map` to store the metadata: + * + * ```js + * import { createServer } from 'node:http'; + * import { + * executionAsyncId, + * executionAsyncResource, + * createHook, + * } from 'async_hooks'; + * const sym = Symbol('state'); // Private symbol to avoid pollution + * + * createHook({ + * init(asyncId, type, triggerAsyncId, resource) { + * const cr = executionAsyncResource(); + * if (cr) { + * resource[sym] = cr[sym]; + * } + * }, + * }).enable(); + * + * const server = createServer((req, res) => { + * executionAsyncResource()[sym] = { state: req.url }; + * setTimeout(function() { + * res.end(JSON.stringify(executionAsyncResource()[sym])); + * }, 100); + * }).listen(3000); + * ``` + * @since v13.9.0, v12.17.0 + * @return The resource representing the current execution. Useful to store data within the resource. + */ + function executionAsyncResource(): object; + /** + * ```js + * const server = net.createServer((conn) => { + * // The resource that caused (or triggered) this callback to be called + * // was that of the new connection. Thus the return value of triggerAsyncId() + * // is the asyncId of "conn". + * async_hooks.triggerAsyncId(); + * + * }).listen(port, () => { + * // Even though all callbacks passed to .listen() are wrapped in a nextTick() + * // the callback itself exists because the call to the server's .listen() + * // was made. So the return value would be the ID of the server. + * async_hooks.triggerAsyncId(); + * }); + * ``` + * + * Promise contexts may not get valid `triggerAsyncId`s by default. See + * the section on `promise execution tracking`. + * @return The ID of the resource responsible for calling the callback that is currently being executed. + */ + function triggerAsyncId(): number; + interface HookCallbacks { + /** + * Called when a class is constructed that has the possibility to emit an asynchronous event. + * @param asyncId a unique ID for the async resource + * @param type the type of the async resource + * @param triggerAsyncId the unique ID of the async resource in whose execution context this async resource was created + * @param resource reference to the resource representing the async operation, needs to be released during destroy + */ + init?(asyncId: number, type: string, triggerAsyncId: number, resource: object): void; + /** + * When an asynchronous operation is initiated or completes a callback is called to notify the user. + * The before callback is called just before said callback is executed. + * @param asyncId the unique identifier assigned to the resource about to execute the callback. + */ + before?(asyncId: number): void; + /** + * Called immediately after the callback specified in before is completed. + * @param asyncId the unique identifier assigned to the resource which has executed the callback. + */ + after?(asyncId: number): void; + /** + * Called when a promise has resolve() called. This may not be in the same execution id + * as the promise itself. + * @param asyncId the unique id for the promise that was resolve()d. + */ + promiseResolve?(asyncId: number): void; + /** + * Called after the resource corresponding to asyncId is destroyed + * @param asyncId a unique ID for the async resource + */ + destroy?(asyncId: number): void; + } + interface AsyncHook { + /** + * Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop. + */ + enable(): this; + /** + * Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled. + */ + disable(): this; + } + /** + * Registers functions to be called for different lifetime events of each async + * operation. + * + * The callbacks `init()`/`before()`/`after()`/`destroy()` are called for the + * respective asynchronous event during a resource's lifetime. + * + * All callbacks are optional. For example, if only resource cleanup needs to + * be tracked, then only the `destroy` callback needs to be passed. The + * specifics of all functions that can be passed to `callbacks` is in the `Hook Callbacks` section. + * + * ```js + * import { createHook } from 'node:async_hooks'; + * + * const asyncHook = createHook({ + * init(asyncId, type, triggerAsyncId, resource) { }, + * destroy(asyncId) { }, + * }); + * ``` + * + * The callbacks will be inherited via the prototype chain: + * + * ```js + * class MyAsyncCallbacks { + * init(asyncId, type, triggerAsyncId, resource) { } + * destroy(asyncId) {} + * } + * + * class MyAddedCallbacks extends MyAsyncCallbacks { + * before(asyncId) { } + * after(asyncId) { } + * } + * + * const asyncHook = async_hooks.createHook(new MyAddedCallbacks()); + * ``` + * + * Because promises are asynchronous resources whose lifecycle is tracked + * via the async hooks mechanism, the `init()`, `before()`, `after()`, and`destroy()` callbacks _must not_ be async functions that return promises. + * @since v8.1.0 + * @param callbacks The `Hook Callbacks` to register + * @return Instance used for disabling and enabling hooks + */ + function createHook(callbacks: HookCallbacks): AsyncHook; + interface AsyncResourceOptions { + /** + * The ID of the execution context that created this async event. + * @default executionAsyncId() + */ + triggerAsyncId?: number | undefined; + /** + * Disables automatic `emitDestroy` when the object is garbage collected. + * This usually does not need to be set (even if `emitDestroy` is called + * manually), unless the resource's `asyncId` is retrieved and the + * sensitive API's `emitDestroy` is called with it. + * @default false + */ + requireManualDestroy?: boolean | undefined; + } + /** + * The class `AsyncResource` is designed to be extended by the embedder's async + * resources. Using this, users can easily trigger the lifetime events of their + * own resources. + * + * The `init` hook will trigger when an `AsyncResource` is instantiated. + * + * The following is an overview of the `AsyncResource` API. + * + * ```js + * import { AsyncResource, executionAsyncId } from 'node:async_hooks'; + * + * // AsyncResource() is meant to be extended. Instantiating a + * // new AsyncResource() also triggers init. If triggerAsyncId is omitted then + * // async_hook.executionAsyncId() is used. + * const asyncResource = new AsyncResource( + * type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false }, + * ); + * + * // Run a function in the execution context of the resource. This will + * // * establish the context of the resource + * // * trigger the AsyncHooks before callbacks + * // * call the provided function `fn` with the supplied arguments + * // * trigger the AsyncHooks after callbacks + * // * restore the original execution context + * asyncResource.runInAsyncScope(fn, thisArg, ...args); + * + * // Call AsyncHooks destroy callbacks. + * asyncResource.emitDestroy(); + * + * // Return the unique ID assigned to the AsyncResource instance. + * asyncResource.asyncId(); + * + * // Return the trigger ID for the AsyncResource instance. + * asyncResource.triggerAsyncId(); + * ``` + */ + class AsyncResource { + /** + * AsyncResource() is meant to be extended. Instantiating a + * new AsyncResource() also triggers init. If triggerAsyncId is omitted then + * async_hook.executionAsyncId() is used. + * @param type The type of async event. + * @param triggerAsyncId The ID of the execution context that created + * this async event (default: `executionAsyncId()`), or an + * AsyncResourceOptions object (since v9.3.0) + */ + constructor(type: string, triggerAsyncId?: number | AsyncResourceOptions); + /** + * Binds the given function to the current execution context. + * @since v14.8.0, v12.19.0 + * @param fn The function to bind to the current execution context. + * @param type An optional name to associate with the underlying `AsyncResource`. + */ + static bind any, ThisArg>( + fn: Func, + type?: string, + thisArg?: ThisArg, + ): Func; + /** + * Binds the given function to execute to this `AsyncResource`'s scope. + * @since v14.8.0, v12.19.0 + * @param fn The function to bind to the current `AsyncResource`. + */ + bind any>(fn: Func): Func; + /** + * Call the provided function with the provided arguments in the execution context + * of the async resource. This will establish the context, trigger the AsyncHooks + * before callbacks, call the function, trigger the AsyncHooks after callbacks, and + * then restore the original execution context. + * @since v9.6.0 + * @param fn The function to call in the execution context of this async resource. + * @param thisArg The receiver to be used for the function call. + * @param args Optional arguments to pass to the function. + */ + runInAsyncScope( + fn: (this: This, ...args: any[]) => Result, + thisArg?: This, + ...args: any[] + ): Result; + /** + * Call all `destroy` hooks. This should only ever be called once. An error will + * be thrown if it is called more than once. This **must** be manually called. If + * the resource is left to be collected by the GC then the `destroy` hooks will + * never be called. + * @return A reference to `asyncResource`. + */ + emitDestroy(): this; + /** + * @return The unique `asyncId` assigned to the resource. + */ + asyncId(): number; + /** + * @return The same `triggerAsyncId` that is passed to the `AsyncResource` constructor. + */ + triggerAsyncId(): number; + } + /** + * This class creates stores that stay coherent through asynchronous operations. + * + * While you can create your own implementation on top of the `node:async_hooks`module, `AsyncLocalStorage` should be preferred as it is a performant and memory + * safe implementation that involves significant optimizations that are non-obvious + * to implement. + * + * The following example uses `AsyncLocalStorage` to build a simple logger + * that assigns IDs to incoming HTTP requests and includes them in messages + * logged within each request. + * + * ```js + * import http from 'node:http'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const asyncLocalStorage = new AsyncLocalStorage(); + * + * function logWithId(msg) { + * const id = asyncLocalStorage.getStore(); + * console.log(`${id !== undefined ? id : '-'}:`, msg); + * } + * + * let idSeq = 0; + * http.createServer((req, res) => { + * asyncLocalStorage.run(idSeq++, () => { + * logWithId('start'); + * // Imagine any chain of async operations here + * setImmediate(() => { + * logWithId('finish'); + * res.end(); + * }); + * }); + * }).listen(8080); + * + * http.get('http://localhost:8080'); + * http.get('http://localhost:8080'); + * // Prints: + * // 0: start + * // 1: start + * // 0: finish + * // 1: finish + * ``` + * + * Each instance of `AsyncLocalStorage` maintains an independent storage context. + * Multiple instances can safely exist simultaneously without risk of interfering + * with each other's data. + * @since v13.10.0, v12.17.0 + */ + class AsyncLocalStorage { + /** + * Binds the given function to the current execution context. + * @since v19.8.0 + * @experimental + * @param fn The function to bind to the current execution context. + * @return A new function that calls `fn` within the captured execution context. + */ + static bind any>(fn: Func): Func; + /** + * Captures the current execution context and returns a function that accepts a + * function as an argument. Whenever the returned function is called, it + * calls the function passed to it within the captured context. + * + * ```js + * const asyncLocalStorage = new AsyncLocalStorage(); + * const runInAsyncScope = asyncLocalStorage.run(123, () => AsyncLocalStorage.snapshot()); + * const result = asyncLocalStorage.run(321, () => runInAsyncScope(() => asyncLocalStorage.getStore())); + * console.log(result); // returns 123 + * ``` + * + * AsyncLocalStorage.snapshot() can replace the use of AsyncResource for simple + * async context tracking purposes, for example: + * + * ```js + * class Foo { + * #runInAsyncScope = AsyncLocalStorage.snapshot(); + * + * get() { return this.#runInAsyncScope(() => asyncLocalStorage.getStore()); } + * } + * + * const foo = asyncLocalStorage.run(123, () => new Foo()); + * console.log(asyncLocalStorage.run(321, () => foo.get())); // returns 123 + * ``` + * @since v19.8.0 + * @experimental + * @return A new function with the signature `(fn: (...args) : R, ...args) : R`. + */ + static snapshot(): (fn: (...args: TArgs) => R, ...args: TArgs) => R; + /** + * Disables the instance of `AsyncLocalStorage`. All subsequent calls + * to `asyncLocalStorage.getStore()` will return `undefined` until`asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()` is called again. + * + * When calling `asyncLocalStorage.disable()`, all current contexts linked to the + * instance will be exited. + * + * Calling `asyncLocalStorage.disable()` is required before the`asyncLocalStorage` can be garbage collected. This does not apply to stores + * provided by the `asyncLocalStorage`, as those objects are garbage collected + * along with the corresponding async resources. + * + * Use this method when the `asyncLocalStorage` is not in use anymore + * in the current process. + * @since v13.10.0, v12.17.0 + * @experimental + */ + disable(): void; + /** + * Returns the current store. + * If called outside of an asynchronous context initialized by + * calling `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()`, it + * returns `undefined`. + * @since v13.10.0, v12.17.0 + */ + getStore(): T | undefined; + /** + * Runs a function synchronously within a context and returns its + * return value. The store is not accessible outside of the callback function. + * The store is accessible to any asynchronous operations created within the + * callback. + * + * The optional `args` are passed to the callback function. + * + * If the callback function throws an error, the error is thrown by `run()` too. + * The stacktrace is not impacted by this call and the context is exited. + * + * Example: + * + * ```js + * const store = { id: 2 }; + * try { + * asyncLocalStorage.run(store, () => { + * asyncLocalStorage.getStore(); // Returns the store object + * setTimeout(() => { + * asyncLocalStorage.getStore(); // Returns the store object + * }, 200); + * throw new Error(); + * }); + * } catch (e) { + * asyncLocalStorage.getStore(); // Returns undefined + * // The error will be caught here + * } + * ``` + * @since v13.10.0, v12.17.0 + */ + run(store: T, callback: () => R): R; + run(store: T, callback: (...args: TArgs) => R, ...args: TArgs): R; + /** + * Runs a function synchronously outside of a context and returns its + * return value. The store is not accessible within the callback function or + * the asynchronous operations created within the callback. Any `getStore()`call done within the callback function will always return `undefined`. + * + * The optional `args` are passed to the callback function. + * + * If the callback function throws an error, the error is thrown by `exit()` too. + * The stacktrace is not impacted by this call and the context is re-entered. + * + * Example: + * + * ```js + * // Within a call to run + * try { + * asyncLocalStorage.getStore(); // Returns the store object or value + * asyncLocalStorage.exit(() => { + * asyncLocalStorage.getStore(); // Returns undefined + * throw new Error(); + * }); + * } catch (e) { + * asyncLocalStorage.getStore(); // Returns the same object or value + * // The error will be caught here + * } + * ``` + * @since v13.10.0, v12.17.0 + * @experimental + */ + exit(callback: (...args: TArgs) => R, ...args: TArgs): R; + /** + * Transitions into the context for the remainder of the current + * synchronous execution and then persists the store through any following + * asynchronous calls. + * + * Example: + * + * ```js + * const store = { id: 1 }; + * // Replaces previous store with the given store object + * asyncLocalStorage.enterWith(store); + * asyncLocalStorage.getStore(); // Returns the store object + * someAsyncOperation(() => { + * asyncLocalStorage.getStore(); // Returns the same object + * }); + * ``` + * + * This transition will continue for the _entire_ synchronous execution. + * This means that if, for example, the context is entered within an event + * handler subsequent event handlers will also run within that context unless + * specifically bound to another context with an `AsyncResource`. That is why`run()` should be preferred over `enterWith()` unless there are strong reasons + * to use the latter method. + * + * ```js + * const store = { id: 1 }; + * + * emitter.on('my-event', () => { + * asyncLocalStorage.enterWith(store); + * }); + * emitter.on('my-event', () => { + * asyncLocalStorage.getStore(); // Returns the same object + * }); + * + * asyncLocalStorage.getStore(); // Returns undefined + * emitter.emit('my-event'); + * asyncLocalStorage.getStore(); // Returns the same object + * ``` + * @since v13.11.0, v12.17.0 + * @experimental + */ + enterWith(store: T): void; + } +} +declare module "node:async_hooks" { + export * from "async_hooks"; +} diff --git a/dist/node_modules/@types/node/ts4.8/buffer.d.ts b/dist/node_modules/@types/node/ts4.8/buffer.d.ts new file mode 100644 index 0000000..255e268 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/buffer.d.ts @@ -0,0 +1,2363 @@ +/** + * `Buffer` objects are used to represent a fixed-length sequence of bytes. Many + * Node.js APIs support `Buffer`s. + * + * The `Buffer` class is a subclass of JavaScript's [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) class and + * extends it with methods that cover additional use cases. Node.js APIs accept + * plain [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) s wherever `Buffer`s are supported as well. + * + * While the `Buffer` class is available within the global scope, it is still + * recommended to explicitly reference it via an import or require statement. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Creates a zero-filled Buffer of length 10. + * const buf1 = Buffer.alloc(10); + * + * // Creates a Buffer of length 10, + * // filled with bytes which all have the value `1`. + * const buf2 = Buffer.alloc(10, 1); + * + * // Creates an uninitialized buffer of length 10. + * // This is faster than calling Buffer.alloc() but the returned + * // Buffer instance might contain old data that needs to be + * // overwritten using fill(), write(), or other functions that fill the Buffer's + * // contents. + * const buf3 = Buffer.allocUnsafe(10); + * + * // Creates a Buffer containing the bytes [1, 2, 3]. + * const buf4 = Buffer.from([1, 2, 3]); + * + * // Creates a Buffer containing the bytes [1, 1, 1, 1] – the entries + * // are all truncated using `(value & 255)` to fit into the range 0–255. + * const buf5 = Buffer.from([257, 257.5, -255, '1']); + * + * // Creates a Buffer containing the UTF-8-encoded bytes for the string 'tést': + * // [0x74, 0xc3, 0xa9, 0x73, 0x74] (in hexadecimal notation) + * // [116, 195, 169, 115, 116] (in decimal notation) + * const buf6 = Buffer.from('tést'); + * + * // Creates a Buffer containing the Latin-1 bytes [0x74, 0xe9, 0x73, 0x74]. + * const buf7 = Buffer.from('tést', 'latin1'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/buffer.js) + */ +declare module "buffer" { + import { BinaryLike } from "node:crypto"; + import { ReadableStream as WebReadableStream } from "node:stream/web"; + /** + * This function returns `true` if `input` contains only valid UTF-8-encoded data, + * including the case in which `input` is empty. + * + * Throws if the `input` is a detached array buffer. + * @since v19.4.0, v18.14.0 + * @param input The input to validate. + */ + export function isUtf8(input: Buffer | ArrayBuffer | NodeJS.TypedArray): boolean; + /** + * This function returns `true` if `input` contains only valid ASCII-encoded data, + * including the case in which `input` is empty. + * + * Throws if the `input` is a detached array buffer. + * @since v19.6.0, v18.15.0 + * @param input The input to validate. + */ + export function isAscii(input: Buffer | ArrayBuffer | NodeJS.TypedArray): boolean; + export const INSPECT_MAX_BYTES: number; + export const kMaxLength: number; + export const kStringMaxLength: number; + export const constants: { + MAX_LENGTH: number; + MAX_STRING_LENGTH: number; + }; + export type TranscodeEncoding = + | "ascii" + | "utf8" + | "utf-8" + | "utf16le" + | "utf-16le" + | "ucs2" + | "ucs-2" + | "latin1" + | "binary"; + /** + * Re-encodes the given `Buffer` or `Uint8Array` instance from one character + * encoding to another. Returns a new `Buffer` instance. + * + * Throws if the `fromEnc` or `toEnc` specify invalid character encodings or if + * conversion from `fromEnc` to `toEnc` is not permitted. + * + * Encodings supported by `buffer.transcode()` are: `'ascii'`, `'utf8'`,`'utf16le'`, `'ucs2'`, `'latin1'`, and `'binary'`. + * + * The transcoding process will use substitution characters if a given byte + * sequence cannot be adequately represented in the target encoding. For instance: + * + * ```js + * import { Buffer, transcode } from 'node:buffer'; + * + * const newBuf = transcode(Buffer.from('€'), 'utf8', 'ascii'); + * console.log(newBuf.toString('ascii')); + * // Prints: '?' + * ``` + * + * Because the Euro (`€`) sign is not representable in US-ASCII, it is replaced + * with `?` in the transcoded `Buffer`. + * @since v7.1.0 + * @param source A `Buffer` or `Uint8Array` instance. + * @param fromEnc The current encoding. + * @param toEnc To target encoding. + */ + export function transcode(source: Uint8Array, fromEnc: TranscodeEncoding, toEnc: TranscodeEncoding): Buffer; + export const SlowBuffer: { + /** @deprecated since v6.0.0, use `Buffer.allocUnsafeSlow()` */ + new(size: number): Buffer; + prototype: Buffer; + }; + /** + * Resolves a `'blob:nodedata:...'` an associated `Blob` object registered using + * a prior call to `URL.createObjectURL()`. + * @since v16.7.0 + * @experimental + * @param id A `'blob:nodedata:...` URL string returned by a prior call to `URL.createObjectURL()`. + */ + export function resolveObjectURL(id: string): Blob | undefined; + export { Buffer }; + /** + * @experimental + */ + export interface BlobOptions { + /** + * One of either `'transparent'` or `'native'`. When set to `'native'`, line endings in string source parts + * will be converted to the platform native line-ending as specified by `require('node:os').EOL`. + */ + endings?: "transparent" | "native"; + /** + * The Blob content-type. The intent is for `type` to convey + * the MIME media type of the data, however no validation of the type format + * is performed. + */ + type?: string | undefined; + } + /** + * A [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) encapsulates immutable, raw data that can be safely shared across + * multiple worker threads. + * @since v15.7.0, v14.18.0 + */ + export class Blob { + /** + * The total size of the `Blob` in bytes. + * @since v15.7.0, v14.18.0 + */ + readonly size: number; + /** + * The content-type of the `Blob`. + * @since v15.7.0, v14.18.0 + */ + readonly type: string; + /** + * Creates a new `Blob` object containing a concatenation of the given sources. + * + * {ArrayBuffer}, {TypedArray}, {DataView}, and {Buffer} sources are copied into + * the 'Blob' and can therefore be safely modified after the 'Blob' is created. + * + * String sources are also copied into the `Blob`. + */ + constructor(sources: Array, options?: BlobOptions); + /** + * Returns a promise that fulfills with an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) containing a copy of + * the `Blob` data. + * @since v15.7.0, v14.18.0 + */ + arrayBuffer(): Promise; + /** + * Creates and returns a new `Blob` containing a subset of this `Blob` objects + * data. The original `Blob` is not altered. + * @since v15.7.0, v14.18.0 + * @param start The starting index. + * @param end The ending index. + * @param type The content-type for the new `Blob` + */ + slice(start?: number, end?: number, type?: string): Blob; + /** + * Returns a promise that fulfills with the contents of the `Blob` decoded as a + * UTF-8 string. + * @since v15.7.0, v14.18.0 + */ + text(): Promise; + /** + * Returns a new `ReadableStream` that allows the content of the `Blob` to be read. + * @since v16.7.0 + */ + stream(): WebReadableStream; + } + export interface FileOptions { + /** + * One of either `'transparent'` or `'native'`. When set to `'native'`, line endings in string source parts will be + * converted to the platform native line-ending as specified by `require('node:os').EOL`. + */ + endings?: "native" | "transparent"; + /** The File content-type. */ + type?: string; + /** The last modified date of the file. `Default`: Date.now(). */ + lastModified?: number; + } + /** + * A [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) provides information about files. + * @since v19.2.0, v18.13.0 + */ + export class File extends Blob { + constructor(sources: Array, fileName: string, options?: FileOptions); + /** + * The name of the `File`. + * @since v19.2.0, v18.13.0 + */ + readonly name: string; + /** + * The last modified date of the `File`. + * @since v19.2.0, v18.13.0 + */ + readonly lastModified: number; + } + export import atob = globalThis.atob; + export import btoa = globalThis.btoa; + import { Blob as NodeBlob } from "buffer"; + // This conditional type will be the existing global Blob in a browser, or + // the copy below in a Node environment. + type __Blob = typeof globalThis extends { onmessage: any; Blob: any } ? {} : NodeBlob; + global { + namespace NodeJS { + export { BufferEncoding }; + } + // Buffer class + type BufferEncoding = + | "ascii" + | "utf8" + | "utf-8" + | "utf16le" + | "utf-16le" + | "ucs2" + | "ucs-2" + | "base64" + | "base64url" + | "latin1" + | "binary" + | "hex"; + type WithImplicitCoercion = + | T + | { + valueOf(): T; + }; + /** + * Raw data is stored in instances of the Buffer class. + * A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized. + * Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'base64url'|'binary'(deprecated)|'hex' + */ + interface BufferConstructor { + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + * @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead. + */ + new(str: string, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`). + */ + new(size: number): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new(array: Uint8Array): Buffer; + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}/{SharedArrayBuffer}. + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + * @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead. + */ + new(arrayBuffer: ArrayBuffer | SharedArrayBuffer): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new(array: readonly any[]): Buffer; + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + * @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead. + */ + new(buffer: Buffer): Buffer; + /** + * Allocates a new `Buffer` using an `array` of bytes in the range `0` – `255`. + * Array entries outside that range will be truncated to fit into it. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'. + * const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]); + * ``` + * + * If `array` is an `Array`\-like object (that is, one with a `length` property of + * type `number`), it is treated as if it is an array, unless it is a `Buffer` or + * a `Uint8Array`. This means all other `TypedArray` variants get treated as an`Array`. To create a `Buffer` from the bytes backing a `TypedArray`, use `Buffer.copyBytesFrom()`. + * + * A `TypeError` will be thrown if `array` is not an `Array` or another type + * appropriate for `Buffer.from()` variants. + * + * `Buffer.from(array)` and `Buffer.from(string)` may also use the internal`Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v5.10.0 + */ + from( + arrayBuffer: WithImplicitCoercion, + byteOffset?: number, + length?: number, + ): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param data data to create a new Buffer + */ + from(data: Uint8Array | readonly number[]): Buffer; + from(data: WithImplicitCoercion): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + */ + from( + str: + | WithImplicitCoercion + | { + [Symbol.toPrimitive](hint: "string"): string; + }, + encoding?: BufferEncoding, + ): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param values to create a new Buffer + */ + of(...items: number[]): Buffer; + /** + * Returns `true` if `obj` is a `Buffer`, `false` otherwise. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * Buffer.isBuffer(Buffer.alloc(10)); // true + * Buffer.isBuffer(Buffer.from('foo')); // true + * Buffer.isBuffer('a string'); // false + * Buffer.isBuffer([]); // false + * Buffer.isBuffer(new Uint8Array(1024)); // false + * ``` + * @since v0.1.101 + */ + isBuffer(obj: any): obj is Buffer; + /** + * Returns `true` if `encoding` is the name of a supported character encoding, + * or `false` otherwise. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * console.log(Buffer.isEncoding('utf8')); + * // Prints: true + * + * console.log(Buffer.isEncoding('hex')); + * // Prints: true + * + * console.log(Buffer.isEncoding('utf/8')); + * // Prints: false + * + * console.log(Buffer.isEncoding('')); + * // Prints: false + * ``` + * @since v0.9.1 + * @param encoding A character encoding name to check. + */ + isEncoding(encoding: string): encoding is BufferEncoding; + /** + * Returns the byte length of a string when encoded using `encoding`. + * This is not the same as [`String.prototype.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), which does not account + * for the encoding that is used to convert the string into bytes. + * + * For `'base64'`, `'base64url'`, and `'hex'`, this function assumes valid input. + * For strings that contain non-base64/hex-encoded data (e.g. whitespace), the + * return value might be greater than the length of a `Buffer` created from the + * string. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const str = '\u00bd + \u00bc = \u00be'; + * + * console.log(`${str}: ${str.length} characters, ` + + * `${Buffer.byteLength(str, 'utf8')} bytes`); + * // Prints: ½ + ¼ = ¾: 9 characters, 12 bytes + * ``` + * + * When `string` is a + * `Buffer`/[`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView)/[`TypedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/- + * Reference/Global_Objects/TypedArray)/[`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer)/[`SharedArrayBuffer`](https://develop- + * er.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer), the byte length as reported by `.byteLength`is returned. + * @since v0.1.90 + * @param string A value to calculate the length of. + * @param [encoding='utf8'] If `string` is a string, this is its encoding. + * @return The number of bytes contained within `string`. + */ + byteLength( + string: string | Buffer | NodeJS.ArrayBufferView | ArrayBuffer | SharedArrayBuffer, + encoding?: BufferEncoding, + ): number; + /** + * Returns a new `Buffer` which is the result of concatenating all the `Buffer`instances in the `list` together. + * + * If the list has no items, or if the `totalLength` is 0, then a new zero-length`Buffer` is returned. + * + * If `totalLength` is not provided, it is calculated from the `Buffer` instances + * in `list` by adding their lengths. + * + * If `totalLength` is provided, it is coerced to an unsigned integer. If the + * combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is + * truncated to `totalLength`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a single `Buffer` from a list of three `Buffer` instances. + * + * const buf1 = Buffer.alloc(10); + * const buf2 = Buffer.alloc(14); + * const buf3 = Buffer.alloc(18); + * const totalLength = buf1.length + buf2.length + buf3.length; + * + * console.log(totalLength); + * // Prints: 42 + * + * const bufA = Buffer.concat([buf1, buf2, buf3], totalLength); + * + * console.log(bufA); + * // Prints: + * console.log(bufA.length); + * // Prints: 42 + * ``` + * + * `Buffer.concat()` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v0.7.11 + * @param list List of `Buffer` or {@link Uint8Array} instances to concatenate. + * @param totalLength Total length of the `Buffer` instances in `list` when concatenated. + */ + concat(list: readonly Uint8Array[], totalLength?: number): Buffer; + /** + * Copies the underlying memory of `view` into a new `Buffer`. + * + * ```js + * const u16 = new Uint16Array([0, 0xffff]); + * const buf = Buffer.copyBytesFrom(u16, 1, 1); + * u16[1] = 0; + * console.log(buf.length); // 2 + * console.log(buf[0]); // 255 + * console.log(buf[1]); // 255 + * ``` + * @since v19.8.0 + * @param view The {TypedArray} to copy. + * @param [offset=0] The starting offset within `view`. + * @param [length=view.length - offset] The number of elements from `view` to copy. + */ + copyBytesFrom(view: NodeJS.TypedArray, offset?: number, length?: number): Buffer; + /** + * Compares `buf1` to `buf2`, typically for the purpose of sorting arrays of`Buffer` instances. This is equivalent to calling `buf1.compare(buf2)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('1234'); + * const buf2 = Buffer.from('0123'); + * const arr = [buf1, buf2]; + * + * console.log(arr.sort(Buffer.compare)); + * // Prints: [ , ] + * // (This result is equal to: [buf2, buf1].) + * ``` + * @since v0.11.13 + * @return Either `-1`, `0`, or `1`, depending on the result of the comparison. See `compare` for details. + */ + compare(buf1: Uint8Array, buf2: Uint8Array): -1 | 0 | 1; + /** + * Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the`Buffer` will be zero-filled. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(5); + * + * console.log(buf); + * // Prints: + * ``` + * + * If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. + * + * If `fill` is specified, the allocated `Buffer` will be initialized by calling `buf.fill(fill)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(5, 'a'); + * + * console.log(buf); + * // Prints: + * ``` + * + * If both `fill` and `encoding` are specified, the allocated `Buffer` will be + * initialized by calling `buf.fill(fill, encoding)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); + * + * console.log(buf); + * // Prints: + * ``` + * + * Calling `Buffer.alloc()` can be measurably slower than the alternative `Buffer.allocUnsafe()` but ensures that the newly created `Buffer` instance + * contents will never contain sensitive data from previous allocations, including + * data that might not have been allocated for `Buffer`s. + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + * @param [fill=0] A value to pre-fill the new `Buffer` with. + * @param [encoding='utf8'] If `fill` is a string, this is its encoding. + */ + alloc(size: number, fill?: string | Uint8Array | number, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `Buffer.alloc()` instead to initialize`Buffer` instances with zeroes. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(10); + * + * console.log(buf); + * // Prints (contents may vary): + * + * buf.fill(0); + * + * console.log(buf); + * // Prints: + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * + * The `Buffer` module pre-allocates an internal `Buffer` instance of + * size `Buffer.poolSize` that is used as a pool for the fast allocation of new `Buffer` instances created using `Buffer.allocUnsafe()`, `Buffer.from(array)`, + * and `Buffer.concat()` only when `size` is less than `Buffer.poolSize >>> 1` (floor of `Buffer.poolSize` divided by two). + * + * Use of this pre-allocated internal memory pool is a key difference between + * calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. + * Specifically, `Buffer.alloc(size, fill)` will _never_ use the internal `Buffer`pool, while `Buffer.allocUnsafe(size).fill(fill)`_will_ use the internal`Buffer` pool if `size` is less + * than or equal to half `Buffer.poolSize`. The + * difference is subtle but can be important when an application requires the + * additional performance that `Buffer.allocUnsafe()` provides. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafe(size: number): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_OUT_OF_RANGE` is thrown. A zero-length `Buffer` is created if + * `size` is 0. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `buf.fill(0)` to initialize + * such `Buffer` instances with zeroes. + * + * When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, + * allocations under 4 KiB are sliced from a single pre-allocated `Buffer`. This + * allows applications to avoid the garbage collection overhead of creating many + * individually allocated `Buffer` instances. This approach improves both + * performance and memory usage by eliminating the need to track and clean up as + * many individual `ArrayBuffer` objects. + * + * However, in the case where a developer may need to retain a small chunk of + * memory from a pool for an indeterminate amount of time, it may be appropriate + * to create an un-pooled `Buffer` instance using `Buffer.allocUnsafeSlow()` and + * then copying out the relevant bits. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Need to keep around a few small chunks of memory. + * const store = []; + * + * socket.on('readable', () => { + * let data; + * while (null !== (data = readable.read())) { + * // Allocate for retained data. + * const sb = Buffer.allocUnsafeSlow(10); + * + * // Copy the data into the new allocation. + * data.copy(sb, 0, 0, 10); + * + * store.push(sb); + * } + * }); + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.12.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafeSlow(size: number): Buffer; + /** + * This is the size (in bytes) of pre-allocated internal `Buffer` instances used + * for pooling. This value may be modified. + * @since v0.11.3 + */ + poolSize: number; + } + interface Buffer extends Uint8Array { + /** + * Writes `string` to `buf` at `offset` according to the character encoding in`encoding`. The `length` parameter is the number of bytes to write. If `buf` did + * not contain enough space to fit the entire string, only part of `string` will be + * written. However, partially encoded characters will not be written. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.alloc(256); + * + * const len = buf.write('\u00bd + \u00bc = \u00be', 0); + * + * console.log(`${len} bytes: ${buf.toString('utf8', 0, len)}`); + * // Prints: 12 bytes: ½ + ¼ = ¾ + * + * const buffer = Buffer.alloc(10); + * + * const length = buffer.write('abcd', 8); + * + * console.log(`${length} bytes: ${buffer.toString('utf8', 8, 10)}`); + * // Prints: 2 bytes : ab + * ``` + * @since v0.1.90 + * @param string String to write to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write `string`. + * @param [length=buf.length - offset] Maximum number of bytes to write (written bytes will not exceed `buf.length - offset`). + * @param [encoding='utf8'] The character encoding of `string`. + * @return Number of bytes written. + */ + write(string: string, encoding?: BufferEncoding): number; + write(string: string, offset: number, encoding?: BufferEncoding): number; + write(string: string, offset: number, length: number, encoding?: BufferEncoding): number; + /** + * Decodes `buf` to a string according to the specified character encoding in`encoding`. `start` and `end` may be passed to decode only a subset of `buf`. + * + * If `encoding` is `'utf8'` and a byte sequence in the input is not valid UTF-8, + * then each invalid byte is replaced with the replacement character `U+FFFD`. + * + * The maximum length of a string instance (in UTF-16 code units) is available + * as {@link constants.MAX_STRING_LENGTH}. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * console.log(buf1.toString('utf8')); + * // Prints: abcdefghijklmnopqrstuvwxyz + * console.log(buf1.toString('utf8', 0, 5)); + * // Prints: abcde + * + * const buf2 = Buffer.from('tést'); + * + * console.log(buf2.toString('hex')); + * // Prints: 74c3a97374 + * console.log(buf2.toString('utf8', 0, 3)); + * // Prints: té + * console.log(buf2.toString(undefined, 0, 3)); + * // Prints: té + * ``` + * @since v0.1.90 + * @param [encoding='utf8'] The character encoding to use. + * @param [start=0] The byte offset to start decoding at. + * @param [end=buf.length] The byte offset to stop decoding at (not inclusive). + */ + toString(encoding?: BufferEncoding, start?: number, end?: number): string; + /** + * Returns a JSON representation of `buf`. [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) implicitly calls + * this function when stringifying a `Buffer` instance. + * + * `Buffer.from()` accepts objects in the format returned from this method. + * In particular, `Buffer.from(buf.toJSON())` works like `Buffer.from(buf)`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]); + * const json = JSON.stringify(buf); + * + * console.log(json); + * // Prints: {"type":"Buffer","data":[1,2,3,4,5]} + * + * const copy = JSON.parse(json, (key, value) => { + * return value && value.type === 'Buffer' ? + * Buffer.from(value) : + * value; + * }); + * + * console.log(copy); + * // Prints: + * ``` + * @since v0.9.2 + */ + toJSON(): { + type: "Buffer"; + data: number[]; + }; + /** + * Returns `true` if both `buf` and `otherBuffer` have exactly the same bytes,`false` otherwise. Equivalent to `buf.compare(otherBuffer) === 0`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('ABC'); + * const buf2 = Buffer.from('414243', 'hex'); + * const buf3 = Buffer.from('ABCD'); + * + * console.log(buf1.equals(buf2)); + * // Prints: true + * console.log(buf1.equals(buf3)); + * // Prints: false + * ``` + * @since v0.11.13 + * @param otherBuffer A `Buffer` or {@link Uint8Array} with which to compare `buf`. + */ + equals(otherBuffer: Uint8Array): boolean; + /** + * Compares `buf` with `target` and returns a number indicating whether `buf`comes before, after, or is the same as `target` in sort order. + * Comparison is based on the actual sequence of bytes in each `Buffer`. + * + * * `0` is returned if `target` is the same as `buf` + * * `1` is returned if `target` should come _before_`buf` when sorted. + * * `-1` is returned if `target` should come _after_`buf` when sorted. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('ABC'); + * const buf2 = Buffer.from('BCD'); + * const buf3 = Buffer.from('ABCD'); + * + * console.log(buf1.compare(buf1)); + * // Prints: 0 + * console.log(buf1.compare(buf2)); + * // Prints: -1 + * console.log(buf1.compare(buf3)); + * // Prints: -1 + * console.log(buf2.compare(buf1)); + * // Prints: 1 + * console.log(buf2.compare(buf3)); + * // Prints: 1 + * console.log([buf1, buf2, buf3].sort(Buffer.compare)); + * // Prints: [ , , ] + * // (This result is equal to: [buf1, buf3, buf2].) + * ``` + * + * The optional `targetStart`, `targetEnd`, `sourceStart`, and `sourceEnd`arguments can be used to limit the comparison to specific ranges within `target`and `buf` respectively. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8, 9]); + * const buf2 = Buffer.from([5, 6, 7, 8, 9, 1, 2, 3, 4]); + * + * console.log(buf1.compare(buf2, 5, 9, 0, 4)); + * // Prints: 0 + * console.log(buf1.compare(buf2, 0, 6, 4)); + * // Prints: -1 + * console.log(buf1.compare(buf2, 5, 6, 5)); + * // Prints: 1 + * ``` + * + * `ERR_OUT_OF_RANGE` is thrown if `targetStart < 0`, `sourceStart < 0`,`targetEnd > target.byteLength`, or `sourceEnd > source.byteLength`. + * @since v0.11.13 + * @param target A `Buffer` or {@link Uint8Array} with which to compare `buf`. + * @param [targetStart=0] The offset within `target` at which to begin comparison. + * @param [targetEnd=target.length] The offset within `target` at which to end comparison (not inclusive). + * @param [sourceStart=0] The offset within `buf` at which to begin comparison. + * @param [sourceEnd=buf.length] The offset within `buf` at which to end comparison (not inclusive). + */ + compare( + target: Uint8Array, + targetStart?: number, + targetEnd?: number, + sourceStart?: number, + sourceEnd?: number, + ): -1 | 0 | 1; + /** + * Copies data from a region of `buf` to a region in `target`, even if the `target`memory region overlaps with `buf`. + * + * [`TypedArray.prototype.set()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/set) performs the same operation, and is available + * for all TypedArrays, including Node.js `Buffer`s, although it takes + * different function arguments. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create two `Buffer` instances. + * const buf1 = Buffer.allocUnsafe(26); + * const buf2 = Buffer.allocUnsafe(26).fill('!'); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * // Copy `buf1` bytes 16 through 19 into `buf2` starting at byte 8 of `buf2`. + * buf1.copy(buf2, 8, 16, 20); + * // This is equivalent to: + * // buf2.set(buf1.subarray(16, 20), 8); + * + * console.log(buf2.toString('ascii', 0, 25)); + * // Prints: !!!!!!!!qrst!!!!!!!!!!!!! + * ``` + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a `Buffer` and copy data from one region to an overlapping region + * // within the same `Buffer`. + * + * const buf = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf[i] = i + 97; + * } + * + * buf.copy(buf, 0, 4, 10); + * + * console.log(buf.toString()); + * // Prints: efghijghijklmnopqrstuvwxyz + * ``` + * @since v0.1.90 + * @param target A `Buffer` or {@link Uint8Array} to copy into. + * @param [targetStart=0] The offset within `target` at which to begin writing. + * @param [sourceStart=0] The offset within `buf` from which to begin copying. + * @param [sourceEnd=buf.length] The offset within `buf` at which to stop copying (not inclusive). + * @return The number of bytes copied. + */ + copy(target: Uint8Array, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * This method is not compatible with the `Uint8Array.prototype.slice()`, + * which is a superclass of `Buffer`. To copy the slice, use`Uint8Array.prototype.slice()`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * const copiedBuf = Uint8Array.prototype.slice.call(buf); + * copiedBuf[0]++; + * console.log(copiedBuf.toString()); + * // Prints: cuffer + * + * console.log(buf.toString()); + * // Prints: buffer + * + * // With buf.slice(), the original buffer is modified. + * const notReallyCopiedBuf = buf.slice(); + * notReallyCopiedBuf[0]++; + * console.log(notReallyCopiedBuf.toString()); + * // Prints: cuffer + * console.log(buf.toString()); + * // Also prints: cuffer (!) + * ``` + * @since v0.3.0 + * @deprecated Use `subarray` instead. + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + slice(start?: number, end?: number): Buffer; + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * Specifying `end` greater than `buf.length` will return the same result as + * that of `end` equal to `buf.length`. + * + * This method is inherited from [`TypedArray.prototype.subarray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray). + * + * Modifying the new `Buffer` slice will modify the memory in the original `Buffer`because the allocated memory of the two objects overlap. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte + * // from the original `Buffer`. + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * const buf2 = buf1.subarray(0, 3); + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: abc + * + * buf1[0] = 33; + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: !bc + * ``` + * + * Specifying negative indexes causes the slice to be generated relative to the + * end of `buf` rather than the beginning. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * console.log(buf.subarray(-6, -1).toString()); + * // Prints: buffe + * // (Equivalent to buf.subarray(0, 5).) + * + * console.log(buf.subarray(-6, -2).toString()); + * // Prints: buff + * // (Equivalent to buf.subarray(0, 4).) + * + * console.log(buf.subarray(-5, -2).toString()); + * // Prints: uff + * // (Equivalent to buf.subarray(1, 4).) + * ``` + * @since v3.0.0 + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + subarray(start?: number, end?: number): Buffer; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigInt64BE(0x0102030405060708n, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigInt64BE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigInt64LE(0x0102030405060708n, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigInt64LE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. + * + * This function is also available under the `writeBigUint64BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigUInt64BE(0xdecafafecacefaden, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigUInt64BE(value: bigint, offset?: number): number; + /** + * @alias Buffer.writeBigUInt64BE + * @since v14.10.0, v12.19.0 + */ + writeBigUint64BE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigUInt64LE(0xdecafafecacefaden, 0); + * + * console.log(buf); + * // Prints: + * ``` + * + * This function is also available under the `writeBigUint64LE` alias. + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigUInt64LE(value: bigint, offset?: number): number; + /** + * @alias Buffer.writeBigUInt64LE + * @since v14.10.0, v12.19.0 + */ + writeBigUint64LE(value: bigint, offset?: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than an unsigned integer. + * + * This function is also available under the `writeUintLE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeUIntLE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeUIntLE(value: number, offset: number, byteLength: number): number; + /** + * @alias Buffer.writeUIntLE + * @since v14.9.0, v12.19.0 + */ + writeUintLE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than an unsigned integer. + * + * This function is also available under the `writeUintBE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeUIntBE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeUIntBE(value: number, offset: number, byteLength: number): number; + /** + * @alias Buffer.writeUIntBE + * @since v14.9.0, v12.19.0 + */ + writeUintBE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than a signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeIntLE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeIntLE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined when`value` is anything other than a + * signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeIntBE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeIntBE(value: number, offset: number, byteLength: number): number; + /** + * Reads an unsigned, big-endian 64-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readBigUint64BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + * + * console.log(buf.readBigUInt64BE(0)); + * // Prints: 4294967295n + * ``` + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigUInt64BE(offset?: number): bigint; + /** + * @alias Buffer.readBigUInt64BE + * @since v14.10.0, v12.19.0 + */ + readBigUint64BE(offset?: number): bigint; + /** + * Reads an unsigned, little-endian 64-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readBigUint64LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + * + * console.log(buf.readBigUInt64LE(0)); + * // Prints: 18446744069414584320n + * ``` + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigUInt64LE(offset?: number): bigint; + /** + * @alias Buffer.readBigUInt64LE + * @since v14.10.0, v12.19.0 + */ + readBigUint64LE(offset?: number): bigint; + /** + * Reads a signed, big-endian 64-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed + * values. + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigInt64BE(offset?: number): bigint; + /** + * Reads a signed, little-endian 64-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed + * values. + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigInt64LE(offset?: number): bigint; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned, little-endian integer supporting + * up to 48 bits of accuracy. + * + * This function is also available under the `readUintLE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readUIntLE(0, 6).toString(16)); + * // Prints: ab9078563412 + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readUIntLE(offset: number, byteLength: number): number; + /** + * @alias Buffer.readUIntLE + * @since v14.9.0, v12.19.0 + */ + readUintLE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned big-endian integer supporting + * up to 48 bits of accuracy. + * + * This function is also available under the `readUintBE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readUIntBE(0, 6).toString(16)); + * // Prints: 1234567890ab + * console.log(buf.readUIntBE(1, 6).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readUIntBE(offset: number, byteLength: number): number; + /** + * @alias Buffer.readUIntBE + * @since v14.9.0, v12.19.0 + */ + readUintBE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a little-endian, two's complement signed value + * supporting up to 48 bits of accuracy. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readIntLE(0, 6).toString(16)); + * // Prints: -546f87a9cbee + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readIntLE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a big-endian, two's complement signed value + * supporting up to 48 bits of accuracy. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readIntBE(0, 6).toString(16)); + * // Prints: 1234567890ab + * console.log(buf.readIntBE(1, 6).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * console.log(buf.readIntBE(1, 0).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readIntBE(offset: number, byteLength: number): number; + /** + * Reads an unsigned 8-bit integer from `buf` at the specified `offset`. + * + * This function is also available under the `readUint8` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, -2]); + * + * console.log(buf.readUInt8(0)); + * // Prints: 1 + * console.log(buf.readUInt8(1)); + * // Prints: 254 + * console.log(buf.readUInt8(2)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`. + */ + readUInt8(offset?: number): number; + /** + * @alias Buffer.readUInt8 + * @since v14.9.0, v12.19.0 + */ + readUint8(offset?: number): number; + /** + * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint16LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56]); + * + * console.log(buf.readUInt16LE(0).toString(16)); + * // Prints: 3412 + * console.log(buf.readUInt16LE(1).toString(16)); + * // Prints: 5634 + * console.log(buf.readUInt16LE(2).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readUInt16LE(offset?: number): number; + /** + * @alias Buffer.readUInt16LE + * @since v14.9.0, v12.19.0 + */ + readUint16LE(offset?: number): number; + /** + * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint16BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56]); + * + * console.log(buf.readUInt16BE(0).toString(16)); + * // Prints: 1234 + * console.log(buf.readUInt16BE(1).toString(16)); + * // Prints: 3456 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readUInt16BE(offset?: number): number; + /** + * @alias Buffer.readUInt16BE + * @since v14.9.0, v12.19.0 + */ + readUint16BE(offset?: number): number; + /** + * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint32LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + * + * console.log(buf.readUInt32LE(0).toString(16)); + * // Prints: 78563412 + * console.log(buf.readUInt32LE(1).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readUInt32LE(offset?: number): number; + /** + * @alias Buffer.readUInt32LE + * @since v14.9.0, v12.19.0 + */ + readUint32LE(offset?: number): number; + /** + * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint32BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + * + * console.log(buf.readUInt32BE(0).toString(16)); + * // Prints: 12345678 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readUInt32BE(offset?: number): number; + /** + * @alias Buffer.readUInt32BE + * @since v14.9.0, v12.19.0 + */ + readUint32BE(offset?: number): number; + /** + * Reads a signed 8-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([-1, 5]); + * + * console.log(buf.readInt8(0)); + * // Prints: -1 + * console.log(buf.readInt8(1)); + * // Prints: 5 + * console.log(buf.readInt8(2)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`. + */ + readInt8(offset?: number): number; + /** + * Reads a signed, little-endian 16-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 5]); + * + * console.log(buf.readInt16LE(0)); + * // Prints: 1280 + * console.log(buf.readInt16LE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readInt16LE(offset?: number): number; + /** + * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 5]); + * + * console.log(buf.readInt16BE(0)); + * // Prints: 5 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readInt16BE(offset?: number): number; + /** + * Reads a signed, little-endian 32-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 0, 0, 5]); + * + * console.log(buf.readInt32LE(0)); + * // Prints: 83886080 + * console.log(buf.readInt32LE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readInt32LE(offset?: number): number; + /** + * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([0, 0, 0, 5]); + * + * console.log(buf.readInt32BE(0)); + * // Prints: 5 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readInt32BE(offset?: number): number; + /** + * Reads a 32-bit, little-endian float from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4]); + * + * console.log(buf.readFloatLE(0)); + * // Prints: 1.539989614439558e-36 + * console.log(buf.readFloatLE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readFloatLE(offset?: number): number; + /** + * Reads a 32-bit, big-endian float from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4]); + * + * console.log(buf.readFloatBE(0)); + * // Prints: 2.387939260590663e-38 + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readFloatBE(offset?: number): number; + /** + * Reads a 64-bit, little-endian double from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + * + * console.log(buf.readDoubleLE(0)); + * // Prints: 5.447603722011605e-270 + * console.log(buf.readDoubleLE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`. + */ + readDoubleLE(offset?: number): number; + /** + * Reads a 64-bit, big-endian double from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + * + * console.log(buf.readDoubleBE(0)); + * // Prints: 8.20788039913184e-304 + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`. + */ + readDoubleBE(offset?: number): number; + reverse(): this; + /** + * Interprets `buf` as an array of unsigned 16-bit integers and swaps the + * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 2. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap16(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap16(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * + * One convenient use of `buf.swap16()` is to perform a fast in-place conversion + * between UTF-16 little-endian and UTF-16 big-endian: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('This is little-endian UTF-16', 'utf16le'); + * buf.swap16(); // Convert to big-endian UTF-16 text. + * ``` + * @since v5.10.0 + * @return A reference to `buf`. + */ + swap16(): Buffer; + /** + * Interprets `buf` as an array of unsigned 32-bit integers and swaps the + * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 4. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap32(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap32(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * @since v5.10.0 + * @return A reference to `buf`. + */ + swap32(): Buffer; + /** + * Interprets `buf` as an array of 64-bit numbers and swaps byte order _in-place_. + * Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 8. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap64(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap64(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * @since v6.3.0 + * @return A reference to `buf`. + */ + swap64(): Buffer; + /** + * Writes `value` to `buf` at the specified `offset`. `value` must be a + * valid unsigned 8-bit integer. Behavior is undefined when `value` is anything + * other than an unsigned 8-bit integer. + * + * This function is also available under the `writeUint8` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt8(0x3, 0); + * buf.writeUInt8(0x4, 1); + * buf.writeUInt8(0x23, 2); + * buf.writeUInt8(0x42, 3); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`. + * @return `offset` plus the number of bytes written. + */ + writeUInt8(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt8 + * @since v14.9.0, v12.19.0 + */ + writeUint8(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value` is + * anything other than an unsigned 16-bit integer. + * + * This function is also available under the `writeUint16LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt16LE(0xdead, 0); + * buf.writeUInt16LE(0xbeef, 2); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeUInt16LE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt16LE + * @since v14.9.0, v12.19.0 + */ + writeUint16LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value`is anything other than an + * unsigned 16-bit integer. + * + * This function is also available under the `writeUint16BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt16BE(0xdead, 0); + * buf.writeUInt16BE(0xbeef, 2); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeUInt16BE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt16BE + * @since v14.9.0, v12.19.0 + */ + writeUint16BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value` is + * anything other than an unsigned 32-bit integer. + * + * This function is also available under the `writeUint32LE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt32LE(0xfeedface, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeUInt32LE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt32LE + * @since v14.9.0, v12.19.0 + */ + writeUint32LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value`is anything other than an + * unsigned 32-bit integer. + * + * This function is also available under the `writeUint32BE` alias. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt32BE(0xfeedface, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeUInt32BE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt32BE + * @since v14.9.0, v12.19.0 + */ + writeUint32BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset`. `value` must be a valid + * signed 8-bit integer. Behavior is undefined when `value` is anything other than + * a signed 8-bit integer. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt8(2, 0); + * buf.writeInt8(-2, 1); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`. + * @return `offset` plus the number of bytes written. + */ + writeInt8(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is + * anything other than a signed 16-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt16LE(0x0304, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeInt16LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is + * anything other than a signed 16-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt16BE(0x0102, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeInt16BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is + * anything other than a signed 32-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeInt32LE(0x05060708, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeInt32LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is + * anything other than a signed 32-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeInt32BE(0x01020304, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeInt32BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. Behavior is + * undefined when `value` is anything other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeFloatLE(0xcafebabe, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeFloatLE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. Behavior is + * undefined when `value` is anything other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeFloatBE(0xcafebabe, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeFloatBE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything + * other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeDoubleLE(123.456, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeDoubleLE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything + * other than a JavaScript number. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeDoubleBE(123.456, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeDoubleBE(value: number, offset?: number): number; + /** + * Fills `buf` with the specified `value`. If the `offset` and `end` are not given, + * the entire `buf` will be filled: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Fill a `Buffer` with the ASCII character 'h'. + * + * const b = Buffer.allocUnsafe(50).fill('h'); + * + * console.log(b.toString()); + * // Prints: hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh + * + * // Fill a buffer with empty string + * const c = Buffer.allocUnsafe(5).fill(''); + * + * console.log(c.fill('')); + * // Prints: + * ``` + * + * `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or + * integer. If the resulting integer is greater than `255` (decimal), `buf` will be + * filled with `value & 255`. + * + * If the final write of a `fill()` operation falls on a multi-byte character, + * then only the bytes of that character that fit into `buf` are written: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Fill a `Buffer` with character that takes up two bytes in UTF-8. + * + * console.log(Buffer.allocUnsafe(5).fill('\u0222')); + * // Prints: + * ``` + * + * If `value` contains invalid characters, it is truncated; if no valid + * fill data remains, an exception is thrown: + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.allocUnsafe(5); + * + * console.log(buf.fill('a')); + * // Prints: + * console.log(buf.fill('aazz', 'hex')); + * // Prints: + * console.log(buf.fill('zz', 'hex')); + * // Throws an exception. + * ``` + * @since v0.5.0 + * @param value The value with which to fill `buf`. Empty value (string, Uint8Array, Buffer) is coerced to `0`. + * @param [offset=0] Number of bytes to skip before starting to fill `buf`. + * @param [end=buf.length] Where to stop filling `buf` (not inclusive). + * @param [encoding='utf8'] The encoding for `value` if `value` is a string. + * @return A reference to `buf`. + */ + fill(value: string | Uint8Array | number, offset?: number, end?: number, encoding?: BufferEncoding): this; + /** + * If `value` is: + * + * * a string, `value` is interpreted according to the character encoding in`encoding`. + * * a `Buffer` or [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array), `value` will be used in its entirety. + * To compare a partial `Buffer`, use `buf.subarray`. + * * a number, `value` will be interpreted as an unsigned 8-bit integer + * value between `0` and `255`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('this is a buffer'); + * + * console.log(buf.indexOf('this')); + * // Prints: 0 + * console.log(buf.indexOf('is')); + * // Prints: 2 + * console.log(buf.indexOf(Buffer.from('a buffer'))); + * // Prints: 8 + * console.log(buf.indexOf(97)); + * // Prints: 8 (97 is the decimal ASCII value for 'a') + * console.log(buf.indexOf(Buffer.from('a buffer example'))); + * // Prints: -1 + * console.log(buf.indexOf(Buffer.from('a buffer example').slice(0, 8))); + * // Prints: 8 + * + * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + * + * console.log(utf16Buffer.indexOf('\u03a3', 0, 'utf16le')); + * // Prints: 4 + * console.log(utf16Buffer.indexOf('\u03a3', -4, 'utf16le')); + * // Prints: 6 + * ``` + * + * If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value, + * an integer between 0 and 255. + * + * If `byteOffset` is not a number, it will be coerced to a number. If the result + * of coercion is `NaN` or `0`, then the entire buffer will be searched. This + * behavior matches [`String.prototype.indexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/indexOf). + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const b = Buffer.from('abcdef'); + * + * // Passing a value that's a number, but not a valid byte. + * // Prints: 2, equivalent to searching for 99 or 'c'. + * console.log(b.indexOf(99.9)); + * console.log(b.indexOf(256 + 99)); + * + * // Passing a byteOffset that coerces to NaN or 0. + * // Prints: 1, searching the whole buffer. + * console.log(b.indexOf('b', undefined)); + * console.log(b.indexOf('b', {})); + * console.log(b.indexOf('b', null)); + * console.log(b.indexOf('b', [])); + * ``` + * + * If `value` is an empty string or empty `Buffer` and `byteOffset` is less + * than `buf.length`, `byteOffset` will be returned. If `value` is empty and`byteOffset` is at least `buf.length`, `buf.length` will be returned. + * @since v1.5.0 + * @param value What to search for. + * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`. + * @return The index of the first occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`. + */ + indexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; + /** + * Identical to `buf.indexOf()`, except the last occurrence of `value` is found + * rather than the first occurrence. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('this buffer is a buffer'); + * + * console.log(buf.lastIndexOf('this')); + * // Prints: 0 + * console.log(buf.lastIndexOf('buffer')); + * // Prints: 17 + * console.log(buf.lastIndexOf(Buffer.from('buffer'))); + * // Prints: 17 + * console.log(buf.lastIndexOf(97)); + * // Prints: 15 (97 is the decimal ASCII value for 'a') + * console.log(buf.lastIndexOf(Buffer.from('yolo'))); + * // Prints: -1 + * console.log(buf.lastIndexOf('buffer', 5)); + * // Prints: 5 + * console.log(buf.lastIndexOf('buffer', 4)); + * // Prints: -1 + * + * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + * + * console.log(utf16Buffer.lastIndexOf('\u03a3', undefined, 'utf16le')); + * // Prints: 6 + * console.log(utf16Buffer.lastIndexOf('\u03a3', -5, 'utf16le')); + * // Prints: 4 + * ``` + * + * If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value, + * an integer between 0 and 255. + * + * If `byteOffset` is not a number, it will be coerced to a number. Any arguments + * that coerce to `NaN`, like `{}` or `undefined`, will search the whole buffer. + * This behavior matches [`String.prototype.lastIndexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/lastIndexOf). + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const b = Buffer.from('abcdef'); + * + * // Passing a value that's a number, but not a valid byte. + * // Prints: 2, equivalent to searching for 99 or 'c'. + * console.log(b.lastIndexOf(99.9)); + * console.log(b.lastIndexOf(256 + 99)); + * + * // Passing a byteOffset that coerces to NaN. + * // Prints: 1, searching the whole buffer. + * console.log(b.lastIndexOf('b', undefined)); + * console.log(b.lastIndexOf('b', {})); + * + * // Passing a byteOffset that coerces to 0. + * // Prints: -1, equivalent to passing 0. + * console.log(b.lastIndexOf('b', null)); + * console.log(b.lastIndexOf('b', [])); + * ``` + * + * If `value` is an empty string or empty `Buffer`, `byteOffset` will be returned. + * @since v6.0.0 + * @param value What to search for. + * @param [byteOffset=buf.length - 1] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`. + * @return The index of the last occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`. + */ + lastIndexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `[index, byte]` pairs from the contents + * of `buf`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * // Log the entire contents of a `Buffer`. + * + * const buf = Buffer.from('buffer'); + * + * for (const pair of buf.entries()) { + * console.log(pair); + * } + * // Prints: + * // [0, 98] + * // [1, 117] + * // [2, 102] + * // [3, 102] + * // [4, 101] + * // [5, 114] + * ``` + * @since v1.1.0 + */ + entries(): IterableIterator<[number, number]>; + /** + * Equivalent to `buf.indexOf() !== -1`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('this is a buffer'); + * + * console.log(buf.includes('this')); + * // Prints: true + * console.log(buf.includes('is')); + * // Prints: true + * console.log(buf.includes(Buffer.from('a buffer'))); + * // Prints: true + * console.log(buf.includes(97)); + * // Prints: true (97 is the decimal ASCII value for 'a') + * console.log(buf.includes(Buffer.from('a buffer example'))); + * // Prints: false + * console.log(buf.includes(Buffer.from('a buffer example').slice(0, 8))); + * // Prints: true + * console.log(buf.includes('this', 4)); + * // Prints: false + * ``` + * @since v5.3.0 + * @param value What to search for. + * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is its encoding. + * @return `true` if `value` was found in `buf`, `false` otherwise. + */ + includes(value: string | number | Buffer, byteOffset?: number, encoding?: BufferEncoding): boolean; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `buf` keys (indices). + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * for (const key of buf.keys()) { + * console.log(key); + * } + * // Prints: + * // 0 + * // 1 + * // 2 + * // 3 + * // 4 + * // 5 + * ``` + * @since v1.1.0 + */ + keys(): IterableIterator; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) for `buf` values (bytes). This function is + * called automatically when a `Buffer` is used in a `for..of` statement. + * + * ```js + * import { Buffer } from 'node:buffer'; + * + * const buf = Buffer.from('buffer'); + * + * for (const value of buf.values()) { + * console.log(value); + * } + * // Prints: + * // 98 + * // 117 + * // 102 + * // 102 + * // 101 + * // 114 + * + * for (const value of buf) { + * console.log(value); + * } + * // Prints: + * // 98 + * // 117 + * // 102 + * // 102 + * // 101 + * // 114 + * ``` + * @since v1.1.0 + */ + values(): IterableIterator; + } + var Buffer: BufferConstructor; + /** + * Decodes a string of Base64-encoded data into bytes, and encodes those bytes + * into a string using Latin-1 (ISO-8859-1). + * + * The `data` may be any JavaScript-value that can be coerced into a string. + * + * **This function is only provided for compatibility with legacy web platform APIs** + * **and should never be used in new code, because they use strings to represent** + * **binary data and predate the introduction of typed arrays in JavaScript.** + * **For code running using Node.js APIs, converting between base64-encoded strings** + * **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.** + * @since v15.13.0, v14.17.0 + * @legacy Use `Buffer.from(data, 'base64')` instead. + * @param data The Base64-encoded input string. + */ + function atob(data: string): string; + /** + * Decodes a string into bytes using Latin-1 (ISO-8859), and encodes those bytes + * into a string using Base64. + * + * The `data` may be any JavaScript-value that can be coerced into a string. + * + * **This function is only provided for compatibility with legacy web platform APIs** + * **and should never be used in new code, because they use strings to represent** + * **binary data and predate the introduction of typed arrays in JavaScript.** + * **For code running using Node.js APIs, converting between base64-encoded strings** + * **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.** + * @since v15.13.0, v14.17.0 + * @legacy Use `buf.toString('base64')` instead. + * @param data An ASCII (Latin1) string. + */ + function btoa(data: string): string; + interface Blob extends __Blob {} + /** + * `Blob` class is a global reference for `require('node:buffer').Blob` + * https://nodejs.org/api/buffer.html#class-blob + * @since v18.0.0 + */ + var Blob: typeof globalThis extends { + onmessage: any; + Blob: infer T; + } ? T + : typeof NodeBlob; + } +} +declare module "node:buffer" { + export * from "buffer"; +} diff --git a/dist/node_modules/@types/node/ts4.8/child_process.d.ts b/dist/node_modules/@types/node/ts4.8/child_process.d.ts new file mode 100644 index 0000000..a97532b --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/child_process.d.ts @@ -0,0 +1,1540 @@ +/** + * The `node:child_process` module provides the ability to spawn subprocesses in + * a manner that is similar, but not identical, to [`popen(3)`](http://man7.org/linux/man-pages/man3/popen.3.html). This capability + * is primarily provided by the {@link spawn} function: + * + * ```js + * const { spawn } = require('node:child_process'); + * const ls = spawn('ls', ['-lh', '/usr']); + * + * ls.stdout.on('data', (data) => { + * console.log(`stdout: ${data}`); + * }); + * + * ls.stderr.on('data', (data) => { + * console.error(`stderr: ${data}`); + * }); + * + * ls.on('close', (code) => { + * console.log(`child process exited with code ${code}`); + * }); + * ``` + * + * By default, pipes for `stdin`, `stdout`, and `stderr` are established between + * the parent Node.js process and the spawned subprocess. These pipes have + * limited (and platform-specific) capacity. If the subprocess writes to + * stdout in excess of that limit without the output being captured, the + * subprocess blocks waiting for the pipe buffer to accept more data. This is + * identical to the behavior of pipes in the shell. Use the `{ stdio: 'ignore' }`option if the output will not be consumed. + * + * The command lookup is performed using the `options.env.PATH` environment + * variable if `env` is in the `options` object. Otherwise, `process.env.PATH` is + * used. If `options.env` is set without `PATH`, lookup on Unix is performed + * on a default search path search of `/usr/bin:/bin` (see your operating system's + * manual for execvpe/execvp), on Windows the current processes environment + * variable `PATH` is used. + * + * On Windows, environment variables are case-insensitive. Node.js + * lexicographically sorts the `env` keys and uses the first one that + * case-insensitively matches. Only first (in lexicographic order) entry will be + * passed to the subprocess. This might lead to issues on Windows when passing + * objects to the `env` option that have multiple variants of the same key, such as`PATH` and `Path`. + * + * The {@link spawn} method spawns the child process asynchronously, + * without blocking the Node.js event loop. The {@link spawnSync} function provides equivalent functionality in a synchronous manner that blocks + * the event loop until the spawned process either exits or is terminated. + * + * For convenience, the `node:child_process` module provides a handful of + * synchronous and asynchronous alternatives to {@link spawn} and {@link spawnSync}. Each of these alternatives are implemented on + * top of {@link spawn} or {@link spawnSync}. + * + * * {@link exec}: spawns a shell and runs a command within that + * shell, passing the `stdout` and `stderr` to a callback function when + * complete. + * * {@link execFile}: similar to {@link exec} except + * that it spawns the command directly without first spawning a shell by + * default. + * * {@link fork}: spawns a new Node.js process and invokes a + * specified module with an IPC communication channel established that allows + * sending messages between parent and child. + * * {@link execSync}: a synchronous version of {@link exec} that will block the Node.js event loop. + * * {@link execFileSync}: a synchronous version of {@link execFile} that will block the Node.js event loop. + * + * For certain use cases, such as automating shell scripts, the `synchronous counterparts` may be more convenient. In many cases, however, + * the synchronous methods can have significant impact on performance due to + * stalling the event loop while spawned processes complete. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/child_process.js) + */ +declare module "child_process" { + import { ObjectEncodingOptions } from "node:fs"; + import { Abortable, EventEmitter } from "node:events"; + import * as net from "node:net"; + import { Pipe, Readable, Stream, Writable } from "node:stream"; + import { URL } from "node:url"; + type Serializable = string | object | number | boolean | bigint; + type SendHandle = net.Socket | net.Server; + /** + * Instances of the `ChildProcess` represent spawned child processes. + * + * Instances of `ChildProcess` are not intended to be created directly. Rather, + * use the {@link spawn}, {@link exec},{@link execFile}, or {@link fork} methods to create + * instances of `ChildProcess`. + * @since v2.2.0 + */ + class ChildProcess extends EventEmitter { + /** + * A `Writable Stream` that represents the child process's `stdin`. + * + * If a child process waits to read all of its input, the child will not continue + * until this stream has been closed via `end()`. + * + * If the child was spawned with `stdio[0]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stdin` is an alias for `subprocess.stdio[0]`. Both properties will + * refer to the same value. + * + * The `subprocess.stdin` property can be `null` or `undefined`if the child process could not be successfully spawned. + * @since v0.1.90 + */ + stdin: Writable | null; + /** + * A `Readable Stream` that represents the child process's `stdout`. + * + * If the child was spawned with `stdio[1]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stdout` is an alias for `subprocess.stdio[1]`. Both properties will + * refer to the same value. + * + * ```js + * const { spawn } = require('node:child_process'); + * + * const subprocess = spawn('ls'); + * + * subprocess.stdout.on('data', (data) => { + * console.log(`Received chunk ${data}`); + * }); + * ``` + * + * The `subprocess.stdout` property can be `null` or `undefined`if the child process could not be successfully spawned. + * @since v0.1.90 + */ + stdout: Readable | null; + /** + * A `Readable Stream` that represents the child process's `stderr`. + * + * If the child was spawned with `stdio[2]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stderr` is an alias for `subprocess.stdio[2]`. Both properties will + * refer to the same value. + * + * The `subprocess.stderr` property can be `null` or `undefined`if the child process could not be successfully spawned. + * @since v0.1.90 + */ + stderr: Readable | null; + /** + * The `subprocess.channel` property is a reference to the child's IPC channel. If + * no IPC channel exists, this property is `undefined`. + * @since v7.1.0 + */ + readonly channel?: Pipe | null | undefined; + /** + * A sparse array of pipes to the child process, corresponding with positions in + * the `stdio` option passed to {@link spawn} that have been set + * to the value `'pipe'`. `subprocess.stdio[0]`, `subprocess.stdio[1]`, and`subprocess.stdio[2]` are also available as `subprocess.stdin`,`subprocess.stdout`, and `subprocess.stderr`, + * respectively. + * + * In the following example, only the child's fd `1` (stdout) is configured as a + * pipe, so only the parent's `subprocess.stdio[1]` is a stream, all other values + * in the array are `null`. + * + * ```js + * const assert = require('node:assert'); + * const fs = require('node:fs'); + * const child_process = require('node:child_process'); + * + * const subprocess = child_process.spawn('ls', { + * stdio: [ + * 0, // Use parent's stdin for child. + * 'pipe', // Pipe child's stdout to parent. + * fs.openSync('err.out', 'w'), // Direct child's stderr to a file. + * ], + * }); + * + * assert.strictEqual(subprocess.stdio[0], null); + * assert.strictEqual(subprocess.stdio[0], subprocess.stdin); + * + * assert(subprocess.stdout); + * assert.strictEqual(subprocess.stdio[1], subprocess.stdout); + * + * assert.strictEqual(subprocess.stdio[2], null); + * assert.strictEqual(subprocess.stdio[2], subprocess.stderr); + * ``` + * + * The `subprocess.stdio` property can be `undefined` if the child process could + * not be successfully spawned. + * @since v0.7.10 + */ + readonly stdio: [ + Writable | null, + // stdin + Readable | null, + // stdout + Readable | null, + // stderr + Readable | Writable | null | undefined, + // extra + Readable | Writable | null | undefined, // extra + ]; + /** + * The `subprocess.killed` property indicates whether the child process + * successfully received a signal from `subprocess.kill()`. The `killed` property + * does not indicate that the child process has been terminated. + * @since v0.5.10 + */ + readonly killed: boolean; + /** + * Returns the process identifier (PID) of the child process. If the child process + * fails to spawn due to errors, then the value is `undefined` and `error` is + * emitted. + * + * ```js + * const { spawn } = require('node:child_process'); + * const grep = spawn('grep', ['ssh']); + * + * console.log(`Spawned child pid: ${grep.pid}`); + * grep.stdin.end(); + * ``` + * @since v0.1.90 + */ + readonly pid?: number | undefined; + /** + * The `subprocess.connected` property indicates whether it is still possible to + * send and receive messages from a child process. When `subprocess.connected` is`false`, it is no longer possible to send or receive messages. + * @since v0.7.2 + */ + readonly connected: boolean; + /** + * The `subprocess.exitCode` property indicates the exit code of the child process. + * If the child process is still running, the field will be `null`. + */ + readonly exitCode: number | null; + /** + * The `subprocess.signalCode` property indicates the signal received by + * the child process if any, else `null`. + */ + readonly signalCode: NodeJS.Signals | null; + /** + * The `subprocess.spawnargs` property represents the full list of command-line + * arguments the child process was launched with. + */ + readonly spawnargs: string[]; + /** + * The `subprocess.spawnfile` property indicates the executable file name of + * the child process that is launched. + * + * For {@link fork}, its value will be equal to `process.execPath`. + * For {@link spawn}, its value will be the name of + * the executable file. + * For {@link exec}, its value will be the name of the shell + * in which the child process is launched. + */ + readonly spawnfile: string; + /** + * The `subprocess.kill()` method sends a signal to the child process. If no + * argument is given, the process will be sent the `'SIGTERM'` signal. See [`signal(7)`](http://man7.org/linux/man-pages/man7/signal.7.html) for a list of available signals. This function + * returns `true` if [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) succeeds, and `false` otherwise. + * + * ```js + * const { spawn } = require('node:child_process'); + * const grep = spawn('grep', ['ssh']); + * + * grep.on('close', (code, signal) => { + * console.log( + * `child process terminated due to receipt of signal ${signal}`); + * }); + * + * // Send SIGHUP to process. + * grep.kill('SIGHUP'); + * ``` + * + * The `ChildProcess` object may emit an `'error'` event if the signal + * cannot be delivered. Sending a signal to a child process that has already exited + * is not an error but may have unforeseen consequences. Specifically, if the + * process identifier (PID) has been reassigned to another process, the signal will + * be delivered to that process instead which can have unexpected results. + * + * While the function is called `kill`, the signal delivered to the child process + * may not actually terminate the process. + * + * See [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for reference. + * + * On Windows, where POSIX signals do not exist, the `signal` argument will be + * ignored, and the process will be killed forcefully and abruptly (similar to`'SIGKILL'`). + * See `Signal Events` for more details. + * + * On Linux, child processes of child processes will not be terminated + * when attempting to kill their parent. This is likely to happen when running a + * new process in a shell or with the use of the `shell` option of `ChildProcess`: + * + * ```js + * 'use strict'; + * const { spawn } = require('node:child_process'); + * + * const subprocess = spawn( + * 'sh', + * [ + * '-c', + * `node -e "setInterval(() => { + * console.log(process.pid, 'is alive') + * }, 500);"`, + * ], { + * stdio: ['inherit', 'inherit', 'inherit'], + * }, + * ); + * + * setTimeout(() => { + * subprocess.kill(); // Does not terminate the Node.js process in the shell. + * }, 2000); + * ``` + * @since v0.1.90 + */ + kill(signal?: NodeJS.Signals | number): boolean; + /** + * Calls {@link ChildProcess.kill} with `'SIGTERM'`. + * @since v20.5.0 + */ + [Symbol.dispose](): void; + /** + * When an IPC channel has been established between the parent and child ( + * i.e. when using {@link fork}), the `subprocess.send()` method can + * be used to send messages to the child process. When the child process is a + * Node.js instance, these messages can be received via the `'message'` event. + * + * The message goes through serialization and parsing. The resulting + * message might not be the same as what is originally sent. + * + * For example, in the parent script: + * + * ```js + * const cp = require('node:child_process'); + * const n = cp.fork(`${__dirname}/sub.js`); + * + * n.on('message', (m) => { + * console.log('PARENT got message:', m); + * }); + * + * // Causes the child to print: CHILD got message: { hello: 'world' } + * n.send({ hello: 'world' }); + * ``` + * + * And then the child script, `'sub.js'` might look like this: + * + * ```js + * process.on('message', (m) => { + * console.log('CHILD got message:', m); + * }); + * + * // Causes the parent to print: PARENT got message: { foo: 'bar', baz: null } + * process.send({ foo: 'bar', baz: NaN }); + * ``` + * + * Child Node.js processes will have a `process.send()` method of their own + * that allows the child to send messages back to the parent. + * + * There is a special case when sending a `{cmd: 'NODE_foo'}` message. Messages + * containing a `NODE_` prefix in the `cmd` property are reserved for use within + * Node.js core and will not be emitted in the child's `'message'` event. Rather, such messages are emitted using the`'internalMessage'` event and are consumed internally by Node.js. + * Applications should avoid using such messages or listening for`'internalMessage'` events as it is subject to change without notice. + * + * The optional `sendHandle` argument that may be passed to `subprocess.send()` is + * for passing a TCP server or socket object to the child process. The child will + * receive the object as the second argument passed to the callback function + * registered on the `'message'` event. Any data that is received + * and buffered in the socket will not be sent to the child. + * + * The optional `callback` is a function that is invoked after the message is + * sent but before the child may have received it. The function is called with a + * single argument: `null` on success, or an `Error` object on failure. + * + * If no `callback` function is provided and the message cannot be sent, an`'error'` event will be emitted by the `ChildProcess` object. This can + * happen, for instance, when the child process has already exited. + * + * `subprocess.send()` will return `false` if the channel has closed or when the + * backlog of unsent messages exceeds a threshold that makes it unwise to send + * more. Otherwise, the method returns `true`. The `callback` function can be + * used to implement flow control. + * + * #### Example: sending a server object + * + * The `sendHandle` argument can be used, for instance, to pass the handle of + * a TCP server object to the child process as illustrated in the example below: + * + * ```js + * const subprocess = require('node:child_process').fork('subprocess.js'); + * + * // Open up the server object and send the handle. + * const server = require('node:net').createServer(); + * server.on('connection', (socket) => { + * socket.end('handled by parent'); + * }); + * server.listen(1337, () => { + * subprocess.send('server', server); + * }); + * ``` + * + * The child would then receive the server object as: + * + * ```js + * process.on('message', (m, server) => { + * if (m === 'server') { + * server.on('connection', (socket) => { + * socket.end('handled by child'); + * }); + * } + * }); + * ``` + * + * Once the server is now shared between the parent and child, some connections + * can be handled by the parent and some by the child. + * + * While the example above uses a server created using the `node:net` module,`node:dgram` module servers use exactly the same workflow with the exceptions of + * listening on a `'message'` event instead of `'connection'` and using`server.bind()` instead of `server.listen()`. This is, however, only + * supported on Unix platforms. + * + * #### Example: sending a socket object + * + * Similarly, the `sendHandler` argument can be used to pass the handle of a + * socket to the child process. The example below spawns two children that each + * handle connections with "normal" or "special" priority: + * + * ```js + * const { fork } = require('node:child_process'); + * const normal = fork('subprocess.js', ['normal']); + * const special = fork('subprocess.js', ['special']); + * + * // Open up the server and send sockets to child. Use pauseOnConnect to prevent + * // the sockets from being read before they are sent to the child process. + * const server = require('node:net').createServer({ pauseOnConnect: true }); + * server.on('connection', (socket) => { + * + * // If this is special priority... + * if (socket.remoteAddress === '74.125.127.100') { + * special.send('socket', socket); + * return; + * } + * // This is normal priority. + * normal.send('socket', socket); + * }); + * server.listen(1337); + * ``` + * + * The `subprocess.js` would receive the socket handle as the second argument + * passed to the event callback function: + * + * ```js + * process.on('message', (m, socket) => { + * if (m === 'socket') { + * if (socket) { + * // Check that the client socket exists. + * // It is possible for the socket to be closed between the time it is + * // sent and the time it is received in the child process. + * socket.end(`Request handled with ${process.argv[2]} priority`); + * } + * } + * }); + * ``` + * + * Do not use `.maxConnections` on a socket that has been passed to a subprocess. + * The parent cannot track when the socket is destroyed. + * + * Any `'message'` handlers in the subprocess should verify that `socket` exists, + * as the connection may have been closed during the time it takes to send the + * connection to the child. + * @since v0.5.9 + * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. `options` supports the following properties: + */ + send(message: Serializable, callback?: (error: Error | null) => void): boolean; + send(message: Serializable, sendHandle?: SendHandle, callback?: (error: Error | null) => void): boolean; + send( + message: Serializable, + sendHandle?: SendHandle, + options?: MessageOptions, + callback?: (error: Error | null) => void, + ): boolean; + /** + * Closes the IPC channel between parent and child, allowing the child to exit + * gracefully once there are no other connections keeping it alive. After calling + * this method the `subprocess.connected` and `process.connected` properties in + * both the parent and child (respectively) will be set to `false`, and it will be + * no longer possible to pass messages between the processes. + * + * The `'disconnect'` event will be emitted when there are no messages in the + * process of being received. This will most often be triggered immediately after + * calling `subprocess.disconnect()`. + * + * When the child process is a Node.js instance (e.g. spawned using {@link fork}), the `process.disconnect()` method can be invoked + * within the child process to close the IPC channel as well. + * @since v0.7.2 + */ + disconnect(): void; + /** + * By default, the parent will wait for the detached child to exit. To prevent the + * parent from waiting for a given `subprocess` to exit, use the`subprocess.unref()` method. Doing so will cause the parent's event loop to not + * include the child in its reference count, allowing the parent to exit + * independently of the child, unless there is an established IPC channel between + * the child and the parent. + * + * ```js + * const { spawn } = require('node:child_process'); + * + * const subprocess = spawn(process.argv[0], ['child_program.js'], { + * detached: true, + * stdio: 'ignore', + * }); + * + * subprocess.unref(); + * ``` + * @since v0.7.10 + */ + unref(): void; + /** + * Calling `subprocess.ref()` after making a call to `subprocess.unref()` will + * restore the removed reference count for the child process, forcing the parent + * to wait for the child to exit before exiting itself. + * + * ```js + * const { spawn } = require('node:child_process'); + * + * const subprocess = spawn(process.argv[0], ['child_program.js'], { + * detached: true, + * stdio: 'ignore', + * }); + * + * subprocess.unref(); + * subprocess.ref(); + * ``` + * @since v0.7.10 + */ + ref(): void; + /** + * events.EventEmitter + * 1. close + * 2. disconnect + * 3. error + * 4. exit + * 5. message + * 6. spawn + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + addListener(event: "disconnect", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + addListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + addListener(event: "spawn", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close", code: number | null, signal: NodeJS.Signals | null): boolean; + emit(event: "disconnect"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "exit", code: number | null, signal: NodeJS.Signals | null): boolean; + emit(event: "message", message: Serializable, sendHandle: SendHandle): boolean; + emit(event: "spawn", listener: () => void): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + on(event: "disconnect", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + on(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + on(event: "spawn", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + once(event: "disconnect", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + once(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + once(event: "spawn", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependListener(event: "disconnect", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "exit", listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + prependListener(event: "spawn", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "close", + listener: (code: number | null, signal: NodeJS.Signals | null) => void, + ): this; + prependOnceListener(event: "disconnect", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener( + event: "exit", + listener: (code: number | null, signal: NodeJS.Signals | null) => void, + ): this; + prependOnceListener(event: "message", listener: (message: Serializable, sendHandle: SendHandle) => void): this; + prependOnceListener(event: "spawn", listener: () => void): this; + } + // return this object when stdio option is undefined or not specified + interface ChildProcessWithoutNullStreams extends ChildProcess { + stdin: Writable; + stdout: Readable; + stderr: Readable; + readonly stdio: [ + Writable, + Readable, + Readable, + // stderr + Readable | Writable | null | undefined, + // extra, no modification + Readable | Writable | null | undefined, // extra, no modification + ]; + } + // return this object when stdio option is a tuple of 3 + interface ChildProcessByStdio + extends ChildProcess + { + stdin: I; + stdout: O; + stderr: E; + readonly stdio: [ + I, + O, + E, + Readable | Writable | null | undefined, + // extra, no modification + Readable | Writable | null | undefined, // extra, no modification + ]; + } + interface MessageOptions { + keepOpen?: boolean | undefined; + } + type IOType = "overlapped" | "pipe" | "ignore" | "inherit"; + type StdioOptions = IOType | Array; + type SerializationType = "json" | "advanced"; + interface MessagingOptions extends Abortable { + /** + * Specify the kind of serialization used for sending messages between processes. + * @default 'json' + */ + serialization?: SerializationType | undefined; + /** + * The signal value to be used when the spawned process will be killed by the abort signal. + * @default 'SIGTERM' + */ + killSignal?: NodeJS.Signals | number | undefined; + /** + * In milliseconds the maximum amount of time the process is allowed to run. + */ + timeout?: number | undefined; + } + interface ProcessEnvOptions { + uid?: number | undefined; + gid?: number | undefined; + cwd?: string | URL | undefined; + env?: NodeJS.ProcessEnv | undefined; + } + interface CommonOptions extends ProcessEnvOptions { + /** + * @default false + */ + windowsHide?: boolean | undefined; + /** + * @default 0 + */ + timeout?: number | undefined; + } + interface CommonSpawnOptions extends CommonOptions, MessagingOptions, Abortable { + argv0?: string | undefined; + /** + * Can be set to 'pipe', 'inherit', 'overlapped', or 'ignore', or an array of these strings. + * If passed as an array, the first element is used for `stdin`, the second for + * `stdout`, and the third for `stderr`. A fourth element can be used to + * specify the `stdio` behavior beyond the standard streams. See + * {@link ChildProcess.stdio} for more information. + * + * @default 'pipe' + */ + stdio?: StdioOptions | undefined; + shell?: boolean | string | undefined; + windowsVerbatimArguments?: boolean | undefined; + } + interface SpawnOptions extends CommonSpawnOptions { + detached?: boolean | undefined; + } + interface SpawnOptionsWithoutStdio extends SpawnOptions { + stdio?: StdioPipeNamed | StdioPipe[] | undefined; + } + type StdioNull = "inherit" | "ignore" | Stream; + type StdioPipeNamed = "pipe" | "overlapped"; + type StdioPipe = undefined | null | StdioPipeNamed; + interface SpawnOptionsWithStdioTuple< + Stdin extends StdioNull | StdioPipe, + Stdout extends StdioNull | StdioPipe, + Stderr extends StdioNull | StdioPipe, + > extends SpawnOptions { + stdio: [Stdin, Stdout, Stderr]; + } + /** + * The `child_process.spawn()` method spawns a new process using the given`command`, with command-line arguments in `args`. If omitted, `args` defaults + * to an empty array. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * + * A third argument may be used to specify additional options, with these defaults: + * + * ```js + * const defaults = { + * cwd: undefined, + * env: process.env, + * }; + * ``` + * + * Use `cwd` to specify the working directory from which the process is spawned. + * If not given, the default is to inherit the current working directory. If given, + * but the path does not exist, the child process emits an `ENOENT` error + * and exits immediately. `ENOENT` is also emitted when the command + * does not exist. + * + * Use `env` to specify environment variables that will be visible to the new + * process, the default is `process.env`. + * + * `undefined` values in `env` will be ignored. + * + * Example of running `ls -lh /usr`, capturing `stdout`, `stderr`, and the + * exit code: + * + * ```js + * const { spawn } = require('node:child_process'); + * const ls = spawn('ls', ['-lh', '/usr']); + * + * ls.stdout.on('data', (data) => { + * console.log(`stdout: ${data}`); + * }); + * + * ls.stderr.on('data', (data) => { + * console.error(`stderr: ${data}`); + * }); + * + * ls.on('close', (code) => { + * console.log(`child process exited with code ${code}`); + * }); + * ``` + * + * Example: A very elaborate way to run `ps ax | grep ssh` + * + * ```js + * const { spawn } = require('node:child_process'); + * const ps = spawn('ps', ['ax']); + * const grep = spawn('grep', ['ssh']); + * + * ps.stdout.on('data', (data) => { + * grep.stdin.write(data); + * }); + * + * ps.stderr.on('data', (data) => { + * console.error(`ps stderr: ${data}`); + * }); + * + * ps.on('close', (code) => { + * if (code !== 0) { + * console.log(`ps process exited with code ${code}`); + * } + * grep.stdin.end(); + * }); + * + * grep.stdout.on('data', (data) => { + * console.log(data.toString()); + * }); + * + * grep.stderr.on('data', (data) => { + * console.error(`grep stderr: ${data}`); + * }); + * + * grep.on('close', (code) => { + * if (code !== 0) { + * console.log(`grep process exited with code ${code}`); + * } + * }); + * ``` + * + * Example of checking for failed `spawn`: + * + * ```js + * const { spawn } = require('node:child_process'); + * const subprocess = spawn('bad_command'); + * + * subprocess.on('error', (err) => { + * console.error('Failed to start subprocess.'); + * }); + * ``` + * + * Certain platforms (macOS, Linux) will use the value of `argv[0]` for the process + * title while others (Windows, SunOS) will use `command`. + * + * Node.js overwrites `argv[0]` with `process.execPath` on startup, so`process.argv[0]` in a Node.js child process will not match the `argv0`parameter passed to `spawn` from the parent. Retrieve + * it with the`process.argv0` property instead. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { spawn } = require('node:child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const grep = spawn('grep', ['ssh'], { signal }); + * grep.on('error', (err) => { + * // This will be called with err being an AbortError if the controller aborts + * }); + * controller.abort(); // Stops the child process + * ``` + * @since v0.1.90 + * @param command The command to run. + * @param args List of string arguments. + */ + function spawn(command: string, options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptions): ChildProcess; + // overloads of spawn with 'args' + function spawn( + command: string, + args?: readonly string[], + options?: SpawnOptionsWithoutStdio, + ): ChildProcessWithoutNullStreams; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn( + command: string, + args: readonly string[], + options: SpawnOptionsWithStdioTuple, + ): ChildProcessByStdio; + function spawn(command: string, args: readonly string[], options: SpawnOptions): ChildProcess; + interface ExecOptions extends CommonOptions { + shell?: string | undefined; + signal?: AbortSignal | undefined; + maxBuffer?: number | undefined; + killSignal?: NodeJS.Signals | number | undefined; + } + interface ExecOptionsWithStringEncoding extends ExecOptions { + encoding: BufferEncoding; + } + interface ExecOptionsWithBufferEncoding extends ExecOptions { + encoding: BufferEncoding | null; // specify `null`. + } + interface ExecException extends Error { + cmd?: string | undefined; + killed?: boolean | undefined; + code?: number | undefined; + signal?: NodeJS.Signals | undefined; + } + /** + * Spawns a shell then executes the `command` within that shell, buffering any + * generated output. The `command` string passed to the exec function is processed + * directly by the shell and special characters (vary based on [shell](https://en.wikipedia.org/wiki/List_of_command-line_interpreters)) + * need to be dealt with accordingly: + * + * ```js + * const { exec } = require('node:child_process'); + * + * exec('"/path/to/test file/test.sh" arg1 arg2'); + * // Double quotes are used so that the space in the path is not interpreted as + * // a delimiter of multiple arguments. + * + * exec('echo "The \\$HOME variable is $HOME"'); + * // The $HOME variable is escaped in the first instance, but not in the second. + * ``` + * + * **Never pass unsanitized user input to this function. Any input containing shell** + * **metacharacters may be used to trigger arbitrary command execution.** + * + * If a `callback` function is provided, it is called with the arguments`(error, stdout, stderr)`. On success, `error` will be `null`. On error,`error` will be an instance of `Error`. The + * `error.code` property will be + * the exit code of the process. By convention, any exit code other than `0`indicates an error. `error.signal` will be the signal that terminated the + * process. + * + * The `stdout` and `stderr` arguments passed to the callback will contain the + * stdout and stderr output of the child process. By default, Node.js will decode + * the output as UTF-8 and pass strings to the callback. The `encoding` option + * can be used to specify the character encoding used to decode the stdout and + * stderr output. If `encoding` is `'buffer'`, or an unrecognized character + * encoding, `Buffer` objects will be passed to the callback instead. + * + * ```js + * const { exec } = require('node:child_process'); + * exec('cat *.js missing_file | wc -l', (error, stdout, stderr) => { + * if (error) { + * console.error(`exec error: ${error}`); + * return; + * } + * console.log(`stdout: ${stdout}`); + * console.error(`stderr: ${stderr}`); + * }); + * ``` + * + * If `timeout` is greater than `0`, the parent will send the signal + * identified by the `killSignal` property (the default is `'SIGTERM'`) if the + * child runs longer than `timeout` milliseconds. + * + * Unlike the [`exec(3)`](http://man7.org/linux/man-pages/man3/exec.3.html) POSIX system call, `child_process.exec()` does not replace + * the existing process and uses a shell to execute the command. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned`ChildProcess` instance is attached to the `Promise` as a `child` property. In + * case of an error (including any error resulting in an exit code other than 0), a + * rejected promise is returned, with the same `error` object given in the + * callback, but with two additional properties `stdout` and `stderr`. + * + * ```js + * const util = require('node:util'); + * const exec = util.promisify(require('node:child_process').exec); + * + * async function lsExample() { + * const { stdout, stderr } = await exec('ls'); + * console.log('stdout:', stdout); + * console.error('stderr:', stderr); + * } + * lsExample(); + * ``` + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { exec } = require('node:child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = exec('grep ssh', { signal }, (error) => { + * console.error(error); // an AbortError + * }); + * controller.abort(); + * ``` + * @since v0.1.90 + * @param command The command to run, with space-separated arguments. + * @param callback called with the output when process terminates. + */ + function exec( + command: string, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. + function exec( + command: string, + options: { + encoding: "buffer" | null; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: Buffer, stderr: Buffer) => void, + ): ChildProcess; + // `options` with well known `encoding` means stdout/stderr are definitely `string`. + function exec( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. + // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. + function exec( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + // `options` without an `encoding` means stdout/stderr are definitely `string`. + function exec( + command: string, + options: ExecOptions, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // fallback if nothing else matches. Worst case is always `string | Buffer`. + function exec( + command: string, + options: (ObjectEncodingOptions & ExecOptions) | undefined | null, + callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + interface PromiseWithChild extends Promise { + child: ChildProcess; + } + namespace exec { + function __promisify__(command: string): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options: { + encoding: "buffer" | null; + } & ExecOptions, + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options: ExecOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options?: (ObjectEncodingOptions & ExecOptions) | null, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + } + interface ExecFileOptions extends CommonOptions, Abortable { + maxBuffer?: number | undefined; + killSignal?: NodeJS.Signals | number | undefined; + windowsVerbatimArguments?: boolean | undefined; + shell?: boolean | string | undefined; + signal?: AbortSignal | undefined; + } + interface ExecFileOptionsWithStringEncoding extends ExecFileOptions { + encoding: BufferEncoding; + } + interface ExecFileOptionsWithBufferEncoding extends ExecFileOptions { + encoding: "buffer" | null; + } + interface ExecFileOptionsWithOtherEncoding extends ExecFileOptions { + encoding: BufferEncoding; + } + type ExecFileException = + & Omit + & Omit + & { code?: string | number | undefined | null }; + /** + * The `child_process.execFile()` function is similar to {@link exec} except that it does not spawn a shell by default. Rather, the specified + * executable `file` is spawned directly as a new process making it slightly more + * efficient than {@link exec}. + * + * The same options as {@link exec} are supported. Since a shell is + * not spawned, behaviors such as I/O redirection and file globbing are not + * supported. + * + * ```js + * const { execFile } = require('node:child_process'); + * const child = execFile('node', ['--version'], (error, stdout, stderr) => { + * if (error) { + * throw error; + * } + * console.log(stdout); + * }); + * ``` + * + * The `stdout` and `stderr` arguments passed to the callback will contain the + * stdout and stderr output of the child process. By default, Node.js will decode + * the output as UTF-8 and pass strings to the callback. The `encoding` option + * can be used to specify the character encoding used to decode the stdout and + * stderr output. If `encoding` is `'buffer'`, or an unrecognized character + * encoding, `Buffer` objects will be passed to the callback instead. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned`ChildProcess` instance is attached to the `Promise` as a `child` property. In + * case of an error (including any error resulting in an exit code other than 0), a + * rejected promise is returned, with the same `error` object given in the + * callback, but with two additional properties `stdout` and `stderr`. + * + * ```js + * const util = require('node:util'); + * const execFile = util.promisify(require('node:child_process').execFile); + * async function getVersion() { + * const { stdout } = await execFile('node', ['--version']); + * console.log(stdout); + * } + * getVersion(); + * ``` + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { execFile } = require('node:child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = execFile('node', ['--version'], { signal }, (error) => { + * console.error(error); // an AbortError + * }); + * controller.abort(); + * ``` + * @since v0.1.91 + * @param file The name or path of the executable file to run. + * @param args List of string arguments. + * @param callback Called with the output when process terminates. + */ + function execFile(file: string): ChildProcess; + function execFile( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): ChildProcess; + function execFile(file: string, args?: readonly string[] | null): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): ChildProcess; + // no `options` definitely means stdout/stderr are `string`. + function execFile( + file: string, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. + function execFile( + file: string, + options: ExecFileOptionsWithBufferEncoding, + callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithBufferEncoding, + callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void, + ): ChildProcess; + // `options` with well known `encoding` means stdout/stderr are definitely `string`. + function execFile( + file: string, + options: ExecFileOptionsWithStringEncoding, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithStringEncoding, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. + // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. + function execFile( + file: string, + options: ExecFileOptionsWithOtherEncoding, + callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithOtherEncoding, + callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void, + ): ChildProcess; + // `options` without an `encoding` means stdout/stderr are definitely `string`. + function execFile( + file: string, + options: ExecFileOptions, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptions, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void, + ): ChildProcess; + // fallback if nothing else matches. Worst case is always `string | Buffer`. + function execFile( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + callback: + | ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void) + | undefined + | null, + ): ChildProcess; + function execFile( + file: string, + args: readonly string[] | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + callback: + | ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void) + | undefined + | null, + ): ChildProcess; + namespace execFile { + function __promisify__(file: string): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithBufferEncoding, + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithBufferEncoding, + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithStringEncoding, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithStringEncoding, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithOtherEncoding, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptionsWithOtherEncoding, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + options: ExecFileOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: ExecFileOptions, + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + args: readonly string[] | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + } + interface ForkOptions extends ProcessEnvOptions, MessagingOptions, Abortable { + execPath?: string | undefined; + execArgv?: string[] | undefined; + silent?: boolean | undefined; + /** + * Can be set to 'pipe', 'inherit', 'overlapped', or 'ignore', or an array of these strings. + * If passed as an array, the first element is used for `stdin`, the second for + * `stdout`, and the third for `stderr`. A fourth element can be used to + * specify the `stdio` behavior beyond the standard streams. See + * {@link ChildProcess.stdio} for more information. + * + * @default 'pipe' + */ + stdio?: StdioOptions | undefined; + detached?: boolean | undefined; + windowsVerbatimArguments?: boolean | undefined; + } + /** + * The `child_process.fork()` method is a special case of {@link spawn} used specifically to spawn new Node.js processes. + * Like {@link spawn}, a `ChildProcess` object is returned. The + * returned `ChildProcess` will have an additional communication channel + * built-in that allows messages to be passed back and forth between the parent and + * child. See `subprocess.send()` for details. + * + * Keep in mind that spawned Node.js child processes are + * independent of the parent with exception of the IPC communication channel + * that is established between the two. Each process has its own memory, with + * their own V8 instances. Because of the additional resource allocations + * required, spawning a large number of child Node.js processes is not + * recommended. + * + * By default, `child_process.fork()` will spawn new Node.js instances using the `process.execPath` of the parent process. The `execPath` property in the`options` object allows for an alternative + * execution path to be used. + * + * Node.js processes launched with a custom `execPath` will communicate with the + * parent process using the file descriptor (fd) identified using the + * environment variable `NODE_CHANNEL_FD` on the child process. + * + * Unlike the [`fork(2)`](http://man7.org/linux/man-pages/man2/fork.2.html) POSIX system call, `child_process.fork()` does not clone the + * current process. + * + * The `shell` option available in {@link spawn} is not supported by`child_process.fork()` and will be ignored if set. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * if (process.argv[2] === 'child') { + * setTimeout(() => { + * console.log(`Hello from ${process.argv[2]}!`); + * }, 1_000); + * } else { + * const { fork } = require('node:child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = fork(__filename, ['child'], { signal }); + * child.on('error', (err) => { + * // This will be called with err being an AbortError if the controller aborts + * }); + * controller.abort(); // Stops the child process + * } + * ``` + * @since v0.5.0 + * @param modulePath The module to run in the child. + * @param args List of string arguments. + */ + function fork(modulePath: string, options?: ForkOptions): ChildProcess; + function fork(modulePath: string, args?: readonly string[], options?: ForkOptions): ChildProcess; + interface SpawnSyncOptions extends CommonSpawnOptions { + input?: string | NodeJS.ArrayBufferView | undefined; + maxBuffer?: number | undefined; + encoding?: BufferEncoding | "buffer" | null | undefined; + } + interface SpawnSyncOptionsWithStringEncoding extends SpawnSyncOptions { + encoding: BufferEncoding; + } + interface SpawnSyncOptionsWithBufferEncoding extends SpawnSyncOptions { + encoding?: "buffer" | null | undefined; + } + interface SpawnSyncReturns { + pid: number; + output: Array; + stdout: T; + stderr: T; + status: number | null; + signal: NodeJS.Signals | null; + error?: Error | undefined; + } + /** + * The `child_process.spawnSync()` method is generally identical to {@link spawn} with the exception that the function will not return + * until the child process has fully closed. When a timeout has been encountered + * and `killSignal` is sent, the method won't return until the process has + * completely exited. If the process intercepts and handles the `SIGTERM` signal + * and doesn't exit, the parent process will wait until the child process has + * exited. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * @since v0.11.12 + * @param command The command to run. + * @param args List of string arguments. + */ + function spawnSync(command: string): SpawnSyncReturns; + function spawnSync(command: string, options: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns; + function spawnSync(command: string, options: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns; + function spawnSync(command: string, options?: SpawnSyncOptions): SpawnSyncReturns; + function spawnSync(command: string, args: readonly string[]): SpawnSyncReturns; + function spawnSync( + command: string, + args: readonly string[], + options: SpawnSyncOptionsWithStringEncoding, + ): SpawnSyncReturns; + function spawnSync( + command: string, + args: readonly string[], + options: SpawnSyncOptionsWithBufferEncoding, + ): SpawnSyncReturns; + function spawnSync( + command: string, + args?: readonly string[], + options?: SpawnSyncOptions, + ): SpawnSyncReturns; + interface CommonExecOptions extends CommonOptions { + input?: string | NodeJS.ArrayBufferView | undefined; + /** + * Can be set to 'pipe', 'inherit, or 'ignore', or an array of these strings. + * If passed as an array, the first element is used for `stdin`, the second for + * `stdout`, and the third for `stderr`. A fourth element can be used to + * specify the `stdio` behavior beyond the standard streams. See + * {@link ChildProcess.stdio} for more information. + * + * @default 'pipe' + */ + stdio?: StdioOptions | undefined; + killSignal?: NodeJS.Signals | number | undefined; + maxBuffer?: number | undefined; + encoding?: BufferEncoding | "buffer" | null | undefined; + } + interface ExecSyncOptions extends CommonExecOptions { + shell?: string | undefined; + } + interface ExecSyncOptionsWithStringEncoding extends ExecSyncOptions { + encoding: BufferEncoding; + } + interface ExecSyncOptionsWithBufferEncoding extends ExecSyncOptions { + encoding?: "buffer" | null | undefined; + } + /** + * The `child_process.execSync()` method is generally identical to {@link exec} with the exception that the method will not return + * until the child process has fully closed. When a timeout has been encountered + * and `killSignal` is sent, the method won't return until the process has + * completely exited. If the child process intercepts and handles the `SIGTERM`signal and doesn't exit, the parent process will wait until the child process + * has exited. + * + * If the process times out or has a non-zero exit code, this method will throw. + * The `Error` object will contain the entire result from {@link spawnSync}. + * + * **Never pass unsanitized user input to this function. Any input containing shell** + * **metacharacters may be used to trigger arbitrary command execution.** + * @since v0.11.12 + * @param command The command to run. + * @return The stdout from the command. + */ + function execSync(command: string): Buffer; + function execSync(command: string, options: ExecSyncOptionsWithStringEncoding): string; + function execSync(command: string, options: ExecSyncOptionsWithBufferEncoding): Buffer; + function execSync(command: string, options?: ExecSyncOptions): string | Buffer; + interface ExecFileSyncOptions extends CommonExecOptions { + shell?: boolean | string | undefined; + } + interface ExecFileSyncOptionsWithStringEncoding extends ExecFileSyncOptions { + encoding: BufferEncoding; + } + interface ExecFileSyncOptionsWithBufferEncoding extends ExecFileSyncOptions { + encoding?: "buffer" | null; // specify `null`. + } + /** + * The `child_process.execFileSync()` method is generally identical to {@link execFile} with the exception that the method will not + * return until the child process has fully closed. When a timeout has been + * encountered and `killSignal` is sent, the method won't return until the process + * has completely exited. + * + * If the child process intercepts and handles the `SIGTERM` signal and + * does not exit, the parent process will still wait until the child process has + * exited. + * + * If the process times out or has a non-zero exit code, this method will throw an `Error` that will include the full result of the underlying {@link spawnSync}. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * @since v0.11.12 + * @param file The name or path of the executable file to run. + * @param args List of string arguments. + * @return The stdout from the command. + */ + function execFileSync(file: string): Buffer; + function execFileSync(file: string, options: ExecFileSyncOptionsWithStringEncoding): string; + function execFileSync(file: string, options: ExecFileSyncOptionsWithBufferEncoding): Buffer; + function execFileSync(file: string, options?: ExecFileSyncOptions): string | Buffer; + function execFileSync(file: string, args: readonly string[]): Buffer; + function execFileSync( + file: string, + args: readonly string[], + options: ExecFileSyncOptionsWithStringEncoding, + ): string; + function execFileSync( + file: string, + args: readonly string[], + options: ExecFileSyncOptionsWithBufferEncoding, + ): Buffer; + function execFileSync(file: string, args?: readonly string[], options?: ExecFileSyncOptions): string | Buffer; +} +declare module "node:child_process" { + export * from "child_process"; +} diff --git a/dist/node_modules/@types/node/ts4.8/cluster.d.ts b/dist/node_modules/@types/node/ts4.8/cluster.d.ts new file mode 100644 index 0000000..39cd56a --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/cluster.d.ts @@ -0,0 +1,432 @@ +/** + * Clusters of Node.js processes can be used to run multiple instances of Node.js + * that can distribute workloads among their application threads. When process + * isolation is not needed, use the `worker_threads` module instead, which + * allows running multiple application threads within a single Node.js instance. + * + * The cluster module allows easy creation of child processes that all share + * server ports. + * + * ```js + * import cluster from 'node:cluster'; + * import http from 'node:http'; + * import { availableParallelism } from 'node:os'; + * import process from 'node:process'; + * + * const numCPUs = availableParallelism(); + * + * if (cluster.isPrimary) { + * console.log(`Primary ${process.pid} is running`); + * + * // Fork workers. + * for (let i = 0; i < numCPUs; i++) { + * cluster.fork(); + * } + * + * cluster.on('exit', (worker, code, signal) => { + * console.log(`worker ${worker.process.pid} died`); + * }); + * } else { + * // Workers can share any TCP connection + * // In this case it is an HTTP server + * http.createServer((req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * + * console.log(`Worker ${process.pid} started`); + * } + * ``` + * + * Running Node.js will now share port 8000 between the workers: + * + * ```console + * $ node server.js + * Primary 3596 is running + * Worker 4324 started + * Worker 4520 started + * Worker 6056 started + * Worker 5644 started + * ``` + * + * On Windows, it is not yet possible to set up a named pipe server in a worker. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/cluster.js) + */ +declare module "cluster" { + import * as child from "node:child_process"; + import EventEmitter = require("node:events"); + import * as net from "node:net"; + type SerializationType = "json" | "advanced"; + export interface ClusterSettings { + execArgv?: string[] | undefined; // default: process.execArgv + exec?: string | undefined; + args?: string[] | undefined; + silent?: boolean | undefined; + stdio?: any[] | undefined; + uid?: number | undefined; + gid?: number | undefined; + inspectPort?: number | (() => number) | undefined; + serialization?: SerializationType | undefined; + cwd?: string | undefined; + windowsHide?: boolean | undefined; + } + export interface Address { + address: string; + port: number; + addressType: number | "udp4" | "udp6"; // 4, 6, -1, "udp4", "udp6" + } + /** + * A `Worker` object contains all public information and method about a worker. + * In the primary it can be obtained using `cluster.workers`. In a worker + * it can be obtained using `cluster.worker`. + * @since v0.7.0 + */ + export class Worker extends EventEmitter { + /** + * Each new worker is given its own unique id, this id is stored in the`id`. + * + * While a worker is alive, this is the key that indexes it in`cluster.workers`. + * @since v0.8.0 + */ + id: number; + /** + * All workers are created using `child_process.fork()`, the returned object + * from this function is stored as `.process`. In a worker, the global `process`is stored. + * + * See: `Child Process module`. + * + * Workers will call `process.exit(0)` if the `'disconnect'` event occurs + * on `process` and `.exitedAfterDisconnect` is not `true`. This protects against + * accidental disconnection. + * @since v0.7.0 + */ + process: child.ChildProcess; + /** + * Send a message to a worker or primary, optionally with a handle. + * + * In the primary, this sends a message to a specific worker. It is identical to `ChildProcess.send()`. + * + * In a worker, this sends a message to the primary. It is identical to`process.send()`. + * + * This example will echo back all messages from the primary: + * + * ```js + * if (cluster.isPrimary) { + * const worker = cluster.fork(); + * worker.send('hi there'); + * + * } else if (cluster.isWorker) { + * process.on('message', (msg) => { + * process.send(msg); + * }); + * } + * ``` + * @since v0.7.0 + * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. `options` supports the following properties: + */ + send(message: child.Serializable, callback?: (error: Error | null) => void): boolean; + send( + message: child.Serializable, + sendHandle: child.SendHandle, + callback?: (error: Error | null) => void, + ): boolean; + send( + message: child.Serializable, + sendHandle: child.SendHandle, + options?: child.MessageOptions, + callback?: (error: Error | null) => void, + ): boolean; + /** + * This function will kill the worker. In the primary worker, it does this by + * disconnecting the `worker.process`, and once disconnected, killing with`signal`. In the worker, it does it by killing the process with `signal`. + * + * The `kill()` function kills the worker process without waiting for a graceful + * disconnect, it has the same behavior as `worker.process.kill()`. + * + * This method is aliased as `worker.destroy()` for backwards compatibility. + * + * In a worker, `process.kill()` exists, but it is not this function; + * it is `kill()`. + * @since v0.9.12 + * @param [signal='SIGTERM'] Name of the kill signal to send to the worker process. + */ + kill(signal?: string): void; + destroy(signal?: string): void; + /** + * In a worker, this function will close all servers, wait for the `'close'` event + * on those servers, and then disconnect the IPC channel. + * + * In the primary, an internal message is sent to the worker causing it to call`.disconnect()` on itself. + * + * Causes `.exitedAfterDisconnect` to be set. + * + * After a server is closed, it will no longer accept new connections, + * but connections may be accepted by any other listening worker. Existing + * connections will be allowed to close as usual. When no more connections exist, + * see `server.close()`, the IPC channel to the worker will close allowing it + * to die gracefully. + * + * The above applies _only_ to server connections, client connections are not + * automatically closed by workers, and disconnect does not wait for them to close + * before exiting. + * + * In a worker, `process.disconnect` exists, but it is not this function; + * it is `disconnect()`. + * + * Because long living server connections may block workers from disconnecting, it + * may be useful to send a message, so application specific actions may be taken to + * close them. It also may be useful to implement a timeout, killing a worker if + * the `'disconnect'` event has not been emitted after some time. + * + * ```js + * if (cluster.isPrimary) { + * const worker = cluster.fork(); + * let timeout; + * + * worker.on('listening', (address) => { + * worker.send('shutdown'); + * worker.disconnect(); + * timeout = setTimeout(() => { + * worker.kill(); + * }, 2000); + * }); + * + * worker.on('disconnect', () => { + * clearTimeout(timeout); + * }); + * + * } else if (cluster.isWorker) { + * const net = require('node:net'); + * const server = net.createServer((socket) => { + * // Connections never end + * }); + * + * server.listen(8000); + * + * process.on('message', (msg) => { + * if (msg === 'shutdown') { + * // Initiate graceful close of any connections to server + * } + * }); + * } + * ``` + * @since v0.7.7 + * @return A reference to `worker`. + */ + disconnect(): void; + /** + * This function returns `true` if the worker is connected to its primary via its + * IPC channel, `false` otherwise. A worker is connected to its primary after it + * has been created. It is disconnected after the `'disconnect'` event is emitted. + * @since v0.11.14 + */ + isConnected(): boolean; + /** + * This function returns `true` if the worker's process has terminated (either + * because of exiting or being signaled). Otherwise, it returns `false`. + * + * ```js + * import cluster from 'node:cluster'; + * import http from 'node:http'; + * import { availableParallelism } from 'node:os'; + * import process from 'node:process'; + * + * const numCPUs = availableParallelism(); + * + * if (cluster.isPrimary) { + * console.log(`Primary ${process.pid} is running`); + * + * // Fork workers. + * for (let i = 0; i < numCPUs; i++) { + * cluster.fork(); + * } + * + * cluster.on('fork', (worker) => { + * console.log('worker is dead:', worker.isDead()); + * }); + * + * cluster.on('exit', (worker, code, signal) => { + * console.log('worker is dead:', worker.isDead()); + * }); + * } else { + * // Workers can share any TCP connection. In this case, it is an HTTP server. + * http.createServer((req, res) => { + * res.writeHead(200); + * res.end(`Current process\n ${process.pid}`); + * process.kill(process.pid); + * }).listen(8000); + * } + * ``` + * @since v0.11.14 + */ + isDead(): boolean; + /** + * This property is `true` if the worker exited due to `.disconnect()`. + * If the worker exited any other way, it is `false`. If the + * worker has not exited, it is `undefined`. + * + * The boolean `worker.exitedAfterDisconnect` allows distinguishing between + * voluntary and accidental exit, the primary may choose not to respawn a worker + * based on this value. + * + * ```js + * cluster.on('exit', (worker, code, signal) => { + * if (worker.exitedAfterDisconnect === true) { + * console.log('Oh, it was just voluntary – no need to worry'); + * } + * }); + * + * // kill worker + * worker.kill(); + * ``` + * @since v6.0.0 + */ + exitedAfterDisconnect: boolean; + /** + * events.EventEmitter + * 1. disconnect + * 2. error + * 3. exit + * 4. listening + * 5. message + * 6. online + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "disconnect", listener: () => void): this; + addListener(event: "error", listener: (error: Error) => void): this; + addListener(event: "exit", listener: (code: number, signal: string) => void): this; + addListener(event: "listening", listener: (address: Address) => void): this; + addListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + addListener(event: "online", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "disconnect"): boolean; + emit(event: "error", error: Error): boolean; + emit(event: "exit", code: number, signal: string): boolean; + emit(event: "listening", address: Address): boolean; + emit(event: "message", message: any, handle: net.Socket | net.Server): boolean; + emit(event: "online"): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "disconnect", listener: () => void): this; + on(event: "error", listener: (error: Error) => void): this; + on(event: "exit", listener: (code: number, signal: string) => void): this; + on(event: "listening", listener: (address: Address) => void): this; + on(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + on(event: "online", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "disconnect", listener: () => void): this; + once(event: "error", listener: (error: Error) => void): this; + once(event: "exit", listener: (code: number, signal: string) => void): this; + once(event: "listening", listener: (address: Address) => void): this; + once(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + once(event: "online", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "disconnect", listener: () => void): this; + prependListener(event: "error", listener: (error: Error) => void): this; + prependListener(event: "exit", listener: (code: number, signal: string) => void): this; + prependListener(event: "listening", listener: (address: Address) => void): this; + prependListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + prependListener(event: "online", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "disconnect", listener: () => void): this; + prependOnceListener(event: "error", listener: (error: Error) => void): this; + prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this; + prependOnceListener(event: "listening", listener: (address: Address) => void): this; + prependOnceListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + prependOnceListener(event: "online", listener: () => void): this; + } + export interface Cluster extends EventEmitter { + disconnect(callback?: () => void): void; + fork(env?: any): Worker; + /** @deprecated since v16.0.0 - use isPrimary. */ + readonly isMaster: boolean; + readonly isPrimary: boolean; + readonly isWorker: boolean; + schedulingPolicy: number; + readonly settings: ClusterSettings; + /** @deprecated since v16.0.0 - use setupPrimary. */ + setupMaster(settings?: ClusterSettings): void; + /** + * `setupPrimary` is used to change the default 'fork' behavior. Once called, the settings will be present in cluster.settings. + */ + setupPrimary(settings?: ClusterSettings): void; + readonly worker?: Worker | undefined; + readonly workers?: NodeJS.Dict | undefined; + readonly SCHED_NONE: number; + readonly SCHED_RR: number; + /** + * events.EventEmitter + * 1. disconnect + * 2. exit + * 3. fork + * 4. listening + * 5. message + * 6. online + * 7. setup + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "disconnect", listener: (worker: Worker) => void): this; + addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + addListener(event: "fork", listener: (worker: Worker) => void): this; + addListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; + addListener( + event: "message", + listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void, + ): this; // the handle is a net.Socket or net.Server object, or undefined. + addListener(event: "online", listener: (worker: Worker) => void): this; + addListener(event: "setup", listener: (settings: ClusterSettings) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "disconnect", worker: Worker): boolean; + emit(event: "exit", worker: Worker, code: number, signal: string): boolean; + emit(event: "fork", worker: Worker): boolean; + emit(event: "listening", worker: Worker, address: Address): boolean; + emit(event: "message", worker: Worker, message: any, handle: net.Socket | net.Server): boolean; + emit(event: "online", worker: Worker): boolean; + emit(event: "setup", settings: ClusterSettings): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "disconnect", listener: (worker: Worker) => void): this; + on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + on(event: "fork", listener: (worker: Worker) => void): this; + on(event: "listening", listener: (worker: Worker, address: Address) => void): this; + on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + on(event: "online", listener: (worker: Worker) => void): this; + on(event: "setup", listener: (settings: ClusterSettings) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "disconnect", listener: (worker: Worker) => void): this; + once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + once(event: "fork", listener: (worker: Worker) => void): this; + once(event: "listening", listener: (worker: Worker, address: Address) => void): this; + once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + once(event: "online", listener: (worker: Worker) => void): this; + once(event: "setup", listener: (settings: ClusterSettings) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "disconnect", listener: (worker: Worker) => void): this; + prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + prependListener(event: "fork", listener: (worker: Worker) => void): this; + prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; + // the handle is a net.Socket or net.Server object, or undefined. + prependListener( + event: "message", + listener: (worker: Worker, message: any, handle?: net.Socket | net.Server) => void, + ): this; + prependListener(event: "online", listener: (worker: Worker) => void): this; + prependListener(event: "setup", listener: (settings: ClusterSettings) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): this; + prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; + prependOnceListener(event: "fork", listener: (worker: Worker) => void): this; + prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; + // the handle is a net.Socket or net.Server object, or undefined. + prependOnceListener( + event: "message", + listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void, + ): this; + prependOnceListener(event: "online", listener: (worker: Worker) => void): this; + prependOnceListener(event: "setup", listener: (settings: ClusterSettings) => void): this; + } + const cluster: Cluster; + export default cluster; +} +declare module "node:cluster" { + export * from "cluster"; + export { default as default } from "cluster"; +} diff --git a/dist/node_modules/@types/node/ts4.8/console.d.ts b/dist/node_modules/@types/node/ts4.8/console.d.ts new file mode 100644 index 0000000..bcc3450 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/console.d.ts @@ -0,0 +1,415 @@ +/** + * The `node:console` module provides a simple debugging console that is similar to + * the JavaScript console mechanism provided by web browsers. + * + * The module exports two specific components: + * + * * A `Console` class with methods such as `console.log()`, `console.error()`, and`console.warn()` that can be used to write to any Node.js stream. + * * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('node:console')`. + * + * _**Warning**_: The global console object's methods are neither consistently + * synchronous like the browser APIs they resemble, nor are they consistently + * asynchronous like all other Node.js streams. See the `note on process I/O` for + * more information. + * + * Example using the global `console`: + * + * ```js + * console.log('hello world'); + * // Prints: hello world, to stdout + * console.log('hello %s', 'world'); + * // Prints: hello world, to stdout + * console.error(new Error('Whoops, something bad happened')); + * // Prints error message and stack trace to stderr: + * // Error: Whoops, something bad happened + * // at [eval]:5:15 + * // at Script.runInThisContext (node:vm:132:18) + * // at Object.runInThisContext (node:vm:309:38) + * // at node:internal/process/execution:77:19 + * // at [eval]-wrapper:6:22 + * // at evalScript (node:internal/process/execution:76:60) + * // at node:internal/main/eval_string:23:3 + * + * const name = 'Will Robinson'; + * console.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to stderr + * ``` + * + * Example using the `Console` class: + * + * ```js + * const out = getStreamSomehow(); + * const err = getStreamSomehow(); + * const myConsole = new console.Console(out, err); + * + * myConsole.log('hello world'); + * // Prints: hello world, to out + * myConsole.log('hello %s', 'world'); + * // Prints: hello world, to out + * myConsole.error(new Error('Whoops, something bad happened')); + * // Prints: [Error: Whoops, something bad happened], to err + * + * const name = 'Will Robinson'; + * myConsole.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to err + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/console.js) + */ +declare module "console" { + import console = require("node:console"); + export = console; +} +declare module "node:console" { + import { InspectOptions } from "node:util"; + global { + // This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build + interface Console { + Console: console.ConsoleConstructor; + /** + * `console.assert()` writes a message if `value` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy) or omitted. It only + * writes a message and does not otherwise affect execution. The output always + * starts with `"Assertion failed"`. If provided, `message` is formatted using `util.format()`. + * + * If `value` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy), nothing happens. + * + * ```js + * console.assert(true, 'does nothing'); + * + * console.assert(false, 'Whoops %s work', 'didn\'t'); + * // Assertion failed: Whoops didn't work + * + * console.assert(); + * // Assertion failed + * ``` + * @since v0.1.101 + * @param value The value tested for being truthy. + * @param message All arguments besides `value` are used as error message. + */ + assert(value: any, message?: string, ...optionalParams: any[]): void; + /** + * When `stdout` is a TTY, calling `console.clear()` will attempt to clear the + * TTY. When `stdout` is not a TTY, this method does nothing. + * + * The specific operation of `console.clear()` can vary across operating systems + * and terminal types. For most Linux operating systems, `console.clear()`operates similarly to the `clear` shell command. On Windows, `console.clear()`will clear only the output in the + * current terminal viewport for the Node.js + * binary. + * @since v8.3.0 + */ + clear(): void; + /** + * Maintains an internal counter specific to `label` and outputs to `stdout` the + * number of times `console.count()` has been called with the given `label`. + * + * ```js + * > console.count() + * default: 1 + * undefined + * > console.count('default') + * default: 2 + * undefined + * > console.count('abc') + * abc: 1 + * undefined + * > console.count('xyz') + * xyz: 1 + * undefined + * > console.count('abc') + * abc: 2 + * undefined + * > console.count() + * default: 3 + * undefined + * > + * ``` + * @since v8.3.0 + * @param [label='default'] The display label for the counter. + */ + count(label?: string): void; + /** + * Resets the internal counter specific to `label`. + * + * ```js + * > console.count('abc'); + * abc: 1 + * undefined + * > console.countReset('abc'); + * undefined + * > console.count('abc'); + * abc: 1 + * undefined + * > + * ``` + * @since v8.3.0 + * @param [label='default'] The display label for the counter. + */ + countReset(label?: string): void; + /** + * The `console.debug()` function is an alias for {@link log}. + * @since v8.0.0 + */ + debug(message?: any, ...optionalParams: any[]): void; + /** + * Uses `util.inspect()` on `obj` and prints the resulting string to `stdout`. + * This function bypasses any custom `inspect()` function defined on `obj`. + * @since v0.1.101 + */ + dir(obj: any, options?: InspectOptions): void; + /** + * This method calls `console.log()` passing it the arguments received. + * This method does not produce any XML formatting. + * @since v8.0.0 + */ + dirxml(...data: any[]): void; + /** + * Prints to `stderr` with newline. Multiple arguments can be passed, with the + * first used as the primary message and all additional used as substitution + * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) (the arguments are all passed to `util.format()`). + * + * ```js + * const code = 5; + * console.error('error #%d', code); + * // Prints: error #5, to stderr + * console.error('error', code); + * // Prints: error 5, to stderr + * ``` + * + * If formatting elements (e.g. `%d`) are not found in the first string then `util.inspect()` is called on each argument and the resulting string + * values are concatenated. See `util.format()` for more information. + * @since v0.1.100 + */ + error(message?: any, ...optionalParams: any[]): void; + /** + * Increases indentation of subsequent lines by spaces for `groupIndentation`length. + * + * If one or more `label`s are provided, those are printed first without the + * additional indentation. + * @since v8.5.0 + */ + group(...label: any[]): void; + /** + * An alias for {@link group}. + * @since v8.5.0 + */ + groupCollapsed(...label: any[]): void; + /** + * Decreases indentation of subsequent lines by spaces for `groupIndentation`length. + * @since v8.5.0 + */ + groupEnd(): void; + /** + * The `console.info()` function is an alias for {@link log}. + * @since v0.1.100 + */ + info(message?: any, ...optionalParams: any[]): void; + /** + * Prints to `stdout` with newline. Multiple arguments can be passed, with the + * first used as the primary message and all additional used as substitution + * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) (the arguments are all passed to `util.format()`). + * + * ```js + * const count = 5; + * console.log('count: %d', count); + * // Prints: count: 5, to stdout + * console.log('count:', count); + * // Prints: count: 5, to stdout + * ``` + * + * See `util.format()` for more information. + * @since v0.1.100 + */ + log(message?: any, ...optionalParams: any[]): void; + /** + * Try to construct a table with the columns of the properties of `tabularData`(or use `properties`) and rows of `tabularData` and log it. Falls back to just + * logging the argument if it can't be parsed as tabular. + * + * ```js + * // These can't be parsed as tabular data + * console.table(Symbol()); + * // Symbol() + * + * console.table(undefined); + * // undefined + * + * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]); + * // ┌─────────┬─────┬─────┐ + * // │ (index) │ a │ b │ + * // ├─────────┼─────┼─────┤ + * // │ 0 │ 1 │ 'Y' │ + * // │ 1 │ 'Z' │ 2 │ + * // └─────────┴─────┴─────┘ + * + * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']); + * // ┌─────────┬─────┐ + * // │ (index) │ a │ + * // ├─────────┼─────┤ + * // │ 0 │ 1 │ + * // │ 1 │ 'Z' │ + * // └─────────┴─────┘ + * ``` + * @since v10.0.0 + * @param properties Alternate properties for constructing the table. + */ + table(tabularData: any, properties?: readonly string[]): void; + /** + * Starts a timer that can be used to compute the duration of an operation. Timers + * are identified by a unique `label`. Use the same `label` when calling {@link timeEnd} to stop the timer and output the elapsed time in + * suitable time units to `stdout`. For example, if the elapsed + * time is 3869ms, `console.timeEnd()` displays "3.869s". + * @since v0.1.104 + * @param [label='default'] + */ + time(label?: string): void; + /** + * Stops a timer that was previously started by calling {@link time} and + * prints the result to `stdout`: + * + * ```js + * console.time('bunch-of-stuff'); + * // Do a bunch of stuff. + * console.timeEnd('bunch-of-stuff'); + * // Prints: bunch-of-stuff: 225.438ms + * ``` + * @since v0.1.104 + * @param [label='default'] + */ + timeEnd(label?: string): void; + /** + * For a timer that was previously started by calling {@link time}, prints + * the elapsed time and other `data` arguments to `stdout`: + * + * ```js + * console.time('process'); + * const value = expensiveProcess1(); // Returns 42 + * console.timeLog('process', value); + * // Prints "process: 365.227ms 42". + * doExpensiveProcess2(value); + * console.timeEnd('process'); + * ``` + * @since v10.7.0 + * @param [label='default'] + */ + timeLog(label?: string, ...data: any[]): void; + /** + * Prints to `stderr` the string `'Trace: '`, followed by the `util.format()` formatted message and stack trace to the current position in the code. + * + * ```js + * console.trace('Show me'); + * // Prints: (stack trace will vary based on where trace is called) + * // Trace: Show me + * // at repl:2:9 + * // at REPLServer.defaultEval (repl.js:248:27) + * // at bound (domain.js:287:14) + * // at REPLServer.runBound [as eval] (domain.js:300:12) + * // at REPLServer. (repl.js:412:12) + * // at emitOne (events.js:82:20) + * // at REPLServer.emit (events.js:169:7) + * // at REPLServer.Interface._onLine (readline.js:210:10) + * // at REPLServer.Interface._line (readline.js:549:8) + * // at REPLServer.Interface._ttyWrite (readline.js:826:14) + * ``` + * @since v0.1.104 + */ + trace(message?: any, ...optionalParams: any[]): void; + /** + * The `console.warn()` function is an alias for {@link error}. + * @since v0.1.100 + */ + warn(message?: any, ...optionalParams: any[]): void; + // --- Inspector mode only --- + /** + * This method does not display anything unless used in the inspector. + * Starts a JavaScript CPU profile with an optional label. + */ + profile(label?: string): void; + /** + * This method does not display anything unless used in the inspector. + * Stops the current JavaScript CPU profiling session if one has been started and prints the report to the Profiles panel of the inspector. + */ + profileEnd(label?: string): void; + /** + * This method does not display anything unless used in the inspector. + * Adds an event with the label `label` to the Timeline panel of the inspector. + */ + timeStamp(label?: string): void; + } + /** + * The `console` module provides a simple debugging console that is similar to the + * JavaScript console mechanism provided by web browsers. + * + * The module exports two specific components: + * + * * A `Console` class with methods such as `console.log()`, `console.error()` and`console.warn()` that can be used to write to any Node.js stream. + * * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('console')`. + * + * _**Warning**_: The global console object's methods are neither consistently + * synchronous like the browser APIs they resemble, nor are they consistently + * asynchronous like all other Node.js streams. See the `note on process I/O` for + * more information. + * + * Example using the global `console`: + * + * ```js + * console.log('hello world'); + * // Prints: hello world, to stdout + * console.log('hello %s', 'world'); + * // Prints: hello world, to stdout + * console.error(new Error('Whoops, something bad happened')); + * // Prints error message and stack trace to stderr: + * // Error: Whoops, something bad happened + * // at [eval]:5:15 + * // at Script.runInThisContext (node:vm:132:18) + * // at Object.runInThisContext (node:vm:309:38) + * // at node:internal/process/execution:77:19 + * // at [eval]-wrapper:6:22 + * // at evalScript (node:internal/process/execution:76:60) + * // at node:internal/main/eval_string:23:3 + * + * const name = 'Will Robinson'; + * console.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to stderr + * ``` + * + * Example using the `Console` class: + * + * ```js + * const out = getStreamSomehow(); + * const err = getStreamSomehow(); + * const myConsole = new console.Console(out, err); + * + * myConsole.log('hello world'); + * // Prints: hello world, to out + * myConsole.log('hello %s', 'world'); + * // Prints: hello world, to out + * myConsole.error(new Error('Whoops, something bad happened')); + * // Prints: [Error: Whoops, something bad happened], to err + * + * const name = 'Will Robinson'; + * myConsole.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to err + * ``` + * @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/console.js) + */ + namespace console { + interface ConsoleConstructorOptions { + stdout: NodeJS.WritableStream; + stderr?: NodeJS.WritableStream | undefined; + ignoreErrors?: boolean | undefined; + colorMode?: boolean | "auto" | undefined; + inspectOptions?: InspectOptions | undefined; + /** + * Set group indentation + * @default 2 + */ + groupIndentation?: number | undefined; + } + interface ConsoleConstructor { + prototype: Console; + new(stdout: NodeJS.WritableStream, stderr?: NodeJS.WritableStream, ignoreErrors?: boolean): Console; + new(options: ConsoleConstructorOptions): Console; + } + } + var console: Console; + } + export = globalThis.console; +} diff --git a/dist/node_modules/@types/node/ts4.8/constants.d.ts b/dist/node_modules/@types/node/ts4.8/constants.d.ts new file mode 100644 index 0000000..c3ac2b8 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/constants.d.ts @@ -0,0 +1,19 @@ +/** @deprecated since v6.3.0 - use constants property exposed by the relevant module instead. */ +declare module "constants" { + import { constants as osConstants, SignalConstants } from "node:os"; + import { constants as cryptoConstants } from "node:crypto"; + import { constants as fsConstants } from "node:fs"; + + const exp: + & typeof osConstants.errno + & typeof osConstants.priority + & SignalConstants + & typeof cryptoConstants + & typeof fsConstants; + export = exp; +} + +declare module "node:constants" { + import constants = require("constants"); + export = constants; +} diff --git a/dist/node_modules/@types/node/ts4.8/crypto.d.ts b/dist/node_modules/@types/node/ts4.8/crypto.d.ts new file mode 100644 index 0000000..718697b --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/crypto.d.ts @@ -0,0 +1,4487 @@ +/** + * The `node:crypto` module provides cryptographic functionality that includes a + * set of wrappers for OpenSSL's hash, HMAC, cipher, decipher, sign, and verify + * functions. + * + * ```js + * const { createHmac } = await import('node:crypto'); + * + * const secret = 'abcdefg'; + * const hash = createHmac('sha256', secret) + * .update('I love cupcakes') + * .digest('hex'); + * console.log(hash); + * // Prints: + * // c0fa1bc00531bd78ef38c628449c5102aeabd49b5dc3a2a516ea6ea959d6658e + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/crypto.js) + */ +declare module "crypto" { + import * as stream from "node:stream"; + import { PeerCertificate } from "node:tls"; + /** + * SPKAC is a Certificate Signing Request mechanism originally implemented by + * Netscape and was specified formally as part of HTML5's `keygen` element. + * + * `` is deprecated since [HTML 5.2](https://www.w3.org/TR/html52/changes.html#features-removed) and new projects + * should not use this element anymore. + * + * The `node:crypto` module provides the `Certificate` class for working with SPKAC + * data. The most common usage is handling output generated by the HTML5`` element. Node.js uses [OpenSSL's SPKAC + * implementation](https://www.openssl.org/docs/man3.0/man1/openssl-spkac.html) internally. + * @since v0.11.8 + */ + class Certificate { + /** + * ```js + * const { Certificate } = await import('node:crypto'); + * const spkac = getSpkacSomehow(); + * const challenge = Certificate.exportChallenge(spkac); + * console.log(challenge.toString('utf8')); + * // Prints: the challenge as a UTF8 string + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return The challenge component of the `spkac` data structure, which includes a public key and a challenge. + */ + static exportChallenge(spkac: BinaryLike): Buffer; + /** + * ```js + * const { Certificate } = await import('node:crypto'); + * const spkac = getSpkacSomehow(); + * const publicKey = Certificate.exportPublicKey(spkac); + * console.log(publicKey); + * // Prints: the public key as + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return The public key component of the `spkac` data structure, which includes a public key and a challenge. + */ + static exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer; + /** + * ```js + * import { Buffer } from 'node:buffer'; + * const { Certificate } = await import('node:crypto'); + * + * const spkac = getSpkacSomehow(); + * console.log(Certificate.verifySpkac(Buffer.from(spkac))); + * // Prints: true or false + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return `true` if the given `spkac` data structure is valid, `false` otherwise. + */ + static verifySpkac(spkac: NodeJS.ArrayBufferView): boolean; + /** + * @deprecated + * @param spkac + * @returns The challenge component of the `spkac` data structure, + * which includes a public key and a challenge. + */ + exportChallenge(spkac: BinaryLike): Buffer; + /** + * @deprecated + * @param spkac + * @param encoding The encoding of the spkac string. + * @returns The public key component of the `spkac` data structure, + * which includes a public key and a challenge. + */ + exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer; + /** + * @deprecated + * @param spkac + * @returns `true` if the given `spkac` data structure is valid, + * `false` otherwise. + */ + verifySpkac(spkac: NodeJS.ArrayBufferView): boolean; + } + namespace constants { + // https://nodejs.org/dist/latest-v20.x/docs/api/crypto.html#crypto-constants + const OPENSSL_VERSION_NUMBER: number; + /** Applies multiple bug workarounds within OpenSSL. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html for detail. */ + const SSL_OP_ALL: number; + /** Instructs OpenSSL to allow a non-[EC]DHE-based key exchange mode for TLS v1.3 */ + const SSL_OP_ALLOW_NO_DHE_KEX: number; + /** Allows legacy insecure renegotiation between OpenSSL and unpatched clients or servers. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */ + const SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: number; + /** Attempts to use the server's preferences instead of the client's when selecting a cipher. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */ + const SSL_OP_CIPHER_SERVER_PREFERENCE: number; + /** Instructs OpenSSL to use Cisco's "speshul" version of DTLS_BAD_VER. */ + const SSL_OP_CISCO_ANYCONNECT: number; + /** Instructs OpenSSL to turn on cookie exchange. */ + const SSL_OP_COOKIE_EXCHANGE: number; + /** Instructs OpenSSL to add server-hello extension from an early version of the cryptopro draft. */ + const SSL_OP_CRYPTOPRO_TLSEXT_BUG: number; + /** Instructs OpenSSL to disable a SSL 3.0/TLS 1.0 vulnerability workaround added in OpenSSL 0.9.6d. */ + const SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: number; + /** Allows initial connection to servers that do not support RI. */ + const SSL_OP_LEGACY_SERVER_CONNECT: number; + /** Instructs OpenSSL to disable support for SSL/TLS compression. */ + const SSL_OP_NO_COMPRESSION: number; + /** Instructs OpenSSL to disable encrypt-then-MAC. */ + const SSL_OP_NO_ENCRYPT_THEN_MAC: number; + const SSL_OP_NO_QUERY_MTU: number; + /** Instructs OpenSSL to disable renegotiation. */ + const SSL_OP_NO_RENEGOTIATION: number; + /** Instructs OpenSSL to always start a new session when performing renegotiation. */ + const SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: number; + /** Instructs OpenSSL to turn off SSL v2 */ + const SSL_OP_NO_SSLv2: number; + /** Instructs OpenSSL to turn off SSL v3 */ + const SSL_OP_NO_SSLv3: number; + /** Instructs OpenSSL to disable use of RFC4507bis tickets. */ + const SSL_OP_NO_TICKET: number; + /** Instructs OpenSSL to turn off TLS v1 */ + const SSL_OP_NO_TLSv1: number; + /** Instructs OpenSSL to turn off TLS v1.1 */ + const SSL_OP_NO_TLSv1_1: number; + /** Instructs OpenSSL to turn off TLS v1.2 */ + const SSL_OP_NO_TLSv1_2: number; + /** Instructs OpenSSL to turn off TLS v1.3 */ + const SSL_OP_NO_TLSv1_3: number; + /** Instructs OpenSSL server to prioritize ChaCha20-Poly1305 when the client does. This option has no effect if `SSL_OP_CIPHER_SERVER_PREFERENCE` is not enabled. */ + const SSL_OP_PRIORITIZE_CHACHA: number; + /** Instructs OpenSSL to disable version rollback attack detection. */ + const SSL_OP_TLS_ROLLBACK_BUG: number; + const ENGINE_METHOD_RSA: number; + const ENGINE_METHOD_DSA: number; + const ENGINE_METHOD_DH: number; + const ENGINE_METHOD_RAND: number; + const ENGINE_METHOD_EC: number; + const ENGINE_METHOD_CIPHERS: number; + const ENGINE_METHOD_DIGESTS: number; + const ENGINE_METHOD_PKEY_METHS: number; + const ENGINE_METHOD_PKEY_ASN1_METHS: number; + const ENGINE_METHOD_ALL: number; + const ENGINE_METHOD_NONE: number; + const DH_CHECK_P_NOT_SAFE_PRIME: number; + const DH_CHECK_P_NOT_PRIME: number; + const DH_UNABLE_TO_CHECK_GENERATOR: number; + const DH_NOT_SUITABLE_GENERATOR: number; + const RSA_PKCS1_PADDING: number; + const RSA_SSLV23_PADDING: number; + const RSA_NO_PADDING: number; + const RSA_PKCS1_OAEP_PADDING: number; + const RSA_X931_PADDING: number; + const RSA_PKCS1_PSS_PADDING: number; + /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the digest size when signing or verifying. */ + const RSA_PSS_SALTLEN_DIGEST: number; + /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the maximum permissible value when signing data. */ + const RSA_PSS_SALTLEN_MAX_SIGN: number; + /** Causes the salt length for RSA_PKCS1_PSS_PADDING to be determined automatically when verifying a signature. */ + const RSA_PSS_SALTLEN_AUTO: number; + const POINT_CONVERSION_COMPRESSED: number; + const POINT_CONVERSION_UNCOMPRESSED: number; + const POINT_CONVERSION_HYBRID: number; + /** Specifies the built-in default cipher list used by Node.js (colon-separated values). */ + const defaultCoreCipherList: string; + /** Specifies the active default cipher list used by the current Node.js process (colon-separated values). */ + const defaultCipherList: string; + } + interface HashOptions extends stream.TransformOptions { + /** + * For XOF hash functions such as `shake256`, the + * outputLength option can be used to specify the desired output length in bytes. + */ + outputLength?: number | undefined; + } + /** @deprecated since v10.0.0 */ + const fips: boolean; + /** + * Creates and returns a `Hash` object that can be used to generate hash digests + * using the given `algorithm`. Optional `options` argument controls stream + * behavior. For XOF hash functions such as `'shake256'`, the `outputLength` option + * can be used to specify the desired output length in bytes. + * + * The `algorithm` is dependent on the available algorithms supported by the + * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. + * On recent releases of OpenSSL, `openssl list -digest-algorithms` will + * display the available digest algorithms. + * + * Example: generating the sha256 sum of a file + * + * ```js + * import { + * createReadStream, + * } from 'node:fs'; + * import { argv } from 'node:process'; + * const { + * createHash, + * } = await import('node:crypto'); + * + * const filename = argv[2]; + * + * const hash = createHash('sha256'); + * + * const input = createReadStream(filename); + * input.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = input.read(); + * if (data) + * hash.update(data); + * else { + * console.log(`${hash.digest('hex')} ${filename}`); + * } + * }); + * ``` + * @since v0.1.92 + * @param options `stream.transform` options + */ + function createHash(algorithm: string, options?: HashOptions): Hash; + /** + * Creates and returns an `Hmac` object that uses the given `algorithm` and `key`. + * Optional `options` argument controls stream behavior. + * + * The `algorithm` is dependent on the available algorithms supported by the + * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. + * On recent releases of OpenSSL, `openssl list -digest-algorithms` will + * display the available digest algorithms. + * + * The `key` is the HMAC key used to generate the cryptographic HMAC hash. If it is + * a `KeyObject`, its type must be `secret`. If it is a string, please consider `caveats when using strings as inputs to cryptographic APIs`. If it was + * obtained from a cryptographically secure source of entropy, such as {@link randomBytes} or {@link generateKey}, its length should not + * exceed the block size of `algorithm` (e.g., 512 bits for SHA-256). + * + * Example: generating the sha256 HMAC of a file + * + * ```js + * import { + * createReadStream, + * } from 'node:fs'; + * import { argv } from 'node:process'; + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const filename = argv[2]; + * + * const hmac = createHmac('sha256', 'a secret'); + * + * const input = createReadStream(filename); + * input.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = input.read(); + * if (data) + * hmac.update(data); + * else { + * console.log(`${hmac.digest('hex')} ${filename}`); + * } + * }); + * ``` + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createHmac(algorithm: string, key: BinaryLike | KeyObject, options?: stream.TransformOptions): Hmac; + // https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings + type BinaryToTextEncoding = "base64" | "base64url" | "hex" | "binary"; + type CharacterEncoding = "utf8" | "utf-8" | "utf16le" | "utf-16le" | "latin1"; + type LegacyCharacterEncoding = "ascii" | "binary" | "ucs2" | "ucs-2"; + type Encoding = BinaryToTextEncoding | CharacterEncoding | LegacyCharacterEncoding; + type ECDHKeyFormat = "compressed" | "uncompressed" | "hybrid"; + /** + * The `Hash` class is a utility for creating hash digests of data. It can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where data is written + * to produce a computed hash digest on the readable side, or + * * Using the `hash.update()` and `hash.digest()` methods to produce the + * computed hash. + * + * The {@link createHash} method is used to create `Hash` instances. `Hash`objects are not to be created directly using the `new` keyword. + * + * Example: Using `Hash` objects as streams: + * + * ```js + * const { + * createHash, + * } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * hash.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = hash.read(); + * if (data) { + * console.log(data.toString('hex')); + * // Prints: + * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50 + * } + * }); + * + * hash.write('some data to hash'); + * hash.end(); + * ``` + * + * Example: Using `Hash` and piped streams: + * + * ```js + * import { createReadStream } from 'node:fs'; + * import { stdout } from 'node:process'; + * const { createHash } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * const input = createReadStream('test.js'); + * input.pipe(hash).setEncoding('hex').pipe(stdout); + * ``` + * + * Example: Using the `hash.update()` and `hash.digest()` methods: + * + * ```js + * const { + * createHash, + * } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * hash.update('some data to hash'); + * console.log(hash.digest('hex')); + * // Prints: + * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50 + * ``` + * @since v0.1.92 + */ + class Hash extends stream.Transform { + private constructor(); + /** + * Creates a new `Hash` object that contains a deep copy of the internal state + * of the current `Hash` object. + * + * The optional `options` argument controls stream behavior. For XOF hash + * functions such as `'shake256'`, the `outputLength` option can be used to + * specify the desired output length in bytes. + * + * An error is thrown when an attempt is made to copy the `Hash` object after + * its `hash.digest()` method has been called. + * + * ```js + * // Calculate a rolling hash. + * const { + * createHash, + * } = await import('node:crypto'); + * + * const hash = createHash('sha256'); + * + * hash.update('one'); + * console.log(hash.copy().digest('hex')); + * + * hash.update('two'); + * console.log(hash.copy().digest('hex')); + * + * hash.update('three'); + * console.log(hash.copy().digest('hex')); + * + * // Etc. + * ``` + * @since v13.1.0 + * @param options `stream.transform` options + */ + copy(options?: HashOptions): Hash; + /** + * Updates the hash content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Hash; + update(data: string, inputEncoding: Encoding): Hash; + /** + * Calculates the digest of all of the data passed to be hashed (using the `hash.update()` method). + * If `encoding` is provided a string will be returned; otherwise + * a `Buffer` is returned. + * + * The `Hash` object can not be used again after `hash.digest()` method has been + * called. Multiple calls will cause an error to be thrown. + * @since v0.1.92 + * @param encoding The `encoding` of the return value. + */ + digest(): Buffer; + digest(encoding: BinaryToTextEncoding): string; + } + /** + * The `Hmac` class is a utility for creating cryptographic HMAC digests. It can + * be used in one of two ways: + * + * * As a `stream` that is both readable and writable, where data is written + * to produce a computed HMAC digest on the readable side, or + * * Using the `hmac.update()` and `hmac.digest()` methods to produce the + * computed HMAC digest. + * + * The {@link createHmac} method is used to create `Hmac` instances. `Hmac`objects are not to be created directly using the `new` keyword. + * + * Example: Using `Hmac` objects as streams: + * + * ```js + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * hmac.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = hmac.read(); + * if (data) { + * console.log(data.toString('hex')); + * // Prints: + * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e + * } + * }); + * + * hmac.write('some data to hash'); + * hmac.end(); + * ``` + * + * Example: Using `Hmac` and piped streams: + * + * ```js + * import { createReadStream } from 'node:fs'; + * import { stdout } from 'node:process'; + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * const input = createReadStream('test.js'); + * input.pipe(hmac).pipe(stdout); + * ``` + * + * Example: Using the `hmac.update()` and `hmac.digest()` methods: + * + * ```js + * const { + * createHmac, + * } = await import('node:crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * hmac.update('some data to hash'); + * console.log(hmac.digest('hex')); + * // Prints: + * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e + * ``` + * @since v0.1.94 + */ + class Hmac extends stream.Transform { + private constructor(); + /** + * Updates the `Hmac` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Hmac; + update(data: string, inputEncoding: Encoding): Hmac; + /** + * Calculates the HMAC digest of all of the data passed using `hmac.update()`. + * If `encoding` is + * provided a string is returned; otherwise a `Buffer` is returned; + * + * The `Hmac` object can not be used again after `hmac.digest()` has been + * called. Multiple calls to `hmac.digest()` will result in an error being thrown. + * @since v0.1.94 + * @param encoding The `encoding` of the return value. + */ + digest(): Buffer; + digest(encoding: BinaryToTextEncoding): string; + } + type KeyObjectType = "secret" | "public" | "private"; + interface KeyExportOptions { + type: "pkcs1" | "spki" | "pkcs8" | "sec1"; + format: T; + cipher?: string | undefined; + passphrase?: string | Buffer | undefined; + } + interface JwkKeyExportOptions { + format: "jwk"; + } + interface JsonWebKey { + crv?: string | undefined; + d?: string | undefined; + dp?: string | undefined; + dq?: string | undefined; + e?: string | undefined; + k?: string | undefined; + kty?: string | undefined; + n?: string | undefined; + p?: string | undefined; + q?: string | undefined; + qi?: string | undefined; + x?: string | undefined; + y?: string | undefined; + [key: string]: unknown; + } + interface AsymmetricKeyDetails { + /** + * Key size in bits (RSA, DSA). + */ + modulusLength?: number | undefined; + /** + * Public exponent (RSA). + */ + publicExponent?: bigint | undefined; + /** + * Name of the message digest (RSA-PSS). + */ + hashAlgorithm?: string | undefined; + /** + * Name of the message digest used by MGF1 (RSA-PSS). + */ + mgf1HashAlgorithm?: string | undefined; + /** + * Minimal salt length in bytes (RSA-PSS). + */ + saltLength?: number | undefined; + /** + * Size of q in bits (DSA). + */ + divisorLength?: number | undefined; + /** + * Name of the curve (EC). + */ + namedCurve?: string | undefined; + } + /** + * Node.js uses a `KeyObject` class to represent a symmetric or asymmetric key, + * and each kind of key exposes different functions. The {@link createSecretKey}, {@link createPublicKey} and {@link createPrivateKey} methods are used to create `KeyObject`instances. `KeyObject` + * objects are not to be created directly using the `new`keyword. + * + * Most applications should consider using the new `KeyObject` API instead of + * passing keys as strings or `Buffer`s due to improved security features. + * + * `KeyObject` instances can be passed to other threads via `postMessage()`. + * The receiver obtains a cloned `KeyObject`, and the `KeyObject` does not need to + * be listed in the `transferList` argument. + * @since v11.6.0 + */ + class KeyObject { + private constructor(); + /** + * Example: Converting a `CryptoKey` instance to a `KeyObject`: + * + * ```js + * const { KeyObject } = await import('node:crypto'); + * const { subtle } = globalThis.crypto; + * + * const key = await subtle.generateKey({ + * name: 'HMAC', + * hash: 'SHA-256', + * length: 256, + * }, true, ['sign', 'verify']); + * + * const keyObject = KeyObject.from(key); + * console.log(keyObject.symmetricKeySize); + * // Prints: 32 (symmetric key size in bytes) + * ``` + * @since v15.0.0 + */ + static from(key: webcrypto.CryptoKey): KeyObject; + /** + * For asymmetric keys, this property represents the type of the key. Supported key + * types are: + * + * * `'rsa'` (OID 1.2.840.113549.1.1.1) + * * `'rsa-pss'` (OID 1.2.840.113549.1.1.10) + * * `'dsa'` (OID 1.2.840.10040.4.1) + * * `'ec'` (OID 1.2.840.10045.2.1) + * * `'x25519'` (OID 1.3.101.110) + * * `'x448'` (OID 1.3.101.111) + * * `'ed25519'` (OID 1.3.101.112) + * * `'ed448'` (OID 1.3.101.113) + * * `'dh'` (OID 1.2.840.113549.1.3.1) + * + * This property is `undefined` for unrecognized `KeyObject` types and symmetric + * keys. + * @since v11.6.0 + */ + asymmetricKeyType?: KeyType | undefined; + /** + * For asymmetric keys, this property represents the size of the embedded key in + * bytes. This property is `undefined` for symmetric keys. + */ + asymmetricKeySize?: number | undefined; + /** + * This property exists only on asymmetric keys. Depending on the type of the key, + * this object contains information about the key. None of the information obtained + * through this property can be used to uniquely identify a key or to compromise + * the security of the key. + * + * For RSA-PSS keys, if the key material contains a `RSASSA-PSS-params` sequence, + * the `hashAlgorithm`, `mgf1HashAlgorithm`, and `saltLength` properties will be + * set. + * + * Other key details might be exposed via this API using additional attributes. + * @since v15.7.0 + */ + asymmetricKeyDetails?: AsymmetricKeyDetails | undefined; + /** + * For symmetric keys, the following encoding options can be used: + * + * For public keys, the following encoding options can be used: + * + * For private keys, the following encoding options can be used: + * + * The result type depends on the selected encoding format, when PEM the + * result is a string, when DER it will be a buffer containing the data + * encoded as DER, when [JWK](https://tools.ietf.org/html/rfc7517) it will be an object. + * + * When [JWK](https://tools.ietf.org/html/rfc7517) encoding format was selected, all other encoding options are + * ignored. + * + * PKCS#1, SEC1, and PKCS#8 type keys can be encrypted by using a combination of + * the `cipher` and `format` options. The PKCS#8 `type` can be used with any`format` to encrypt any key algorithm (RSA, EC, or DH) by specifying a`cipher`. PKCS#1 and SEC1 can only be + * encrypted by specifying a `cipher`when the PEM `format` is used. For maximum compatibility, use PKCS#8 for + * encrypted private keys. Since PKCS#8 defines its own + * encryption mechanism, PEM-level encryption is not supported when encrypting + * a PKCS#8 key. See [RFC 5208](https://www.rfc-editor.org/rfc/rfc5208.txt) for PKCS#8 encryption and [RFC 1421](https://www.rfc-editor.org/rfc/rfc1421.txt) for + * PKCS#1 and SEC1 encryption. + * @since v11.6.0 + */ + export(options: KeyExportOptions<"pem">): string | Buffer; + export(options?: KeyExportOptions<"der">): Buffer; + export(options?: JwkKeyExportOptions): JsonWebKey; + /** + * Returns `true` or `false` depending on whether the keys have exactly the same + * type, value, and parameters. This method is not [constant time](https://en.wikipedia.org/wiki/Timing_attack). + * @since v17.7.0, v16.15.0 + * @param otherKeyObject A `KeyObject` with which to compare `keyObject`. + */ + equals(otherKeyObject: KeyObject): boolean; + /** + * For secret keys, this property represents the size of the key in bytes. This + * property is `undefined` for asymmetric keys. + * @since v11.6.0 + */ + symmetricKeySize?: number | undefined; + /** + * Depending on the type of this `KeyObject`, this property is either`'secret'` for secret (symmetric) keys, `'public'` for public (asymmetric) keys + * or `'private'` for private (asymmetric) keys. + * @since v11.6.0 + */ + type: KeyObjectType; + } + type CipherCCMTypes = "aes-128-ccm" | "aes-192-ccm" | "aes-256-ccm" | "chacha20-poly1305"; + type CipherGCMTypes = "aes-128-gcm" | "aes-192-gcm" | "aes-256-gcm"; + type CipherOCBTypes = "aes-128-ocb" | "aes-192-ocb" | "aes-256-ocb"; + type BinaryLike = string | NodeJS.ArrayBufferView; + type CipherKey = BinaryLike | KeyObject; + interface CipherCCMOptions extends stream.TransformOptions { + authTagLength: number; + } + interface CipherGCMOptions extends stream.TransformOptions { + authTagLength?: number | undefined; + } + interface CipherOCBOptions extends stream.TransformOptions { + authTagLength: number; + } + /** + * Creates and returns a `Cipher` object that uses the given `algorithm` and`password`. + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication + * tag that will be returned by `getAuthTag()` and defaults to 16 bytes. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `password` is used to derive the cipher key and initialization vector (IV). + * The value must be either a `'latin1'` encoded string, a `Buffer`, a`TypedArray`, or a `DataView`. + * + * **This function is semantically insecure for all** + * **supported ciphers and fatally flawed for ciphers in counter mode (such as CTR,** + * **GCM, or CCM).** + * + * The implementation of `crypto.createCipher()` derives keys using the OpenSSL + * function [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) with the digest algorithm set to MD5, one + * iteration, and no salt. The lack of salt allows dictionary attacks as the same + * password always creates the same key. The low iteration count and + * non-cryptographically secure hash algorithm allow passwords to be tested very + * rapidly. + * + * In line with OpenSSL's recommendation to use a more modern algorithm instead of [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) it is recommended that + * developers derive a key and IV on + * their own using {@link scrypt} and to use {@link createCipheriv} to create the `Cipher` object. Users should not use ciphers with counter mode + * (e.g. CTR, GCM, or CCM) in `crypto.createCipher()`. A warning is emitted when + * they are used in order to avoid the risk of IV reuse that causes + * vulnerabilities. For the case when IV is reused in GCM, see [Nonce-Disrespecting Adversaries](https://github.com/nonce-disrespect/nonce-disrespect) for details. + * @since v0.1.94 + * @deprecated Since v10.0.0 - Use {@link createCipheriv} instead. + * @param options `stream.transform` options + */ + function createCipher(algorithm: CipherCCMTypes, password: BinaryLike, options: CipherCCMOptions): CipherCCM; + /** @deprecated since v10.0.0 use `createCipheriv()` */ + function createCipher(algorithm: CipherGCMTypes, password: BinaryLike, options?: CipherGCMOptions): CipherGCM; + /** @deprecated since v10.0.0 use `createCipheriv()` */ + function createCipher(algorithm: string, password: BinaryLike, options?: stream.TransformOptions): Cipher; + /** + * Creates and returns a `Cipher` object, with the given `algorithm`, `key` and + * initialization vector (`iv`). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication + * tag that will be returned by `getAuthTag()` and defaults to 16 bytes. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded + * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be + * a `KeyObject` of type `secret`. If the cipher does not need + * an initialization vector, `iv` may be `null`. + * + * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * Initialization vectors should be unpredictable and unique; ideally, they will be + * cryptographically random. They do not have to be secret: IVs are typically just + * added to ciphertext messages unencrypted. It may sound contradictory that + * something has to be unpredictable and unique, but does not have to be secret; + * remember that an attacker must not be able to predict ahead of time what a + * given IV will be. + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createCipheriv( + algorithm: CipherCCMTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherCCMOptions, + ): CipherCCM; + function createCipheriv( + algorithm: CipherOCBTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherOCBOptions, + ): CipherOCB; + function createCipheriv( + algorithm: CipherGCMTypes, + key: CipherKey, + iv: BinaryLike, + options?: CipherGCMOptions, + ): CipherGCM; + function createCipheriv( + algorithm: string, + key: CipherKey, + iv: BinaryLike | null, + options?: stream.TransformOptions, + ): Cipher; + /** + * Instances of the `Cipher` class are used to encrypt data. The class can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where plain unencrypted + * data is written to produce encrypted data on the readable side, or + * * Using the `cipher.update()` and `cipher.final()` methods to produce + * the encrypted data. + * + * The {@link createCipher} or {@link createCipheriv} methods are + * used to create `Cipher` instances. `Cipher` objects are not to be created + * directly using the `new` keyword. + * + * Example: Using `Cipher` objects as streams: + * + * ```js + * const { + * scrypt, + * randomFill, + * createCipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * // Once we have the key and iv, we can create and use the cipher... + * const cipher = createCipheriv(algorithm, key, iv); + * + * let encrypted = ''; + * cipher.setEncoding('hex'); + * + * cipher.on('data', (chunk) => encrypted += chunk); + * cipher.on('end', () => console.log(encrypted)); + * + * cipher.write('some clear text data'); + * cipher.end(); + * }); + * }); + * ``` + * + * Example: Using `Cipher` and piped streams: + * + * ```js + * import { + * createReadStream, + * createWriteStream, + * } from 'node:fs'; + * + * import { + * pipeline, + * } from 'node:stream'; + * + * const { + * scrypt, + * randomFill, + * createCipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * const cipher = createCipheriv(algorithm, key, iv); + * + * const input = createReadStream('test.js'); + * const output = createWriteStream('test.enc'); + * + * pipeline(input, cipher, output, (err) => { + * if (err) throw err; + * }); + * }); + * }); + * ``` + * + * Example: Using the `cipher.update()` and `cipher.final()` methods: + * + * ```js + * const { + * scrypt, + * randomFill, + * createCipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * const cipher = createCipheriv(algorithm, key, iv); + * + * let encrypted = cipher.update('some clear text data', 'utf8', 'hex'); + * encrypted += cipher.final('hex'); + * console.log(encrypted); + * }); + * }); + * ``` + * @since v0.1.94 + */ + class Cipher extends stream.Transform { + private constructor(); + /** + * Updates the cipher with `data`. If the `inputEncoding` argument is given, + * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`, `TypedArray`, or`DataView`. If `data` is a `Buffer`, + * `TypedArray`, or `DataView`, then`inputEncoding` is ignored. + * + * The `outputEncoding` specifies the output format of the enciphered + * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned. + * + * The `cipher.update()` method can be called multiple times with new data until `cipher.final()` is called. Calling `cipher.update()` after `cipher.final()` will result in an error being + * thrown. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the data. + * @param outputEncoding The `encoding` of the return value. + */ + update(data: BinaryLike): Buffer; + update(data: string, inputEncoding: Encoding): Buffer; + update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string; + update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string; + /** + * Once the `cipher.final()` method has been called, the `Cipher` object can no + * longer be used to encrypt data. Attempts to call `cipher.final()` more than + * once will result in an error being thrown. + * @since v0.1.94 + * @param outputEncoding The `encoding` of the return value. + * @return Any remaining enciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned. + */ + final(): Buffer; + final(outputEncoding: BufferEncoding): string; + /** + * When using block encryption algorithms, the `Cipher` class will automatically + * add padding to the input data to the appropriate block size. To disable the + * default padding call `cipher.setAutoPadding(false)`. + * + * When `autoPadding` is `false`, the length of the entire input data must be a + * multiple of the cipher's block size or `cipher.final()` will throw an error. + * Disabling automatic padding is useful for non-standard padding, for instance + * using `0x0` instead of PKCS padding. + * + * The `cipher.setAutoPadding()` method must be called before `cipher.final()`. + * @since v0.7.1 + * @param [autoPadding=true] + * @return for method chaining. + */ + setAutoPadding(autoPadding?: boolean): this; + } + interface CipherCCM extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options: { + plaintextLength: number; + }, + ): this; + getAuthTag(): Buffer; + } + interface CipherGCM extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + getAuthTag(): Buffer; + } + interface CipherOCB extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + getAuthTag(): Buffer; + } + /** + * Creates and returns a `Decipher` object that uses the given `algorithm` and`password` (key). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * **This function is semantically insecure for all** + * **supported ciphers and fatally flawed for ciphers in counter mode (such as CTR,** + * **GCM, or CCM).** + * + * The implementation of `crypto.createDecipher()` derives keys using the OpenSSL + * function [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) with the digest algorithm set to MD5, one + * iteration, and no salt. The lack of salt allows dictionary attacks as the same + * password always creates the same key. The low iteration count and + * non-cryptographically secure hash algorithm allow passwords to be tested very + * rapidly. + * + * In line with OpenSSL's recommendation to use a more modern algorithm instead of [`EVP_BytesToKey`](https://www.openssl.org/docs/man3.0/man3/EVP_BytesToKey.html) it is recommended that + * developers derive a key and IV on + * their own using {@link scrypt} and to use {@link createDecipheriv} to create the `Decipher` object. + * @since v0.1.94 + * @deprecated Since v10.0.0 - Use {@link createDecipheriv} instead. + * @param options `stream.transform` options + */ + function createDecipher(algorithm: CipherCCMTypes, password: BinaryLike, options: CipherCCMOptions): DecipherCCM; + /** @deprecated since v10.0.0 use `createDecipheriv()` */ + function createDecipher(algorithm: CipherGCMTypes, password: BinaryLike, options?: CipherGCMOptions): DecipherGCM; + /** @deprecated since v10.0.0 use `createDecipheriv()` */ + function createDecipher(algorithm: string, password: BinaryLike, options?: stream.TransformOptions): Decipher; + /** + * Creates and returns a `Decipher` object that uses the given `algorithm`, `key`and initialization vector (`iv`). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to restrict accepted authentication tags + * to those with the specified length. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded + * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be + * a `KeyObject` of type `secret`. If the cipher does not need + * an initialization vector, `iv` may be `null`. + * + * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * Initialization vectors should be unpredictable and unique; ideally, they will be + * cryptographically random. They do not have to be secret: IVs are typically just + * added to ciphertext messages unencrypted. It may sound contradictory that + * something has to be unpredictable and unique, but does not have to be secret; + * remember that an attacker must not be able to predict ahead of time what a given + * IV will be. + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createDecipheriv( + algorithm: CipherCCMTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherCCMOptions, + ): DecipherCCM; + function createDecipheriv( + algorithm: CipherOCBTypes, + key: CipherKey, + iv: BinaryLike, + options: CipherOCBOptions, + ): DecipherOCB; + function createDecipheriv( + algorithm: CipherGCMTypes, + key: CipherKey, + iv: BinaryLike, + options?: CipherGCMOptions, + ): DecipherGCM; + function createDecipheriv( + algorithm: string, + key: CipherKey, + iv: BinaryLike | null, + options?: stream.TransformOptions, + ): Decipher; + /** + * Instances of the `Decipher` class are used to decrypt data. The class can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where plain encrypted + * data is written to produce unencrypted data on the readable side, or + * * Using the `decipher.update()` and `decipher.final()` methods to + * produce the unencrypted data. + * + * The {@link createDecipher} or {@link createDecipheriv} methods are + * used to create `Decipher` instances. `Decipher` objects are not to be created + * directly using the `new` keyword. + * + * Example: Using `Decipher` objects as streams: + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * scryptSync, + * createDecipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Key length is dependent on the algorithm. In this case for aes192, it is + * // 24 bytes (192 bits). + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * let decrypted = ''; + * decipher.on('readable', () => { + * let chunk; + * while (null !== (chunk = decipher.read())) { + * decrypted += chunk.toString('utf8'); + * } + * }); + * decipher.on('end', () => { + * console.log(decrypted); + * // Prints: some clear text data + * }); + * + * // Encrypted with same algorithm, key and iv. + * const encrypted = + * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa'; + * decipher.write(encrypted, 'hex'); + * decipher.end(); + * ``` + * + * Example: Using `Decipher` and piped streams: + * + * ```js + * import { + * createReadStream, + * createWriteStream, + * } from 'node:fs'; + * import { Buffer } from 'node:buffer'; + * const { + * scryptSync, + * createDecipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * const input = createReadStream('test.enc'); + * const output = createWriteStream('test.js'); + * + * input.pipe(decipher).pipe(output); + * ``` + * + * Example: Using the `decipher.update()` and `decipher.final()` methods: + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * scryptSync, + * createDecipheriv, + * } = await import('node:crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * // Encrypted using same algorithm, key and iv. + * const encrypted = + * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa'; + * let decrypted = decipher.update(encrypted, 'hex', 'utf8'); + * decrypted += decipher.final('utf8'); + * console.log(decrypted); + * // Prints: some clear text data + * ``` + * @since v0.1.94 + */ + class Decipher extends stream.Transform { + private constructor(); + /** + * Updates the decipher with `data`. If the `inputEncoding` argument is given, + * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`. If `data` is a `Buffer` then `inputEncoding` is + * ignored. + * + * The `outputEncoding` specifies the output format of the enciphered + * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned. + * + * The `decipher.update()` method can be called multiple times with new data until `decipher.final()` is called. Calling `decipher.update()` after `decipher.final()` will result in an error + * being thrown. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the `data` string. + * @param outputEncoding The `encoding` of the return value. + */ + update(data: NodeJS.ArrayBufferView): Buffer; + update(data: string, inputEncoding: Encoding): Buffer; + update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string; + update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string; + /** + * Once the `decipher.final()` method has been called, the `Decipher` object can + * no longer be used to decrypt data. Attempts to call `decipher.final()` more + * than once will result in an error being thrown. + * @since v0.1.94 + * @param outputEncoding The `encoding` of the return value. + * @return Any remaining deciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned. + */ + final(): Buffer; + final(outputEncoding: BufferEncoding): string; + /** + * When data has been encrypted without standard block padding, calling`decipher.setAutoPadding(false)` will disable automatic padding to prevent `decipher.final()` from checking for and + * removing padding. + * + * Turning auto padding off will only work if the input data's length is a + * multiple of the ciphers block size. + * + * The `decipher.setAutoPadding()` method must be called before `decipher.final()`. + * @since v0.7.1 + * @param [autoPadding=true] + * @return for method chaining. + */ + setAutoPadding(auto_padding?: boolean): this; + } + interface DecipherCCM extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options: { + plaintextLength: number; + }, + ): this; + } + interface DecipherGCM extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + } + interface DecipherOCB extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + }, + ): this; + } + interface PrivateKeyInput { + key: string | Buffer; + format?: KeyFormat | undefined; + type?: "pkcs1" | "pkcs8" | "sec1" | undefined; + passphrase?: string | Buffer | undefined; + encoding?: string | undefined; + } + interface PublicKeyInput { + key: string | Buffer; + format?: KeyFormat | undefined; + type?: "pkcs1" | "spki" | undefined; + encoding?: string | undefined; + } + /** + * Asynchronously generates a new random secret key of the given `length`. The`type` will determine which validations will be performed on the `length`. + * + * ```js + * const { + * generateKey, + * } = await import('node:crypto'); + * + * generateKey('hmac', { length: 512 }, (err, key) => { + * if (err) throw err; + * console.log(key.export().toString('hex')); // 46e..........620 + * }); + * ``` + * + * The size of a generated HMAC key should not exceed the block size of the + * underlying hash function. See {@link createHmac} for more information. + * @since v15.0.0 + * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. + */ + function generateKey( + type: "hmac" | "aes", + options: { + length: number; + }, + callback: (err: Error | null, key: KeyObject) => void, + ): void; + /** + * Synchronously generates a new random secret key of the given `length`. The`type` will determine which validations will be performed on the `length`. + * + * ```js + * const { + * generateKeySync, + * } = await import('node:crypto'); + * + * const key = generateKeySync('hmac', { length: 512 }); + * console.log(key.export().toString('hex')); // e89..........41e + * ``` + * + * The size of a generated HMAC key should not exceed the block size of the + * underlying hash function. See {@link createHmac} for more information. + * @since v15.0.0 + * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. + */ + function generateKeySync( + type: "hmac" | "aes", + options: { + length: number; + }, + ): KeyObject; + interface JsonWebKeyInput { + key: JsonWebKey; + format: "jwk"; + } + /** + * Creates and returns a new key object containing a private key. If `key` is a + * string or `Buffer`, `format` is assumed to be `'pem'`; otherwise, `key`must be an object with the properties described above. + * + * If the private key is encrypted, a `passphrase` must be specified. The length + * of the passphrase is limited to 1024 bytes. + * @since v11.6.0 + */ + function createPrivateKey(key: PrivateKeyInput | string | Buffer | JsonWebKeyInput): KeyObject; + /** + * Creates and returns a new key object containing a public key. If `key` is a + * string or `Buffer`, `format` is assumed to be `'pem'`; if `key` is a `KeyObject`with type `'private'`, the public key is derived from the given private key; + * otherwise, `key` must be an object with the properties described above. + * + * If the format is `'pem'`, the `'key'` may also be an X.509 certificate. + * + * Because public keys can be derived from private keys, a private key may be + * passed instead of a public key. In that case, this function behaves as if {@link createPrivateKey} had been called, except that the type of the + * returned `KeyObject` will be `'public'` and that the private key cannot be + * extracted from the returned `KeyObject`. Similarly, if a `KeyObject` with type`'private'` is given, a new `KeyObject` with type `'public'` will be returned + * and it will be impossible to extract the private key from the returned object. + * @since v11.6.0 + */ + function createPublicKey(key: PublicKeyInput | string | Buffer | KeyObject | JsonWebKeyInput): KeyObject; + /** + * Creates and returns a new key object containing a secret key for symmetric + * encryption or `Hmac`. + * @since v11.6.0 + * @param encoding The string encoding when `key` is a string. + */ + function createSecretKey(key: NodeJS.ArrayBufferView): KeyObject; + function createSecretKey(key: string, encoding: BufferEncoding): KeyObject; + /** + * Creates and returns a `Sign` object that uses the given `algorithm`. Use {@link getHashes} to obtain the names of the available digest algorithms. + * Optional `options` argument controls the `stream.Writable` behavior. + * + * In some cases, a `Sign` instance can be created using the name of a signature + * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use + * the corresponding digest algorithm. This does not work for all signature + * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest + * algorithm names. + * @since v0.1.92 + * @param options `stream.Writable` options + */ + function createSign(algorithm: string, options?: stream.WritableOptions): Sign; + type DSAEncoding = "der" | "ieee-p1363"; + interface SigningOptions { + /** + * @see crypto.constants.RSA_PKCS1_PADDING + */ + padding?: number | undefined; + saltLength?: number | undefined; + dsaEncoding?: DSAEncoding | undefined; + } + interface SignPrivateKeyInput extends PrivateKeyInput, SigningOptions {} + interface SignKeyObjectInput extends SigningOptions { + key: KeyObject; + } + interface VerifyPublicKeyInput extends PublicKeyInput, SigningOptions {} + interface VerifyKeyObjectInput extends SigningOptions { + key: KeyObject; + } + interface VerifyJsonWebKeyInput extends JsonWebKeyInput, SigningOptions {} + type KeyLike = string | Buffer | KeyObject; + /** + * The `Sign` class is a utility for generating signatures. It can be used in one + * of two ways: + * + * * As a writable `stream`, where data to be signed is written and the `sign.sign()` method is used to generate and return the signature, or + * * Using the `sign.update()` and `sign.sign()` methods to produce the + * signature. + * + * The {@link createSign} method is used to create `Sign` instances. The + * argument is the string name of the hash function to use. `Sign` objects are not + * to be created directly using the `new` keyword. + * + * Example: Using `Sign` and `Verify` objects as streams: + * + * ```js + * const { + * generateKeyPairSync, + * createSign, + * createVerify, + * } = await import('node:crypto'); + * + * const { privateKey, publicKey } = generateKeyPairSync('ec', { + * namedCurve: 'sect239k1', + * }); + * + * const sign = createSign('SHA256'); + * sign.write('some data to sign'); + * sign.end(); + * const signature = sign.sign(privateKey, 'hex'); + * + * const verify = createVerify('SHA256'); + * verify.write('some data to sign'); + * verify.end(); + * console.log(verify.verify(publicKey, signature, 'hex')); + * // Prints: true + * ``` + * + * Example: Using the `sign.update()` and `verify.update()` methods: + * + * ```js + * const { + * generateKeyPairSync, + * createSign, + * createVerify, + * } = await import('node:crypto'); + * + * const { privateKey, publicKey } = generateKeyPairSync('rsa', { + * modulusLength: 2048, + * }); + * + * const sign = createSign('SHA256'); + * sign.update('some data to sign'); + * sign.end(); + * const signature = sign.sign(privateKey); + * + * const verify = createVerify('SHA256'); + * verify.update('some data to sign'); + * verify.end(); + * console.log(verify.verify(publicKey, signature)); + * // Prints: true + * ``` + * @since v0.1.92 + */ + class Sign extends stream.Writable { + private constructor(); + /** + * Updates the `Sign` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): this; + update(data: string, inputEncoding: Encoding): this; + /** + * Calculates the signature on all the data passed through using either `sign.update()` or `sign.write()`. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the following additional properties can be passed: + * + * If `outputEncoding` is provided a string is returned; otherwise a `Buffer` is returned. + * + * The `Sign` object can not be again used after `sign.sign()` method has been + * called. Multiple calls to `sign.sign()` will result in an error being thrown. + * @since v0.1.92 + */ + sign(privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput): Buffer; + sign( + privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, + outputFormat: BinaryToTextEncoding, + ): string; + } + /** + * Creates and returns a `Verify` object that uses the given algorithm. + * Use {@link getHashes} to obtain an array of names of the available + * signing algorithms. Optional `options` argument controls the`stream.Writable` behavior. + * + * In some cases, a `Verify` instance can be created using the name of a signature + * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use + * the corresponding digest algorithm. This does not work for all signature + * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest + * algorithm names. + * @since v0.1.92 + * @param options `stream.Writable` options + */ + function createVerify(algorithm: string, options?: stream.WritableOptions): Verify; + /** + * The `Verify` class is a utility for verifying signatures. It can be used in one + * of two ways: + * + * * As a writable `stream` where written data is used to validate against the + * supplied signature, or + * * Using the `verify.update()` and `verify.verify()` methods to verify + * the signature. + * + * The {@link createVerify} method is used to create `Verify` instances.`Verify` objects are not to be created directly using the `new` keyword. + * + * See `Sign` for examples. + * @since v0.1.92 + */ + class Verify extends stream.Writable { + private constructor(); + /** + * Updates the `Verify` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `inputEncoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Verify; + update(data: string, inputEncoding: Encoding): Verify; + /** + * Verifies the provided data using the given `object` and `signature`. + * + * If `object` is not a `KeyObject`, this function behaves as if`object` had been passed to {@link createPublicKey}. If it is an + * object, the following additional properties can be passed: + * + * The `signature` argument is the previously calculated signature for the data, in + * the `signatureEncoding`. + * If a `signatureEncoding` is specified, the `signature` is expected to be a + * string; otherwise `signature` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * The `verify` object can not be used again after `verify.verify()` has been + * called. Multiple calls to `verify.verify()` will result in an error being + * thrown. + * + * Because public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v0.1.92 + */ + verify( + object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: NodeJS.ArrayBufferView, + ): boolean; + verify( + object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: string, + signature_format?: BinaryToTextEncoding, + ): boolean; + } + /** + * Creates a `DiffieHellman` key exchange object using the supplied `prime` and an + * optional specific `generator`. + * + * The `generator` argument can be a number, string, or `Buffer`. If`generator` is not specified, the value `2` is used. + * + * If `primeEncoding` is specified, `prime` is expected to be a string; otherwise + * a `Buffer`, `TypedArray`, or `DataView` is expected. + * + * If `generatorEncoding` is specified, `generator` is expected to be a string; + * otherwise a number, `Buffer`, `TypedArray`, or `DataView` is expected. + * @since v0.11.12 + * @param primeEncoding The `encoding` of the `prime` string. + * @param [generator=2] + * @param generatorEncoding The `encoding` of the `generator` string. + */ + function createDiffieHellman(primeLength: number, generator?: number): DiffieHellman; + function createDiffieHellman( + prime: ArrayBuffer | NodeJS.ArrayBufferView, + generator?: number | ArrayBuffer | NodeJS.ArrayBufferView, + ): DiffieHellman; + function createDiffieHellman( + prime: ArrayBuffer | NodeJS.ArrayBufferView, + generator: string, + generatorEncoding: BinaryToTextEncoding, + ): DiffieHellman; + function createDiffieHellman( + prime: string, + primeEncoding: BinaryToTextEncoding, + generator?: number | ArrayBuffer | NodeJS.ArrayBufferView, + ): DiffieHellman; + function createDiffieHellman( + prime: string, + primeEncoding: BinaryToTextEncoding, + generator: string, + generatorEncoding: BinaryToTextEncoding, + ): DiffieHellman; + /** + * The `DiffieHellman` class is a utility for creating Diffie-Hellman key + * exchanges. + * + * Instances of the `DiffieHellman` class can be created using the {@link createDiffieHellman} function. + * + * ```js + * import assert from 'node:assert'; + * + * const { + * createDiffieHellman, + * } = await import('node:crypto'); + * + * // Generate Alice's keys... + * const alice = createDiffieHellman(2048); + * const aliceKey = alice.generateKeys(); + * + * // Generate Bob's keys... + * const bob = createDiffieHellman(alice.getPrime(), alice.getGenerator()); + * const bobKey = bob.generateKeys(); + * + * // Exchange and generate the secret... + * const aliceSecret = alice.computeSecret(bobKey); + * const bobSecret = bob.computeSecret(aliceKey); + * + * // OK + * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex')); + * ``` + * @since v0.5.0 + */ + class DiffieHellman { + private constructor(); + /** + * Generates private and public Diffie-Hellman key values unless they have been + * generated or computed already, and returns + * the public key in the specified `encoding`. This key should be + * transferred to the other party. + * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned. + * + * This function is a thin wrapper around [`DH_generate_key()`](https://www.openssl.org/docs/man3.0/man3/DH_generate_key.html). In particular, + * once a private key has been generated or set, calling this function only updates + * the public key but does not generate a new private key. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + generateKeys(): Buffer; + generateKeys(encoding: BinaryToTextEncoding): string; + /** + * Computes the shared secret using `otherPublicKey` as the other + * party's public key and returns the computed shared secret. The supplied + * key is interpreted using the specified `inputEncoding`, and secret is + * encoded using specified `outputEncoding`. + * If the `inputEncoding` is not + * provided, `otherPublicKey` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * If `outputEncoding` is given a string is returned; otherwise, a `Buffer` is returned. + * @since v0.5.0 + * @param inputEncoding The `encoding` of an `otherPublicKey` string. + * @param outputEncoding The `encoding` of the return value. + */ + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, inputEncoding?: null, outputEncoding?: null): Buffer; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding, outputEncoding?: null): Buffer; + computeSecret( + otherPublicKey: NodeJS.ArrayBufferView, + inputEncoding: null, + outputEncoding: BinaryToTextEncoding, + ): string; + computeSecret( + otherPublicKey: string, + inputEncoding: BinaryToTextEncoding, + outputEncoding: BinaryToTextEncoding, + ): string; + /** + * Returns the Diffie-Hellman prime in the specified `encoding`. + * If `encoding` is provided a string is + * returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPrime(): Buffer; + getPrime(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman generator in the specified `encoding`. + * If `encoding` is provided a string is + * returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getGenerator(): Buffer; + getGenerator(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman public key in the specified `encoding`. + * If `encoding` is provided a + * string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPublicKey(): Buffer; + getPublicKey(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman private key in the specified `encoding`. + * If `encoding` is provided a + * string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPrivateKey(): Buffer; + getPrivateKey(encoding: BinaryToTextEncoding): string; + /** + * Sets the Diffie-Hellman public key. If the `encoding` argument is provided,`publicKey` is expected + * to be a string. If no `encoding` is provided, `publicKey` is expected + * to be a `Buffer`, `TypedArray`, or `DataView`. + * @since v0.5.0 + * @param encoding The `encoding` of the `publicKey` string. + */ + setPublicKey(publicKey: NodeJS.ArrayBufferView): void; + setPublicKey(publicKey: string, encoding: BufferEncoding): void; + /** + * Sets the Diffie-Hellman private key. If the `encoding` argument is provided,`privateKey` is expected + * to be a string. If no `encoding` is provided, `privateKey` is expected + * to be a `Buffer`, `TypedArray`, or `DataView`. + * + * This function does not automatically compute the associated public key. Either `diffieHellman.setPublicKey()` or `diffieHellman.generateKeys()` can be + * used to manually provide the public key or to automatically derive it. + * @since v0.5.0 + * @param encoding The `encoding` of the `privateKey` string. + */ + setPrivateKey(privateKey: NodeJS.ArrayBufferView): void; + setPrivateKey(privateKey: string, encoding: BufferEncoding): void; + /** + * A bit field containing any warnings and/or errors resulting from a check + * performed during initialization of the `DiffieHellman` object. + * + * The following values are valid for this property (as defined in `node:constants` module): + * + * * `DH_CHECK_P_NOT_SAFE_PRIME` + * * `DH_CHECK_P_NOT_PRIME` + * * `DH_UNABLE_TO_CHECK_GENERATOR` + * * `DH_NOT_SUITABLE_GENERATOR` + * @since v0.11.12 + */ + verifyError: number; + } + /** + * The `DiffieHellmanGroup` class takes a well-known modp group as its argument. + * It works the same as `DiffieHellman`, except that it does not allow changing its keys after creation. + * In other words, it does not implement `setPublicKey()` or `setPrivateKey()` methods. + * + * ```js + * const { createDiffieHellmanGroup } = await import('node:crypto'); + * const dh = createDiffieHellmanGroup('modp1'); + * ``` + * The name (e.g. `'modp1'`) is taken from [RFC 2412](https://www.rfc-editor.org/rfc/rfc2412.txt) (modp1 and 2) and [RFC 3526](https://www.rfc-editor.org/rfc/rfc3526.txt): + * ```bash + * $ perl -ne 'print "$1\n" if /"(modp\d+)"/' src/node_crypto_groups.h + * modp1 # 768 bits + * modp2 # 1024 bits + * modp5 # 1536 bits + * modp14 # 2048 bits + * modp15 # etc. + * modp16 + * modp17 + * modp18 + * ``` + * @since v0.7.5 + */ + const DiffieHellmanGroup: DiffieHellmanGroupConstructor; + interface DiffieHellmanGroupConstructor { + new(name: string): DiffieHellmanGroup; + (name: string): DiffieHellmanGroup; + readonly prototype: DiffieHellmanGroup; + } + type DiffieHellmanGroup = Omit; + /** + * Creates a predefined `DiffieHellmanGroup` key exchange object. The + * supported groups are listed in the documentation for `DiffieHellmanGroup`. + * + * The returned object mimics the interface of objects created by {@link createDiffieHellman}, but will not allow changing + * the keys (with `diffieHellman.setPublicKey()`, for example). The + * advantage of using this method is that the parties do not have to + * generate nor exchange a group modulus beforehand, saving both processor + * and communication time. + * + * Example (obtaining a shared secret): + * + * ```js + * const { + * getDiffieHellman, + * } = await import('node:crypto'); + * const alice = getDiffieHellman('modp14'); + * const bob = getDiffieHellman('modp14'); + * + * alice.generateKeys(); + * bob.generateKeys(); + * + * const aliceSecret = alice.computeSecret(bob.getPublicKey(), null, 'hex'); + * const bobSecret = bob.computeSecret(alice.getPublicKey(), null, 'hex'); + * + * // aliceSecret and bobSecret should be the same + * console.log(aliceSecret === bobSecret); + * ``` + * @since v0.7.5 + */ + function getDiffieHellman(groupName: string): DiffieHellmanGroup; + /** + * An alias for {@link getDiffieHellman} + * @since v0.9.3 + */ + function createDiffieHellmanGroup(name: string): DiffieHellmanGroup; + /** + * Provides an asynchronous Password-Based Key Derivation Function 2 (PBKDF2) + * implementation. A selected HMAC digest algorithm specified by `digest` is + * applied to derive a key of the requested byte length (`keylen`) from the`password`, `salt` and `iterations`. + * + * The supplied `callback` function is called with two arguments: `err` and`derivedKey`. If an error occurs while deriving the key, `err` will be set; + * otherwise `err` will be `null`. By default, the successfully generated`derivedKey` will be passed to the callback as a `Buffer`. An error will be + * thrown if any of the input arguments specify invalid values or types. + * + * The `iterations` argument must be a number set as high as possible. The + * higher the number of iterations, the more secure the derived key will be, + * but will take a longer amount of time to complete. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * ```js + * const { + * pbkdf2, + * } = await import('node:crypto'); + * + * pbkdf2('secret', 'salt', 100000, 64, 'sha512', (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae' + * }); + * ``` + * + * An array of supported digest functions can be retrieved using {@link getHashes}. + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * @since v0.5.5 + */ + function pbkdf2( + password: BinaryLike, + salt: BinaryLike, + iterations: number, + keylen: number, + digest: string, + callback: (err: Error | null, derivedKey: Buffer) => void, + ): void; + /** + * Provides a synchronous Password-Based Key Derivation Function 2 (PBKDF2) + * implementation. A selected HMAC digest algorithm specified by `digest` is + * applied to derive a key of the requested byte length (`keylen`) from the`password`, `salt` and `iterations`. + * + * If an error occurs an `Error` will be thrown, otherwise the derived key will be + * returned as a `Buffer`. + * + * The `iterations` argument must be a number set as high as possible. The + * higher the number of iterations, the more secure the derived key will be, + * but will take a longer amount of time to complete. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * ```js + * const { + * pbkdf2Sync, + * } = await import('node:crypto'); + * + * const key = pbkdf2Sync('secret', 'salt', 100000, 64, 'sha512'); + * console.log(key.toString('hex')); // '3745e48...08d59ae' + * ``` + * + * An array of supported digest functions can be retrieved using {@link getHashes}. + * @since v0.9.3 + */ + function pbkdf2Sync( + password: BinaryLike, + salt: BinaryLike, + iterations: number, + keylen: number, + digest: string, + ): Buffer; + /** + * Generates cryptographically strong pseudorandom data. The `size` argument + * is a number indicating the number of bytes to generate. + * + * If a `callback` function is provided, the bytes are generated asynchronously + * and the `callback` function is invoked with two arguments: `err` and `buf`. + * If an error occurs, `err` will be an `Error` object; otherwise it is `null`. The`buf` argument is a `Buffer` containing the generated bytes. + * + * ```js + * // Asynchronous + * const { + * randomBytes, + * } = await import('node:crypto'); + * + * randomBytes(256, (err, buf) => { + * if (err) throw err; + * console.log(`${buf.length} bytes of random data: ${buf.toString('hex')}`); + * }); + * ``` + * + * If the `callback` function is not provided, the random bytes are generated + * synchronously and returned as a `Buffer`. An error will be thrown if + * there is a problem generating the bytes. + * + * ```js + * // Synchronous + * const { + * randomBytes, + * } = await import('node:crypto'); + * + * const buf = randomBytes(256); + * console.log( + * `${buf.length} bytes of random data: ${buf.toString('hex')}`); + * ``` + * + * The `crypto.randomBytes()` method will not complete until there is + * sufficient entropy available. + * This should normally never take longer than a few milliseconds. The only time + * when generating the random bytes may conceivably block for a longer period of + * time is right after boot, when the whole system is still low on entropy. + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * + * The asynchronous version of `crypto.randomBytes()` is carried out in a single + * threadpool request. To minimize threadpool task length variation, partition + * large `randomBytes` requests when doing so as part of fulfilling a client + * request. + * @since v0.5.8 + * @param size The number of bytes to generate. The `size` must not be larger than `2**31 - 1`. + * @return if the `callback` function is not provided. + */ + function randomBytes(size: number): Buffer; + function randomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; + function pseudoRandomBytes(size: number): Buffer; + function pseudoRandomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; + /** + * Return a random integer `n` such that `min <= n < max`. This + * implementation avoids [modulo bias](https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle#Modulo_bias). + * + * The range (`max - min`) must be less than 2**48. `min` and `max` must + * be [safe integers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger). + * + * If the `callback` function is not provided, the random integer is + * generated synchronously. + * + * ```js + * // Asynchronous + * const { + * randomInt, + * } = await import('node:crypto'); + * + * randomInt(3, (err, n) => { + * if (err) throw err; + * console.log(`Random number chosen from (0, 1, 2): ${n}`); + * }); + * ``` + * + * ```js + * // Synchronous + * const { + * randomInt, + * } = await import('node:crypto'); + * + * const n = randomInt(3); + * console.log(`Random number chosen from (0, 1, 2): ${n}`); + * ``` + * + * ```js + * // With `min` argument + * const { + * randomInt, + * } = await import('node:crypto'); + * + * const n = randomInt(1, 7); + * console.log(`The dice rolled: ${n}`); + * ``` + * @since v14.10.0, v12.19.0 + * @param [min=0] Start of random range (inclusive). + * @param max End of random range (exclusive). + * @param callback `function(err, n) {}`. + */ + function randomInt(max: number): number; + function randomInt(min: number, max: number): number; + function randomInt(max: number, callback: (err: Error | null, value: number) => void): void; + function randomInt(min: number, max: number, callback: (err: Error | null, value: number) => void): void; + /** + * Synchronous version of {@link randomFill}. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFillSync } = await import('node:crypto'); + * + * const buf = Buffer.alloc(10); + * console.log(randomFillSync(buf).toString('hex')); + * + * randomFillSync(buf, 5); + * console.log(buf.toString('hex')); + * + * // The above is equivalent to the following: + * randomFillSync(buf, 5, 5); + * console.log(buf.toString('hex')); + * ``` + * + * Any `ArrayBuffer`, `TypedArray` or `DataView` instance may be passed as`buffer`. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFillSync } = await import('node:crypto'); + * + * const a = new Uint32Array(10); + * console.log(Buffer.from(randomFillSync(a).buffer, + * a.byteOffset, a.byteLength).toString('hex')); + * + * const b = new DataView(new ArrayBuffer(10)); + * console.log(Buffer.from(randomFillSync(b).buffer, + * b.byteOffset, b.byteLength).toString('hex')); + * + * const c = new ArrayBuffer(10); + * console.log(Buffer.from(randomFillSync(c)).toString('hex')); + * ``` + * @since v7.10.0, v6.13.0 + * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`. + * @param [offset=0] + * @param [size=buffer.length - offset] + * @return The object passed as `buffer` argument. + */ + function randomFillSync(buffer: T, offset?: number, size?: number): T; + /** + * This function is similar to {@link randomBytes} but requires the first + * argument to be a `Buffer` that will be filled. It also + * requires that a callback is passed in. + * + * If the `callback` function is not provided, an error will be thrown. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFill } = await import('node:crypto'); + * + * const buf = Buffer.alloc(10); + * randomFill(buf, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * + * randomFill(buf, 5, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * + * // The above is equivalent to the following: + * randomFill(buf, 5, 5, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * ``` + * + * Any `ArrayBuffer`, `TypedArray`, or `DataView` instance may be passed as`buffer`. + * + * While this includes instances of `Float32Array` and `Float64Array`, this + * function should not be used to generate random floating-point numbers. The + * result may contain `+Infinity`, `-Infinity`, and `NaN`, and even if the array + * contains finite numbers only, they are not drawn from a uniform random + * distribution and have no meaningful lower or upper bounds. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { randomFill } = await import('node:crypto'); + * + * const a = new Uint32Array(10); + * randomFill(a, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength) + * .toString('hex')); + * }); + * + * const b = new DataView(new ArrayBuffer(10)); + * randomFill(b, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength) + * .toString('hex')); + * }); + * + * const c = new ArrayBuffer(10); + * randomFill(c, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf).toString('hex')); + * }); + * ``` + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * + * The asynchronous version of `crypto.randomFill()` is carried out in a single + * threadpool request. To minimize threadpool task length variation, partition + * large `randomFill` requests when doing so as part of fulfilling a client + * request. + * @since v7.10.0, v6.13.0 + * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`. + * @param [offset=0] + * @param [size=buffer.length - offset] + * @param callback `function(err, buf) {}`. + */ + function randomFill( + buffer: T, + callback: (err: Error | null, buf: T) => void, + ): void; + function randomFill( + buffer: T, + offset: number, + callback: (err: Error | null, buf: T) => void, + ): void; + function randomFill( + buffer: T, + offset: number, + size: number, + callback: (err: Error | null, buf: T) => void, + ): void; + interface ScryptOptions { + cost?: number | undefined; + blockSize?: number | undefined; + parallelization?: number | undefined; + N?: number | undefined; + r?: number | undefined; + p?: number | undefined; + maxmem?: number | undefined; + } + /** + * Provides an asynchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based + * key derivation function that is designed to be expensive computationally and + * memory-wise in order to make brute-force attacks unrewarding. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * The `callback` function is called with two arguments: `err` and `derivedKey`.`err` is an exception object when key derivation fails, otherwise `err` is`null`. `derivedKey` is passed to the + * callback as a `Buffer`. + * + * An exception is thrown when any of the input arguments specify invalid values + * or types. + * + * ```js + * const { + * scrypt, + * } = await import('node:crypto'); + * + * // Using the factory defaults. + * scrypt('password', 'salt', 64, (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae' + * }); + * // Using a custom N parameter. Must be a power of two. + * scrypt('password', 'salt', 64, { N: 1024 }, (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...aa39b34' + * }); + * ``` + * @since v10.5.0 + */ + function scrypt( + password: BinaryLike, + salt: BinaryLike, + keylen: number, + callback: (err: Error | null, derivedKey: Buffer) => void, + ): void; + function scrypt( + password: BinaryLike, + salt: BinaryLike, + keylen: number, + options: ScryptOptions, + callback: (err: Error | null, derivedKey: Buffer) => void, + ): void; + /** + * Provides a synchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based + * key derivation function that is designed to be expensive computationally and + * memory-wise in order to make brute-force attacks unrewarding. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * An exception is thrown when key derivation fails, otherwise the derived key is + * returned as a `Buffer`. + * + * An exception is thrown when any of the input arguments specify invalid values + * or types. + * + * ```js + * const { + * scryptSync, + * } = await import('node:crypto'); + * // Using the factory defaults. + * + * const key1 = scryptSync('password', 'salt', 64); + * console.log(key1.toString('hex')); // '3745e48...08d59ae' + * // Using a custom N parameter. Must be a power of two. + * const key2 = scryptSync('password', 'salt', 64, { N: 1024 }); + * console.log(key2.toString('hex')); // '3745e48...aa39b34' + * ``` + * @since v10.5.0 + */ + function scryptSync(password: BinaryLike, salt: BinaryLike, keylen: number, options?: ScryptOptions): Buffer; + interface RsaPublicKey { + key: KeyLike; + padding?: number | undefined; + } + interface RsaPrivateKey { + key: KeyLike; + passphrase?: string | undefined; + /** + * @default 'sha1' + */ + oaepHash?: string | undefined; + oaepLabel?: NodeJS.TypedArray | undefined; + padding?: number | undefined; + } + /** + * Encrypts the content of `buffer` with `key` and returns a new `Buffer` with encrypted content. The returned data can be decrypted using + * the corresponding private key, for example using {@link privateDecrypt}. + * + * If `key` is not a `KeyObject`, this function behaves as if`key` had been passed to {@link createPublicKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_OAEP_PADDING`. + * + * Because RSA public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v0.11.14 + */ + function publicEncrypt(key: RsaPublicKey | RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Decrypts `buffer` with `key`.`buffer` was previously encrypted using + * the corresponding private key, for example using {@link privateEncrypt}. + * + * If `key` is not a `KeyObject`, this function behaves as if`key` had been passed to {@link createPublicKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_PADDING`. + * + * Because RSA public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v1.1.0 + */ + function publicDecrypt(key: RsaPublicKey | RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Decrypts `buffer` with `privateKey`. `buffer` was previously encrypted using + * the corresponding public key, for example using {@link publicEncrypt}. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_OAEP_PADDING`. + * @since v0.11.14 + */ + function privateDecrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Encrypts `buffer` with `privateKey`. The returned data can be decrypted using + * the corresponding public key, for example using {@link publicDecrypt}. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_PADDING`. + * @since v1.1.0 + */ + function privateEncrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * ```js + * const { + * getCiphers, + * } = await import('node:crypto'); + * + * console.log(getCiphers()); // ['aes-128-cbc', 'aes-128-ccm', ...] + * ``` + * @since v0.9.3 + * @return An array with the names of the supported cipher algorithms. + */ + function getCiphers(): string[]; + /** + * ```js + * const { + * getCurves, + * } = await import('node:crypto'); + * + * console.log(getCurves()); // ['Oakley-EC2N-3', 'Oakley-EC2N-4', ...] + * ``` + * @since v2.3.0 + * @return An array with the names of the supported elliptic curves. + */ + function getCurves(): string[]; + /** + * @since v10.0.0 + * @return `1` if and only if a FIPS compliant crypto provider is currently in use, `0` otherwise. A future semver-major release may change the return type of this API to a {boolean}. + */ + function getFips(): 1 | 0; + /** + * Enables the FIPS compliant crypto provider in a FIPS-enabled Node.js build. + * Throws an error if FIPS mode is not available. + * @since v10.0.0 + * @param bool `true` to enable FIPS mode. + */ + function setFips(bool: boolean): void; + /** + * ```js + * const { + * getHashes, + * } = await import('node:crypto'); + * + * console.log(getHashes()); // ['DSA', 'DSA-SHA', 'DSA-SHA1', ...] + * ``` + * @since v0.9.3 + * @return An array of the names of the supported hash algorithms, such as `'RSA-SHA256'`. Hash algorithms are also called "digest" algorithms. + */ + function getHashes(): string[]; + /** + * The `ECDH` class is a utility for creating Elliptic Curve Diffie-Hellman (ECDH) + * key exchanges. + * + * Instances of the `ECDH` class can be created using the {@link createECDH} function. + * + * ```js + * import assert from 'node:assert'; + * + * const { + * createECDH, + * } = await import('node:crypto'); + * + * // Generate Alice's keys... + * const alice = createECDH('secp521r1'); + * const aliceKey = alice.generateKeys(); + * + * // Generate Bob's keys... + * const bob = createECDH('secp521r1'); + * const bobKey = bob.generateKeys(); + * + * // Exchange and generate the secret... + * const aliceSecret = alice.computeSecret(bobKey); + * const bobSecret = bob.computeSecret(aliceKey); + * + * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex')); + * // OK + * ``` + * @since v0.11.14 + */ + class ECDH { + private constructor(); + /** + * Converts the EC Diffie-Hellman public key specified by `key` and `curve` to the + * format specified by `format`. The `format` argument specifies point encoding + * and can be `'compressed'`, `'uncompressed'` or `'hybrid'`. The supplied key is + * interpreted using the specified `inputEncoding`, and the returned key is encoded + * using the specified `outputEncoding`. + * + * Use {@link getCurves} to obtain a list of available curve names. + * On recent OpenSSL releases, `openssl ecparam -list_curves` will also display + * the name and description of each available elliptic curve. + * + * If `format` is not specified the point will be returned in `'uncompressed'`format. + * + * If the `inputEncoding` is not provided, `key` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * Example (uncompressing a key): + * + * ```js + * const { + * createECDH, + * ECDH, + * } = await import('node:crypto'); + * + * const ecdh = createECDH('secp256k1'); + * ecdh.generateKeys(); + * + * const compressedKey = ecdh.getPublicKey('hex', 'compressed'); + * + * const uncompressedKey = ECDH.convertKey(compressedKey, + * 'secp256k1', + * 'hex', + * 'hex', + * 'uncompressed'); + * + * // The converted key and the uncompressed public key should be the same + * console.log(uncompressedKey === ecdh.getPublicKey('hex')); + * ``` + * @since v10.0.0 + * @param inputEncoding The `encoding` of the `key` string. + * @param outputEncoding The `encoding` of the return value. + * @param [format='uncompressed'] + */ + static convertKey( + key: BinaryLike, + curve: string, + inputEncoding?: BinaryToTextEncoding, + outputEncoding?: "latin1" | "hex" | "base64" | "base64url", + format?: "uncompressed" | "compressed" | "hybrid", + ): Buffer | string; + /** + * Generates private and public EC Diffie-Hellman key values, and returns + * the public key in the specified `format` and `encoding`. This key should be + * transferred to the other party. + * + * The `format` argument specifies point encoding and can be `'compressed'` or`'uncompressed'`. If `format` is not specified, the point will be returned in`'uncompressed'` format. + * + * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @param [format='uncompressed'] + */ + generateKeys(): Buffer; + generateKeys(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; + /** + * Computes the shared secret using `otherPublicKey` as the other + * party's public key and returns the computed shared secret. The supplied + * key is interpreted using specified `inputEncoding`, and the returned secret + * is encoded using the specified `outputEncoding`. + * If the `inputEncoding` is not + * provided, `otherPublicKey` is expected to be a `Buffer`, `TypedArray`, or`DataView`. + * + * If `outputEncoding` is given a string will be returned; otherwise a `Buffer` is returned. + * + * `ecdh.computeSecret` will throw an`ERR_CRYPTO_ECDH_INVALID_PUBLIC_KEY` error when `otherPublicKey`lies outside of the elliptic curve. Since `otherPublicKey` is + * usually supplied from a remote user over an insecure network, + * be sure to handle this exception accordingly. + * @since v0.11.14 + * @param inputEncoding The `encoding` of the `otherPublicKey` string. + * @param outputEncoding The `encoding` of the return value. + */ + computeSecret(otherPublicKey: NodeJS.ArrayBufferView): Buffer; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding): Buffer; + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, outputEncoding: BinaryToTextEncoding): string; + computeSecret( + otherPublicKey: string, + inputEncoding: BinaryToTextEncoding, + outputEncoding: BinaryToTextEncoding, + ): string; + /** + * If `encoding` is specified, a string is returned; otherwise a `Buffer` is + * returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @return The EC Diffie-Hellman in the specified `encoding`. + */ + getPrivateKey(): Buffer; + getPrivateKey(encoding: BinaryToTextEncoding): string; + /** + * The `format` argument specifies point encoding and can be `'compressed'` or`'uncompressed'`. If `format` is not specified the point will be returned in`'uncompressed'` format. + * + * If `encoding` is specified, a string is returned; otherwise a `Buffer` is + * returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @param [format='uncompressed'] + * @return The EC Diffie-Hellman public key in the specified `encoding` and `format`. + */ + getPublicKey(encoding?: null, format?: ECDHKeyFormat): Buffer; + getPublicKey(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; + /** + * Sets the EC Diffie-Hellman private key. + * If `encoding` is provided, `privateKey` is expected + * to be a string; otherwise `privateKey` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * If `privateKey` is not valid for the curve specified when the `ECDH` object was + * created, an error is thrown. Upon setting the private key, the associated + * public point (key) is also generated and set in the `ECDH` object. + * @since v0.11.14 + * @param encoding The `encoding` of the `privateKey` string. + */ + setPrivateKey(privateKey: NodeJS.ArrayBufferView): void; + setPrivateKey(privateKey: string, encoding: BinaryToTextEncoding): void; + } + /** + * Creates an Elliptic Curve Diffie-Hellman (`ECDH`) key exchange object using a + * predefined curve specified by the `curveName` string. Use {@link getCurves} to obtain a list of available curve names. On recent + * OpenSSL releases, `openssl ecparam -list_curves` will also display the name + * and description of each available elliptic curve. + * @since v0.11.14 + */ + function createECDH(curveName: string): ECDH; + /** + * This function compares the underlying bytes that represent the given`ArrayBuffer`, `TypedArray`, or `DataView` instances using a constant-time + * algorithm. + * + * This function does not leak timing information that + * would allow an attacker to guess one of the values. This is suitable for + * comparing HMAC digests or secret values like authentication cookies or [capability urls](https://www.w3.org/TR/capability-urls/). + * + * `a` and `b` must both be `Buffer`s, `TypedArray`s, or `DataView`s, and they + * must have the same byte length. An error is thrown if `a` and `b` have + * different byte lengths. + * + * If at least one of `a` and `b` is a `TypedArray` with more than one byte per + * entry, such as `Uint16Array`, the result will be computed using the platform + * byte order. + * + * **When both of the inputs are `Float32Array`s or`Float64Array`s, this function might return unexpected results due to IEEE 754** + * **encoding of floating-point numbers. In particular, neither `x === y` nor`Object.is(x, y)` implies that the byte representations of two floating-point** + * **numbers `x` and `y` are equal.** + * + * Use of `crypto.timingSafeEqual` does not guarantee that the _surrounding_ code + * is timing-safe. Care should be taken to ensure that the surrounding code does + * not introduce timing vulnerabilities. + * @since v6.6.0 + */ + function timingSafeEqual(a: NodeJS.ArrayBufferView, b: NodeJS.ArrayBufferView): boolean; + type KeyType = "rsa" | "rsa-pss" | "dsa" | "ec" | "ed25519" | "ed448" | "x25519" | "x448"; + type KeyFormat = "pem" | "der" | "jwk"; + interface BasePrivateKeyEncodingOptions { + format: T; + cipher?: string | undefined; + passphrase?: string | undefined; + } + interface KeyPairKeyObjectResult { + publicKey: KeyObject; + privateKey: KeyObject; + } + interface ED25519KeyPairKeyObjectOptions {} + interface ED448KeyPairKeyObjectOptions {} + interface X25519KeyPairKeyObjectOptions {} + interface X448KeyPairKeyObjectOptions {} + interface ECKeyPairKeyObjectOptions { + /** + * Name of the curve to use + */ + namedCurve: string; + /** + * Must be `'named'` or `'explicit'`. Default: `'named'`. + */ + paramEncoding?: "explicit" | "named" | undefined; + } + interface RSAKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + } + interface RSAPSSKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + /** + * Name of the message digest + */ + hashAlgorithm?: string; + /** + * Name of the message digest used by MGF1 + */ + mgf1HashAlgorithm?: string; + /** + * Minimal salt length in bytes + */ + saltLength?: string; + } + interface DSAKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Size of q in bits + */ + divisorLength: number; + } + interface RSAKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + publicKeyEncoding: { + type: "pkcs1" | "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs1" | "pkcs8"; + }; + } + interface RSAPSSKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + /** + * Name of the message digest + */ + hashAlgorithm?: string; + /** + * Name of the message digest used by MGF1 + */ + mgf1HashAlgorithm?: string; + /** + * Minimal salt length in bytes + */ + saltLength?: string; + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface DSAKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Size of q in bits + */ + divisorLength: number; + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface ECKeyPairOptions extends ECKeyPairKeyObjectOptions { + publicKeyEncoding: { + type: "pkcs1" | "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "sec1" | "pkcs8"; + }; + } + interface ED25519KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface ED448KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface X25519KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface X448KeyPairOptions { + publicKeyEncoding: { + type: "spki"; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: "pkcs8"; + }; + } + interface KeyPairSyncResult { + publicKey: T1; + privateKey: T2; + } + /** + * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, + * Ed25519, Ed448, X25519, X448, and DH are currently supported. + * + * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function + * behaves as if `keyObject.export()` had been called on its result. Otherwise, + * the respective part of the key is returned as a `KeyObject`. + * + * When encoding public keys, it is recommended to use `'spki'`. When encoding + * private keys, it is recommended to use `'pkcs8'` with a strong passphrase, + * and to keep the passphrase confidential. + * + * ```js + * const { + * generateKeyPairSync, + * } = await import('node:crypto'); + * + * const { + * publicKey, + * privateKey, + * } = generateKeyPairSync('rsa', { + * modulusLength: 4096, + * publicKeyEncoding: { + * type: 'spki', + * format: 'pem', + * }, + * privateKeyEncoding: { + * type: 'pkcs8', + * format: 'pem', + * cipher: 'aes-256-cbc', + * passphrase: 'top secret', + * }, + * }); + * ``` + * + * The return value `{ publicKey, privateKey }` represents the generated key pair. + * When PEM encoding was selected, the respective key will be a string, otherwise + * it will be a buffer containing the data encoded as DER. + * @since v10.12.0 + * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. + */ + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa", + options: RSAKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "rsa", options: RSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "rsa-pss", options: RSAPSSKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "dsa", + options: DSAKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "dsa", options: DSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ec", + options: ECKeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "ec", options: ECKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "ed25519", options?: ED25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "ed448", + options: ED448KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "ed448", options?: ED448KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x25519", + options: X25519KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "x25519", options?: X25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"pem", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"pem", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"der", "pem">, + ): KeyPairSyncResult; + function generateKeyPairSync( + type: "x448", + options: X448KeyPairOptions<"der", "der">, + ): KeyPairSyncResult; + function generateKeyPairSync(type: "x448", options?: X448KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + /** + * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, + * Ed25519, Ed448, X25519, X448, and DH are currently supported. + * + * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function + * behaves as if `keyObject.export()` had been called on its result. Otherwise, + * the respective part of the key is returned as a `KeyObject`. + * + * It is recommended to encode public keys as `'spki'` and private keys as`'pkcs8'` with encryption for long-term storage: + * + * ```js + * const { + * generateKeyPair, + * } = await import('node:crypto'); + * + * generateKeyPair('rsa', { + * modulusLength: 4096, + * publicKeyEncoding: { + * type: 'spki', + * format: 'pem', + * }, + * privateKeyEncoding: { + * type: 'pkcs8', + * format: 'pem', + * cipher: 'aes-256-cbc', + * passphrase: 'top secret', + * }, + * }, (err, publicKey, privateKey) => { + * // Handle errors and use the generated key pair. + * }); + * ``` + * + * On completion, `callback` will be called with `err` set to `undefined` and`publicKey` / `privateKey` representing the generated key pair. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `publicKey` and `privateKey` properties. + * @since v10.12.0 + * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. + */ + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa", + options: RSAKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "rsa-pss", + options: RSAPSSKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "dsa", + options: DSAKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ec", + options: ECKeyPairKeyObjectOptions, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed25519", + options: ED25519KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "ed448", + options: ED448KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x25519", + options: X25519KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"pem", "pem">, + callback: (err: Error | null, publicKey: string, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"pem", "der">, + callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"der", "pem">, + callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairOptions<"der", "der">, + callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void, + ): void; + function generateKeyPair( + type: "x448", + options: X448KeyPairKeyObjectOptions | undefined, + callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void, + ): void; + namespace generateKeyPair { + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "rsa", + options: RSAKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "rsa", options: RSAKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__( + type: "rsa-pss", + options: RSAPSSKeyPairKeyObjectOptions, + ): Promise; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "dsa", + options: DSAKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "dsa", options: DSAKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "ec", + options: ECKeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "ec", options: ECKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "ed25519", + options: ED25519KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__( + type: "ed25519", + options?: ED25519KeyPairKeyObjectOptions, + ): Promise; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "ed448", + options: ED448KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "ed448", options?: ED448KeyPairKeyObjectOptions): Promise; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "x25519", + options: X25519KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__( + type: "x25519", + options?: X25519KeyPairKeyObjectOptions, + ): Promise; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"pem", "pem">, + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"pem", "der">, + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"der", "pem">, + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: "x448", + options: X448KeyPairOptions<"der", "der">, + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: "x448", options?: X448KeyPairKeyObjectOptions): Promise; + } + /** + * Calculates and returns the signature for `data` using the given private key and + * algorithm. If `algorithm` is `null` or `undefined`, then the algorithm is + * dependent upon the key type (especially Ed25519 and Ed448). + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been + * passed to {@link createPrivateKey}. If it is an object, the following + * additional properties can be passed: + * + * If the `callback` function is provided this function uses libuv's threadpool. + * @since v12.0.0 + */ + function sign( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, + ): Buffer; + function sign( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, + callback: (error: Error | null, data: Buffer) => void, + ): void; + /** + * Verifies the given signature for `data` using the given key and algorithm. If`algorithm` is `null` or `undefined`, then the algorithm is dependent upon the + * key type (especially Ed25519 and Ed448). + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been + * passed to {@link createPublicKey}. If it is an object, the following + * additional properties can be passed: + * + * The `signature` argument is the previously calculated signature for the `data`. + * + * Because public keys can be derived from private keys, a private key or a public + * key may be passed for `key`. + * + * If the `callback` function is provided this function uses libuv's threadpool. + * @since v12.0.0 + */ + function verify( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: NodeJS.ArrayBufferView, + ): boolean; + function verify( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput | VerifyJsonWebKeyInput, + signature: NodeJS.ArrayBufferView, + callback: (error: Error | null, result: boolean) => void, + ): void; + /** + * Computes the Diffie-Hellman secret based on a `privateKey` and a `publicKey`. + * Both keys must have the same `asymmetricKeyType`, which must be one of `'dh'`(for Diffie-Hellman), `'ec'` (for ECDH), `'x448'`, or `'x25519'` (for ECDH-ES). + * @since v13.9.0, v12.17.0 + */ + function diffieHellman(options: { privateKey: KeyObject; publicKey: KeyObject }): Buffer; + type CipherMode = "cbc" | "ccm" | "cfb" | "ctr" | "ecb" | "gcm" | "ocb" | "ofb" | "stream" | "wrap" | "xts"; + interface CipherInfoOptions { + /** + * A test key length. + */ + keyLength?: number | undefined; + /** + * A test IV length. + */ + ivLength?: number | undefined; + } + interface CipherInfo { + /** + * The name of the cipher. + */ + name: string; + /** + * The nid of the cipher. + */ + nid: number; + /** + * The block size of the cipher in bytes. + * This property is omitted when mode is 'stream'. + */ + blockSize?: number | undefined; + /** + * The expected or default initialization vector length in bytes. + * This property is omitted if the cipher does not use an initialization vector. + */ + ivLength?: number | undefined; + /** + * The expected or default key length in bytes. + */ + keyLength: number; + /** + * The cipher mode. + */ + mode: CipherMode; + } + /** + * Returns information about a given cipher. + * + * Some ciphers accept variable length keys and initialization vectors. By default, + * the `crypto.getCipherInfo()` method will return the default values for these + * ciphers. To test if a given key length or iv length is acceptable for given + * cipher, use the `keyLength` and `ivLength` options. If the given values are + * unacceptable, `undefined` will be returned. + * @since v15.0.0 + * @param nameOrNid The name or nid of the cipher to query. + */ + function getCipherInfo(nameOrNid: string | number, options?: CipherInfoOptions): CipherInfo | undefined; + /** + * HKDF is a simple key derivation function defined in RFC 5869\. The given `ikm`,`salt` and `info` are used with the `digest` to derive a key of `keylen` bytes. + * + * The supplied `callback` function is called with two arguments: `err` and`derivedKey`. If an errors occurs while deriving the key, `err` will be set; + * otherwise `err` will be `null`. The successfully generated `derivedKey` will + * be passed to the callback as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). An error will be thrown if any + * of the input arguments specify invalid values or types. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * hkdf, + * } = await import('node:crypto'); + * + * hkdf('sha512', 'key', 'salt', 'info', 64, (err, derivedKey) => { + * if (err) throw err; + * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' + * }); + * ``` + * @since v15.0.0 + * @param digest The digest algorithm to use. + * @param ikm The input keying material. Must be provided but can be zero-length. + * @param salt The salt value. Must be provided but can be zero-length. + * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes. + * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512` + * generates 64-byte hashes, making the maximum HKDF output 16320 bytes). + */ + function hkdf( + digest: string, + irm: BinaryLike | KeyObject, + salt: BinaryLike, + info: BinaryLike, + keylen: number, + callback: (err: Error | null, derivedKey: ArrayBuffer) => void, + ): void; + /** + * Provides a synchronous HKDF key derivation function as defined in RFC 5869\. The + * given `ikm`, `salt` and `info` are used with the `digest` to derive a key of`keylen` bytes. + * + * The successfully generated `derivedKey` will be returned as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). + * + * An error will be thrown if any of the input arguments specify invalid values or + * types, or if the derived key cannot be generated. + * + * ```js + * import { Buffer } from 'node:buffer'; + * const { + * hkdfSync, + * } = await import('node:crypto'); + * + * const derivedKey = hkdfSync('sha512', 'key', 'salt', 'info', 64); + * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' + * ``` + * @since v15.0.0 + * @param digest The digest algorithm to use. + * @param ikm The input keying material. Must be provided but can be zero-length. + * @param salt The salt value. Must be provided but can be zero-length. + * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes. + * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512` + * generates 64-byte hashes, making the maximum HKDF output 16320 bytes). + */ + function hkdfSync( + digest: string, + ikm: BinaryLike | KeyObject, + salt: BinaryLike, + info: BinaryLike, + keylen: number, + ): ArrayBuffer; + interface SecureHeapUsage { + /** + * The total allocated secure heap size as specified using the `--secure-heap=n` command-line flag. + */ + total: number; + /** + * The minimum allocation from the secure heap as specified using the `--secure-heap-min` command-line flag. + */ + min: number; + /** + * The total number of bytes currently allocated from the secure heap. + */ + used: number; + /** + * The calculated ratio of `used` to `total` allocated bytes. + */ + utilization: number; + } + /** + * @since v15.6.0 + */ + function secureHeapUsed(): SecureHeapUsage; + interface RandomUUIDOptions { + /** + * By default, to improve performance, + * Node.js will pre-emptively generate and persistently cache enough + * random data to generate up to 128 random UUIDs. To generate a UUID + * without using the cache, set `disableEntropyCache` to `true`. + * + * @default `false` + */ + disableEntropyCache?: boolean | undefined; + } + type UUID = `${string}-${string}-${string}-${string}-${string}`; + /** + * Generates a random [RFC 4122](https://www.rfc-editor.org/rfc/rfc4122.txt) version 4 UUID. The UUID is generated using a + * cryptographic pseudorandom number generator. + * @since v15.6.0, v14.17.0 + */ + function randomUUID(options?: RandomUUIDOptions): UUID; + interface X509CheckOptions { + /** + * @default 'always' + */ + subject?: "always" | "default" | "never"; + /** + * @default true + */ + wildcards?: boolean; + /** + * @default true + */ + partialWildcards?: boolean; + /** + * @default false + */ + multiLabelWildcards?: boolean; + /** + * @default false + */ + singleLabelSubdomains?: boolean; + } + /** + * Encapsulates an X509 certificate and provides read-only access to + * its information. + * + * ```js + * const { X509Certificate } = await import('node:crypto'); + * + * const x509 = new X509Certificate('{... pem encoded cert ...}'); + * + * console.log(x509.subject); + * ``` + * @since v15.6.0 + */ + class X509Certificate { + /** + * Will be \`true\` if this is a Certificate Authority (CA) certificate. + * @since v15.6.0 + */ + readonly ca: boolean; + /** + * The SHA-1 fingerprint of this certificate. + * + * Because SHA-1 is cryptographically broken and because the security of SHA-1 is + * significantly worse than that of algorithms that are commonly used to sign + * certificates, consider using `x509.fingerprint256` instead. + * @since v15.6.0 + */ + readonly fingerprint: string; + /** + * The SHA-256 fingerprint of this certificate. + * @since v15.6.0 + */ + readonly fingerprint256: string; + /** + * The SHA-512 fingerprint of this certificate. + * + * Because computing the SHA-256 fingerprint is usually faster and because it is + * only half the size of the SHA-512 fingerprint, `x509.fingerprint256` may be + * a better choice. While SHA-512 presumably provides a higher level of security in + * general, the security of SHA-256 matches that of most algorithms that are + * commonly used to sign certificates. + * @since v17.2.0, v16.14.0 + */ + readonly fingerprint512: string; + /** + * The complete subject of this certificate. + * @since v15.6.0 + */ + readonly subject: string; + /** + * The subject alternative name specified for this certificate. + * + * This is a comma-separated list of subject alternative names. Each entry begins + * with a string identifying the kind of the subject alternative name followed by + * a colon and the value associated with the entry. + * + * Earlier versions of Node.js incorrectly assumed that it is safe to split this + * property at the two-character sequence `', '` (see [CVE-2021-44532](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44532)). However, + * both malicious and legitimate certificates can contain subject alternative names + * that include this sequence when represented as a string. + * + * After the prefix denoting the type of the entry, the remainder of each entry + * might be enclosed in quotes to indicate that the value is a JSON string literal. + * For backward compatibility, Node.js only uses JSON string literals within this + * property when necessary to avoid ambiguity. Third-party code should be prepared + * to handle both possible entry formats. + * @since v15.6.0 + */ + readonly subjectAltName: string | undefined; + /** + * A textual representation of the certificate's authority information access + * extension. + * + * This is a line feed separated list of access descriptions. Each line begins with + * the access method and the kind of the access location, followed by a colon and + * the value associated with the access location. + * + * After the prefix denoting the access method and the kind of the access location, + * the remainder of each line might be enclosed in quotes to indicate that the + * value is a JSON string literal. For backward compatibility, Node.js only uses + * JSON string literals within this property when necessary to avoid ambiguity. + * Third-party code should be prepared to handle both possible entry formats. + * @since v15.6.0 + */ + readonly infoAccess: string | undefined; + /** + * An array detailing the key usages for this certificate. + * @since v15.6.0 + */ + readonly keyUsage: string[]; + /** + * The issuer identification included in this certificate. + * @since v15.6.0 + */ + readonly issuer: string; + /** + * The issuer certificate or `undefined` if the issuer certificate is not + * available. + * @since v15.9.0 + */ + readonly issuerCertificate?: X509Certificate | undefined; + /** + * The public key `KeyObject` for this certificate. + * @since v15.6.0 + */ + readonly publicKey: KeyObject; + /** + * A `Buffer` containing the DER encoding of this certificate. + * @since v15.6.0 + */ + readonly raw: Buffer; + /** + * The serial number of this certificate. + * + * Serial numbers are assigned by certificate authorities and do not uniquely + * identify certificates. Consider using `x509.fingerprint256` as a unique + * identifier instead. + * @since v15.6.0 + */ + readonly serialNumber: string; + /** + * The date/time from which this certificate is considered valid. + * @since v15.6.0 + */ + readonly validFrom: string; + /** + * The date/time until which this certificate is considered valid. + * @since v15.6.0 + */ + readonly validTo: string; + constructor(buffer: BinaryLike); + /** + * Checks whether the certificate matches the given email address. + * + * If the `'subject'` option is undefined or set to `'default'`, the certificate + * subject is only considered if the subject alternative name extension either does + * not exist or does not contain any email addresses. + * + * If the `'subject'` option is set to `'always'` and if the subject alternative + * name extension either does not exist or does not contain a matching email + * address, the certificate subject is considered. + * + * If the `'subject'` option is set to `'never'`, the certificate subject is never + * considered, even if the certificate contains no subject alternative names. + * @since v15.6.0 + * @return Returns `email` if the certificate matches, `undefined` if it does not. + */ + checkEmail(email: string, options?: Pick): string | undefined; + /** + * Checks whether the certificate matches the given host name. + * + * If the certificate matches the given host name, the matching subject name is + * returned. The returned name might be an exact match (e.g., `foo.example.com`) + * or it might contain wildcards (e.g., `*.example.com`). Because host name + * comparisons are case-insensitive, the returned subject name might also differ + * from the given `name` in capitalization. + * + * If the `'subject'` option is undefined or set to `'default'`, the certificate + * subject is only considered if the subject alternative name extension either does + * not exist or does not contain any DNS names. This behavior is consistent with [RFC 2818](https://www.rfc-editor.org/rfc/rfc2818.txt) ("HTTP Over TLS"). + * + * If the `'subject'` option is set to `'always'` and if the subject alternative + * name extension either does not exist or does not contain a matching DNS name, + * the certificate subject is considered. + * + * If the `'subject'` option is set to `'never'`, the certificate subject is never + * considered, even if the certificate contains no subject alternative names. + * @since v15.6.0 + * @return Returns a subject name that matches `name`, or `undefined` if no subject name matches `name`. + */ + checkHost(name: string, options?: X509CheckOptions): string | undefined; + /** + * Checks whether the certificate matches the given IP address (IPv4 or IPv6). + * + * Only [RFC 5280](https://www.rfc-editor.org/rfc/rfc5280.txt) `iPAddress` subject alternative names are considered, and they + * must match the given `ip` address exactly. Other subject alternative names as + * well as the subject field of the certificate are ignored. + * @since v15.6.0 + * @return Returns `ip` if the certificate matches, `undefined` if it does not. + */ + checkIP(ip: string): string | undefined; + /** + * Checks whether this certificate was issued by the given `otherCert`. + * @since v15.6.0 + */ + checkIssued(otherCert: X509Certificate): boolean; + /** + * Checks whether the public key for this certificate is consistent with + * the given private key. + * @since v15.6.0 + * @param privateKey A private key. + */ + checkPrivateKey(privateKey: KeyObject): boolean; + /** + * There is no standard JSON encoding for X509 certificates. The`toJSON()` method returns a string containing the PEM encoded + * certificate. + * @since v15.6.0 + */ + toJSON(): string; + /** + * Returns information about this certificate using the legacy `certificate object` encoding. + * @since v15.6.0 + */ + toLegacyObject(): PeerCertificate; + /** + * Returns the PEM-encoded certificate. + * @since v15.6.0 + */ + toString(): string; + /** + * Verifies that this certificate was signed by the given public key. + * Does not perform any other validation checks on the certificate. + * @since v15.6.0 + * @param publicKey A public key. + */ + verify(publicKey: KeyObject): boolean; + } + type LargeNumberLike = NodeJS.ArrayBufferView | SharedArrayBuffer | ArrayBuffer | bigint; + interface GeneratePrimeOptions { + add?: LargeNumberLike | undefined; + rem?: LargeNumberLike | undefined; + /** + * @default false + */ + safe?: boolean | undefined; + bigint?: boolean | undefined; + } + interface GeneratePrimeOptionsBigInt extends GeneratePrimeOptions { + bigint: true; + } + interface GeneratePrimeOptionsArrayBuffer extends GeneratePrimeOptions { + bigint?: false | undefined; + } + /** + * Generates a pseudorandom prime of `size` bits. + * + * If `options.safe` is `true`, the prime will be a safe prime -- that is,`(prime - 1) / 2` will also be a prime. + * + * The `options.add` and `options.rem` parameters can be used to enforce additional + * requirements, e.g., for Diffie-Hellman: + * + * * If `options.add` and `options.rem` are both set, the prime will satisfy the + * condition that `prime % add = rem`. + * * If only `options.add` is set and `options.safe` is not `true`, the prime will + * satisfy the condition that `prime % add = 1`. + * * If only `options.add` is set and `options.safe` is set to `true`, the prime + * will instead satisfy the condition that `prime % add = 3`. This is necessary + * because `prime % add = 1` for `options.add > 2` would contradict the condition + * enforced by `options.safe`. + * * `options.rem` is ignored if `options.add` is not given. + * + * Both `options.add` and `options.rem` must be encoded as big-endian sequences + * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or`DataView`. + * + * By default, the prime is encoded as a big-endian sequence of octets + * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided. + * @since v15.8.0 + * @param size The size (in bits) of the prime to generate. + */ + function generatePrime(size: number, callback: (err: Error | null, prime: ArrayBuffer) => void): void; + function generatePrime( + size: number, + options: GeneratePrimeOptionsBigInt, + callback: (err: Error | null, prime: bigint) => void, + ): void; + function generatePrime( + size: number, + options: GeneratePrimeOptionsArrayBuffer, + callback: (err: Error | null, prime: ArrayBuffer) => void, + ): void; + function generatePrime( + size: number, + options: GeneratePrimeOptions, + callback: (err: Error | null, prime: ArrayBuffer | bigint) => void, + ): void; + /** + * Generates a pseudorandom prime of `size` bits. + * + * If `options.safe` is `true`, the prime will be a safe prime -- that is,`(prime - 1) / 2` will also be a prime. + * + * The `options.add` and `options.rem` parameters can be used to enforce additional + * requirements, e.g., for Diffie-Hellman: + * + * * If `options.add` and `options.rem` are both set, the prime will satisfy the + * condition that `prime % add = rem`. + * * If only `options.add` is set and `options.safe` is not `true`, the prime will + * satisfy the condition that `prime % add = 1`. + * * If only `options.add` is set and `options.safe` is set to `true`, the prime + * will instead satisfy the condition that `prime % add = 3`. This is necessary + * because `prime % add = 1` for `options.add > 2` would contradict the condition + * enforced by `options.safe`. + * * `options.rem` is ignored if `options.add` is not given. + * + * Both `options.add` and `options.rem` must be encoded as big-endian sequences + * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or`DataView`. + * + * By default, the prime is encoded as a big-endian sequence of octets + * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided. + * @since v15.8.0 + * @param size The size (in bits) of the prime to generate. + */ + function generatePrimeSync(size: number): ArrayBuffer; + function generatePrimeSync(size: number, options: GeneratePrimeOptionsBigInt): bigint; + function generatePrimeSync(size: number, options: GeneratePrimeOptionsArrayBuffer): ArrayBuffer; + function generatePrimeSync(size: number, options: GeneratePrimeOptions): ArrayBuffer | bigint; + interface CheckPrimeOptions { + /** + * The number of Miller-Rabin probabilistic primality iterations to perform. + * When the value is 0 (zero), a number of checks is used that yields a false positive rate of at most `2**-64` for random input. + * Care must be used when selecting a number of checks. + * Refer to the OpenSSL documentation for the BN_is_prime_ex function nchecks options for more details. + * + * @default 0 + */ + checks?: number | undefined; + } + /** + * Checks the primality of the `candidate`. + * @since v15.8.0 + * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length. + */ + function checkPrime(value: LargeNumberLike, callback: (err: Error | null, result: boolean) => void): void; + function checkPrime( + value: LargeNumberLike, + options: CheckPrimeOptions, + callback: (err: Error | null, result: boolean) => void, + ): void; + /** + * Checks the primality of the `candidate`. + * @since v15.8.0 + * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length. + * @return `true` if the candidate is a prime with an error probability less than `0.25 ** options.checks`. + */ + function checkPrimeSync(candidate: LargeNumberLike, options?: CheckPrimeOptions): boolean; + /** + * Load and set the `engine` for some or all OpenSSL functions (selected by flags). + * + * `engine` could be either an id or a path to the engine's shared library. + * + * The optional `flags` argument uses `ENGINE_METHOD_ALL` by default. The `flags`is a bit field taking one of or a mix of the following flags (defined in`crypto.constants`): + * + * * `crypto.constants.ENGINE_METHOD_RSA` + * * `crypto.constants.ENGINE_METHOD_DSA` + * * `crypto.constants.ENGINE_METHOD_DH` + * * `crypto.constants.ENGINE_METHOD_RAND` + * * `crypto.constants.ENGINE_METHOD_EC` + * * `crypto.constants.ENGINE_METHOD_CIPHERS` + * * `crypto.constants.ENGINE_METHOD_DIGESTS` + * * `crypto.constants.ENGINE_METHOD_PKEY_METHS` + * * `crypto.constants.ENGINE_METHOD_PKEY_ASN1_METHS` + * * `crypto.constants.ENGINE_METHOD_ALL` + * * `crypto.constants.ENGINE_METHOD_NONE` + * @since v0.11.11 + * @param flags + */ + function setEngine(engine: string, flags?: number): void; + /** + * A convenient alias for {@link webcrypto.getRandomValues}. This + * implementation is not compliant with the Web Crypto spec, to write + * web-compatible code use {@link webcrypto.getRandomValues} instead. + * @since v17.4.0 + * @return Returns `typedArray`. + */ + function getRandomValues(typedArray: T): T; + /** + * A convenient alias for `crypto.webcrypto.subtle`. + * @since v17.4.0 + */ + const subtle: webcrypto.SubtleCrypto; + /** + * An implementation of the Web Crypto API standard. + * + * See the {@link https://nodejs.org/docs/latest/api/webcrypto.html Web Crypto API documentation} for details. + * @since v15.0.0 + */ + const webcrypto: webcrypto.Crypto; + namespace webcrypto { + type BufferSource = ArrayBufferView | ArrayBuffer; + type KeyFormat = "jwk" | "pkcs8" | "raw" | "spki"; + type KeyType = "private" | "public" | "secret"; + type KeyUsage = + | "decrypt" + | "deriveBits" + | "deriveKey" + | "encrypt" + | "sign" + | "unwrapKey" + | "verify" + | "wrapKey"; + type AlgorithmIdentifier = Algorithm | string; + type HashAlgorithmIdentifier = AlgorithmIdentifier; + type NamedCurve = string; + type BigInteger = Uint8Array; + interface AesCbcParams extends Algorithm { + iv: BufferSource; + } + interface AesCtrParams extends Algorithm { + counter: BufferSource; + length: number; + } + interface AesDerivedKeyParams extends Algorithm { + length: number; + } + interface AesGcmParams extends Algorithm { + additionalData?: BufferSource; + iv: BufferSource; + tagLength?: number; + } + interface AesKeyAlgorithm extends KeyAlgorithm { + length: number; + } + interface AesKeyGenParams extends Algorithm { + length: number; + } + interface Algorithm { + name: string; + } + interface EcKeyAlgorithm extends KeyAlgorithm { + namedCurve: NamedCurve; + } + interface EcKeyGenParams extends Algorithm { + namedCurve: NamedCurve; + } + interface EcKeyImportParams extends Algorithm { + namedCurve: NamedCurve; + } + interface EcdhKeyDeriveParams extends Algorithm { + public: CryptoKey; + } + interface EcdsaParams extends Algorithm { + hash: HashAlgorithmIdentifier; + } + interface Ed448Params extends Algorithm { + context?: BufferSource; + } + interface HkdfParams extends Algorithm { + hash: HashAlgorithmIdentifier; + info: BufferSource; + salt: BufferSource; + } + interface HmacImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; + } + interface HmacKeyAlgorithm extends KeyAlgorithm { + hash: KeyAlgorithm; + length: number; + } + interface HmacKeyGenParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; + } + interface JsonWebKey { + alg?: string; + crv?: string; + d?: string; + dp?: string; + dq?: string; + e?: string; + ext?: boolean; + k?: string; + key_ops?: string[]; + kty?: string; + n?: string; + oth?: RsaOtherPrimesInfo[]; + p?: string; + q?: string; + qi?: string; + use?: string; + x?: string; + y?: string; + } + interface KeyAlgorithm { + name: string; + } + interface Pbkdf2Params extends Algorithm { + hash: HashAlgorithmIdentifier; + iterations: number; + salt: BufferSource; + } + interface RsaHashedImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + } + interface RsaHashedKeyAlgorithm extends RsaKeyAlgorithm { + hash: KeyAlgorithm; + } + interface RsaHashedKeyGenParams extends RsaKeyGenParams { + hash: HashAlgorithmIdentifier; + } + interface RsaKeyAlgorithm extends KeyAlgorithm { + modulusLength: number; + publicExponent: BigInteger; + } + interface RsaKeyGenParams extends Algorithm { + modulusLength: number; + publicExponent: BigInteger; + } + interface RsaOaepParams extends Algorithm { + label?: BufferSource; + } + interface RsaOtherPrimesInfo { + d?: string; + r?: string; + t?: string; + } + interface RsaPssParams extends Algorithm { + saltLength: number; + } + /** + * Calling `require('node:crypto').webcrypto` returns an instance of the `Crypto` class. + * `Crypto` is a singleton that provides access to the remainder of the crypto API. + * @since v15.0.0 + */ + interface Crypto { + /** + * Provides access to the `SubtleCrypto` API. + * @since v15.0.0 + */ + readonly subtle: SubtleCrypto; + /** + * Generates cryptographically strong random values. + * The given `typedArray` is filled with random values, and a reference to `typedArray` is returned. + * + * The given `typedArray` must be an integer-based instance of {@link NodeJS.TypedArray}, i.e. `Float32Array` and `Float64Array` are not accepted. + * + * An error will be thrown if the given `typedArray` is larger than 65,536 bytes. + * @since v15.0.0 + */ + getRandomValues>(typedArray: T): T; + /** + * Generates a random {@link https://www.rfc-editor.org/rfc/rfc4122.txt RFC 4122} version 4 UUID. + * The UUID is generated using a cryptographic pseudorandom number generator. + * @since v16.7.0 + */ + randomUUID(): UUID; + CryptoKey: CryptoKeyConstructor; + } + // This constructor throws ILLEGAL_CONSTRUCTOR so it should not be newable. + interface CryptoKeyConstructor { + /** Illegal constructor */ + (_: { readonly _: unique symbol }): never; // Allows instanceof to work but not be callable by the user. + readonly length: 0; + readonly name: "CryptoKey"; + readonly prototype: CryptoKey; + } + /** + * @since v15.0.0 + */ + interface CryptoKey { + /** + * An object detailing the algorithm for which the key can be used along with additional algorithm-specific parameters. + * @since v15.0.0 + */ + readonly algorithm: KeyAlgorithm; + /** + * When `true`, the {@link CryptoKey} can be extracted using either `subtleCrypto.exportKey()` or `subtleCrypto.wrapKey()`. + * @since v15.0.0 + */ + readonly extractable: boolean; + /** + * A string identifying whether the key is a symmetric (`'secret'`) or asymmetric (`'private'` or `'public'`) key. + * @since v15.0.0 + */ + readonly type: KeyType; + /** + * An array of strings identifying the operations for which the key may be used. + * + * The possible usages are: + * - `'encrypt'` - The key may be used to encrypt data. + * - `'decrypt'` - The key may be used to decrypt data. + * - `'sign'` - The key may be used to generate digital signatures. + * - `'verify'` - The key may be used to verify digital signatures. + * - `'deriveKey'` - The key may be used to derive a new key. + * - `'deriveBits'` - The key may be used to derive bits. + * - `'wrapKey'` - The key may be used to wrap another key. + * - `'unwrapKey'` - The key may be used to unwrap another key. + * + * Valid key usages depend on the key algorithm (identified by `cryptokey.algorithm.name`). + * @since v15.0.0 + */ + readonly usages: KeyUsage[]; + } + /** + * The `CryptoKeyPair` is a simple dictionary object with `publicKey` and `privateKey` properties, representing an asymmetric key pair. + * @since v15.0.0 + */ + interface CryptoKeyPair { + /** + * A {@link CryptoKey} whose type will be `'private'`. + * @since v15.0.0 + */ + privateKey: CryptoKey; + /** + * A {@link CryptoKey} whose type will be `'public'`. + * @since v15.0.0 + */ + publicKey: CryptoKey; + } + /** + * @since v15.0.0 + */ + interface SubtleCrypto { + /** + * Using the method and parameters specified in `algorithm` and the keying material provided by `key`, + * `subtle.decrypt()` attempts to decipher the provided `data`. If successful, + * the returned promise will be resolved with an `` containing the plaintext result. + * + * The algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * @since v15.0.0 + */ + decrypt( + algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + key: CryptoKey, + data: BufferSource, + ): Promise; + /** + * Using the method and parameters specified in `algorithm` and the keying material provided by `baseKey`, + * `subtle.deriveBits()` attempts to generate `length` bits. + * The Node.js implementation requires that when `length` is a number it must be multiple of `8`. + * When `length` is `null` the maximum number of bits for a given algorithm is generated. This is allowed + * for the `'ECDH'`, `'X25519'`, and `'X448'` algorithms. + * If successful, the returned promise will be resolved with an `` containing the generated data. + * + * The algorithms currently supported include: + * + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HKDF'` + * - `'PBKDF2'` + * @since v15.0.0 + */ + deriveBits(algorithm: EcdhKeyDeriveParams, baseKey: CryptoKey, length: number | null): Promise; + deriveBits( + algorithm: AlgorithmIdentifier | HkdfParams | Pbkdf2Params, + baseKey: CryptoKey, + length: number, + ): Promise; + /** + * Using the method and parameters specified in `algorithm`, and the keying material provided by `baseKey`, + * `subtle.deriveKey()` attempts to generate a new ` based on the method and parameters in `derivedKeyAlgorithm`. + * + * Calling `subtle.deriveKey()` is equivalent to calling `subtle.deriveBits()` to generate raw keying material, + * then passing the result into the `subtle.importKey()` method using the `deriveKeyAlgorithm`, `extractable`, and `keyUsages` parameters as input. + * + * The algorithms currently supported include: + * + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HKDF'` + * - `'PBKDF2'` + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + deriveKey( + algorithm: AlgorithmIdentifier | EcdhKeyDeriveParams | HkdfParams | Pbkdf2Params, + baseKey: CryptoKey, + derivedKeyAlgorithm: + | AlgorithmIdentifier + | AesDerivedKeyParams + | HmacImportParams + | HkdfParams + | Pbkdf2Params, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + /** + * Using the method identified by `algorithm`, `subtle.digest()` attempts to generate a digest of `data`. + * If successful, the returned promise is resolved with an `` containing the computed digest. + * + * If `algorithm` is provided as a ``, it must be one of: + * + * - `'SHA-1'` + * - `'SHA-256'` + * - `'SHA-384'` + * - `'SHA-512'` + * + * If `algorithm` is provided as an ``, it must have a `name` property whose value is one of the above. + * @since v15.0.0 + */ + digest(algorithm: AlgorithmIdentifier, data: BufferSource): Promise; + /** + * Using the method and parameters specified by `algorithm` and the keying material provided by `key`, + * `subtle.encrypt()` attempts to encipher `data`. If successful, + * the returned promise is resolved with an `` containing the encrypted result. + * + * The algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * @since v15.0.0 + */ + encrypt( + algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + key: CryptoKey, + data: BufferSource, + ): Promise; + /** + * Exports the given key into the specified format, if supported. + * + * If the `` is not extractable, the returned promise will reject. + * + * When `format` is either `'pkcs8'` or `'spki'` and the export is successful, + * the returned promise will be resolved with an `` containing the exported key data. + * + * When `format` is `'jwk'` and the export is successful, the returned promise will be resolved with a + * JavaScript object conforming to the {@link https://tools.ietf.org/html/rfc7517 JSON Web Key} specification. + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @returns `` containing ``. + * @since v15.0.0 + */ + exportKey(format: "jwk", key: CryptoKey): Promise; + exportKey(format: Exclude, key: CryptoKey): Promise; + /** + * Using the method and parameters provided in `algorithm`, + * `subtle.generateKey()` attempts to generate new keying material. + * Depending the method used, the method may generate either a single `` or a ``. + * + * The `` (public and private key) generating algorithms supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'RSA-OAEP'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * The `` (secret key) generating algorithms supported include: + * + * - `'HMAC'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + generateKey( + algorithm: RsaHashedKeyGenParams | EcKeyGenParams, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + generateKey( + algorithm: AesKeyGenParams | HmacKeyGenParams | Pbkdf2Params, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + generateKey( + algorithm: AlgorithmIdentifier, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + /** + * The `subtle.importKey()` method attempts to interpret the provided `keyData` as the given `format` + * to create a `` instance using the provided `algorithm`, `extractable`, and `keyUsages` arguments. + * If the import is successful, the returned promise will be resolved with the created ``. + * + * If importing a `'PBKDF2'` key, `extractable` must be `false`. + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + importKey( + format: "jwk", + keyData: JsonWebKey, + algorithm: + | AlgorithmIdentifier + | RsaHashedImportParams + | EcKeyImportParams + | HmacImportParams + | AesKeyAlgorithm, + extractable: boolean, + keyUsages: readonly KeyUsage[], + ): Promise; + importKey( + format: Exclude, + keyData: BufferSource, + algorithm: + | AlgorithmIdentifier + | RsaHashedImportParams + | EcKeyImportParams + | HmacImportParams + | AesKeyAlgorithm, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + /** + * Using the method and parameters given by `algorithm` and the keying material provided by `key`, + * `subtle.sign()` attempts to generate a cryptographic signature of `data`. If successful, + * the returned promise is resolved with an `` containing the generated signature. + * + * The algorithms currently supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'HMAC'` + * @since v15.0.0 + */ + sign( + algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams | Ed448Params, + key: CryptoKey, + data: BufferSource, + ): Promise; + /** + * In cryptography, "wrapping a key" refers to exporting and then encrypting the keying material. + * The `subtle.unwrapKey()` method attempts to decrypt a wrapped key and create a `` instance. + * It is equivalent to calling `subtle.decrypt()` first on the encrypted key data (using the `wrappedKey`, `unwrapAlgo`, and `unwrappingKey` arguments as input) + * then passing the results in to the `subtle.importKey()` method using the `unwrappedKeyAlgo`, `extractable`, and `keyUsages` arguments as inputs. + * If successful, the returned promise is resolved with a `` object. + * + * The wrapping algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * + * The unwrapped key algorithms supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'RSA-OAEP'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HMAC'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + unwrapKey( + format: KeyFormat, + wrappedKey: BufferSource, + unwrappingKey: CryptoKey, + unwrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + unwrappedKeyAlgorithm: + | AlgorithmIdentifier + | RsaHashedImportParams + | EcKeyImportParams + | HmacImportParams + | AesKeyAlgorithm, + extractable: boolean, + keyUsages: KeyUsage[], + ): Promise; + /** + * Using the method and parameters given in `algorithm` and the keying material provided by `key`, + * `subtle.verify()` attempts to verify that `signature` is a valid cryptographic signature of `data`. + * The returned promise is resolved with either `true` or `false`. + * + * The algorithms currently supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'HMAC'` + * @since v15.0.0 + */ + verify( + algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams | Ed448Params, + key: CryptoKey, + signature: BufferSource, + data: BufferSource, + ): Promise; + /** + * In cryptography, "wrapping a key" refers to exporting and then encrypting the keying material. + * The `subtle.wrapKey()` method exports the keying material into the format identified by `format`, + * then encrypts it using the method and parameters specified by `wrapAlgo` and the keying material provided by `wrappingKey`. + * It is the equivalent to calling `subtle.exportKey()` using `format` and `key` as the arguments, + * then passing the result to the `subtle.encrypt()` method using `wrappingKey` and `wrapAlgo` as inputs. + * If successful, the returned promise will be resolved with an `` containing the encrypted key data. + * + * The wrapping algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @since v15.0.0 + */ + wrapKey( + format: KeyFormat, + key: CryptoKey, + wrappingKey: CryptoKey, + wrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + ): Promise; + } + } + + global { + var crypto: typeof globalThis extends { + crypto: infer T; + onmessage: any; + } ? T + : webcrypto.Crypto; + } +} +declare module "node:crypto" { + export * from "crypto"; +} diff --git a/dist/node_modules/@types/node/ts4.8/dgram.d.ts b/dist/node_modules/@types/node/ts4.8/dgram.d.ts new file mode 100644 index 0000000..0b86ae7 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/dgram.d.ts @@ -0,0 +1,596 @@ +/** + * The `node:dgram` module provides an implementation of UDP datagram sockets. + * + * ```js + * import dgram from 'node:dgram'; + * + * const server = dgram.createSocket('udp4'); + * + * server.on('error', (err) => { + * console.error(`server error:\n${err.stack}`); + * server.close(); + * }); + * + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * + * server.on('listening', () => { + * const address = server.address(); + * console.log(`server listening ${address.address}:${address.port}`); + * }); + * + * server.bind(41234); + * // Prints: server listening 0.0.0.0:41234 + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/dgram.js) + */ +declare module "dgram" { + import { AddressInfo } from "node:net"; + import * as dns from "node:dns"; + import { Abortable, EventEmitter } from "node:events"; + interface RemoteInfo { + address: string; + family: "IPv4" | "IPv6"; + port: number; + size: number; + } + interface BindOptions { + port?: number | undefined; + address?: string | undefined; + exclusive?: boolean | undefined; + fd?: number | undefined; + } + type SocketType = "udp4" | "udp6"; + interface SocketOptions extends Abortable { + type: SocketType; + reuseAddr?: boolean | undefined; + /** + * @default false + */ + ipv6Only?: boolean | undefined; + recvBufferSize?: number | undefined; + sendBufferSize?: number | undefined; + lookup?: + | (( + hostname: string, + options: dns.LookupOneOptions, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ) => void) + | undefined; + } + /** + * Creates a `dgram.Socket` object. Once the socket is created, calling `socket.bind()` will instruct the socket to begin listening for datagram + * messages. When `address` and `port` are not passed to `socket.bind()` the + * method will bind the socket to the "all interfaces" address on a random port + * (it does the right thing for both `udp4` and `udp6` sockets). The bound address + * and port can be retrieved using `socket.address().address` and `socket.address().port`. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.close()` on the socket: + * + * ```js + * const controller = new AbortController(); + * const { signal } = controller; + * const server = dgram.createSocket({ type: 'udp4', signal }); + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * // Later, when you want to close the server. + * controller.abort(); + * ``` + * @since v0.11.13 + * @param options Available options are: + * @param callback Attached as a listener for `'message'` events. Optional. + */ + function createSocket(type: SocketType, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; + function createSocket(options: SocketOptions, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; + /** + * Encapsulates the datagram functionality. + * + * New instances of `dgram.Socket` are created using {@link createSocket}. + * The `new` keyword is not to be used to create `dgram.Socket` instances. + * @since v0.1.99 + */ + class Socket extends EventEmitter { + /** + * Tells the kernel to join a multicast group at the given `multicastAddress` and`multicastInterface` using the `IP_ADD_MEMBERSHIP` socket option. If the`multicastInterface` argument is not + * specified, the operating system will choose + * one interface and will add membership to it. To add membership to every + * available interface, call `addMembership` multiple times, once per interface. + * + * When called on an unbound socket, this method will implicitly bind to a random + * port, listening on all interfaces. + * + * When sharing a UDP socket across multiple `cluster` workers, the`socket.addMembership()` function must be called only once or an`EADDRINUSE` error will occur: + * + * ```js + * import cluster from 'node:cluster'; + * import dgram from 'node:dgram'; + * + * if (cluster.isPrimary) { + * cluster.fork(); // Works ok. + * cluster.fork(); // Fails with EADDRINUSE. + * } else { + * const s = dgram.createSocket('udp4'); + * s.bind(1234, () => { + * s.addMembership('224.0.0.114'); + * }); + * } + * ``` + * @since v0.6.9 + */ + addMembership(multicastAddress: string, multicastInterface?: string): void; + /** + * Returns an object containing the address information for a socket. + * For UDP sockets, this object will contain `address`, `family`, and `port`properties. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.1.99 + */ + address(): AddressInfo; + /** + * For UDP sockets, causes the `dgram.Socket` to listen for datagram + * messages on a named `port` and optional `address`. If `port` is not + * specified or is `0`, the operating system will attempt to bind to a + * random port. If `address` is not specified, the operating system will + * attempt to listen on all addresses. Once binding is complete, a`'listening'` event is emitted and the optional `callback` function is + * called. + * + * Specifying both a `'listening'` event listener and passing a`callback` to the `socket.bind()` method is not harmful but not very + * useful. + * + * A bound datagram socket keeps the Node.js process running to receive + * datagram messages. + * + * If binding fails, an `'error'` event is generated. In rare case (e.g. + * attempting to bind with a closed socket), an `Error` may be thrown. + * + * Example of a UDP server listening on port 41234: + * + * ```js + * import dgram from 'node:dgram'; + * + * const server = dgram.createSocket('udp4'); + * + * server.on('error', (err) => { + * console.error(`server error:\n${err.stack}`); + * server.close(); + * }); + * + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * + * server.on('listening', () => { + * const address = server.address(); + * console.log(`server listening ${address.address}:${address.port}`); + * }); + * + * server.bind(41234); + * // Prints: server listening 0.0.0.0:41234 + * ``` + * @since v0.1.99 + * @param callback with no parameters. Called when binding is complete. + */ + bind(port?: number, address?: string, callback?: () => void): this; + bind(port?: number, callback?: () => void): this; + bind(callback?: () => void): this; + bind(options: BindOptions, callback?: () => void): this; + /** + * Close the underlying socket and stop listening for data on it. If a callback is + * provided, it is added as a listener for the `'close'` event. + * @since v0.1.99 + * @param callback Called when the socket has been closed. + */ + close(callback?: () => void): this; + /** + * Associates the `dgram.Socket` to a remote address and port. Every + * message sent by this handle is automatically sent to that destination. Also, + * the socket will only receive messages from that remote peer. + * Trying to call `connect()` on an already connected socket will result + * in an `ERR_SOCKET_DGRAM_IS_CONNECTED` exception. If `address` is not + * provided, `'127.0.0.1'` (for `udp4` sockets) or `'::1'` (for `udp6` sockets) + * will be used by default. Once the connection is complete, a `'connect'` event + * is emitted and the optional `callback` function is called. In case of failure, + * the `callback` is called or, failing this, an `'error'` event is emitted. + * @since v12.0.0 + * @param callback Called when the connection is completed or on error. + */ + connect(port: number, address?: string, callback?: () => void): void; + connect(port: number, callback: () => void): void; + /** + * A synchronous function that disassociates a connected `dgram.Socket` from + * its remote address. Trying to call `disconnect()` on an unbound or already + * disconnected socket will result in an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception. + * @since v12.0.0 + */ + disconnect(): void; + /** + * Instructs the kernel to leave a multicast group at `multicastAddress` using the`IP_DROP_MEMBERSHIP` socket option. This method is automatically called by the + * kernel when the socket is closed or the process terminates, so most apps will + * never have reason to call this. + * + * If `multicastInterface` is not specified, the operating system will attempt to + * drop membership on all valid interfaces. + * @since v0.6.9 + */ + dropMembership(multicastAddress: string, multicastInterface?: string): void; + /** + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + * @return the `SO_RCVBUF` socket receive buffer size in bytes. + */ + getRecvBufferSize(): number; + /** + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + * @return the `SO_SNDBUF` socket send buffer size in bytes. + */ + getSendBufferSize(): number; + /** + * @since v18.8.0, v16.19.0 + * @return Number of bytes queued for sending. + */ + getSendQueueSize(): number; + /** + * @since v18.8.0, v16.19.0 + * @return Number of send requests currently in the queue awaiting to be processed. + */ + getSendQueueCount(): number; + /** + * By default, binding a socket will cause it to block the Node.js process from + * exiting as long as the socket is open. The `socket.unref()` method can be used + * to exclude the socket from the reference counting that keeps the Node.js + * process active. The `socket.ref()` method adds the socket back to the reference + * counting and restores the default behavior. + * + * Calling `socket.ref()` multiples times will have no additional effect. + * + * The `socket.ref()` method returns a reference to the socket so calls can be + * chained. + * @since v0.9.1 + */ + ref(): this; + /** + * Returns an object containing the `address`, `family`, and `port` of the remote + * endpoint. This method throws an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception + * if the socket is not connected. + * @since v12.0.0 + */ + remoteAddress(): AddressInfo; + /** + * Broadcasts a datagram on the socket. + * For connectionless sockets, the destination `port` and `address` must be + * specified. Connected sockets, on the other hand, will use their associated + * remote endpoint, so the `port` and `address` arguments must not be set. + * + * The `msg` argument contains the message to be sent. + * Depending on its type, different behavior can apply. If `msg` is a `Buffer`, + * any `TypedArray` or a `DataView`, + * the `offset` and `length` specify the offset within the `Buffer` where the + * message begins and the number of bytes in the message, respectively. + * If `msg` is a `String`, then it is automatically converted to a `Buffer`with `'utf8'` encoding. With messages that + * contain multi-byte characters, `offset` and `length` will be calculated with + * respect to `byte length` and not the character position. + * If `msg` is an array, `offset` and `length` must not be specified. + * + * The `address` argument is a string. If the value of `address` is a host name, + * DNS will be used to resolve the address of the host. If `address` is not + * provided or otherwise nullish, `'127.0.0.1'` (for `udp4` sockets) or `'::1'`(for `udp6` sockets) will be used by default. + * + * If the socket has not been previously bound with a call to `bind`, the socket + * is assigned a random port number and is bound to the "all interfaces" address + * (`'0.0.0.0'` for `udp4` sockets, `'::0'` for `udp6` sockets.) + * + * An optional `callback` function may be specified to as a way of reporting + * DNS errors or for determining when it is safe to reuse the `buf` object. + * DNS lookups delay the time to send for at least one tick of the + * Node.js event loop. + * + * The only way to know for sure that the datagram has been sent is by using a`callback`. If an error occurs and a `callback` is given, the error will be + * passed as the first argument to the `callback`. If a `callback` is not given, + * the error is emitted as an `'error'` event on the `socket` object. + * + * Offset and length are optional but both _must_ be set if either are used. + * They are supported only when the first argument is a `Buffer`, a `TypedArray`, + * or a `DataView`. + * + * This method throws `ERR_SOCKET_BAD_PORT` if called on an unbound socket. + * + * Example of sending a UDP packet to a port on `localhost`; + * + * ```js + * import dgram from 'node:dgram'; + * import { Buffer } from 'node:buffer'; + * + * const message = Buffer.from('Some bytes'); + * const client = dgram.createSocket('udp4'); + * client.send(message, 41234, 'localhost', (err) => { + * client.close(); + * }); + * ``` + * + * Example of sending a UDP packet composed of multiple buffers to a port on`127.0.0.1`; + * + * ```js + * import dgram from 'node:dgram'; + * import { Buffer } from 'node:buffer'; + * + * const buf1 = Buffer.from('Some '); + * const buf2 = Buffer.from('bytes'); + * const client = dgram.createSocket('udp4'); + * client.send([buf1, buf2], 41234, (err) => { + * client.close(); + * }); + * ``` + * + * Sending multiple buffers might be faster or slower depending on the + * application and operating system. Run benchmarks to + * determine the optimal strategy on a case-by-case basis. Generally speaking, + * however, sending multiple buffers is faster. + * + * Example of sending a UDP packet using a socket connected to a port on`localhost`: + * + * ```js + * import dgram from 'node:dgram'; + * import { Buffer } from 'node:buffer'; + * + * const message = Buffer.from('Some bytes'); + * const client = dgram.createSocket('udp4'); + * client.connect(41234, 'localhost', (err) => { + * client.send(message, (err) => { + * client.close(); + * }); + * }); + * ``` + * @since v0.1.99 + * @param msg Message to be sent. + * @param offset Offset in the buffer where the message starts. + * @param length Number of bytes in the message. + * @param port Destination port. + * @param address Destination host name or IP address. + * @param callback Called when the message has been sent. + */ + send( + msg: string | Uint8Array | readonly any[], + port?: number, + address?: string, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array | readonly any[], + port?: number, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array | readonly any[], + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array, + offset: number, + length: number, + port?: number, + address?: string, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array, + offset: number, + length: number, + port?: number, + callback?: (error: Error | null, bytes: number) => void, + ): void; + send( + msg: string | Uint8Array, + offset: number, + length: number, + callback?: (error: Error | null, bytes: number) => void, + ): void; + /** + * Sets or clears the `SO_BROADCAST` socket option. When set to `true`, UDP + * packets may be sent to a local interface's broadcast address. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.6.9 + */ + setBroadcast(flag: boolean): void; + /** + * _All references to scope in this section are referring to [IPv6 Zone Indices](https://en.wikipedia.org/wiki/IPv6_address#Scoped_literal_IPv6_addresses), which are defined by [RFC + * 4007](https://tools.ietf.org/html/rfc4007). In string form, an IP_ + * _with a scope index is written as `'IP%scope'` where scope is an interface name_ + * _or interface number._ + * + * Sets the default outgoing multicast interface of the socket to a chosen + * interface or back to system interface selection. The `multicastInterface` must + * be a valid string representation of an IP from the socket's family. + * + * For IPv4 sockets, this should be the IP configured for the desired physical + * interface. All packets sent to multicast on the socket will be sent on the + * interface determined by the most recent successful use of this call. + * + * For IPv6 sockets, `multicastInterface` should include a scope to indicate the + * interface as in the examples that follow. In IPv6, individual `send` calls can + * also use explicit scope in addresses, so only packets sent to a multicast + * address without specifying an explicit scope are affected by the most recent + * successful use of this call. + * + * This method throws `EBADF` if called on an unbound socket. + * + * #### Example: IPv6 outgoing multicast interface + * + * On most systems, where scope format uses the interface name: + * + * ```js + * const socket = dgram.createSocket('udp6'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('::%eth1'); + * }); + * ``` + * + * On Windows, where scope format uses an interface number: + * + * ```js + * const socket = dgram.createSocket('udp6'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('::%2'); + * }); + * ``` + * + * #### Example: IPv4 outgoing multicast interface + * + * All systems use an IP of the host on the desired physical interface: + * + * ```js + * const socket = dgram.createSocket('udp4'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('10.0.0.2'); + * }); + * ``` + * @since v8.6.0 + */ + setMulticastInterface(multicastInterface: string): void; + /** + * Sets or clears the `IP_MULTICAST_LOOP` socket option. When set to `true`, + * multicast packets will also be received on the local interface. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.3.8 + */ + setMulticastLoopback(flag: boolean): boolean; + /** + * Sets the `IP_MULTICAST_TTL` socket option. While TTL generally stands for + * "Time to Live", in this context it specifies the number of IP hops that a + * packet is allowed to travel through, specifically for multicast traffic. Each + * router or gateway that forwards a packet decrements the TTL. If the TTL is + * decremented to 0 by a router, it will not be forwarded. + * + * The `ttl` argument may be between 0 and 255\. The default on most systems is `1`. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.3.8 + */ + setMulticastTTL(ttl: number): number; + /** + * Sets the `SO_RCVBUF` socket option. Sets the maximum socket receive buffer + * in bytes. + * + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + */ + setRecvBufferSize(size: number): void; + /** + * Sets the `SO_SNDBUF` socket option. Sets the maximum socket send buffer + * in bytes. + * + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + */ + setSendBufferSize(size: number): void; + /** + * Sets the `IP_TTL` socket option. While TTL generally stands for "Time to Live", + * in this context it specifies the number of IP hops that a packet is allowed to + * travel through. Each router or gateway that forwards a packet decrements the + * TTL. If the TTL is decremented to 0 by a router, it will not be forwarded. + * Changing TTL values is typically done for network probes or when multicasting. + * + * The `ttl` argument may be between 1 and 255\. The default on most systems + * is 64. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.1.101 + */ + setTTL(ttl: number): number; + /** + * By default, binding a socket will cause it to block the Node.js process from + * exiting as long as the socket is open. The `socket.unref()` method can be used + * to exclude the socket from the reference counting that keeps the Node.js + * process active, allowing the process to exit even if the socket is still + * listening. + * + * Calling `socket.unref()` multiple times will have no additional effect. + * + * The `socket.unref()` method returns a reference to the socket so calls can be + * chained. + * @since v0.9.1 + */ + unref(): this; + /** + * Tells the kernel to join a source-specific multicast channel at the given`sourceAddress` and `groupAddress`, using the `multicastInterface` with the`IP_ADD_SOURCE_MEMBERSHIP` socket + * option. If the `multicastInterface` argument + * is not specified, the operating system will choose one interface and will add + * membership to it. To add membership to every available interface, call`socket.addSourceSpecificMembership()` multiple times, once per interface. + * + * When called on an unbound socket, this method will implicitly bind to a random + * port, listening on all interfaces. + * @since v13.1.0, v12.16.0 + */ + addSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void; + /** + * Instructs the kernel to leave a source-specific multicast channel at the given`sourceAddress` and `groupAddress` using the `IP_DROP_SOURCE_MEMBERSHIP`socket option. This method is + * automatically called by the kernel when the + * socket is closed or the process terminates, so most apps will never have + * reason to call this. + * + * If `multicastInterface` is not specified, the operating system will attempt to + * drop membership on all valid interfaces. + * @since v13.1.0, v12.16.0 + */ + dropSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void; + /** + * events.EventEmitter + * 1. close + * 2. connect + * 3. error + * 4. listening + * 5. message + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connect", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "connect"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit(event: "message", msg: Buffer, rinfo: RemoteInfo): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connect", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connect", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connect", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connect", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "message", listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + /** + * Calls `socket.close()` and returns a promise that fulfills when the socket has closed. + * @since v20.5.0 + */ + [Symbol.asyncDispose](): Promise; + } +} +declare module "node:dgram" { + export * from "dgram"; +} diff --git a/dist/node_modules/@types/node/ts4.8/diagnostics_channel.d.ts b/dist/node_modules/@types/node/ts4.8/diagnostics_channel.d.ts new file mode 100644 index 0000000..cd4bd71 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/diagnostics_channel.d.ts @@ -0,0 +1,545 @@ +/** + * The `node:diagnostics_channel` module provides an API to create named channels + * to report arbitrary message data for diagnostics purposes. + * + * It can be accessed using: + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * ``` + * + * It is intended that a module writer wanting to report diagnostics messages + * will create one or many top-level channels to report messages through. + * Channels may also be acquired at runtime but it is not encouraged + * due to the additional overhead of doing so. Channels may be exported for + * convenience, but as long as the name is known it can be acquired anywhere. + * + * If you intend for your module to produce diagnostics data for others to + * consume it is recommended that you include documentation of what named + * channels are used along with the shape of the message data. Channel names + * should generally include the module name to avoid collisions with data from + * other modules. + * @since v15.1.0, v14.17.0 + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/diagnostics_channel.js) + */ +declare module "diagnostics_channel" { + import { AsyncLocalStorage } from "node:async_hooks"; + /** + * Check if there are active subscribers to the named channel. This is helpful if + * the message you want to send might be expensive to prepare. + * + * This API is optional but helpful when trying to publish messages from very + * performance-sensitive code. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * if (diagnostics_channel.hasSubscribers('my-channel')) { + * // There are subscribers, prepare and publish message + * } + * ``` + * @since v15.1.0, v14.17.0 + * @param name The channel name + * @return If there are active subscribers + */ + function hasSubscribers(name: string | symbol): boolean; + /** + * This is the primary entry-point for anyone wanting to publish to a named + * channel. It produces a channel object which is optimized to reduce overhead at + * publish time as much as possible. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * ``` + * @since v15.1.0, v14.17.0 + * @param name The channel name + * @return The named channel object + */ + function channel(name: string | symbol): Channel; + type ChannelListener = (message: unknown, name: string | symbol) => void; + /** + * Register a message handler to subscribe to this channel. This message handler + * will be run synchronously whenever a message is published to the channel. Any + * errors thrown in the message handler will trigger an `'uncaughtException'`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * diagnostics_channel.subscribe('my-channel', (message, name) => { + * // Received data + * }); + * ``` + * @since v18.7.0, v16.17.0 + * @param name The channel name + * @param onMessage The handler to receive channel messages + */ + function subscribe(name: string | symbol, onMessage: ChannelListener): void; + /** + * Remove a message handler previously registered to this channel with {@link subscribe}. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * function onMessage(message, name) { + * // Received data + * } + * + * diagnostics_channel.subscribe('my-channel', onMessage); + * + * diagnostics_channel.unsubscribe('my-channel', onMessage); + * ``` + * @since v18.7.0, v16.17.0 + * @param name The channel name + * @param onMessage The previous subscribed handler to remove + * @return `true` if the handler was found, `false` otherwise. + */ + function unsubscribe(name: string | symbol, onMessage: ChannelListener): boolean; + /** + * Creates a `TracingChannel` wrapper for the given `TracingChannel Channels`. If a name is given, the corresponding tracing + * channels will be created in the form of `tracing:${name}:${eventType}` where`eventType` corresponds to the types of `TracingChannel Channels`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channelsByName = diagnostics_channel.tracingChannel('my-channel'); + * + * // or... + * + * const channelsByCollection = diagnostics_channel.tracingChannel({ + * start: diagnostics_channel.channel('tracing:my-channel:start'), + * end: diagnostics_channel.channel('tracing:my-channel:end'), + * asyncStart: diagnostics_channel.channel('tracing:my-channel:asyncStart'), + * asyncEnd: diagnostics_channel.channel('tracing:my-channel:asyncEnd'), + * error: diagnostics_channel.channel('tracing:my-channel:error'), + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param nameOrChannels Channel name or object containing all the `TracingChannel Channels` + * @return Collection of channels to trace with + */ + function tracingChannel< + StoreType = unknown, + ContextType extends object = StoreType extends object ? StoreType : object, + >( + nameOrChannels: string | TracingChannelCollection, + ): TracingChannel; + /** + * The class `Channel` represents an individual named channel within the data + * pipeline. It is used to track subscribers and to publish messages when there + * are subscribers present. It exists as a separate object to avoid channel + * lookups at publish time, enabling very fast publish speeds and allowing + * for heavy use while incurring very minimal cost. Channels are created with {@link channel}, constructing a channel directly + * with `new Channel(name)` is not supported. + * @since v15.1.0, v14.17.0 + */ + class Channel { + readonly name: string | symbol; + /** + * Check if there are active subscribers to this channel. This is helpful if + * the message you want to send might be expensive to prepare. + * + * This API is optional but helpful when trying to publish messages from very + * performance-sensitive code. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * if (channel.hasSubscribers) { + * // There are subscribers, prepare and publish message + * } + * ``` + * @since v15.1.0, v14.17.0 + */ + readonly hasSubscribers: boolean; + private constructor(name: string | symbol); + /** + * Publish a message to any subscribers to the channel. This will trigger + * message handlers synchronously so they will execute within the same context. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.publish({ + * some: 'message', + * }); + * ``` + * @since v15.1.0, v14.17.0 + * @param message The message to send to the channel subscribers + */ + publish(message: unknown): void; + /** + * Register a message handler to subscribe to this channel. This message handler + * will be run synchronously whenever a message is published to the channel. Any + * errors thrown in the message handler will trigger an `'uncaughtException'`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.subscribe((message, name) => { + * // Received data + * }); + * ``` + * @since v15.1.0, v14.17.0 + * @deprecated Since v18.7.0,v16.17.0 - Use {@link subscribe(name, onMessage)} + * @param onMessage The handler to receive channel messages + */ + subscribe(onMessage: ChannelListener): void; + /** + * Remove a message handler previously registered to this channel with `channel.subscribe(onMessage)`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * function onMessage(message, name) { + * // Received data + * } + * + * channel.subscribe(onMessage); + * + * channel.unsubscribe(onMessage); + * ``` + * @since v15.1.0, v14.17.0 + * @deprecated Since v18.7.0,v16.17.0 - Use {@link unsubscribe(name, onMessage)} + * @param onMessage The previous subscribed handler to remove + * @return `true` if the handler was found, `false` otherwise. + */ + unsubscribe(onMessage: ChannelListener): void; + /** + * When `channel.runStores(context, ...)` is called, the given context data + * will be applied to any store bound to the channel. If the store has already been + * bound the previous `transform` function will be replaced with the new one. + * The `transform` function may be omitted to set the given context data as the + * context directly. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const store = new AsyncLocalStorage(); + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.bindStore(store, (data) => { + * return { data }; + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param store The store to which to bind the context data + * @param transform Transform context data before setting the store context + */ + bindStore(store: AsyncLocalStorage, transform?: (context: ContextType) => StoreType): void; + /** + * Remove a message handler previously registered to this channel with `channel.bindStore(store)`. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const store = new AsyncLocalStorage(); + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.bindStore(store); + * channel.unbindStore(store); + * ``` + * @since v19.9.0 + * @experimental + * @param store The store to unbind from the channel. + * @return `true` if the store was found, `false` otherwise. + */ + unbindStore(store: any): void; + /** + * Applies the given data to any AsyncLocalStorage instances bound to the channel + * for the duration of the given function, then publishes to the channel within + * the scope of that data is applied to the stores. + * + * If a transform function was given to `channel.bindStore(store)` it will be + * applied to transform the message data before it becomes the context value for + * the store. The prior storage context is accessible from within the transform + * function in cases where context linking is required. + * + * The context applied to the store should be accessible in any async code which + * continues from execution which began during the given function, however + * there are some situations in which `context loss` may occur. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const store = new AsyncLocalStorage(); + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.bindStore(store, (message) => { + * const parent = store.getStore(); + * return new Span(message, parent); + * }); + * channel.runStores({ some: 'message' }, () => { + * store.getStore(); // Span({ some: 'message' }) + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param context Message to send to subscribers and bind to stores + * @param fn Handler to run within the entered storage context + * @param thisArg The receiver to be used for the function call. + * @param args Optional arguments to pass to the function. + */ + runStores(): void; + } + interface TracingChannelSubscribers { + start: (message: ContextType) => void; + end: ( + message: ContextType & { + error?: unknown; + result?: unknown; + }, + ) => void; + asyncStart: ( + message: ContextType & { + error?: unknown; + result?: unknown; + }, + ) => void; + asyncEnd: ( + message: ContextType & { + error?: unknown; + result?: unknown; + }, + ) => void; + error: ( + message: ContextType & { + error: unknown; + }, + ) => void; + } + interface TracingChannelCollection { + start: Channel; + end: Channel; + asyncStart: Channel; + asyncEnd: Channel; + error: Channel; + } + /** + * The class `TracingChannel` is a collection of `TracingChannel Channels` which + * together express a single traceable action. It is used to formalize and + * simplify the process of producing events for tracing application flow.{@link tracingChannel} is used to construct a`TracingChannel`. As with `Channel` it is recommended to create and reuse a + * single `TracingChannel` at the top-level of the file rather than creating them + * dynamically. + * @since v19.9.0 + * @experimental + */ + class TracingChannel implements TracingChannelCollection { + start: Channel; + end: Channel; + asyncStart: Channel; + asyncEnd: Channel; + error: Channel; + /** + * Helper to subscribe a collection of functions to the corresponding channels. + * This is the same as calling `channel.subscribe(onMessage)` on each channel + * individually. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.subscribe({ + * start(message) { + * // Handle start message + * }, + * end(message) { + * // Handle end message + * }, + * asyncStart(message) { + * // Handle asyncStart message + * }, + * asyncEnd(message) { + * // Handle asyncEnd message + * }, + * error(message) { + * // Handle error message + * }, + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param subscribers Set of `TracingChannel Channels` subscribers + */ + subscribe(subscribers: TracingChannelSubscribers): void; + /** + * Helper to unsubscribe a collection of functions from the corresponding channels. + * This is the same as calling `channel.unsubscribe(onMessage)` on each channel + * individually. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.unsubscribe({ + * start(message) { + * // Handle start message + * }, + * end(message) { + * // Handle end message + * }, + * asyncStart(message) { + * // Handle asyncStart message + * }, + * asyncEnd(message) { + * // Handle asyncEnd message + * }, + * error(message) { + * // Handle error message + * }, + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param subscribers Set of `TracingChannel Channels` subscribers + * @return `true` if all handlers were successfully unsubscribed, and `false` otherwise. + */ + unsubscribe(subscribers: TracingChannelSubscribers): void; + /** + * Trace a synchronous function call. This will always produce a `start event` and `end event` around the execution and may produce an `error event` if the given function throws an error. + * This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all + * events should have any bound stores set to match this trace context. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.traceSync(() => { + * // Do something + * }, { + * some: 'thing', + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param fn Function to wrap a trace around + * @param context Shared object to correlate events through + * @param thisArg The receiver to be used for the function call + * @param args Optional arguments to pass to the function + * @return The return value of the given function + */ + traceSync( + fn: (this: ThisArg, ...args: Args) => any, + context?: ContextType, + thisArg?: ThisArg, + ...args: Args + ): void; + /** + * Trace a promise-returning function call. This will always produce a `start event` and `end event` around the synchronous portion of the + * function execution, and will produce an `asyncStart event` and `asyncEnd event` when a promise continuation is reached. It may also + * produce an `error event` if the given function throws an error or the + * returned promise rejects. This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all + * events should have any bound stores set to match this trace context. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.tracePromise(async () => { + * // Do something + * }, { + * some: 'thing', + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param fn Promise-returning function to wrap a trace around + * @param context Shared object to correlate trace events through + * @param thisArg The receiver to be used for the function call + * @param args Optional arguments to pass to the function + * @return Chained from promise returned by the given function + */ + tracePromise( + fn: (this: ThisArg, ...args: Args) => Promise, + context?: ContextType, + thisArg?: ThisArg, + ...args: Args + ): void; + /** + * Trace a callback-receiving function call. This will always produce a `start event` and `end event` around the synchronous portion of the + * function execution, and will produce a `asyncStart event` and `asyncEnd event` around the callback execution. It may also produce an `error event` if the given function throws an error or + * the returned + * promise rejects. This will run the given function using `channel.runStores(context, ...)` on the `start` channel which ensures all + * events should have any bound stores set to match this trace context. + * + * The `position` will be -1 by default to indicate the final argument should + * be used as the callback. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * + * channels.traceCallback((arg1, callback) => { + * // Do something + * callback(null, 'result'); + * }, 1, { + * some: 'thing', + * }, thisArg, arg1, callback); + * ``` + * + * The callback will also be run with `channel.runStores(context, ...)` which + * enables context loss recovery in some cases. + * + * ```js + * import diagnostics_channel from 'node:diagnostics_channel'; + * import { AsyncLocalStorage } from 'node:async_hooks'; + * + * const channels = diagnostics_channel.tracingChannel('my-channel'); + * const myStore = new AsyncLocalStorage(); + * + * // The start channel sets the initial store data to something + * // and stores that store data value on the trace context object + * channels.start.bindStore(myStore, (data) => { + * const span = new Span(data); + * data.span = span; + * return span; + * }); + * + * // Then asyncStart can restore from that data it stored previously + * channels.asyncStart.bindStore(myStore, (data) => { + * return data.span; + * }); + * ``` + * @since v19.9.0 + * @experimental + * @param fn callback using function to wrap a trace around + * @param position Zero-indexed argument position of expected callback + * @param context Shared object to correlate trace events through + * @param thisArg The receiver to be used for the function call + * @param args Optional arguments to pass to the function + * @return The return value of the given function + */ + traceCallback any>( + fn: Fn, + position: number | undefined, + context: ContextType | undefined, + thisArg: any, + ...args: Parameters + ): void; + } +} +declare module "node:diagnostics_channel" { + export * from "diagnostics_channel"; +} diff --git a/dist/node_modules/@types/node/ts4.8/dns.d.ts b/dist/node_modules/@types/node/ts4.8/dns.d.ts new file mode 100644 index 0000000..380cf7d --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/dns.d.ts @@ -0,0 +1,809 @@ +/** + * The `node:dns` module enables name resolution. For example, use it to look up IP + * addresses of host names. + * + * Although named for the [Domain Name System (DNS)](https://en.wikipedia.org/wiki/Domain_Name_System), it does not always use the + * DNS protocol for lookups. {@link lookup} uses the operating system + * facilities to perform name resolution. It may not need to perform any network + * communication. To perform name resolution the way other applications on the same + * system do, use {@link lookup}. + * + * ```js + * const dns = require('node:dns'); + * + * dns.lookup('example.org', (err, address, family) => { + * console.log('address: %j family: IPv%s', address, family); + * }); + * // address: "93.184.216.34" family: IPv4 + * ``` + * + * All other functions in the `node:dns` module connect to an actual DNS server to + * perform name resolution. They will always use the network to perform DNS + * queries. These functions do not use the same set of configuration files used by {@link lookup} (e.g. `/etc/hosts`). Use these functions to always perform + * DNS queries, bypassing other name-resolution facilities. + * + * ```js + * const dns = require('node:dns'); + * + * dns.resolve4('archive.org', (err, addresses) => { + * if (err) throw err; + * + * console.log(`addresses: ${JSON.stringify(addresses)}`); + * + * addresses.forEach((a) => { + * dns.reverse(a, (err, hostnames) => { + * if (err) { + * throw err; + * } + * console.log(`reverse for ${a}: ${JSON.stringify(hostnames)}`); + * }); + * }); + * }); + * ``` + * + * See the `Implementation considerations section` for more information. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/dns.js) + */ +declare module "dns" { + import * as dnsPromises from "node:dns/promises"; + // Supported getaddrinfo flags. + export const ADDRCONFIG: number; + export const V4MAPPED: number; + /** + * If `dns.V4MAPPED` is specified, return resolved IPv6 addresses as + * well as IPv4 mapped IPv6 addresses. + */ + export const ALL: number; + export interface LookupOptions { + family?: number | undefined; + hints?: number | undefined; + all?: boolean | undefined; + /** + * @default true + */ + verbatim?: boolean | undefined; + } + export interface LookupOneOptions extends LookupOptions { + all?: false | undefined; + } + export interface LookupAllOptions extends LookupOptions { + all: true; + } + export interface LookupAddress { + address: string; + family: number; + } + /** + * Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or + * AAAA (IPv6) record. All `option` properties are optional. If `options` is an + * integer, then it must be `4` or `6` – if `options` is `0` or not provided, then + * IPv4 and IPv6 addresses are both returned if found. + * + * With the `all` option set to `true`, the arguments for `callback` change to`(err, addresses)`, with `addresses` being an array of objects with the + * properties `address` and `family`. + * + * On error, `err` is an `Error` object, where `err.code` is the error code. + * Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when + * the host name does not exist but also when the lookup fails in other ways + * such as no available file descriptors. + * + * `dns.lookup()` does not necessarily have anything to do with the DNS protocol. + * The implementation uses an operating system facility that can associate names + * with addresses and vice versa. This implementation can have subtle but + * important consequences on the behavior of any Node.js program. Please take some + * time to consult the `Implementation considerations section` before using`dns.lookup()`. + * + * Example usage: + * + * ```js + * const dns = require('node:dns'); + * const options = { + * family: 6, + * hints: dns.ADDRCONFIG | dns.V4MAPPED, + * }; + * dns.lookup('example.com', options, (err, address, family) => + * console.log('address: %j family: IPv%s', address, family)); + * // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6 + * + * // When options.all is true, the result will be an Array. + * options.all = true; + * dns.lookup('example.com', options, (err, addresses) => + * console.log('addresses: %j', addresses)); + * // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}] + * ``` + * + * If this method is invoked as its `util.promisify()` ed version, and `all`is not set to `true`, it returns a `Promise` for an `Object` with `address` and`family` properties. + * @since v0.1.90 + */ + export function lookup( + hostname: string, + family: number, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ): void; + export function lookup( + hostname: string, + options: LookupOneOptions, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ): void; + export function lookup( + hostname: string, + options: LookupAllOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: LookupAddress[]) => void, + ): void; + export function lookup( + hostname: string, + options: LookupOptions, + callback: (err: NodeJS.ErrnoException | null, address: string | LookupAddress[], family: number) => void, + ): void; + export function lookup( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void, + ): void; + export namespace lookup { + function __promisify__(hostname: string, options: LookupAllOptions): Promise; + function __promisify__(hostname: string, options?: LookupOneOptions | number): Promise; + function __promisify__(hostname: string, options: LookupOptions): Promise; + } + /** + * Resolves the given `address` and `port` into a host name and service using + * the operating system's underlying `getnameinfo` implementation. + * + * If `address` is not a valid IP address, a `TypeError` will be thrown. + * The `port` will be coerced to a number. If it is not a legal port, a `TypeError`will be thrown. + * + * On an error, `err` is an `Error` object, where `err.code` is the error code. + * + * ```js + * const dns = require('node:dns'); + * dns.lookupService('127.0.0.1', 22, (err, hostname, service) => { + * console.log(hostname, service); + * // Prints: localhost ssh + * }); + * ``` + * + * If this method is invoked as its `util.promisify()` ed version, it returns a`Promise` for an `Object` with `hostname` and `service` properties. + * @since v0.11.14 + */ + export function lookupService( + address: string, + port: number, + callback: (err: NodeJS.ErrnoException | null, hostname: string, service: string) => void, + ): void; + export namespace lookupService { + function __promisify__( + address: string, + port: number, + ): Promise<{ + hostname: string; + service: string; + }>; + } + export interface ResolveOptions { + ttl: boolean; + } + export interface ResolveWithTtlOptions extends ResolveOptions { + ttl: true; + } + export interface RecordWithTtl { + address: string; + ttl: number; + } + /** @deprecated Use `AnyARecord` or `AnyAaaaRecord` instead. */ + export type AnyRecordWithTtl = AnyARecord | AnyAaaaRecord; + export interface AnyARecord extends RecordWithTtl { + type: "A"; + } + export interface AnyAaaaRecord extends RecordWithTtl { + type: "AAAA"; + } + export interface CaaRecord { + critical: number; + issue?: string | undefined; + issuewild?: string | undefined; + iodef?: string | undefined; + contactemail?: string | undefined; + contactphone?: string | undefined; + } + export interface MxRecord { + priority: number; + exchange: string; + } + export interface AnyMxRecord extends MxRecord { + type: "MX"; + } + export interface NaptrRecord { + flags: string; + service: string; + regexp: string; + replacement: string; + order: number; + preference: number; + } + export interface AnyNaptrRecord extends NaptrRecord { + type: "NAPTR"; + } + export interface SoaRecord { + nsname: string; + hostmaster: string; + serial: number; + refresh: number; + retry: number; + expire: number; + minttl: number; + } + export interface AnySoaRecord extends SoaRecord { + type: "SOA"; + } + export interface SrvRecord { + priority: number; + weight: number; + port: number; + name: string; + } + export interface AnySrvRecord extends SrvRecord { + type: "SRV"; + } + export interface AnyTxtRecord { + type: "TXT"; + entries: string[]; + } + export interface AnyNsRecord { + type: "NS"; + value: string; + } + export interface AnyPtrRecord { + type: "PTR"; + value: string; + } + export interface AnyCnameRecord { + type: "CNAME"; + value: string; + } + export type AnyRecord = + | AnyARecord + | AnyAaaaRecord + | AnyCnameRecord + | AnyMxRecord + | AnyNaptrRecord + | AnyNsRecord + | AnyPtrRecord + | AnySoaRecord + | AnySrvRecord + | AnyTxtRecord; + /** + * Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array + * of the resource records. The `callback` function has arguments`(err, records)`. When successful, `records` will be an array of resource + * records. The type and structure of individual results varies based on `rrtype`: + * + * + * + * On error, `err` is an `Error` object, where `err.code` is one of the `DNS error codes`. + * @since v0.1.27 + * @param hostname Host name to resolve. + * @param [rrtype='A'] Resource record type. + */ + export function resolve( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "A", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "AAAA", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "ANY", + callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "CNAME", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "MX", + callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "NAPTR", + callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "NS", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "PTR", + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "SOA", + callback: (err: NodeJS.ErrnoException | null, addresses: SoaRecord) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "SRV", + callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: "TXT", + callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void, + ): void; + export function resolve( + hostname: string, + rrtype: string, + callback: ( + err: NodeJS.ErrnoException | null, + addresses: string[] | MxRecord[] | NaptrRecord[] | SoaRecord | SrvRecord[] | string[][] | AnyRecord[], + ) => void, + ): void; + export namespace resolve { + function __promisify__(hostname: string, rrtype?: "A" | "AAAA" | "CNAME" | "NS" | "PTR"): Promise; + function __promisify__(hostname: string, rrtype: "ANY"): Promise; + function __promisify__(hostname: string, rrtype: "MX"): Promise; + function __promisify__(hostname: string, rrtype: "NAPTR"): Promise; + function __promisify__(hostname: string, rrtype: "SOA"): Promise; + function __promisify__(hostname: string, rrtype: "SRV"): Promise; + function __promisify__(hostname: string, rrtype: "TXT"): Promise; + function __promisify__( + hostname: string, + rrtype: string, + ): Promise; + } + /** + * Uses the DNS protocol to resolve a IPv4 addresses (`A` records) for the`hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of IPv4 addresses (e.g.`['74.125.79.104', '74.125.79.105', '74.125.79.106']`). + * @since v0.1.16 + * @param hostname Host name to resolve. + */ + export function resolve4( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve4( + hostname: string, + options: ResolveWithTtlOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void, + ): void; + export function resolve4( + hostname: string, + options: ResolveOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void, + ): void; + export namespace resolve4 { + function __promisify__(hostname: string): Promise; + function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; + function __promisify__(hostname: string, options?: ResolveOptions): Promise; + } + /** + * Uses the DNS protocol to resolve IPv6 addresses (`AAAA` records) for the`hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of IPv6 addresses. + * @since v0.1.16 + * @param hostname Host name to resolve. + */ + export function resolve6( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export function resolve6( + hostname: string, + options: ResolveWithTtlOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void, + ): void; + export function resolve6( + hostname: string, + options: ResolveOptions, + callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void, + ): void; + export namespace resolve6 { + function __promisify__(hostname: string): Promise; + function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; + function __promisify__(hostname: string, options?: ResolveOptions): Promise; + } + /** + * Uses the DNS protocol to resolve `CNAME` records for the `hostname`. The`addresses` argument passed to the `callback` function + * will contain an array of canonical name records available for the `hostname`(e.g. `['bar.example.com']`). + * @since v0.3.2 + */ + export function resolveCname( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export namespace resolveCname { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve `CAA` records for the `hostname`. The`addresses` argument passed to the `callback` function + * will contain an array of certification authority authorization records + * available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'}, {critical: 128, issue: 'pki.example.com'}]`). + * @since v15.0.0, v14.17.0 + */ + export function resolveCaa( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, records: CaaRecord[]) => void, + ): void; + export namespace resolveCaa { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * contain an array of objects containing both a `priority` and `exchange`property (e.g. `[{priority: 10, exchange: 'mx.example.com'}, ...]`). + * @since v0.1.27 + */ + export function resolveMx( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void, + ): void; + export namespace resolveMx { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve regular expression-based records (`NAPTR`records) for the `hostname`. The `addresses` argument passed to the `callback`function will contain an array of + * objects with the following properties: + * + * * `flags` + * * `service` + * * `regexp` + * * `replacement` + * * `order` + * * `preference` + * + * ```js + * { + * flags: 's', + * service: 'SIP+D2U', + * regexp: '', + * replacement: '_sip._udp.example.com', + * order: 30, + * preference: 100 + * } + * ``` + * @since v0.9.12 + */ + export function resolveNaptr( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void, + ): void; + export namespace resolveNaptr { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * contain an array of name server records available for `hostname`(e.g. `['ns1.example.com', 'ns2.example.com']`). + * @since v0.1.90 + */ + export function resolveNs( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export namespace resolveNs { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * be an array of strings containing the reply records. + * @since v6.0.0 + */ + export function resolvePtr( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void, + ): void; + export namespace resolvePtr { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve a start of authority record (`SOA` record) for + * the `hostname`. The `address` argument passed to the `callback` function will + * be an object with the following properties: + * + * * `nsname` + * * `hostmaster` + * * `serial` + * * `refresh` + * * `retry` + * * `expire` + * * `minttl` + * + * ```js + * { + * nsname: 'ns.example.com', + * hostmaster: 'root.example.com', + * serial: 2013101809, + * refresh: 10000, + * retry: 2400, + * expire: 604800, + * minttl: 3600 + * } + * ``` + * @since v0.11.10 + */ + export function resolveSoa( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, address: SoaRecord) => void, + ): void; + export namespace resolveSoa { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * be an array of objects with the following properties: + * + * * `priority` + * * `weight` + * * `port` + * * `name` + * + * ```js + * { + * priority: 10, + * weight: 5, + * port: 21223, + * name: 'service.example.com' + * } + * ``` + * @since v0.1.27 + */ + export function resolveSrv( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void, + ): void; + export namespace resolveSrv { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. The `records` argument passed to the `callback` function is a + * two-dimensional array of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of + * one record. Depending on the use case, these could be either joined together or + * treated separately. + * @since v0.1.27 + */ + export function resolveTxt( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void, + ): void; + export namespace resolveTxt { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query). + * The `ret` argument passed to the `callback` function will be an array containing + * various types of records. Each object has a property `type` that indicates the + * type of the current record. And depending on the `type`, additional properties + * will be present on the object: + * + * + * + * Here is an example of the `ret` object passed to the callback: + * + * ```js + * [ { type: 'A', address: '127.0.0.1', ttl: 299 }, + * { type: 'CNAME', value: 'example.com' }, + * { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 }, + * { type: 'NS', value: 'ns1.example.com' }, + * { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] }, + * { type: 'SOA', + * nsname: 'ns1.example.com', + * hostmaster: 'admin.example.com', + * serial: 156696742, + * refresh: 900, + * retry: 900, + * expire: 1800, + * minttl: 60 } ] + * ``` + * + * DNS server operators may choose not to respond to `ANY`queries. It may be better to call individual methods like {@link resolve4},{@link resolveMx}, and so on. For more details, see [RFC + * 8482](https://tools.ietf.org/html/rfc8482). + */ + export function resolveAny( + hostname: string, + callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void, + ): void; + export namespace resolveAny { + function __promisify__(hostname: string): Promise; + } + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an + * array of host names. + * + * On error, `err` is an `Error` object, where `err.code` is + * one of the `DNS error codes`. + * @since v0.1.16 + */ + export function reverse( + ip: string, + callback: (err: NodeJS.ErrnoException | null, hostnames: string[]) => void, + ): void; + /** + * Get the default value for `verbatim` in {@link lookup} and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: for `verbatim` defaulting to `false`. + * * `verbatim`: for `verbatim` defaulting to `true`. + * @since v20.1.0 + */ + export function getDefaultResultOrder(): "ipv4first" | "verbatim"; + /** + * Sets the IP address and port of servers to be used when performing DNS + * resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted + * addresses. If the port is the IANA default DNS port (53) it can be omitted. + * + * ```js + * dns.setServers([ + * '4.4.4.4', + * '[2001:4860:4860::8888]', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ]); + * ``` + * + * An error will be thrown if an invalid address is provided. + * + * The `dns.setServers()` method must not be called while a DNS query is in + * progress. + * + * The {@link setServers} method affects only {@link resolve},`dns.resolve*()` and {@link reverse} (and specifically _not_ {@link lookup}). + * + * This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html). + * That is, if attempting to resolve with the first server provided results in a`NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with + * subsequent servers provided. Fallback DNS servers will only be used if the + * earlier ones time out or result in some other error. + * @since v0.11.3 + * @param servers array of `RFC 5952` formatted addresses + */ + export function setServers(servers: readonly string[]): void; + /** + * Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6), + * that are currently configured for DNS resolution. A string will include a port + * section if a custom port is used. + * + * ```js + * [ + * '4.4.4.4', + * '2001:4860:4860::8888', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ] + * ``` + * @since v0.11.3 + */ + export function getServers(): string[]; + /** + * Set the default value of `verbatim` in {@link lookup} and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: sets default `verbatim` `false`. + * * `verbatim`: sets default `verbatim` `true`. + * + * The default is `verbatim` and {@link setDefaultResultOrder} have higher + * priority than `--dns-result-order`. When using `worker threads`,{@link setDefaultResultOrder} from the main thread won't affect the default + * dns orders in workers. + * @since v16.4.0, v14.18.0 + * @param order must be `'ipv4first'` or `'verbatim'`. + */ + export function setDefaultResultOrder(order: "ipv4first" | "verbatim"): void; + // Error codes + export const NODATA: string; + export const FORMERR: string; + export const SERVFAIL: string; + export const NOTFOUND: string; + export const NOTIMP: string; + export const REFUSED: string; + export const BADQUERY: string; + export const BADNAME: string; + export const BADFAMILY: string; + export const BADRESP: string; + export const CONNREFUSED: string; + export const TIMEOUT: string; + export const EOF: string; + export const FILE: string; + export const NOMEM: string; + export const DESTRUCTION: string; + export const BADSTR: string; + export const BADFLAGS: string; + export const NONAME: string; + export const BADHINTS: string; + export const NOTINITIALIZED: string; + export const LOADIPHLPAPI: string; + export const ADDRGETNETWORKPARAMS: string; + export const CANCELLED: string; + export interface ResolverOptions { + timeout?: number | undefined; + /** + * @default 4 + */ + tries?: number; + } + /** + * An independent resolver for DNS requests. + * + * Creating a new resolver uses the default server settings. Setting + * the servers used for a resolver using `resolver.setServers()` does not affect + * other resolvers: + * + * ```js + * const { Resolver } = require('node:dns'); + * const resolver = new Resolver(); + * resolver.setServers(['4.4.4.4']); + * + * // This request will use the server at 4.4.4.4, independent of global settings. + * resolver.resolve4('example.org', (err, addresses) => { + * // ... + * }); + * ``` + * + * The following methods from the `node:dns` module are available: + * + * * `resolver.getServers()` + * * `resolver.resolve()` + * * `resolver.resolve4()` + * * `resolver.resolve6()` + * * `resolver.resolveAny()` + * * `resolver.resolveCaa()` + * * `resolver.resolveCname()` + * * `resolver.resolveMx()` + * * `resolver.resolveNaptr()` + * * `resolver.resolveNs()` + * * `resolver.resolvePtr()` + * * `resolver.resolveSoa()` + * * `resolver.resolveSrv()` + * * `resolver.resolveTxt()` + * * `resolver.reverse()` + * * `resolver.setServers()` + * @since v8.3.0 + */ + export class Resolver { + constructor(options?: ResolverOptions); + /** + * Cancel all outstanding DNS queries made by this resolver. The corresponding + * callbacks will be called with an error with code `ECANCELLED`. + * @since v8.3.0 + */ + cancel(): void; + getServers: typeof getServers; + resolve: typeof resolve; + resolve4: typeof resolve4; + resolve6: typeof resolve6; + resolveAny: typeof resolveAny; + resolveCaa: typeof resolveCaa; + resolveCname: typeof resolveCname; + resolveMx: typeof resolveMx; + resolveNaptr: typeof resolveNaptr; + resolveNs: typeof resolveNs; + resolvePtr: typeof resolvePtr; + resolveSoa: typeof resolveSoa; + resolveSrv: typeof resolveSrv; + resolveTxt: typeof resolveTxt; + reverse: typeof reverse; + /** + * The resolver instance will send its requests from the specified IP address. + * This allows programs to specify outbound interfaces when used on multi-homed + * systems. + * + * If a v4 or v6 address is not specified, it is set to the default and the + * operating system will choose a local address automatically. + * + * The resolver will use the v4 local address when making requests to IPv4 DNS + * servers, and the v6 local address when making requests to IPv6 DNS servers. + * The `rrtype` of resolution requests has no impact on the local address used. + * @since v15.1.0, v14.17.0 + * @param [ipv4='0.0.0.0'] A string representation of an IPv4 address. + * @param [ipv6='::0'] A string representation of an IPv6 address. + */ + setLocalAddress(ipv4?: string, ipv6?: string): void; + setServers: typeof setServers; + } + export { dnsPromises as promises }; +} +declare module "node:dns" { + export * from "dns"; +} diff --git a/dist/node_modules/@types/node/ts4.8/dns/promises.d.ts b/dist/node_modules/@types/node/ts4.8/dns/promises.d.ts new file mode 100644 index 0000000..ef9b222 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/dns/promises.d.ts @@ -0,0 +1,425 @@ +/** + * The `dns.promises` API provides an alternative set of asynchronous DNS methods + * that return `Promise` objects rather than using callbacks. The API is accessible + * via `require('node:dns').promises` or `require('node:dns/promises')`. + * @since v10.6.0 + */ +declare module "dns/promises" { + import { + AnyRecord, + CaaRecord, + LookupAddress, + LookupAllOptions, + LookupOneOptions, + LookupOptions, + MxRecord, + NaptrRecord, + RecordWithTtl, + ResolveOptions, + ResolverOptions, + ResolveWithTtlOptions, + SoaRecord, + SrvRecord, + } from "node:dns"; + /** + * Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6), + * that are currently configured for DNS resolution. A string will include a port + * section if a custom port is used. + * + * ```js + * [ + * '4.4.4.4', + * '2001:4860:4860::8888', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ] + * ``` + * @since v10.6.0 + */ + function getServers(): string[]; + /** + * Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or + * AAAA (IPv6) record. All `option` properties are optional. If `options` is an + * integer, then it must be `4` or `6` – if `options` is not provided, then IPv4 + * and IPv6 addresses are both returned if found. + * + * With the `all` option set to `true`, the `Promise` is resolved with `addresses`being an array of objects with the properties `address` and `family`. + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is the error code. + * Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when + * the host name does not exist but also when the lookup fails in other ways + * such as no available file descriptors. + * + * `dnsPromises.lookup()` does not necessarily have anything to do with the DNS + * protocol. The implementation uses an operating system facility that can + * associate names with addresses and vice versa. This implementation can have + * subtle but important consequences on the behavior of any Node.js program. Please + * take some time to consult the `Implementation considerations section` before + * using `dnsPromises.lookup()`. + * + * Example usage: + * + * ```js + * const dns = require('node:dns'); + * const dnsPromises = dns.promises; + * const options = { + * family: 6, + * hints: dns.ADDRCONFIG | dns.V4MAPPED, + * }; + * + * dnsPromises.lookup('example.com', options).then((result) => { + * console.log('address: %j family: IPv%s', result.address, result.family); + * // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6 + * }); + * + * // When options.all is true, the result will be an Array. + * options.all = true; + * dnsPromises.lookup('example.com', options).then((result) => { + * console.log('addresses: %j', result); + * // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}] + * }); + * ``` + * @since v10.6.0 + */ + function lookup(hostname: string, family: number): Promise; + function lookup(hostname: string, options: LookupOneOptions): Promise; + function lookup(hostname: string, options: LookupAllOptions): Promise; + function lookup(hostname: string, options: LookupOptions): Promise; + function lookup(hostname: string): Promise; + /** + * Resolves the given `address` and `port` into a host name and service using + * the operating system's underlying `getnameinfo` implementation. + * + * If `address` is not a valid IP address, a `TypeError` will be thrown. + * The `port` will be coerced to a number. If it is not a legal port, a `TypeError`will be thrown. + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is the error code. + * + * ```js + * const dnsPromises = require('node:dns').promises; + * dnsPromises.lookupService('127.0.0.1', 22).then((result) => { + * console.log(result.hostname, result.service); + * // Prints: localhost ssh + * }); + * ``` + * @since v10.6.0 + */ + function lookupService( + address: string, + port: number, + ): Promise<{ + hostname: string; + service: string; + }>; + /** + * Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array + * of the resource records. When successful, the `Promise` is resolved with an + * array of resource records. The type and structure of individual results vary + * based on `rrtype`: + * + * + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is one of the `DNS error codes`. + * @since v10.6.0 + * @param hostname Host name to resolve. + * @param [rrtype='A'] Resource record type. + */ + function resolve(hostname: string): Promise; + function resolve(hostname: string, rrtype: "A"): Promise; + function resolve(hostname: string, rrtype: "AAAA"): Promise; + function resolve(hostname: string, rrtype: "ANY"): Promise; + function resolve(hostname: string, rrtype: "CAA"): Promise; + function resolve(hostname: string, rrtype: "CNAME"): Promise; + function resolve(hostname: string, rrtype: "MX"): Promise; + function resolve(hostname: string, rrtype: "NAPTR"): Promise; + function resolve(hostname: string, rrtype: "NS"): Promise; + function resolve(hostname: string, rrtype: "PTR"): Promise; + function resolve(hostname: string, rrtype: "SOA"): Promise; + function resolve(hostname: string, rrtype: "SRV"): Promise; + function resolve(hostname: string, rrtype: "TXT"): Promise; + function resolve( + hostname: string, + rrtype: string, + ): Promise; + /** + * Uses the DNS protocol to resolve IPv4 addresses (`A` records) for the`hostname`. On success, the `Promise` is resolved with an array of IPv4 + * addresses (e.g. `['74.125.79.104', '74.125.79.105', '74.125.79.106']`). + * @since v10.6.0 + * @param hostname Host name to resolve. + */ + function resolve4(hostname: string): Promise; + function resolve4(hostname: string, options: ResolveWithTtlOptions): Promise; + function resolve4(hostname: string, options: ResolveOptions): Promise; + /** + * Uses the DNS protocol to resolve IPv6 addresses (`AAAA` records) for the`hostname`. On success, the `Promise` is resolved with an array of IPv6 + * addresses. + * @since v10.6.0 + * @param hostname Host name to resolve. + */ + function resolve6(hostname: string): Promise; + function resolve6(hostname: string, options: ResolveWithTtlOptions): Promise; + function resolve6(hostname: string, options: ResolveOptions): Promise; + /** + * Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query). + * On success, the `Promise` is resolved with an array containing various types of + * records. Each object has a property `type` that indicates the type of the + * current record. And depending on the `type`, additional properties will be + * present on the object: + * + * + * + * Here is an example of the result object: + * + * ```js + * [ { type: 'A', address: '127.0.0.1', ttl: 299 }, + * { type: 'CNAME', value: 'example.com' }, + * { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 }, + * { type: 'NS', value: 'ns1.example.com' }, + * { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] }, + * { type: 'SOA', + * nsname: 'ns1.example.com', + * hostmaster: 'admin.example.com', + * serial: 156696742, + * refresh: 900, + * retry: 900, + * expire: 1800, + * minttl: 60 } ] + * ``` + * @since v10.6.0 + */ + function resolveAny(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve `CAA` records for the `hostname`. On success, + * the `Promise` is resolved with an array of objects containing available + * certification authority authorization records available for the `hostname`(e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'},{critical: 128, issue: 'pki.example.com'}]`). + * @since v15.0.0, v14.17.0 + */ + function resolveCaa(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve `CNAME` records for the `hostname`. On success, + * the `Promise` is resolved with an array of canonical name records available for + * the `hostname` (e.g. `['bar.example.com']`). + * @since v10.6.0 + */ + function resolveCname(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects + * containing both a `priority` and `exchange` property (e.g.`[{priority: 10, exchange: 'mx.example.com'}, ...]`). + * @since v10.6.0 + */ + function resolveMx(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve regular expression-based records (`NAPTR`records) for the `hostname`. On success, the `Promise` is resolved with an array + * of objects with the following properties: + * + * * `flags` + * * `service` + * * `regexp` + * * `replacement` + * * `order` + * * `preference` + * + * ```js + * { + * flags: 's', + * service: 'SIP+D2U', + * regexp: '', + * replacement: '_sip._udp.example.com', + * order: 30, + * preference: 100 + * } + * ``` + * @since v10.6.0 + */ + function resolveNaptr(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. On success, the `Promise` is resolved with an array of name server + * records available for `hostname` (e.g.`['ns1.example.com', 'ns2.example.com']`). + * @since v10.6.0 + */ + function resolveNs(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. On success, the `Promise` is resolved with an array of strings + * containing the reply records. + * @since v10.6.0 + */ + function resolvePtr(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve a start of authority record (`SOA` record) for + * the `hostname`. On success, the `Promise` is resolved with an object with the + * following properties: + * + * * `nsname` + * * `hostmaster` + * * `serial` + * * `refresh` + * * `retry` + * * `expire` + * * `minttl` + * + * ```js + * { + * nsname: 'ns.example.com', + * hostmaster: 'root.example.com', + * serial: 2013101809, + * refresh: 10000, + * retry: 2400, + * expire: 604800, + * minttl: 3600 + * } + * ``` + * @since v10.6.0 + */ + function resolveSoa(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects with + * the following properties: + * + * * `priority` + * * `weight` + * * `port` + * * `name` + * + * ```js + * { + * priority: 10, + * weight: 5, + * port: 21223, + * name: 'service.example.com' + * } + * ``` + * @since v10.6.0 + */ + function resolveSrv(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. On success, the `Promise` is resolved with a two-dimensional array + * of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of + * one record. Depending on the use case, these could be either joined together or + * treated separately. + * @since v10.6.0 + */ + function resolveTxt(hostname: string): Promise; + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an + * array of host names. + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is one of the `DNS error codes`. + * @since v10.6.0 + */ + function reverse(ip: string): Promise; + /** + * Get the default value for `verbatim` in {@link lookup} and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: for `verbatim` defaulting to `false`. + * * `verbatim`: for `verbatim` defaulting to `true`. + * @since v20.1.0 + */ + function getDefaultResultOrder(): "ipv4first" | "verbatim"; + /** + * Sets the IP address and port of servers to be used when performing DNS + * resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted + * addresses. If the port is the IANA default DNS port (53) it can be omitted. + * + * ```js + * dnsPromises.setServers([ + * '4.4.4.4', + * '[2001:4860:4860::8888]', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ]); + * ``` + * + * An error will be thrown if an invalid address is provided. + * + * The `dnsPromises.setServers()` method must not be called while a DNS query is in + * progress. + * + * This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html). + * That is, if attempting to resolve with the first server provided results in a`NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with + * subsequent servers provided. Fallback DNS servers will only be used if the + * earlier ones time out or result in some other error. + * @since v10.6.0 + * @param servers array of `RFC 5952` formatted addresses + */ + function setServers(servers: readonly string[]): void; + /** + * Set the default value of `verbatim` in `dns.lookup()` and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: sets default `verbatim` `false`. + * * `verbatim`: sets default `verbatim` `true`. + * + * The default is `verbatim` and `dnsPromises.setDefaultResultOrder()` have + * higher priority than `--dns-result-order`. When using `worker threads`,`dnsPromises.setDefaultResultOrder()` from the main thread won't affect the + * default dns orders in workers. + * @since v16.4.0, v14.18.0 + * @param order must be `'ipv4first'` or `'verbatim'`. + */ + function setDefaultResultOrder(order: "ipv4first" | "verbatim"): void; + /** + * An independent resolver for DNS requests. + * + * Creating a new resolver uses the default server settings. Setting + * the servers used for a resolver using `resolver.setServers()` does not affect + * other resolvers: + * + * ```js + * const { Resolver } = require('node:dns').promises; + * const resolver = new Resolver(); + * resolver.setServers(['4.4.4.4']); + * + * // This request will use the server at 4.4.4.4, independent of global settings. + * resolver.resolve4('example.org').then((addresses) => { + * // ... + * }); + * + * // Alternatively, the same code can be written using async-await style. + * (async function() { + * const addresses = await resolver.resolve4('example.org'); + * })(); + * ``` + * + * The following methods from the `dnsPromises` API are available: + * + * * `resolver.getServers()` + * * `resolver.resolve()` + * * `resolver.resolve4()` + * * `resolver.resolve6()` + * * `resolver.resolveAny()` + * * `resolver.resolveCaa()` + * * `resolver.resolveCname()` + * * `resolver.resolveMx()` + * * `resolver.resolveNaptr()` + * * `resolver.resolveNs()` + * * `resolver.resolvePtr()` + * * `resolver.resolveSoa()` + * * `resolver.resolveSrv()` + * * `resolver.resolveTxt()` + * * `resolver.reverse()` + * * `resolver.setServers()` + * @since v10.6.0 + */ + class Resolver { + constructor(options?: ResolverOptions); + cancel(): void; + getServers: typeof getServers; + resolve: typeof resolve; + resolve4: typeof resolve4; + resolve6: typeof resolve6; + resolveAny: typeof resolveAny; + resolveCaa: typeof resolveCaa; + resolveCname: typeof resolveCname; + resolveMx: typeof resolveMx; + resolveNaptr: typeof resolveNaptr; + resolveNs: typeof resolveNs; + resolvePtr: typeof resolvePtr; + resolveSoa: typeof resolveSoa; + resolveSrv: typeof resolveSrv; + resolveTxt: typeof resolveTxt; + reverse: typeof reverse; + setLocalAddress(ipv4?: string, ipv6?: string): void; + setServers: typeof setServers; + } +} +declare module "node:dns/promises" { + export * from "dns/promises"; +} diff --git a/dist/node_modules/@types/node/ts4.8/dom-events.d.ts b/dist/node_modules/@types/node/ts4.8/dom-events.d.ts new file mode 100644 index 0000000..147a7b6 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/dom-events.d.ts @@ -0,0 +1,122 @@ +export {}; // Don't export anything! + +//// DOM-like Events +// NB: The Event / EventTarget / EventListener implementations below were copied +// from lib.dom.d.ts, then edited to reflect Node's documentation at +// https://nodejs.org/api/events.html#class-eventtarget. +// Please read that link to understand important implementation differences. + +// This conditional type will be the existing global Event in a browser, or +// the copy below in a Node environment. +type __Event = typeof globalThis extends { onmessage: any; Event: any } ? {} + : { + /** This is not used in Node.js and is provided purely for completeness. */ + readonly bubbles: boolean; + /** Alias for event.stopPropagation(). This is not used in Node.js and is provided purely for completeness. */ + cancelBubble: () => void; + /** True if the event was created with the cancelable option */ + readonly cancelable: boolean; + /** This is not used in Node.js and is provided purely for completeness. */ + readonly composed: boolean; + /** Returns an array containing the current EventTarget as the only entry or empty if the event is not being dispatched. This is not used in Node.js and is provided purely for completeness. */ + composedPath(): [EventTarget?]; + /** Alias for event.target. */ + readonly currentTarget: EventTarget | null; + /** Is true if cancelable is true and event.preventDefault() has been called. */ + readonly defaultPrevented: boolean; + /** This is not used in Node.js and is provided purely for completeness. */ + readonly eventPhase: 0 | 2; + /** The `AbortSignal` "abort" event is emitted with `isTrusted` set to `true`. The value is `false` in all other cases. */ + readonly isTrusted: boolean; + /** Sets the `defaultPrevented` property to `true` if `cancelable` is `true`. */ + preventDefault(): void; + /** This is not used in Node.js and is provided purely for completeness. */ + returnValue: boolean; + /** Alias for event.target. */ + readonly srcElement: EventTarget | null; + /** Stops the invocation of event listeners after the current one completes. */ + stopImmediatePropagation(): void; + /** This is not used in Node.js and is provided purely for completeness. */ + stopPropagation(): void; + /** The `EventTarget` dispatching the event */ + readonly target: EventTarget | null; + /** The millisecond timestamp when the Event object was created. */ + readonly timeStamp: number; + /** Returns the type of event, e.g. "click", "hashchange", or "submit". */ + readonly type: string; + }; + +// See comment above explaining conditional type +type __EventTarget = typeof globalThis extends { onmessage: any; EventTarget: any } ? {} + : { + /** + * Adds a new handler for the `type` event. Any given `listener` is added only once per `type` and per `capture` option value. + * + * If the `once` option is true, the `listener` is removed after the next time a `type` event is dispatched. + * + * The `capture` option is not used by Node.js in any functional way other than tracking registered event listeners per the `EventTarget` specification. + * Specifically, the `capture` option is used as part of the key when registering a `listener`. + * Any individual `listener` may be added once with `capture = false`, and once with `capture = true`. + */ + addEventListener( + type: string, + listener: EventListener | EventListenerObject, + options?: AddEventListenerOptions | boolean, + ): void; + /** Dispatches a synthetic event event to target and returns true if either event's cancelable attribute value is false or its preventDefault() method was not invoked, and false otherwise. */ + dispatchEvent(event: Event): boolean; + /** Removes the event listener in target's event listener list with the same type, callback, and options. */ + removeEventListener( + type: string, + listener: EventListener | EventListenerObject, + options?: EventListenerOptions | boolean, + ): void; + }; + +interface EventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; +} + +interface EventListenerOptions { + /** Not directly used by Node.js. Added for API completeness. Default: `false`. */ + capture?: boolean; +} + +interface AddEventListenerOptions extends EventListenerOptions { + /** When `true`, the listener is automatically removed when it is first invoked. Default: `false`. */ + once?: boolean; + /** When `true`, serves as a hint that the listener will not call the `Event` object's `preventDefault()` method. Default: false. */ + passive?: boolean; +} + +interface EventListener { + (evt: Event): void; +} + +interface EventListenerObject { + handleEvent(object: Event): void; +} + +import {} from "events"; // Make this an ambient declaration +declare global { + /** An event which takes place in the DOM. */ + interface Event extends __Event {} + var Event: typeof globalThis extends { onmessage: any; Event: infer T } ? T + : { + prototype: __Event; + new(type: string, eventInitDict?: EventInit): __Event; + }; + + /** + * EventTarget is a DOM interface implemented by objects that can + * receive events and may have listeners for them. + */ + interface EventTarget extends __EventTarget {} + var EventTarget: typeof globalThis extends { onmessage: any; EventTarget: infer T } ? T + : { + prototype: __EventTarget; + new(): __EventTarget; + }; +} diff --git a/dist/node_modules/@types/node/ts4.8/domain.d.ts b/dist/node_modules/@types/node/ts4.8/domain.d.ts new file mode 100644 index 0000000..72f17bd --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/domain.d.ts @@ -0,0 +1,170 @@ +/** + * **This module is pending deprecation.** Once a replacement API has been + * finalized, this module will be fully deprecated. Most developers should + * **not** have cause to use this module. Users who absolutely must have + * the functionality that domains provide may rely on it for the time being + * but should expect to have to migrate to a different solution + * in the future. + * + * Domains provide a way to handle multiple different IO operations as a + * single group. If any of the event emitters or callbacks registered to a + * domain emit an `'error'` event, or throw an error, then the domain object + * will be notified, rather than losing the context of the error in the`process.on('uncaughtException')` handler, or causing the program to + * exit immediately with an error code. + * @deprecated Since v1.4.2 - Deprecated + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/domain.js) + */ +declare module "domain" { + import EventEmitter = require("node:events"); + /** + * The `Domain` class encapsulates the functionality of routing errors and + * uncaught exceptions to the active `Domain` object. + * + * To handle the errors that it catches, listen to its `'error'` event. + */ + class Domain extends EventEmitter { + /** + * An array of timers and event emitters that have been explicitly added + * to the domain. + */ + members: Array; + /** + * The `enter()` method is plumbing used by the `run()`, `bind()`, and`intercept()` methods to set the active domain. It sets `domain.active` and`process.domain` to the domain, and implicitly + * pushes the domain onto the domain + * stack managed by the domain module (see {@link exit} for details on the + * domain stack). The call to `enter()` delimits the beginning of a chain of + * asynchronous calls and I/O operations bound to a domain. + * + * Calling `enter()` changes only the active domain, and does not alter the domain + * itself. `enter()` and `exit()` can be called an arbitrary number of times on a + * single domain. + */ + enter(): void; + /** + * The `exit()` method exits the current domain, popping it off the domain stack. + * Any time execution is going to switch to the context of a different chain of + * asynchronous calls, it's important to ensure that the current domain is exited. + * The call to `exit()` delimits either the end of or an interruption to the chain + * of asynchronous calls and I/O operations bound to a domain. + * + * If there are multiple, nested domains bound to the current execution context,`exit()` will exit any domains nested within this domain. + * + * Calling `exit()` changes only the active domain, and does not alter the domain + * itself. `enter()` and `exit()` can be called an arbitrary number of times on a + * single domain. + */ + exit(): void; + /** + * Run the supplied function in the context of the domain, implicitly + * binding all event emitters, timers, and low-level requests that are + * created in that context. Optionally, arguments can be passed to + * the function. + * + * This is the most basic way to use a domain. + * + * ```js + * const domain = require('node:domain'); + * const fs = require('node:fs'); + * const d = domain.create(); + * d.on('error', (er) => { + * console.error('Caught error!', er); + * }); + * d.run(() => { + * process.nextTick(() => { + * setTimeout(() => { // Simulating some various async stuff + * fs.open('non-existent file', 'r', (er, fd) => { + * if (er) throw er; + * // proceed... + * }); + * }, 100); + * }); + * }); + * ``` + * + * In this example, the `d.on('error')` handler will be triggered, rather + * than crashing the program. + */ + run(fn: (...args: any[]) => T, ...args: any[]): T; + /** + * Explicitly adds an emitter to the domain. If any event handlers called by + * the emitter throw an error, or if the emitter emits an `'error'` event, it + * will be routed to the domain's `'error'` event, just like with implicit + * binding. + * + * This also works with timers that are returned from `setInterval()` and `setTimeout()`. If their callback function throws, it will be caught by + * the domain `'error'` handler. + * + * If the Timer or `EventEmitter` was already bound to a domain, it is removed + * from that one, and bound to this one instead. + * @param emitter emitter or timer to be added to the domain + */ + add(emitter: EventEmitter | NodeJS.Timer): void; + /** + * The opposite of {@link add}. Removes domain handling from the + * specified emitter. + * @param emitter emitter or timer to be removed from the domain + */ + remove(emitter: EventEmitter | NodeJS.Timer): void; + /** + * The returned function will be a wrapper around the supplied callback + * function. When the returned function is called, any errors that are + * thrown will be routed to the domain's `'error'` event. + * + * ```js + * const d = domain.create(); + * + * function readSomeFile(filename, cb) { + * fs.readFile(filename, 'utf8', d.bind((er, data) => { + * // If this throws, it will also be passed to the domain. + * return cb(er, data ? JSON.parse(data) : null); + * })); + * } + * + * d.on('error', (er) => { + * // An error occurred somewhere. If we throw it now, it will crash the program + * // with the normal line number and stack message. + * }); + * ``` + * @param callback The callback function + * @return The bound function + */ + bind(callback: T): T; + /** + * This method is almost identical to {@link bind}. However, in + * addition to catching thrown errors, it will also intercept `Error` objects sent as the first argument to the function. + * + * In this way, the common `if (err) return callback(err);` pattern can be replaced + * with a single error handler in a single place. + * + * ```js + * const d = domain.create(); + * + * function readSomeFile(filename, cb) { + * fs.readFile(filename, 'utf8', d.intercept((data) => { + * // Note, the first argument is never passed to the + * // callback since it is assumed to be the 'Error' argument + * // and thus intercepted by the domain. + * + * // If this throws, it will also be passed to the domain + * // so the error-handling logic can be moved to the 'error' + * // event on the domain instead of being repeated throughout + * // the program. + * return cb(null, JSON.parse(data)); + * })); + * } + * + * d.on('error', (er) => { + * // An error occurred somewhere. If we throw it now, it will crash the program + * // with the normal line number and stack message. + * }); + * ``` + * @param callback The callback function + * @return The intercepted function + */ + intercept(callback: T): T; + } + function create(): Domain; +} +declare module "node:domain" { + export * from "domain"; +} diff --git a/dist/node_modules/@types/node/ts4.8/events.d.ts b/dist/node_modules/@types/node/ts4.8/events.d.ts new file mode 100644 index 0000000..87b3ba0 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/events.d.ts @@ -0,0 +1,896 @@ +/** + * Much of the Node.js core API is built around an idiomatic asynchronous + * event-driven architecture in which certain kinds of objects (called "emitters") + * emit named events that cause `Function` objects ("listeners") to be called. + * + * For instance: a `net.Server` object emits an event each time a peer + * connects to it; a `fs.ReadStream` emits an event when the file is opened; + * a `stream` emits an event whenever data is available to be read. + * + * All objects that emit events are instances of the `EventEmitter` class. These + * objects expose an `eventEmitter.on()` function that allows one or more + * functions to be attached to named events emitted by the object. Typically, + * event names are camel-cased strings but any valid JavaScript property key + * can be used. + * + * When the `EventEmitter` object emits an event, all of the functions attached + * to that specific event are called _synchronously_. Any values returned by the + * called listeners are _ignored_ and discarded. + * + * The following example shows a simple `EventEmitter` instance with a single + * listener. The `eventEmitter.on()` method is used to register listeners, while + * the `eventEmitter.emit()` method is used to trigger the event. + * + * ```js + * import { EventEmitter } from 'node:events'; + * + * class MyEmitter extends EventEmitter {} + * + * const myEmitter = new MyEmitter(); + * myEmitter.on('event', () => { + * console.log('an event occurred!'); + * }); + * myEmitter.emit('event'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/events.js) + */ +declare module "events" { + import { AsyncResource, AsyncResourceOptions } from "node:async_hooks"; + // NOTE: This class is in the docs but is **not actually exported** by Node. + // If https://github.com/nodejs/node/issues/39903 gets resolved and Node + // actually starts exporting the class, uncomment below. + // import { EventListener, EventListenerObject } from '__dom-events'; + // /** The NodeEventTarget is a Node.js-specific extension to EventTarget that emulates a subset of the EventEmitter API. */ + // interface NodeEventTarget extends EventTarget { + // /** + // * Node.js-specific extension to the `EventTarget` class that emulates the equivalent `EventEmitter` API. + // * The only difference between `addListener()` and `addEventListener()` is that addListener() will return a reference to the EventTarget. + // */ + // addListener(type: string, listener: EventListener | EventListenerObject, options?: { once: boolean }): this; + // /** Node.js-specific extension to the `EventTarget` class that returns an array of event `type` names for which event listeners are registered. */ + // eventNames(): string[]; + // /** Node.js-specific extension to the `EventTarget` class that returns the number of event listeners registered for the `type`. */ + // listenerCount(type: string): number; + // /** Node.js-specific alias for `eventTarget.removeListener()`. */ + // off(type: string, listener: EventListener | EventListenerObject): this; + // /** Node.js-specific alias for `eventTarget.addListener()`. */ + // on(type: string, listener: EventListener | EventListenerObject, options?: { once: boolean }): this; + // /** Node.js-specific extension to the `EventTarget` class that adds a `once` listener for the given event `type`. This is equivalent to calling `on` with the `once` option set to `true`. */ + // once(type: string, listener: EventListener | EventListenerObject): this; + // /** + // * Node.js-specific extension to the `EventTarget` class. + // * If `type` is specified, removes all registered listeners for `type`, + // * otherwise removes all registered listeners. + // */ + // removeAllListeners(type: string): this; + // /** + // * Node.js-specific extension to the `EventTarget` class that removes the listener for the given `type`. + // * The only difference between `removeListener()` and `removeEventListener()` is that `removeListener()` will return a reference to the `EventTarget`. + // */ + // removeListener(type: string, listener: EventListener | EventListenerObject): this; + // } + interface EventEmitterOptions { + /** + * Enables automatic capturing of promise rejection. + */ + captureRejections?: boolean | undefined; + } + // Any EventTarget with a Node-style `once` function + interface _NodeEventTarget { + once(eventName: string | symbol, listener: (...args: any[]) => void): this; + } + // Any EventTarget with a DOM-style `addEventListener` + interface _DOMEventTarget { + addEventListener( + eventName: string, + listener: (...args: any[]) => void, + opts?: { + once: boolean; + }, + ): any; + } + interface StaticEventEmitterOptions { + signal?: AbortSignal | undefined; + } + interface EventEmitter = DefaultEventMap> extends NodeJS.EventEmitter {} + type EventMap = Record | DefaultEventMap; + type DefaultEventMap = [never]; + type AnyRest = [...args: any[]]; + type Args = T extends DefaultEventMap ? AnyRest : ( + K extends keyof T ? T[K] : never + ); + type Key = T extends DefaultEventMap ? string | symbol : K | keyof T; + type Key2 = T extends DefaultEventMap ? string | symbol : K & keyof T; + type Listener = T extends DefaultEventMap ? F : ( + K extends keyof T ? ( + T[K] extends unknown[] ? (...args: T[K]) => void : never + ) + : never + ); + type Listener1 = Listener void>; + type Listener2 = Listener; + + /** + * The `EventEmitter` class is defined and exposed by the `node:events` module: + * + * ```js + * import { EventEmitter } from 'node:events'; + * ``` + * + * All `EventEmitter`s emit the event `'newListener'` when new listeners are + * added and `'removeListener'` when existing listeners are removed. + * + * It supports the following option: + * @since v0.1.26 + */ + class EventEmitter = DefaultEventMap> { + constructor(options?: EventEmitterOptions); + + [EventEmitter.captureRejectionSymbol]?(error: Error, event: Key, ...args: Args): void; + + /** + * Creates a `Promise` that is fulfilled when the `EventEmitter` emits the given + * event or that is rejected if the `EventEmitter` emits `'error'` while waiting. + * The `Promise` will resolve with an array of all the arguments emitted to the + * given event. + * + * This method is intentionally generic and works with the web platform [EventTarget](https://dom.spec.whatwg.org/#interface-eventtarget) interface, which has no special`'error'` event + * semantics and does not listen to the `'error'` event. + * + * ```js + * import { once, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ee = new EventEmitter(); + * + * process.nextTick(() => { + * ee.emit('myevent', 42); + * }); + * + * const [value] = await once(ee, 'myevent'); + * console.log(value); + * + * const err = new Error('kaboom'); + * process.nextTick(() => { + * ee.emit('error', err); + * }); + * + * try { + * await once(ee, 'myevent'); + * } catch (err) { + * console.error('error happened', err); + * } + * ``` + * + * The special handling of the `'error'` event is only used when `events.once()`is used to wait for another event. If `events.once()` is used to wait for the + * '`error'` event itself, then it is treated as any other kind of event without + * special handling: + * + * ```js + * import { EventEmitter, once } from 'node:events'; + * + * const ee = new EventEmitter(); + * + * once(ee, 'error') + * .then(([err]) => console.log('ok', err.message)) + * .catch((err) => console.error('error', err.message)); + * + * ee.emit('error', new Error('boom')); + * + * // Prints: ok boom + * ``` + * + * An `AbortSignal` can be used to cancel waiting for the event: + * + * ```js + * import { EventEmitter, once } from 'node:events'; + * + * const ee = new EventEmitter(); + * const ac = new AbortController(); + * + * async function foo(emitter, event, signal) { + * try { + * await once(emitter, event, { signal }); + * console.log('event emitted!'); + * } catch (error) { + * if (error.name === 'AbortError') { + * console.error('Waiting for the event was canceled!'); + * } else { + * console.error('There was an error', error.message); + * } + * } + * } + * + * foo(ee, 'foo', ac.signal); + * ac.abort(); // Abort waiting for the event + * ee.emit('foo'); // Prints: Waiting for the event was canceled! + * ``` + * @since v11.13.0, v10.16.0 + */ + static once( + emitter: _NodeEventTarget, + eventName: string | symbol, + options?: StaticEventEmitterOptions, + ): Promise; + static once(emitter: _DOMEventTarget, eventName: string, options?: StaticEventEmitterOptions): Promise; + /** + * ```js + * import { on, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * }); + * + * for await (const event of on(ee, 'foo')) { + * // The execution of this inner block is synchronous and it + * // processes one event at a time (even with await). Do not use + * // if concurrent execution is required. + * console.log(event); // prints ['bar'] [42] + * } + * // Unreachable here + * ``` + * + * Returns an `AsyncIterator` that iterates `eventName` events. It will throw + * if the `EventEmitter` emits `'error'`. It removes all listeners when + * exiting the loop. The `value` returned by each iteration is an array + * composed of the emitted event arguments. + * + * An `AbortSignal` can be used to cancel waiting on events: + * + * ```js + * import { on, EventEmitter } from 'node:events'; + * import process from 'node:process'; + * + * const ac = new AbortController(); + * + * (async () => { + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * }); + * + * for await (const event of on(ee, 'foo', { signal: ac.signal })) { + * // The execution of this inner block is synchronous and it + * // processes one event at a time (even with await). Do not use + * // if concurrent execution is required. + * console.log(event); // prints ['bar'] [42] + * } + * // Unreachable here + * })(); + * + * process.nextTick(() => ac.abort()); + * ``` + * @since v13.6.0, v12.16.0 + * @param eventName The name of the event being listened for + * @return that iterates `eventName` events emitted by the `emitter` + */ + static on( + emitter: NodeJS.EventEmitter, + eventName: string, + options?: StaticEventEmitterOptions, + ): AsyncIterableIterator; + /** + * A class method that returns the number of listeners for the given `eventName`registered on the given `emitter`. + * + * ```js + * import { EventEmitter, listenerCount } from 'node:events'; + * + * const myEmitter = new EventEmitter(); + * myEmitter.on('event', () => {}); + * myEmitter.on('event', () => {}); + * console.log(listenerCount(myEmitter, 'event')); + * // Prints: 2 + * ``` + * @since v0.9.12 + * @deprecated Since v3.2.0 - Use `listenerCount` instead. + * @param emitter The emitter to query + * @param eventName The event name + */ + static listenerCount(emitter: NodeJS.EventEmitter, eventName: string | symbol): number; + /** + * Returns a copy of the array of listeners for the event named `eventName`. + * + * For `EventEmitter`s this behaves exactly the same as calling `.listeners` on + * the emitter. + * + * For `EventTarget`s this is the only way to get the event listeners for the + * event target. This is useful for debugging and diagnostic purposes. + * + * ```js + * import { getEventListeners, EventEmitter } from 'node:events'; + * + * { + * const ee = new EventEmitter(); + * const listener = () => console.log('Events are fun'); + * ee.on('foo', listener); + * console.log(getEventListeners(ee, 'foo')); // [ [Function: listener] ] + * } + * { + * const et = new EventTarget(); + * const listener = () => console.log('Events are fun'); + * et.addEventListener('foo', listener); + * console.log(getEventListeners(et, 'foo')); // [ [Function: listener] ] + * } + * ``` + * @since v15.2.0, v14.17.0 + */ + static getEventListeners(emitter: _DOMEventTarget | NodeJS.EventEmitter, name: string | symbol): Function[]; + /** + * Returns the currently set max amount of listeners. + * + * For `EventEmitter`s this behaves exactly the same as calling `.getMaxListeners` on + * the emitter. + * + * For `EventTarget`s this is the only way to get the max event listeners for the + * event target. If the number of event handlers on a single EventTarget exceeds + * the max set, the EventTarget will print a warning. + * + * ```js + * import { getMaxListeners, setMaxListeners, EventEmitter } from 'node:events'; + * + * { + * const ee = new EventEmitter(); + * console.log(getMaxListeners(ee)); // 10 + * setMaxListeners(11, ee); + * console.log(getMaxListeners(ee)); // 11 + * } + * { + * const et = new EventTarget(); + * console.log(getMaxListeners(et)); // 10 + * setMaxListeners(11, et); + * console.log(getMaxListeners(et)); // 11 + * } + * ``` + * @since v19.9.0 + */ + static getMaxListeners(emitter: _DOMEventTarget | NodeJS.EventEmitter): number; + /** + * ```js + * import { setMaxListeners, EventEmitter } from 'node:events'; + * + * const target = new EventTarget(); + * const emitter = new EventEmitter(); + * + * setMaxListeners(5, target, emitter); + * ``` + * @since v15.4.0 + * @param n A non-negative number. The maximum number of listeners per `EventTarget` event. + * @param eventsTargets Zero or more {EventTarget} or {EventEmitter} instances. If none are specified, `n` is set as the default max for all newly created {EventTarget} and {EventEmitter} + * objects. + */ + static setMaxListeners(n?: number, ...eventTargets: Array<_DOMEventTarget | NodeJS.EventEmitter>): void; + /** + * Listens once to the `abort` event on the provided `signal`. + * + * Listening to the `abort` event on abort signals is unsafe and may + * lead to resource leaks since another third party with the signal can + * call `e.stopImmediatePropagation()`. Unfortunately Node.js cannot change + * this since it would violate the web standard. Additionally, the original + * API makes it easy to forget to remove listeners. + * + * This API allows safely using `AbortSignal`s in Node.js APIs by solving these + * two issues by listening to the event such that `stopImmediatePropagation` does + * not prevent the listener from running. + * + * Returns a disposable so that it may be unsubscribed from more easily. + * + * ```js + * import { addAbortListener } from 'node:events'; + * + * function example(signal) { + * let disposable; + * try { + * signal.addEventListener('abort', (e) => e.stopImmediatePropagation()); + * disposable = addAbortListener(signal, (e) => { + * // Do something when signal is aborted. + * }); + * } finally { + * disposable?.[Symbol.dispose](); + * } + * } + * ``` + * @since v20.5.0 + * @experimental + * @return Disposable that removes the `abort` listener. + */ + static addAbortListener(signal: AbortSignal, resource: (event: Event) => void): Disposable; + /** + * This symbol shall be used to install a listener for only monitoring `'error'`events. Listeners installed using this symbol are called before the regular`'error'` listeners are called. + * + * Installing a listener using this symbol does not change the behavior once an`'error'` event is emitted. Therefore, the process will still crash if no + * regular `'error'` listener is installed. + * @since v13.6.0, v12.17.0 + */ + static readonly errorMonitor: unique symbol; + /** + * Value: `Symbol.for('nodejs.rejection')` + * + * See how to write a custom `rejection handler`. + * @since v13.4.0, v12.16.0 + */ + static readonly captureRejectionSymbol: unique symbol; + /** + * Value: [boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) + * + * Change the default `captureRejections` option on all new `EventEmitter` objects. + * @since v13.4.0, v12.16.0 + */ + static captureRejections: boolean; + /** + * By default, a maximum of `10` listeners can be registered for any single + * event. This limit can be changed for individual `EventEmitter` instances + * using the `emitter.setMaxListeners(n)` method. To change the default + * for _all_`EventEmitter` instances, the `events.defaultMaxListeners`property can be used. If this value is not a positive number, a `RangeError`is thrown. + * + * Take caution when setting the `events.defaultMaxListeners` because the + * change affects _all_`EventEmitter` instances, including those created before + * the change is made. However, calling `emitter.setMaxListeners(n)` still has + * precedence over `events.defaultMaxListeners`. + * + * This is not a hard limit. The `EventEmitter` instance will allow + * more listeners to be added but will output a trace warning to stderr indicating + * that a "possible EventEmitter memory leak" has been detected. For any single`EventEmitter`, the `emitter.getMaxListeners()` and `emitter.setMaxListeners()`methods can be used to + * temporarily avoid this warning: + * + * ```js + * import { EventEmitter } from 'node:events'; + * const emitter = new EventEmitter(); + * emitter.setMaxListeners(emitter.getMaxListeners() + 1); + * emitter.once('event', () => { + * // do stuff + * emitter.setMaxListeners(Math.max(emitter.getMaxListeners() - 1, 0)); + * }); + * ``` + * + * The `--trace-warnings` command-line flag can be used to display the + * stack trace for such warnings. + * + * The emitted warning can be inspected with `process.on('warning')` and will + * have the additional `emitter`, `type`, and `count` properties, referring to + * the event emitter instance, the event's name and the number of attached + * listeners, respectively. + * Its `name` property is set to `'MaxListenersExceededWarning'`. + * @since v0.11.2 + */ + static defaultMaxListeners: number; + } + import internal = require("node:events"); + namespace EventEmitter { + // Should just be `export { EventEmitter }`, but that doesn't work in TypeScript 3.4 + export { internal as EventEmitter }; + export interface Abortable { + /** + * When provided the corresponding `AbortController` can be used to cancel an asynchronous action. + */ + signal?: AbortSignal | undefined; + } + + export interface EventEmitterReferencingAsyncResource extends AsyncResource { + readonly eventEmitter: EventEmitterAsyncResource; + } + + export interface EventEmitterAsyncResourceOptions extends AsyncResourceOptions, EventEmitterOptions { + /** + * The type of async event, this is required when instantiating `EventEmitterAsyncResource` + * directly rather than as a child class. + * @default new.target.name if instantiated as a child class. + */ + name?: string; + } + + /** + * Integrates `EventEmitter` with `AsyncResource` for `EventEmitter`s that + * require manual async tracking. Specifically, all events emitted by instances + * of `events.EventEmitterAsyncResource` will run within its `async context`. + * + * ```js + * import { EventEmitterAsyncResource, EventEmitter } from 'node:events'; + * import { notStrictEqual, strictEqual } from 'node:assert'; + * import { executionAsyncId, triggerAsyncId } from 'node:async_hooks'; + * + * // Async tracking tooling will identify this as 'Q'. + * const ee1 = new EventEmitterAsyncResource({ name: 'Q' }); + * + * // 'foo' listeners will run in the EventEmitters async context. + * ee1.on('foo', () => { + * strictEqual(executionAsyncId(), ee1.asyncId); + * strictEqual(triggerAsyncId(), ee1.triggerAsyncId); + * }); + * + * const ee2 = new EventEmitter(); + * + * // 'foo' listeners on ordinary EventEmitters that do not track async + * // context, however, run in the same async context as the emit(). + * ee2.on('foo', () => { + * notStrictEqual(executionAsyncId(), ee2.asyncId); + * notStrictEqual(triggerAsyncId(), ee2.triggerAsyncId); + * }); + * + * Promise.resolve().then(() => { + * ee1.emit('foo'); + * ee2.emit('foo'); + * }); + * ``` + * + * The `EventEmitterAsyncResource` class has the same methods and takes the + * same options as `EventEmitter` and `AsyncResource` themselves. + * @since v17.4.0, v16.14.0 + */ + export class EventEmitterAsyncResource extends EventEmitter { + /** + * @param options Only optional in child class. + */ + constructor(options?: EventEmitterAsyncResourceOptions); + /** + * Call all `destroy` hooks. This should only ever be called once. An error will + * be thrown if it is called more than once. This **must** be manually called. If + * the resource is left to be collected by the GC then the `destroy` hooks will + * never be called. + */ + emitDestroy(): void; + /** + * The unique `asyncId` assigned to the resource. + */ + readonly asyncId: number; + /** + * The same triggerAsyncId that is passed to the AsyncResource constructor. + */ + readonly triggerAsyncId: number; + /** + * The returned `AsyncResource` object has an additional `eventEmitter` property + * that provides a reference to this `EventEmitterAsyncResource`. + */ + readonly asyncResource: EventEmitterReferencingAsyncResource; + } + } + global { + namespace NodeJS { + interface EventEmitter = DefaultEventMap> { + [EventEmitter.captureRejectionSymbol]?(error: Error, event: Key, ...args: Args): void; + /** + * Alias for `emitter.on(eventName, listener)`. + * @since v0.1.26 + */ + addListener(eventName: Key, listener: Listener1): this; + /** + * Adds the `listener` function to the end of the listeners array for the + * event named `eventName`. No checks are made to see if the `listener` has + * already been added. Multiple calls passing the same combination of `eventName`and `listener` will result in the `listener` being added, and called, multiple + * times. + * + * ```js + * server.on('connection', (stream) => { + * console.log('someone connected!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * + * By default, event listeners are invoked in the order they are added. The`emitter.prependListener()` method can be used as an alternative to add the + * event listener to the beginning of the listeners array. + * + * ```js + * import { EventEmitter } from 'node:events'; + * const myEE = new EventEmitter(); + * myEE.on('foo', () => console.log('a')); + * myEE.prependListener('foo', () => console.log('b')); + * myEE.emit('foo'); + * // Prints: + * // b + * // a + * ``` + * @since v0.1.101 + * @param eventName The name of the event. + * @param listener The callback function + */ + on(eventName: Key, listener: Listener1): this; + /** + * Adds a **one-time**`listener` function for the event named `eventName`. The + * next time `eventName` is triggered, this listener is removed and then invoked. + * + * ```js + * server.once('connection', (stream) => { + * console.log('Ah, we have our first user!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * + * By default, event listeners are invoked in the order they are added. The`emitter.prependOnceListener()` method can be used as an alternative to add the + * event listener to the beginning of the listeners array. + * + * ```js + * import { EventEmitter } from 'node:events'; + * const myEE = new EventEmitter(); + * myEE.once('foo', () => console.log('a')); + * myEE.prependOnceListener('foo', () => console.log('b')); + * myEE.emit('foo'); + * // Prints: + * // b + * // a + * ``` + * @since v0.3.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + once(eventName: Key, listener: Listener1): this; + /** + * Removes the specified `listener` from the listener array for the event named`eventName`. + * + * ```js + * const callback = (stream) => { + * console.log('someone connected!'); + * }; + * server.on('connection', callback); + * // ... + * server.removeListener('connection', callback); + * ``` + * + * `removeListener()` will remove, at most, one instance of a listener from the + * listener array. If any single listener has been added multiple times to the + * listener array for the specified `eventName`, then `removeListener()` must be + * called multiple times to remove each instance. + * + * Once an event is emitted, all listeners attached to it at the + * time of emitting are called in order. This implies that any`removeListener()` or `removeAllListeners()` calls _after_ emitting and _before_ the last listener finishes execution + * will not remove them from`emit()` in progress. Subsequent events behave as expected. + * + * ```js + * import { EventEmitter } from 'node:events'; + * class MyEmitter extends EventEmitter {} + * const myEmitter = new MyEmitter(); + * + * const callbackA = () => { + * console.log('A'); + * myEmitter.removeListener('event', callbackB); + * }; + * + * const callbackB = () => { + * console.log('B'); + * }; + * + * myEmitter.on('event', callbackA); + * + * myEmitter.on('event', callbackB); + * + * // callbackA removes listener callbackB but it will still be called. + * // Internal listener array at time of emit [callbackA, callbackB] + * myEmitter.emit('event'); + * // Prints: + * // A + * // B + * + * // callbackB is now removed. + * // Internal listener array [callbackA] + * myEmitter.emit('event'); + * // Prints: + * // A + * ``` + * + * Because listeners are managed using an internal array, calling this will + * change the position indices of any listener registered _after_ the listener + * being removed. This will not impact the order in which listeners are called, + * but it means that any copies of the listener array as returned by + * the `emitter.listeners()` method will need to be recreated. + * + * When a single function has been added as a handler multiple times for a single + * event (as in the example below), `removeListener()` will remove the most + * recently added instance. In the example the `once('ping')`listener is removed: + * + * ```js + * import { EventEmitter } from 'node:events'; + * const ee = new EventEmitter(); + * + * function pong() { + * console.log('pong'); + * } + * + * ee.on('ping', pong); + * ee.once('ping', pong); + * ee.removeListener('ping', pong); + * + * ee.emit('ping'); + * ee.emit('ping'); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.1.26 + */ + removeListener(eventName: Key, listener: Listener1): this; + /** + * Alias for `emitter.removeListener()`. + * @since v10.0.0 + */ + off(eventName: Key, listener: Listener1): this; + /** + * Removes all listeners, or those of the specified `eventName`. + * + * It is bad practice to remove listeners added elsewhere in the code, + * particularly when the `EventEmitter` instance was created by some other + * component or module (e.g. sockets or file streams). + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.1.26 + */ + removeAllListeners(event?: Key): this; + /** + * By default `EventEmitter`s will print a warning if more than `10` listeners are + * added for a particular event. This is a useful default that helps finding + * memory leaks. The `emitter.setMaxListeners()` method allows the limit to be + * modified for this specific `EventEmitter` instance. The value can be set to`Infinity` (or `0`) to indicate an unlimited number of listeners. + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.3.5 + */ + setMaxListeners(n: number): this; + /** + * Returns the current max listener value for the `EventEmitter` which is either + * set by `emitter.setMaxListeners(n)` or defaults to {@link defaultMaxListeners}. + * @since v1.0.0 + */ + getMaxListeners(): number; + /** + * Returns a copy of the array of listeners for the event named `eventName`. + * + * ```js + * server.on('connection', (stream) => { + * console.log('someone connected!'); + * }); + * console.log(util.inspect(server.listeners('connection'))); + * // Prints: [ [Function] ] + * ``` + * @since v0.1.26 + */ + listeners(eventName: Key): Array>; + /** + * Returns a copy of the array of listeners for the event named `eventName`, + * including any wrappers (such as those created by `.once()`). + * + * ```js + * import { EventEmitter } from 'node:events'; + * const emitter = new EventEmitter(); + * emitter.once('log', () => console.log('log once')); + * + * // Returns a new Array with a function `onceWrapper` which has a property + * // `listener` which contains the original listener bound above + * const listeners = emitter.rawListeners('log'); + * const logFnWrapper = listeners[0]; + * + * // Logs "log once" to the console and does not unbind the `once` event + * logFnWrapper.listener(); + * + * // Logs "log once" to the console and removes the listener + * logFnWrapper(); + * + * emitter.on('log', () => console.log('log persistently')); + * // Will return a new Array with a single function bound by `.on()` above + * const newListeners = emitter.rawListeners('log'); + * + * // Logs "log persistently" twice + * newListeners[0](); + * emitter.emit('log'); + * ``` + * @since v9.4.0 + */ + rawListeners(eventName: Key): Array>; + /** + * Synchronously calls each of the listeners registered for the event named`eventName`, in the order they were registered, passing the supplied arguments + * to each. + * + * Returns `true` if the event had listeners, `false` otherwise. + * + * ```js + * import { EventEmitter } from 'node:events'; + * const myEmitter = new EventEmitter(); + * + * // First listener + * myEmitter.on('event', function firstListener() { + * console.log('Helloooo! first listener'); + * }); + * // Second listener + * myEmitter.on('event', function secondListener(arg1, arg2) { + * console.log(`event with parameters ${arg1}, ${arg2} in second listener`); + * }); + * // Third listener + * myEmitter.on('event', function thirdListener(...args) { + * const parameters = args.join(', '); + * console.log(`event with parameters ${parameters} in third listener`); + * }); + * + * console.log(myEmitter.listeners('event')); + * + * myEmitter.emit('event', 1, 2, 3, 4, 5); + * + * // Prints: + * // [ + * // [Function: firstListener], + * // [Function: secondListener], + * // [Function: thirdListener] + * // ] + * // Helloooo! first listener + * // event with parameters 1, 2 in second listener + * // event with parameters 1, 2, 3, 4, 5 in third listener + * ``` + * @since v0.1.26 + */ + emit(eventName: Key, ...args: Args): boolean; + /** + * Returns the number of listeners listening for the event named `eventName`. + * If `listener` is provided, it will return how many times the listener is found + * in the list of the listeners of the event. + * @since v3.2.0 + * @param eventName The name of the event being listened for + * @param listener The event handler function + */ + listenerCount(eventName: Key, listener?: Listener2): number; + /** + * Adds the `listener` function to the _beginning_ of the listeners array for the + * event named `eventName`. No checks are made to see if the `listener` has + * already been added. Multiple calls passing the same combination of `eventName`and `listener` will result in the `listener` being added, and called, multiple + * times. + * + * ```js + * server.prependListener('connection', (stream) => { + * console.log('someone connected!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v6.0.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + prependListener(eventName: Key, listener: Listener1): this; + /** + * Adds a **one-time**`listener` function for the event named `eventName` to the _beginning_ of the listeners array. The next time `eventName` is triggered, this + * listener is removed, and then invoked. + * + * ```js + * server.prependOnceListener('connection', (stream) => { + * console.log('Ah, we have our first user!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v6.0.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + prependOnceListener(eventName: Key, listener: Listener1): this; + /** + * Returns an array listing the events for which the emitter has registered + * listeners. The values in the array are strings or `Symbol`s. + * + * ```js + * import { EventEmitter } from 'node:events'; + * + * const myEE = new EventEmitter(); + * myEE.on('foo', () => {}); + * myEE.on('bar', () => {}); + * + * const sym = Symbol('symbol'); + * myEE.on(sym, () => {}); + * + * console.log(myEE.eventNames()); + * // Prints: [ 'foo', 'bar', Symbol(symbol) ] + * ``` + * @since v6.0.0 + */ + eventNames(): Array<(string | symbol) & Key2>; + } + } + } + export = EventEmitter; +} +declare module "node:events" { + import events = require("events"); + export = events; +} diff --git a/dist/node_modules/@types/node/ts4.8/fs.d.ts b/dist/node_modules/@types/node/ts4.8/fs.d.ts new file mode 100644 index 0000000..5cbca12 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/fs.d.ts @@ -0,0 +1,4311 @@ +/** + * The `node:fs` module enables interacting with the file system in a + * way modeled on standard POSIX functions. + * + * To use the promise-based APIs: + * + * ```js + * import * as fs from 'node:fs/promises'; + * ``` + * + * To use the callback and sync APIs: + * + * ```js + * import * as fs from 'node:fs'; + * ``` + * + * All file system operations have synchronous, callback, and promise-based + * forms, and are accessible using both CommonJS syntax and ES6 Modules (ESM). + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/fs.js) + */ +declare module "fs" { + import * as stream from "node:stream"; + import { Abortable, EventEmitter } from "node:events"; + import { URL } from "node:url"; + import * as promises from "node:fs/promises"; + export { promises }; + /** + * Valid types for path values in "fs". + */ + export type PathLike = string | Buffer | URL; + export type PathOrFileDescriptor = PathLike | number; + export type TimeLike = string | number | Date; + export type NoParamCallback = (err: NodeJS.ErrnoException | null) => void; + export type BufferEncodingOption = + | "buffer" + | { + encoding: "buffer"; + }; + export interface ObjectEncodingOptions { + encoding?: BufferEncoding | null | undefined; + } + export type EncodingOption = ObjectEncodingOptions | BufferEncoding | undefined | null; + export type OpenMode = number | string; + export type Mode = number | string; + export interface StatsBase { + isFile(): boolean; + isDirectory(): boolean; + isBlockDevice(): boolean; + isCharacterDevice(): boolean; + isSymbolicLink(): boolean; + isFIFO(): boolean; + isSocket(): boolean; + dev: T; + ino: T; + mode: T; + nlink: T; + uid: T; + gid: T; + rdev: T; + size: T; + blksize: T; + blocks: T; + atimeMs: T; + mtimeMs: T; + ctimeMs: T; + birthtimeMs: T; + atime: Date; + mtime: Date; + ctime: Date; + birthtime: Date; + } + export interface Stats extends StatsBase {} + /** + * A `fs.Stats` object provides information about a file. + * + * Objects returned from {@link stat}, {@link lstat}, {@link fstat}, and + * their synchronous counterparts are of this type. + * If `bigint` in the `options` passed to those methods is true, the numeric values + * will be `bigint` instead of `number`, and the object will contain additional + * nanosecond-precision properties suffixed with `Ns`. + * + * ```console + * Stats { + * dev: 2114, + * ino: 48064969, + * mode: 33188, + * nlink: 1, + * uid: 85, + * gid: 100, + * rdev: 0, + * size: 527, + * blksize: 4096, + * blocks: 8, + * atimeMs: 1318289051000.1, + * mtimeMs: 1318289051000.1, + * ctimeMs: 1318289051000.1, + * birthtimeMs: 1318289051000.1, + * atime: Mon, 10 Oct 2011 23:24:11 GMT, + * mtime: Mon, 10 Oct 2011 23:24:11 GMT, + * ctime: Mon, 10 Oct 2011 23:24:11 GMT, + * birthtime: Mon, 10 Oct 2011 23:24:11 GMT } + * ``` + * + * `bigint` version: + * + * ```console + * BigIntStats { + * dev: 2114n, + * ino: 48064969n, + * mode: 33188n, + * nlink: 1n, + * uid: 85n, + * gid: 100n, + * rdev: 0n, + * size: 527n, + * blksize: 4096n, + * blocks: 8n, + * atimeMs: 1318289051000n, + * mtimeMs: 1318289051000n, + * ctimeMs: 1318289051000n, + * birthtimeMs: 1318289051000n, + * atimeNs: 1318289051000000000n, + * mtimeNs: 1318289051000000000n, + * ctimeNs: 1318289051000000000n, + * birthtimeNs: 1318289051000000000n, + * atime: Mon, 10 Oct 2011 23:24:11 GMT, + * mtime: Mon, 10 Oct 2011 23:24:11 GMT, + * ctime: Mon, 10 Oct 2011 23:24:11 GMT, + * birthtime: Mon, 10 Oct 2011 23:24:11 GMT } + * ``` + * @since v0.1.21 + */ + export class Stats {} + export interface StatsFsBase { + /** Type of file system. */ + type: T; + /** Optimal transfer block size. */ + bsize: T; + /** Total data blocks in file system. */ + blocks: T; + /** Free blocks in file system. */ + bfree: T; + /** Available blocks for unprivileged users */ + bavail: T; + /** Total file nodes in file system. */ + files: T; + /** Free file nodes in file system. */ + ffree: T; + } + export interface StatsFs extends StatsFsBase {} + /** + * Provides information about a mounted file system. + * + * Objects returned from {@link statfs} and its synchronous counterpart are of + * this type. If `bigint` in the `options` passed to those methods is `true`, the + * numeric values will be `bigint` instead of `number`. + * + * ```console + * StatFs { + * type: 1397114950, + * bsize: 4096, + * blocks: 121938943, + * bfree: 61058895, + * bavail: 61058895, + * files: 999, + * ffree: 1000000 + * } + * ``` + * + * `bigint` version: + * + * ```console + * StatFs { + * type: 1397114950n, + * bsize: 4096n, + * blocks: 121938943n, + * bfree: 61058895n, + * bavail: 61058895n, + * files: 999n, + * ffree: 1000000n + * } + * ``` + * @since v19.6.0, v18.15.0 + */ + export class StatsFs {} + export interface BigIntStatsFs extends StatsFsBase {} + export interface StatFsOptions { + bigint?: boolean | undefined; + } + /** + * A representation of a directory entry, which can be a file or a subdirectory + * within the directory, as returned by reading from an `fs.Dir`. The + * directory entry is a combination of the file name and file type pairs. + * + * Additionally, when {@link readdir} or {@link readdirSync} is called with + * the `withFileTypes` option set to `true`, the resulting array is filled with `fs.Dirent` objects, rather than strings or `Buffer` s. + * @since v10.10.0 + */ + export class Dirent { + /** + * Returns `true` if the `fs.Dirent` object describes a regular file. + * @since v10.10.0 + */ + isFile(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a file system + * directory. + * @since v10.10.0 + */ + isDirectory(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a block device. + * @since v10.10.0 + */ + isBlockDevice(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a character device. + * @since v10.10.0 + */ + isCharacterDevice(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a symbolic link. + * @since v10.10.0 + */ + isSymbolicLink(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a first-in-first-out + * (FIFO) pipe. + * @since v10.10.0 + */ + isFIFO(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a socket. + * @since v10.10.0 + */ + isSocket(): boolean; + /** + * The file name that this `fs.Dirent` object refers to. The type of this + * value is determined by the `options.encoding` passed to {@link readdir} or {@link readdirSync}. + * @since v10.10.0 + */ + name: string; + /** + * The base path that this `fs.Dirent` object refers to. + * @since v20.1.0 + */ + path: string; + } + /** + * A class representing a directory stream. + * + * Created by {@link opendir}, {@link opendirSync}, or `fsPromises.opendir()`. + * + * ```js + * import { opendir } from 'node:fs/promises'; + * + * try { + * const dir = await opendir('./'); + * for await (const dirent of dir) + * console.log(dirent.name); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * When using the async iterator, the `fs.Dir` object will be automatically + * closed after the iterator exits. + * @since v12.12.0 + */ + export class Dir implements AsyncIterable { + /** + * The read-only path of this directory as was provided to {@link opendir},{@link opendirSync}, or `fsPromises.opendir()`. + * @since v12.12.0 + */ + readonly path: string; + /** + * Asynchronously iterates over the directory via `readdir(3)` until all entries have been read. + */ + [Symbol.asyncIterator](): AsyncIterableIterator; + /** + * Asynchronously close the directory's underlying resource handle. + * Subsequent reads will result in errors. + * + * A promise is returned that will be fulfilled after the resource has been + * closed. + * @since v12.12.0 + */ + close(): Promise; + close(cb: NoParamCallback): void; + /** + * Synchronously close the directory's underlying resource handle. + * Subsequent reads will result in errors. + * @since v12.12.0 + */ + closeSync(): void; + /** + * Asynchronously read the next directory entry via [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) as an `fs.Dirent`. + * + * A promise is returned that will be fulfilled with an `fs.Dirent`, or `null`if there are no more directory entries to read. + * + * Directory entries returned by this function are in no particular order as + * provided by the operating system's underlying directory mechanisms. + * Entries added or removed while iterating over the directory might not be + * included in the iteration results. + * @since v12.12.0 + * @return containing {fs.Dirent|null} + */ + read(): Promise; + read(cb: (err: NodeJS.ErrnoException | null, dirEnt: Dirent | null) => void): void; + /** + * Synchronously read the next directory entry as an `fs.Dirent`. See the + * POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more detail. + * + * If there are no more directory entries to read, `null` will be returned. + * + * Directory entries returned by this function are in no particular order as + * provided by the operating system's underlying directory mechanisms. + * Entries added or removed while iterating over the directory might not be + * included in the iteration results. + * @since v12.12.0 + */ + readSync(): Dirent | null; + } + /** + * Class: fs.StatWatcher + * @since v14.3.0, v12.20.0 + * Extends `EventEmitter` + * A successful call to {@link watchFile} method will return a new fs.StatWatcher object. + */ + export interface StatWatcher extends EventEmitter { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the `fs.StatWatcher` is active. Calling `watcher.ref()` multiple times will have + * no effect. + * + * By default, all `fs.StatWatcher` objects are "ref'ed", making it normally + * unnecessary to call `watcher.ref()` unless `watcher.unref()` had been + * called previously. + * @since v14.3.0, v12.20.0 + */ + ref(): this; + /** + * When called, the active `fs.StatWatcher` object will not require the Node.js + * event loop to remain active. If there is no other activity keeping the + * event loop running, the process may exit before the `fs.StatWatcher` object's + * callback is invoked. Calling `watcher.unref()` multiple times will have + * no effect. + * @since v14.3.0, v12.20.0 + */ + unref(): this; + } + export interface FSWatcher extends EventEmitter { + /** + * Stop watching for changes on the given `fs.FSWatcher`. Once stopped, the `fs.FSWatcher` object is no longer usable. + * @since v0.5.8 + */ + close(): void; + /** + * When called, requests that the Node.js event loop _not_ exit so long as the `fs.FSWatcher` is active. Calling `watcher.ref()` multiple times will have + * no effect. + * + * By default, all `fs.FSWatcher` objects are "ref'ed", making it normally + * unnecessary to call `watcher.ref()` unless `watcher.unref()` had been + * called previously. + * @since v14.3.0, v12.20.0 + */ + ref(): this; + /** + * When called, the active `fs.FSWatcher` object will not require the Node.js + * event loop to remain active. If there is no other activity keeping the + * event loop running, the process may exit before the `fs.FSWatcher` object's + * callback is invoked. Calling `watcher.unref()` multiple times will have + * no effect. + * @since v14.3.0, v12.20.0 + */ + unref(): this; + /** + * events.EventEmitter + * 1. change + * 2. close + * 3. error + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "error", listener: (error: Error) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + on(event: "close", listener: () => void): this; + on(event: "error", listener: (error: Error) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + once(event: "close", listener: () => void): this; + once(event: "error", listener: (error: Error) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "error", listener: (error: Error) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "error", listener: (error: Error) => void): this; + } + /** + * Instances of `fs.ReadStream` are created and returned using the {@link createReadStream} function. + * @since v0.1.93 + */ + export class ReadStream extends stream.Readable { + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void; + /** + * The number of bytes that have been read so far. + * @since v6.4.0 + */ + bytesRead: number; + /** + * The path to the file the stream is reading from as specified in the first + * argument to `fs.createReadStream()`. If `path` is passed as a string, then`readStream.path` will be a string. If `path` is passed as a `Buffer`, then`readStream.path` will be a + * `Buffer`. If `fd` is specified, then`readStream.path` will be `undefined`. + * @since v0.1.93 + */ + path: string | Buffer; + /** + * This property is `true` if the underlying file has not been opened yet, + * i.e. before the `'ready'` event is emitted. + * @since v11.2.0, v10.16.0 + */ + pending: boolean; + /** + * events.EventEmitter + * 1. open + * 2. close + * 3. ready + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "open", listener: (fd: number) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "ready", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "open", listener: (fd: number) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "ready", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "open", listener: (fd: number) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "ready", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "open", listener: (fd: number) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "ready", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "open", listener: (fd: number) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "ready", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * * Extends `stream.Writable` + * + * Instances of `fs.WriteStream` are created and returned using the {@link createWriteStream} function. + * @since v0.1.93 + */ + export class WriteStream extends stream.Writable { + /** + * Closes `writeStream`. Optionally accepts a + * callback that will be executed once the `writeStream`is closed. + * @since v0.9.4 + */ + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void; + /** + * The number of bytes written so far. Does not include data that is still queued + * for writing. + * @since v0.4.7 + */ + bytesWritten: number; + /** + * The path to the file the stream is writing to as specified in the first + * argument to {@link createWriteStream}. If `path` is passed as a string, then`writeStream.path` will be a string. If `path` is passed as a `Buffer`, then`writeStream.path` will be a + * `Buffer`. + * @since v0.1.93 + */ + path: string | Buffer; + /** + * This property is `true` if the underlying file has not been opened yet, + * i.e. before the `'ready'` event is emitted. + * @since v11.2.0 + */ + pending: boolean; + /** + * events.EventEmitter + * 1. open + * 2. close + * 3. ready + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "open", listener: (fd: number) => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "ready", listener: () => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "open", listener: (fd: number) => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "ready", listener: () => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "open", listener: (fd: number) => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "ready", listener: () => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "open", listener: (fd: number) => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "ready", listener: () => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "open", listener: (fd: number) => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "ready", listener: () => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * Asynchronously rename file at `oldPath` to the pathname provided + * as `newPath`. In the case that `newPath` already exists, it will + * be overwritten. If there is a directory at `newPath`, an error will + * be raised instead. No arguments other than a possible exception are + * given to the completion callback. + * + * See also: [`rename(2)`](http://man7.org/linux/man-pages/man2/rename.2.html). + * + * ```js + * import { rename } from 'node:fs'; + * + * rename('oldFile.txt', 'newFile.txt', (err) => { + * if (err) throw err; + * console.log('Rename complete!'); + * }); + * ``` + * @since v0.0.2 + */ + export function rename(oldPath: PathLike, newPath: PathLike, callback: NoParamCallback): void; + export namespace rename { + /** + * Asynchronous rename(2) - Change the name or location of a file or directory. + * @param oldPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(oldPath: PathLike, newPath: PathLike): Promise; + } + /** + * Renames the file from `oldPath` to `newPath`. Returns `undefined`. + * + * See the POSIX [`rename(2)`](http://man7.org/linux/man-pages/man2/rename.2.html) documentation for more details. + * @since v0.1.21 + */ + export function renameSync(oldPath: PathLike, newPath: PathLike): void; + /** + * Truncates the file. No arguments other than a possible exception are + * given to the completion callback. A file descriptor can also be passed as the + * first argument. In this case, `fs.ftruncate()` is called. + * + * ```js + * import { truncate } from 'node:fs'; + * // Assuming that 'path/file.txt' is a regular file. + * truncate('path/file.txt', (err) => { + * if (err) throw err; + * console.log('path/file.txt was truncated'); + * }); + * ``` + * + * Passing a file descriptor is deprecated and may result in an error being thrown + * in the future. + * + * See the POSIX [`truncate(2)`](http://man7.org/linux/man-pages/man2/truncate.2.html) documentation for more details. + * @since v0.8.6 + * @param [len=0] + */ + export function truncate(path: PathLike, len: number | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous truncate(2) - Truncate a file to a specified length. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function truncate(path: PathLike, callback: NoParamCallback): void; + export namespace truncate { + /** + * Asynchronous truncate(2) - Truncate a file to a specified length. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param len If not specified, defaults to `0`. + */ + function __promisify__(path: PathLike, len?: number | null): Promise; + } + /** + * Truncates the file. Returns `undefined`. A file descriptor can also be + * passed as the first argument. In this case, `fs.ftruncateSync()` is called. + * + * Passing a file descriptor is deprecated and may result in an error being thrown + * in the future. + * @since v0.8.6 + * @param [len=0] + */ + export function truncateSync(path: PathLike, len?: number | null): void; + /** + * Truncates the file descriptor. No arguments other than a possible exception are + * given to the completion callback. + * + * See the POSIX [`ftruncate(2)`](http://man7.org/linux/man-pages/man2/ftruncate.2.html) documentation for more detail. + * + * If the file referred to by the file descriptor was larger than `len` bytes, only + * the first `len` bytes will be retained in the file. + * + * For example, the following program retains only the first four bytes of the + * file: + * + * ```js + * import { open, close, ftruncate } from 'node:fs'; + * + * function closeFd(fd) { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * + * open('temp.txt', 'r+', (err, fd) => { + * if (err) throw err; + * + * try { + * ftruncate(fd, 4, (err) => { + * closeFd(fd); + * if (err) throw err; + * }); + * } catch (err) { + * closeFd(fd); + * if (err) throw err; + * } + * }); + * ``` + * + * If the file previously was shorter than `len` bytes, it is extended, and the + * extended part is filled with null bytes (`'\0'`): + * + * If `len` is negative then `0` will be used. + * @since v0.8.6 + * @param [len=0] + */ + export function ftruncate(fd: number, len: number | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous ftruncate(2) - Truncate a file to a specified length. + * @param fd A file descriptor. + */ + export function ftruncate(fd: number, callback: NoParamCallback): void; + export namespace ftruncate { + /** + * Asynchronous ftruncate(2) - Truncate a file to a specified length. + * @param fd A file descriptor. + * @param len If not specified, defaults to `0`. + */ + function __promisify__(fd: number, len?: number | null): Promise; + } + /** + * Truncates the file descriptor. Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link ftruncate}. + * @since v0.8.6 + * @param [len=0] + */ + export function ftruncateSync(fd: number, len?: number | null): void; + /** + * Asynchronously changes owner and group of a file. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html) documentation for more detail. + * @since v0.1.97 + */ + export function chown(path: PathLike, uid: number, gid: number, callback: NoParamCallback): void; + export namespace chown { + /** + * Asynchronous chown(2) - Change ownership of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, uid: number, gid: number): Promise; + } + /** + * Synchronously changes owner and group of a file. Returns `undefined`. + * This is the synchronous version of {@link chown}. + * + * See the POSIX [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html) documentation for more detail. + * @since v0.1.97 + */ + export function chownSync(path: PathLike, uid: number, gid: number): void; + /** + * Sets the owner of the file. No arguments other than a possible exception are + * given to the completion callback. + * + * See the POSIX [`fchown(2)`](http://man7.org/linux/man-pages/man2/fchown.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchown(fd: number, uid: number, gid: number, callback: NoParamCallback): void; + export namespace fchown { + /** + * Asynchronous fchown(2) - Change ownership of a file. + * @param fd A file descriptor. + */ + function __promisify__(fd: number, uid: number, gid: number): Promise; + } + /** + * Sets the owner of the file. Returns `undefined`. + * + * See the POSIX [`fchown(2)`](http://man7.org/linux/man-pages/man2/fchown.2.html) documentation for more detail. + * @since v0.4.7 + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + */ + export function fchownSync(fd: number, uid: number, gid: number): void; + /** + * Set the owner of the symbolic link. No arguments other than a possible + * exception are given to the completion callback. + * + * See the POSIX [`lchown(2)`](http://man7.org/linux/man-pages/man2/lchown.2.html) documentation for more detail. + */ + export function lchown(path: PathLike, uid: number, gid: number, callback: NoParamCallback): void; + export namespace lchown { + /** + * Asynchronous lchown(2) - Change ownership of a file. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, uid: number, gid: number): Promise; + } + /** + * Set the owner for the path. Returns `undefined`. + * + * See the POSIX [`lchown(2)`](http://man7.org/linux/man-pages/man2/lchown.2.html) documentation for more details. + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + */ + export function lchownSync(path: PathLike, uid: number, gid: number): void; + /** + * Changes the access and modification times of a file in the same way as {@link utimes}, with the difference that if the path refers to a symbolic + * link, then the link is not dereferenced: instead, the timestamps of the + * symbolic link itself are changed. + * + * No arguments other than a possible exception are given to the completion + * callback. + * @since v14.5.0, v12.19.0 + */ + export function lutimes(path: PathLike, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace lutimes { + /** + * Changes the access and modification times of a file in the same way as `fsPromises.utimes()`, + * with the difference that if the path refers to a symbolic link, then the link is not + * dereferenced: instead, the timestamps of the symbolic link itself are changed. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Change the file system timestamps of the symbolic link referenced by `path`. + * Returns `undefined`, or throws an exception when parameters are incorrect or + * the operation fails. This is the synchronous version of {@link lutimes}. + * @since v14.5.0, v12.19.0 + */ + export function lutimesSync(path: PathLike, atime: TimeLike, mtime: TimeLike): void; + /** + * Asynchronously changes the permissions of a file. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html) documentation for more detail. + * + * ```js + * import { chmod } from 'node:fs'; + * + * chmod('my_file.txt', 0o775, (err) => { + * if (err) throw err; + * console.log('The permissions for file "my_file.txt" have been changed!'); + * }); + * ``` + * @since v0.1.30 + */ + export function chmod(path: PathLike, mode: Mode, callback: NoParamCallback): void; + export namespace chmod { + /** + * Asynchronous chmod(2) - Change permissions of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(path: PathLike, mode: Mode): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link chmod}. + * + * See the POSIX [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html) documentation for more detail. + * @since v0.6.7 + */ + export function chmodSync(path: PathLike, mode: Mode): void; + /** + * Sets the permissions on the file. No arguments other than a possible exception + * are given to the completion callback. + * + * See the POSIX [`fchmod(2)`](http://man7.org/linux/man-pages/man2/fchmod.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchmod(fd: number, mode: Mode, callback: NoParamCallback): void; + export namespace fchmod { + /** + * Asynchronous fchmod(2) - Change permissions of a file. + * @param fd A file descriptor. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(fd: number, mode: Mode): Promise; + } + /** + * Sets the permissions on the file. Returns `undefined`. + * + * See the POSIX [`fchmod(2)`](http://man7.org/linux/man-pages/man2/fchmod.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchmodSync(fd: number, mode: Mode): void; + /** + * Changes the permissions on a symbolic link. No arguments other than a possible + * exception are given to the completion callback. + * + * This method is only implemented on macOS. + * + * See the POSIX [`lchmod(2)`](https://www.freebsd.org/cgi/man.cgi?query=lchmod&sektion=2) documentation for more detail. + * @deprecated Since v0.4.7 + */ + export function lchmod(path: PathLike, mode: Mode, callback: NoParamCallback): void; + /** @deprecated */ + export namespace lchmod { + /** + * Asynchronous lchmod(2) - Change permissions of a file. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(path: PathLike, mode: Mode): Promise; + } + /** + * Changes the permissions on a symbolic link. Returns `undefined`. + * + * This method is only implemented on macOS. + * + * See the POSIX [`lchmod(2)`](https://www.freebsd.org/cgi/man.cgi?query=lchmod&sektion=2) documentation for more detail. + * @deprecated Since v0.4.7 + */ + export function lchmodSync(path: PathLike, mode: Mode): void; + /** + * Asynchronous [`stat(2)`](http://man7.org/linux/man-pages/man2/stat.2.html). The callback gets two arguments `(err, stats)` where`stats` is an `fs.Stats` object. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * + * {@link stat} follows symbolic links. Use {@link lstat} to look at the + * links themselves. + * + * Using `fs.stat()` to check for the existence of a file before calling`fs.open()`, `fs.readFile()`, or `fs.writeFile()` is not recommended. + * Instead, user code should open/read/write the file directly and handle the + * error raised if the file is not available. + * + * To check if a file exists without manipulating it afterwards, {@link access} is recommended. + * + * For example, given the following directory structure: + * + * ```text + * - txtDir + * -- file.txt + * - app.js + * ``` + * + * The next program will check for the stats of the given paths: + * + * ```js + * import { stat } from 'node:fs'; + * + * const pathsToCheck = ['./txtDir', './txtDir/file.txt']; + * + * for (let i = 0; i < pathsToCheck.length; i++) { + * stat(pathsToCheck[i], (err, stats) => { + * console.log(stats.isDirectory()); + * console.log(stats); + * }); + * } + * ``` + * + * The resulting output will resemble: + * + * ```console + * true + * Stats { + * dev: 16777220, + * mode: 16877, + * nlink: 3, + * uid: 501, + * gid: 20, + * rdev: 0, + * blksize: 4096, + * ino: 14214262, + * size: 96, + * blocks: 0, + * atimeMs: 1561174653071.963, + * mtimeMs: 1561174614583.3518, + * ctimeMs: 1561174626623.5366, + * birthtimeMs: 1561174126937.2893, + * atime: 2019-06-22T03:37:33.072Z, + * mtime: 2019-06-22T03:36:54.583Z, + * ctime: 2019-06-22T03:37:06.624Z, + * birthtime: 2019-06-22T03:28:46.937Z + * } + * false + * Stats { + * dev: 16777220, + * mode: 33188, + * nlink: 1, + * uid: 501, + * gid: 20, + * rdev: 0, + * blksize: 4096, + * ino: 14214074, + * size: 8, + * blocks: 8, + * atimeMs: 1561174616618.8555, + * mtimeMs: 1561174614584, + * ctimeMs: 1561174614583.8145, + * birthtimeMs: 1561174007710.7478, + * atime: 2019-06-22T03:36:56.619Z, + * mtime: 2019-06-22T03:36:54.584Z, + * ctime: 2019-06-22T03:36:54.584Z, + * birthtime: 2019-06-22T03:26:47.711Z + * } + * ``` + * @since v0.0.2 + */ + export function stat(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function stat( + path: PathLike, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void, + ): void; + export function stat( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void, + ): void; + export function stat( + path: PathLike, + options: StatOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void, + ): void; + export namespace stat { + /** + * Asynchronous stat(2) - Get file status. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__( + path: PathLike, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(path: PathLike, options?: StatOptions): Promise; + } + export interface StatSyncFn extends Function { + (path: PathLike, options?: undefined): Stats; + ( + path: PathLike, + options?: StatSyncOptions & { + bigint?: false | undefined; + throwIfNoEntry: false; + }, + ): Stats | undefined; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: true; + throwIfNoEntry: false; + }, + ): BigIntStats | undefined; + ( + path: PathLike, + options?: StatSyncOptions & { + bigint?: false | undefined; + }, + ): Stats; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: true; + }, + ): BigIntStats; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: boolean; + throwIfNoEntry?: false | undefined; + }, + ): Stats | BigIntStats; + (path: PathLike, options?: StatSyncOptions): Stats | BigIntStats | undefined; + } + /** + * Synchronous stat(2) - Get file status. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export const statSync: StatSyncFn; + /** + * Invokes the callback with the `fs.Stats` for the file descriptor. + * + * See the POSIX [`fstat(2)`](http://man7.org/linux/man-pages/man2/fstat.2.html) documentation for more detail. + * @since v0.1.95 + */ + export function fstat(fd: number, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function fstat( + fd: number, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void, + ): void; + export function fstat( + fd: number, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void, + ): void; + export function fstat( + fd: number, + options: StatOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void, + ): void; + export namespace fstat { + /** + * Asynchronous fstat(2) - Get file status. + * @param fd A file descriptor. + */ + function __promisify__( + fd: number, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + fd: number, + options: StatOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(fd: number, options?: StatOptions): Promise; + } + /** + * Retrieves the `fs.Stats` for the file descriptor. + * + * See the POSIX [`fstat(2)`](http://man7.org/linux/man-pages/man2/fstat.2.html) documentation for more detail. + * @since v0.1.95 + */ + export function fstatSync( + fd: number, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Stats; + export function fstatSync( + fd: number, + options: StatOptions & { + bigint: true; + }, + ): BigIntStats; + export function fstatSync(fd: number, options?: StatOptions): Stats | BigIntStats; + /** + * Retrieves the `fs.Stats` for the symbolic link referred to by the path. + * The callback gets two arguments `(err, stats)` where `stats` is a `fs.Stats` object. `lstat()` is identical to `stat()`, except that if `path` is a symbolic + * link, then the link itself is stat-ed, not the file that it refers to. + * + * See the POSIX [`lstat(2)`](http://man7.org/linux/man-pages/man2/lstat.2.html) documentation for more details. + * @since v0.1.30 + */ + export function lstat(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function lstat( + path: PathLike, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void, + ): void; + export function lstat( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void, + ): void; + export function lstat( + path: PathLike, + options: StatOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void, + ): void; + export namespace lstat { + /** + * Asynchronous lstat(2) - Get file status. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__( + path: PathLike, + options?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(path: PathLike, options?: StatOptions): Promise; + } + /** + * Asynchronous [`statfs(2)`](http://man7.org/linux/man-pages/man2/statfs.2.html). Returns information about the mounted file system which + * contains `path`. The callback gets two arguments `(err, stats)` where `stats`is an `fs.StatFs` object. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * @since v19.6.0, v18.15.0 + * @param path A path to an existing file or directory on the file system to be queried. + */ + export function statfs(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: StatsFs) => void): void; + export function statfs( + path: PathLike, + options: + | (StatFsOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: StatsFs) => void, + ): void; + export function statfs( + path: PathLike, + options: StatFsOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStatsFs) => void, + ): void; + export function statfs( + path: PathLike, + options: StatFsOptions | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: StatsFs | BigIntStatsFs) => void, + ): void; + export namespace statfs { + /** + * Asynchronous statfs(2) - Returns information about the mounted file system which contains path. The callback gets two arguments (err, stats) where stats is an object. + * @param path A path to an existing file or directory on the file system to be queried. + */ + function __promisify__( + path: PathLike, + options?: StatFsOptions & { + bigint?: false | undefined; + }, + ): Promise; + function __promisify__( + path: PathLike, + options: StatFsOptions & { + bigint: true; + }, + ): Promise; + function __promisify__(path: PathLike, options?: StatFsOptions): Promise; + } + /** + * Synchronous [`statfs(2)`](http://man7.org/linux/man-pages/man2/statfs.2.html). Returns information about the mounted file system which + * contains `path`. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * @since v19.6.0, v18.15.0 + * @param path A path to an existing file or directory on the file system to be queried. + */ + export function statfsSync( + path: PathLike, + options?: StatFsOptions & { + bigint?: false | undefined; + }, + ): StatsFs; + export function statfsSync( + path: PathLike, + options: StatFsOptions & { + bigint: true; + }, + ): BigIntStatsFs; + export function statfsSync(path: PathLike, options?: StatFsOptions): StatsFs | BigIntStatsFs; + /** + * Synchronous lstat(2) - Get file status. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export const lstatSync: StatSyncFn; + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. No arguments other than + * a possible + * exception are given to the completion callback. + * @since v0.1.31 + */ + export function link(existingPath: PathLike, newPath: PathLike, callback: NoParamCallback): void; + export namespace link { + /** + * Asynchronous link(2) - Create a new link (also known as a hard link) to an existing file. + * @param existingPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(existingPath: PathLike, newPath: PathLike): Promise; + } + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. Returns `undefined`. + * @since v0.1.31 + */ + export function linkSync(existingPath: PathLike, newPath: PathLike): void; + /** + * Creates the link called `path` pointing to `target`. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`symlink(2)`](http://man7.org/linux/man-pages/man2/symlink.2.html) documentation for more details. + * + * The `type` argument is only available on Windows and ignored on other platforms. + * It can be set to `'dir'`, `'file'`, or `'junction'`. If the `type` argument is + * not a string, Node.js will autodetect `target` type and use `'file'` or `'dir'`. + * If the `target` does not exist, `'file'` will be used. Windows junction points + * require the destination path to be absolute. When using `'junction'`, the`target` argument will automatically be normalized to absolute path. Junction + * points on NTFS volumes can only point to directories. + * + * Relative targets are relative to the link's parent directory. + * + * ```js + * import { symlink } from 'node:fs'; + * + * symlink('./mew', './mewtwo', callback); + * ``` + * + * The above example creates a symbolic link `mewtwo` which points to `mew` in the + * same directory: + * + * ```bash + * $ tree . + * . + * ├── mew + * └── mewtwo -> ./mew + * ``` + * @since v0.1.31 + * @param [type='null'] + */ + export function symlink( + target: PathLike, + path: PathLike, + type: symlink.Type | undefined | null, + callback: NoParamCallback, + ): void; + /** + * Asynchronous symlink(2) - Create a new symbolic link to an existing file. + * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. + * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. + */ + export function symlink(target: PathLike, path: PathLike, callback: NoParamCallback): void; + export namespace symlink { + /** + * Asynchronous symlink(2) - Create a new symbolic link to an existing file. + * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. + * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. + * @param type May be set to `'dir'`, `'file'`, or `'junction'` (default is `'file'`) and is only available on Windows (ignored on other platforms). + * When using `'junction'`, the `target` argument will automatically be normalized to an absolute path. + */ + function __promisify__(target: PathLike, path: PathLike, type?: string | null): Promise; + type Type = "dir" | "file" | "junction"; + } + /** + * Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link symlink}. + * @since v0.1.31 + * @param [type='null'] + */ + export function symlinkSync(target: PathLike, path: PathLike, type?: symlink.Type | null): void; + /** + * Reads the contents of the symbolic link referred to by `path`. The callback gets + * two arguments `(err, linkString)`. + * + * See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path passed to the callback. If the `encoding` is set to `'buffer'`, + * the link path returned will be passed as a `Buffer` object. + * @since v0.1.31 + */ + export function readlink( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, linkString: string) => void, + ): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlink( + path: PathLike, + options: BufferEncodingOption, + callback: (err: NodeJS.ErrnoException | null, linkString: Buffer) => void, + ): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlink( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, linkString: string | Buffer) => void, + ): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function readlink( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, linkString: string) => void, + ): void; + export namespace readlink { + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + } + /** + * Returns the symbolic link's string value. + * + * See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path returned. If the `encoding` is set to `'buffer'`, + * the link path returned will be passed as a `Buffer` object. + * @since v0.1.31 + */ + export function readlinkSync(path: PathLike, options?: EncodingOption): string; + /** + * Synchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlinkSync(path: PathLike, options: BufferEncodingOption): Buffer; + /** + * Synchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlinkSync(path: PathLike, options?: EncodingOption): string | Buffer; + /** + * Asynchronously computes the canonical pathname by resolving `.`, `..`, and + * symbolic links. + * + * A canonical pathname is not necessarily unique. Hard links and bind mounts can + * expose a file system entity through many pathnames. + * + * This function behaves like [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html), with some exceptions: + * + * 1. No case conversion is performed on case-insensitive file systems. + * 2. The maximum number of symbolic links is platform-independent and generally + * (much) higher than what the native [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html) implementation supports. + * + * The `callback` gets two arguments `(err, resolvedPath)`. May use `process.cwd`to resolve relative paths. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path passed to the callback. If the `encoding` is set to `'buffer'`, + * the path returned will be passed as a `Buffer` object. + * + * If `path` resolves to a socket or a pipe, the function will return a system + * dependent name for that object. + * @since v0.1.31 + */ + export function realpath( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpath( + path: PathLike, + options: BufferEncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: Buffer) => void, + ): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpath( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string | Buffer) => void, + ): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function realpath( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + export namespace realpath { + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html). + * + * The `callback` gets two arguments `(err, resolvedPath)`. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path passed to the callback. If the `encoding` is set to `'buffer'`, + * the path returned will be passed as a `Buffer` object. + * + * On Linux, when Node.js is linked against musl libc, the procfs file system must + * be mounted on `/proc` in order for this function to work. Glibc does not have + * this restriction. + * @since v9.2.0 + */ + function native( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + function native( + path: PathLike, + options: BufferEncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: Buffer) => void, + ): void; + function native( + path: PathLike, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string | Buffer) => void, + ): void; + function native( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void, + ): void; + } + /** + * Returns the resolved pathname. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link realpath}. + * @since v0.1.31 + */ + export function realpathSync(path: PathLike, options?: EncodingOption): string; + /** + * Synchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpathSync(path: PathLike, options: BufferEncodingOption): Buffer; + /** + * Synchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpathSync(path: PathLike, options?: EncodingOption): string | Buffer; + export namespace realpathSync { + function native(path: PathLike, options?: EncodingOption): string; + function native(path: PathLike, options: BufferEncodingOption): Buffer; + function native(path: PathLike, options?: EncodingOption): string | Buffer; + } + /** + * Asynchronously removes a file or symbolic link. No arguments other than a + * possible exception are given to the completion callback. + * + * ```js + * import { unlink } from 'node:fs'; + * // Assuming that 'path/file.txt' is a regular file. + * unlink('path/file.txt', (err) => { + * if (err) throw err; + * console.log('path/file.txt was deleted'); + * }); + * ``` + * + * `fs.unlink()` will not work on a directory, empty or otherwise. To remove a + * directory, use {@link rmdir}. + * + * See the POSIX [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html) documentation for more details. + * @since v0.0.2 + */ + export function unlink(path: PathLike, callback: NoParamCallback): void; + export namespace unlink { + /** + * Asynchronous unlink(2) - delete a name and possibly the file it refers to. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike): Promise; + } + /** + * Synchronous [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html). Returns `undefined`. + * @since v0.1.21 + */ + export function unlinkSync(path: PathLike): void; + export interface RmDirOptions { + /** + * If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or + * `EPERM` error is encountered, Node.js will retry the operation with a linear + * backoff wait of `retryDelay` ms longer on each try. This option represents the + * number of retries. This option is ignored if the `recursive` option is not + * `true`. + * @default 0 + */ + maxRetries?: number | undefined; + /** + * @deprecated since v14.14.0 In future versions of Node.js and will trigger a warning + * `fs.rmdir(path, { recursive: true })` will throw if `path` does not exist or is a file. + * Use `fs.rm(path, { recursive: true, force: true })` instead. + * + * If `true`, perform a recursive directory removal. In + * recursive mode, operations are retried on failure. + * @default false + */ + recursive?: boolean | undefined; + /** + * The amount of time in milliseconds to wait between retries. + * This option is ignored if the `recursive` option is not `true`. + * @default 100 + */ + retryDelay?: number | undefined; + } + /** + * Asynchronous [`rmdir(2)`](http://man7.org/linux/man-pages/man2/rmdir.2.html). No arguments other than a possible exception are given + * to the completion callback. + * + * Using `fs.rmdir()` on a file (not a directory) results in an `ENOENT` error on + * Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use {@link rm} with options `{ recursive: true, force: true }`. + * @since v0.0.2 + */ + export function rmdir(path: PathLike, callback: NoParamCallback): void; + export function rmdir(path: PathLike, options: RmDirOptions, callback: NoParamCallback): void; + export namespace rmdir { + /** + * Asynchronous rmdir(2) - delete a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, options?: RmDirOptions): Promise; + } + /** + * Synchronous [`rmdir(2)`](http://man7.org/linux/man-pages/man2/rmdir.2.html). Returns `undefined`. + * + * Using `fs.rmdirSync()` on a file (not a directory) results in an `ENOENT` error + * on Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use {@link rmSync} with options `{ recursive: true, force: true }`. + * @since v0.1.21 + */ + export function rmdirSync(path: PathLike, options?: RmDirOptions): void; + export interface RmOptions { + /** + * When `true`, exceptions will be ignored if `path` does not exist. + * @default false + */ + force?: boolean | undefined; + /** + * If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or + * `EPERM` error is encountered, Node.js will retry the operation with a linear + * backoff wait of `retryDelay` ms longer on each try. This option represents the + * number of retries. This option is ignored if the `recursive` option is not + * `true`. + * @default 0 + */ + maxRetries?: number | undefined; + /** + * If `true`, perform a recursive directory removal. In + * recursive mode, operations are retried on failure. + * @default false + */ + recursive?: boolean | undefined; + /** + * The amount of time in milliseconds to wait between retries. + * This option is ignored if the `recursive` option is not `true`. + * @default 100 + */ + retryDelay?: number | undefined; + } + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm`utility). No arguments other than a possible exception are given to the + * completion callback. + * @since v14.14.0 + */ + export function rm(path: PathLike, callback: NoParamCallback): void; + export function rm(path: PathLike, options: RmOptions, callback: NoParamCallback): void; + export namespace rm { + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm` utility). + */ + function __promisify__(path: PathLike, options?: RmOptions): Promise; + } + /** + * Synchronously removes files and directories (modeled on the standard POSIX `rm`utility). Returns `undefined`. + * @since v14.14.0 + */ + export function rmSync(path: PathLike, options?: RmOptions): void; + export interface MakeDirectoryOptions { + /** + * Indicates whether parent folders should be created. + * If a folder was created, the path to the first created folder will be returned. + * @default false + */ + recursive?: boolean | undefined; + /** + * A file mode. If a string is passed, it is parsed as an octal integer. If not specified + * @default 0o777 + */ + mode?: Mode | undefined; + } + /** + * Asynchronously creates a directory. + * + * The callback is given a possible exception and, if `recursive` is `true`, the + * first directory path created, `(err[, path])`.`path` can still be `undefined` when `recursive` is `true`, if no directory was + * created (for instance, if it was previously created). + * + * The optional `options` argument can be an integer specifying `mode` (permission + * and sticky bits), or an object with a `mode` property and a `recursive`property indicating whether parent directories should be created. Calling`fs.mkdir()` when `path` is a directory that + * exists results in an error only + * when `recursive` is false. If `recursive` is false and the directory exists, + * an `EEXIST` error occurs. + * + * ```js + * import { mkdir } from 'node:fs'; + * + * // Create ./tmp/a/apple, regardless of whether ./tmp and ./tmp/a exist. + * mkdir('./tmp/a/apple', { recursive: true }, (err) => { + * if (err) throw err; + * }); + * ``` + * + * On Windows, using `fs.mkdir()` on the root directory even with recursion will + * result in an error: + * + * ```js + * import { mkdir } from 'node:fs'; + * + * mkdir('/', { recursive: true }, (err) => { + * // => [Error: EPERM: operation not permitted, mkdir 'C:\'] + * }); + * ``` + * + * See the POSIX [`mkdir(2)`](http://man7.org/linux/man-pages/man2/mkdir.2.html) documentation for more details. + * @since v0.1.8 + */ + export function mkdir( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + callback: (err: NodeJS.ErrnoException | null, path?: string) => void, + ): void; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdir( + path: PathLike, + options: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null + | undefined, + callback: NoParamCallback, + ): void; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdir( + path: PathLike, + options: Mode | MakeDirectoryOptions | null | undefined, + callback: (err: NodeJS.ErrnoException | null, path?: string) => void, + ): void; + /** + * Asynchronous mkdir(2) - create a directory with a mode of `0o777`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function mkdir(path: PathLike, callback: NoParamCallback): void; + export namespace mkdir { + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options?: Mode | MakeDirectoryOptions | null, + ): Promise; + } + /** + * Synchronously creates a directory. Returns `undefined`, or if `recursive` is`true`, the first directory path created. + * This is the synchronous version of {@link mkdir}. + * + * See the POSIX [`mkdir(2)`](http://man7.org/linux/man-pages/man2/mkdir.2.html) documentation for more details. + * @since v0.1.21 + */ + export function mkdirSync( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + ): string | undefined; + /** + * Synchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdirSync( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null, + ): void; + /** + * Synchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdirSync(path: PathLike, options?: Mode | MakeDirectoryOptions | null): string | undefined; + /** + * Creates a unique temporary directory. + * + * Generates six random characters to be appended behind a required`prefix` to create a unique temporary directory. Due to platform + * inconsistencies, avoid trailing `X` characters in `prefix`. Some platforms, + * notably the BSDs, can return more than six random characters, and replace + * trailing `X` characters in `prefix` with random characters. + * + * The created directory path is passed as a string to the callback's second + * parameter. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * + * ```js + * import { mkdtemp } from 'node:fs'; + * import { join } from 'node:path'; + * import { tmpdir } from 'node:os'; + * + * mkdtemp(join(tmpdir(), 'foo-'), (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Prints: /tmp/foo-itXde2 or C:\Users\...\AppData\Local\Temp\foo-itXde2 + * }); + * ``` + * + * The `fs.mkdtemp()` method will append the six randomly selected characters + * directly to the `prefix` string. For instance, given a directory `/tmp`, if the + * intention is to create a temporary directory _within_`/tmp`, the `prefix`must end with a trailing platform-specific path separator + * (`require('node:path').sep`). + * + * ```js + * import { tmpdir } from 'node:os'; + * import { mkdtemp } from 'node:fs'; + * + * // The parent directory for the new temporary directory + * const tmpDir = tmpdir(); + * + * // This method is *INCORRECT*: + * mkdtemp(tmpDir, (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Will print something similar to `/tmpabc123`. + * // A new temporary directory is created at the file system root + * // rather than *within* the /tmp directory. + * }); + * + * // This method is *CORRECT*: + * import { sep } from 'node:path'; + * mkdtemp(`${tmpDir}${sep}`, (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Will print something similar to `/tmp/abc123`. + * // A new temporary directory is created within + * // the /tmp directory. + * }); + * ``` + * @since v5.10.0 + */ + export function mkdtemp( + prefix: string, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, folder: string) => void, + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtemp( + prefix: string, + options: + | "buffer" + | { + encoding: "buffer"; + }, + callback: (err: NodeJS.ErrnoException | null, folder: Buffer) => void, + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtemp( + prefix: string, + options: EncodingOption, + callback: (err: NodeJS.ErrnoException | null, folder: string | Buffer) => void, + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + */ + export function mkdtemp( + prefix: string, + callback: (err: NodeJS.ErrnoException | null, folder: string) => void, + ): void; + export namespace mkdtemp { + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options?: EncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options: BufferEncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options?: EncodingOption): Promise; + } + /** + * Returns the created directory path. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link mkdtemp}. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * @since v5.10.0 + */ + export function mkdtempSync(prefix: string, options?: EncodingOption): string; + /** + * Synchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtempSync(prefix: string, options: BufferEncodingOption): Buffer; + /** + * Synchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtempSync(prefix: string, options?: EncodingOption): string | Buffer; + /** + * Reads the contents of a directory. The callback gets two arguments `(err, files)`where `files` is an array of the names of the files in the directory excluding`'.'` and `'..'`. + * + * See the POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames passed to the callback. If the `encoding` is set to `'buffer'`, + * the filenames returned will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the `files` array will contain `fs.Dirent` objects. + * @since v0.1.8 + */ + export function readdir( + path: PathLike, + options: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, files: string[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdir( + path: PathLike, + options: + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | "buffer", + callback: (err: NodeJS.ErrnoException | null, files: Buffer[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdir( + path: PathLike, + options: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, files: string[] | Buffer[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function readdir( + path: PathLike, + callback: (err: NodeJS.ErrnoException | null, files: string[]) => void, + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + export function readdir( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + callback: (err: NodeJS.ErrnoException | null, files: Dirent[]) => void, + ): void; + export namespace readdir { + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options?: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options: + | "buffer" + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent + */ + function __promisify__( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + ): Promise; + } + /** + * Reads the contents of the directory. + * + * See the POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames returned. If the `encoding` is set to `'buffer'`, + * the filenames returned will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the result will contain `fs.Dirent` objects. + * @since v0.1.21 + */ + export function readdirSync( + path: PathLike, + options?: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | BufferEncoding + | null, + ): string[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdirSync( + path: PathLike, + options: + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | "buffer", + ): Buffer[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdirSync( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): string[] | Buffer[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + export function readdirSync( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + ): Dirent[]; + /** + * Closes the file descriptor. No arguments other than a possible exception are + * given to the completion callback. + * + * Calling `fs.close()` on any file descriptor (`fd`) that is currently in use + * through any other `fs` operation may lead to undefined behavior. + * + * See the POSIX [`close(2)`](http://man7.org/linux/man-pages/man2/close.2.html) documentation for more detail. + * @since v0.0.2 + */ + export function close(fd: number, callback?: NoParamCallback): void; + export namespace close { + /** + * Asynchronous close(2) - close a file descriptor. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Closes the file descriptor. Returns `undefined`. + * + * Calling `fs.closeSync()` on any file descriptor (`fd`) that is currently in use + * through any other `fs` operation may lead to undefined behavior. + * + * See the POSIX [`close(2)`](http://man7.org/linux/man-pages/man2/close.2.html) documentation for more detail. + * @since v0.1.21 + */ + export function closeSync(fd: number): void; + /** + * Asynchronous file open. See the POSIX [`open(2)`](http://man7.org/linux/man-pages/man2/open.2.html) documentation for more details. + * + * `mode` sets the file mode (permission and sticky bits), but only if the file was + * created. On Windows, only the write permission can be manipulated; see {@link chmod}. + * + * The callback gets two arguments `(err, fd)`. + * + * Some characters (`< > : " / \ | ? *`) are reserved under Windows as documented + * by [Naming Files, Paths, and Namespaces](https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file). Under NTFS, if the filename contains + * a colon, Node.js will open a file system stream, as described by [this MSDN page](https://docs.microsoft.com/en-us/windows/desktop/FileIO/using-streams). + * + * Functions based on `fs.open()` exhibit this behavior as well:`fs.writeFile()`, `fs.readFile()`, etc. + * @since v0.0.2 + * @param [flags='r'] See `support of file system `flags``. + * @param [mode=0o666] + */ + export function open( + path: PathLike, + flags: OpenMode | undefined, + mode: Mode | undefined | null, + callback: (err: NodeJS.ErrnoException | null, fd: number) => void, + ): void; + /** + * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param [flags='r'] See `support of file system `flags``. + */ + export function open( + path: PathLike, + flags: OpenMode | undefined, + callback: (err: NodeJS.ErrnoException | null, fd: number) => void, + ): void; + /** + * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function open(path: PathLike, callback: (err: NodeJS.ErrnoException | null, fd: number) => void): void; + export namespace open { + /** + * Asynchronous open(2) - open and possibly create a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not supplied, defaults to `0o666`. + */ + function __promisify__(path: PathLike, flags: OpenMode, mode?: Mode | null): Promise; + } + /** + * Returns an integer representing the file descriptor. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link open}. + * @since v0.1.21 + * @param [flags='r'] + * @param [mode=0o666] + */ + export function openSync(path: PathLike, flags: OpenMode, mode?: Mode | null): number; + /** + * Change the file system timestamps of the object referenced by `path`. + * + * The `atime` and `mtime` arguments follow these rules: + * + * * Values can be either numbers representing Unix epoch time in seconds,`Date`s, or a numeric string like `'123456789.0'`. + * * If the value can not be converted to a number, or is `NaN`, `Infinity`, or`-Infinity`, an `Error` will be thrown. + * @since v0.4.2 + */ + export function utimes(path: PathLike, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace utimes { + /** + * Asynchronously change file timestamps of the file referenced by the supplied path. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link utimes}. + * @since v0.4.2 + */ + export function utimesSync(path: PathLike, atime: TimeLike, mtime: TimeLike): void; + /** + * Change the file system timestamps of the object referenced by the supplied file + * descriptor. See {@link utimes}. + * @since v0.4.2 + */ + export function futimes(fd: number, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace futimes { + /** + * Asynchronously change file timestamps of the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(fd: number, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Synchronous version of {@link futimes}. Returns `undefined`. + * @since v0.4.2 + */ + export function futimesSync(fd: number, atime: TimeLike, mtime: TimeLike): void; + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. No arguments other + * than a possible exception are given to the completion callback. + * @since v0.1.96 + */ + export function fsync(fd: number, callback: NoParamCallback): void; + export namespace fsync { + /** + * Asynchronous fsync(2) - synchronize a file's in-core state with the underlying storage device. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. Returns `undefined`. + * @since v0.1.96 + */ + export function fsyncSync(fd: number): void; + /** + * Write `buffer` to the file specified by `fd`. + * + * `offset` determines the part of the buffer to be written, and `length` is + * an integer specifying the number of bytes to write. + * + * `position` refers to the offset from the beginning of the file where this data + * should be written. If `typeof position !== 'number'`, the data will be written + * at the current position. See [`pwrite(2)`](http://man7.org/linux/man-pages/man2/pwrite.2.html). + * + * The callback will be given three arguments `(err, bytesWritten, buffer)` where`bytesWritten` specifies how many _bytes_ were written from `buffer`. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesWritten` and `buffer` properties. + * + * It is unsafe to use `fs.write()` multiple times on the same file without waiting + * for the callback. For this scenario, {@link createWriteStream} is + * recommended. + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v0.0.2 + * @param [offset=0] + * @param [length=buffer.byteLength - offset] + * @param [position='null'] + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + length: number | undefined | null, + position: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + length: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + */ + export function write( + fd: number, + buffer: TBuffer, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void, + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + export function write( + fd: number, + string: string, + position: number | undefined | null, + encoding: BufferEncoding | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void, + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + */ + export function write( + fd: number, + string: string, + position: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void, + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + */ + export function write( + fd: number, + string: string, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void, + ): void; + export namespace write { + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + */ + function __promisify__( + fd: number, + buffer?: TBuffer, + offset?: number, + length?: number, + position?: number | null, + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + function __promisify__( + fd: number, + string: string, + position?: number | null, + encoding?: BufferEncoding | null, + ): Promise<{ + bytesWritten: number; + buffer: string; + }>; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link write}. + * @since v0.1.21 + * @param [offset=0] + * @param [length=buffer.byteLength - offset] + * @param [position='null'] + * @return The number of bytes written. + */ + export function writeSync( + fd: number, + buffer: NodeJS.ArrayBufferView, + offset?: number | null, + length?: number | null, + position?: number | null, + ): number; + /** + * Synchronously writes `string` to the file referenced by the supplied file descriptor, returning the number of bytes written. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + export function writeSync( + fd: number, + string: string, + position?: number | null, + encoding?: BufferEncoding | null, + ): number; + export type ReadPosition = number | bigint; + export interface ReadSyncOptions { + /** + * @default 0 + */ + offset?: number | undefined; + /** + * @default `length of buffer` + */ + length?: number | undefined; + /** + * @default null + */ + position?: ReadPosition | null | undefined; + } + export interface ReadAsyncOptions extends ReadSyncOptions { + buffer?: TBuffer; + } + /** + * Read data from the file specified by `fd`. + * + * The callback is given the three arguments, `(err, bytesRead, buffer)`. + * + * If the file is not modified concurrently, the end-of-file is reached when the + * number of bytes read is zero. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesRead` and `buffer` properties. + * @since v0.0.2 + * @param buffer The buffer that the data will be written to. + * @param offset The position in `buffer` to write the data to. + * @param length The number of bytes to read. + * @param position Specifies where to begin reading from in the file. If `position` is `null` or `-1 `, data will be read from the current file position, and the file position will be updated. If + * `position` is an integer, the file position will be unchanged. + */ + export function read( + fd: number, + buffer: TBuffer, + offset: number, + length: number, + position: ReadPosition | null, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void, + ): void; + /** + * Similar to the above `fs.read` function, this version takes an optional `options` object. + * If not otherwise specified in an `options` object, + * `buffer` defaults to `Buffer.alloc(16384)`, + * `offset` defaults to `0`, + * `length` defaults to `buffer.byteLength`, `- offset` as of Node 17.6.0 + * `position` defaults to `null` + * @since v12.17.0, 13.11.0 + */ + export function read( + fd: number, + options: ReadAsyncOptions, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void, + ): void; + export function read( + fd: number, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: NodeJS.ArrayBufferView) => void, + ): void; + export namespace read { + /** + * @param fd A file descriptor. + * @param buffer The buffer that the data will be written to. + * @param offset The offset in the buffer at which to start writing. + * @param length The number of bytes to read. + * @param position The offset from the beginning of the file from which data should be read. If `null`, data will be read from the current position. + */ + function __promisify__( + fd: number, + buffer: TBuffer, + offset: number, + length: number, + position: number | null, + ): Promise<{ + bytesRead: number; + buffer: TBuffer; + }>; + function __promisify__( + fd: number, + options: ReadAsyncOptions, + ): Promise<{ + bytesRead: number; + buffer: TBuffer; + }>; + function __promisify__(fd: number): Promise<{ + bytesRead: number; + buffer: NodeJS.ArrayBufferView; + }>; + } + /** + * Returns the number of `bytesRead`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link read}. + * @since v0.1.21 + * @param [position='null'] + */ + export function readSync( + fd: number, + buffer: NodeJS.ArrayBufferView, + offset: number, + length: number, + position: ReadPosition | null, + ): number; + /** + * Similar to the above `fs.readSync` function, this version takes an optional `options` object. + * If no `options` object is specified, it will default with the above values. + */ + export function readSync(fd: number, buffer: NodeJS.ArrayBufferView, opts?: ReadSyncOptions): number; + /** + * Asynchronously reads the entire contents of a file. + * + * ```js + * import { readFile } from 'node:fs'; + * + * readFile('/etc/passwd', (err, data) => { + * if (err) throw err; + * console.log(data); + * }); + * ``` + * + * The callback is passed two arguments `(err, data)`, where `data` is the + * contents of the file. + * + * If no encoding is specified, then the raw buffer is returned. + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { readFile } from 'node:fs'; + * + * readFile('/etc/passwd', 'utf8', callback); + * ``` + * + * When the path is a directory, the behavior of `fs.readFile()` and {@link readFileSync} is platform-specific. On macOS, Linux, and Windows, an + * error will be returned. On FreeBSD, a representation of the directory's contents + * will be returned. + * + * ```js + * import { readFile } from 'node:fs'; + * + * // macOS, Linux, and Windows + * readFile('', (err, data) => { + * // => [Error: EISDIR: illegal operation on a directory, read ] + * }); + * + * // FreeBSD + * readFile('', (err, data) => { + * // => null, + * }); + * ``` + * + * It is possible to abort an ongoing request using an `AbortSignal`. If a + * request is aborted the callback is called with an `AbortError`: + * + * ```js + * import { readFile } from 'node:fs'; + * + * const controller = new AbortController(); + * const signal = controller.signal; + * readFile(fileInfo[0].name, { signal }, (err, buf) => { + * // ... + * }); + * // When you want to abort the request + * controller.abort(); + * ``` + * + * The `fs.readFile()` function buffers the entire file. To minimize memory costs, + * when possible prefer streaming via `fs.createReadStream()`. + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.readFile` performs. + * @since v0.1.29 + * @param path filename or file descriptor + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | ({ + encoding?: null | undefined; + flag?: string | undefined; + } & Abortable) + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, data: Buffer) => void, + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | ({ + encoding: BufferEncoding; + flag?: string | undefined; + } & Abortable) + | BufferEncoding, + callback: (err: NodeJS.ErrnoException | null, data: string) => void, + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | (ObjectEncodingOptions & { + flag?: string | undefined; + } & Abortable) + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, data: string | Buffer) => void, + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + */ + export function readFile( + path: PathOrFileDescriptor, + callback: (err: NodeJS.ErrnoException | null, data: Buffer) => void, + ): void; + export namespace readFile { + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options?: { + encoding?: null | undefined; + flag?: string | undefined; + } | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options: + | { + encoding: BufferEncoding; + flag?: string | undefined; + } + | BufferEncoding, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options?: + | (ObjectEncodingOptions & { + flag?: string | undefined; + }) + | BufferEncoding + | null, + ): Promise; + } + /** + * Returns the contents of the `path`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link readFile}. + * + * If the `encoding` option is specified then this function returns a + * string. Otherwise it returns a buffer. + * + * Similar to {@link readFile}, when the path is a directory, the behavior of`fs.readFileSync()` is platform-specific. + * + * ```js + * import { readFileSync } from 'node:fs'; + * + * // macOS, Linux, and Windows + * readFileSync(''); + * // => [Error: EISDIR: illegal operation on a directory, read ] + * + * // FreeBSD + * readFileSync(''); // => + * ``` + * @since v0.1.8 + * @param path filename or file descriptor + */ + export function readFileSync( + path: PathOrFileDescriptor, + options?: { + encoding?: null | undefined; + flag?: string | undefined; + } | null, + ): Buffer; + /** + * Synchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFileSync( + path: PathOrFileDescriptor, + options: + | { + encoding: BufferEncoding; + flag?: string | undefined; + } + | BufferEncoding, + ): string; + /** + * Synchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFileSync( + path: PathOrFileDescriptor, + options?: + | (ObjectEncodingOptions & { + flag?: string | undefined; + }) + | BufferEncoding + | null, + ): string | Buffer; + export type WriteFileOptions = + | ( + & ObjectEncodingOptions + & Abortable + & { + mode?: Mode | undefined; + flag?: string | undefined; + flush?: boolean | undefined; + } + ) + | BufferEncoding + | null; + /** + * When `file` is a filename, asynchronously writes data to the file, replacing the + * file if it already exists. `data` can be a string or a buffer. + * + * When `file` is a file descriptor, the behavior is similar to calling`fs.write()` directly (which is recommended). See the notes below on using + * a file descriptor. + * + * The `encoding` option is ignored if `data` is a buffer. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { writeFile } from 'node:fs'; + * import { Buffer } from 'node:buffer'; + * + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * writeFile('message.txt', data, (err) => { + * if (err) throw err; + * console.log('The file has been saved!'); + * }); + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { writeFile } from 'node:fs'; + * + * writeFile('message.txt', 'Hello Node.js', 'utf8', callback); + * ``` + * + * It is unsafe to use `fs.writeFile()` multiple times on the same file without + * waiting for the callback. For this scenario, {@link createWriteStream} is + * recommended. + * + * Similarly to `fs.readFile` \- `fs.writeFile` is a convenience method that + * performs multiple `write` calls internally to write the buffer passed to it. + * For performance sensitive code consider using {@link createWriteStream}. + * + * It is possible to use an `AbortSignal` to cancel an `fs.writeFile()`. + * Cancelation is "best effort", and some amount of data is likely still + * to be written. + * + * ```js + * import { writeFile } from 'node:fs'; + * import { Buffer } from 'node:buffer'; + * + * const controller = new AbortController(); + * const { signal } = controller; + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * writeFile('message.txt', data, { signal }, (err) => { + * // When a request is aborted - the callback is called with an AbortError + * }); + * // When the request should be aborted + * controller.abort(); + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.writeFile` performs. + * @since v0.1.29 + * @param file filename or file descriptor + */ + export function writeFile( + file: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + options: WriteFileOptions, + callback: NoParamCallback, + ): void; + /** + * Asynchronously writes data to a file, replacing the file if it already exists. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + */ + export function writeFile( + path: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + callback: NoParamCallback, + ): void; + export namespace writeFile { + /** + * Asynchronously writes data to a file, replacing the file if it already exists. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `mode` is not supplied, the default of `0o666` is used. + * If `mode` is a string, it is parsed as an octal integer. + * If `flag` is not supplied, the default of `'w'` is used. + */ + function __promisify__( + path: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + options?: WriteFileOptions, + ): Promise; + } + /** + * Returns `undefined`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link writeFile}. + * @since v0.1.29 + * @param file filename or file descriptor + */ + export function writeFileSync( + file: PathOrFileDescriptor, + data: string | NodeJS.ArrayBufferView, + options?: WriteFileOptions, + ): void; + /** + * Asynchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { appendFile } from 'node:fs'; + * + * appendFile('message.txt', 'data to append', (err) => { + * if (err) throw err; + * console.log('The "data to append" was appended to file!'); + * }); + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { appendFile } from 'node:fs'; + * + * appendFile('message.txt', 'data to append', 'utf8', callback); + * ``` + * + * The `path` may be specified as a numeric file descriptor that has been opened + * for appending (using `fs.open()` or `fs.openSync()`). The file descriptor will + * not be closed automatically. + * + * ```js + * import { open, close, appendFile } from 'node:fs'; + * + * function closeFd(fd) { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * + * open('message.txt', 'a', (err, fd) => { + * if (err) throw err; + * + * try { + * appendFile(fd, 'data to append', 'utf8', (err) => { + * closeFd(fd); + * if (err) throw err; + * }); + * } catch (err) { + * closeFd(fd); + * throw err; + * } + * }); + * ``` + * @since v0.6.7 + * @param path filename or file descriptor + */ + export function appendFile( + path: PathOrFileDescriptor, + data: string | Uint8Array, + options: WriteFileOptions, + callback: NoParamCallback, + ): void; + /** + * Asynchronously append data to a file, creating the file if it does not exist. + * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + */ + export function appendFile(file: PathOrFileDescriptor, data: string | Uint8Array, callback: NoParamCallback): void; + export namespace appendFile { + /** + * Asynchronously append data to a file, creating the file if it does not exist. + * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `mode` is not supplied, the default of `0o666` is used. + * If `mode` is a string, it is parsed as an octal integer. + * If `flag` is not supplied, the default of `'a'` is used. + */ + function __promisify__( + file: PathOrFileDescriptor, + data: string | Uint8Array, + options?: WriteFileOptions, + ): Promise; + } + /** + * Synchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { appendFileSync } from 'node:fs'; + * + * try { + * appendFileSync('message.txt', 'data to append'); + * console.log('The "data to append" was appended to file!'); + * } catch (err) { + * // Handle the error + * } + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { appendFileSync } from 'node:fs'; + * + * appendFileSync('message.txt', 'data to append', 'utf8'); + * ``` + * + * The `path` may be specified as a numeric file descriptor that has been opened + * for appending (using `fs.open()` or `fs.openSync()`). The file descriptor will + * not be closed automatically. + * + * ```js + * import { openSync, closeSync, appendFileSync } from 'node:fs'; + * + * let fd; + * + * try { + * fd = openSync('message.txt', 'a'); + * appendFileSync(fd, 'data to append', 'utf8'); + * } catch (err) { + * // Handle the error + * } finally { + * if (fd !== undefined) + * closeSync(fd); + * } + * ``` + * @since v0.6.7 + * @param path filename or file descriptor + */ + export function appendFileSync( + path: PathOrFileDescriptor, + data: string | Uint8Array, + options?: WriteFileOptions, + ): void; + /** + * Watch for changes on `filename`. The callback `listener` will be called each + * time the file is accessed. + * + * The `options` argument may be omitted. If provided, it should be an object. The`options` object may contain a boolean named `persistent` that indicates + * whether the process should continue to run as long as files are being watched. + * The `options` object may specify an `interval` property indicating how often the + * target should be polled in milliseconds. + * + * The `listener` gets two arguments the current stat object and the previous + * stat object: + * + * ```js + * import { watchFile } from 'fs'; + * + * watchFile('message.text', (curr, prev) => { + * console.log(`the current mtime is: ${curr.mtime}`); + * console.log(`the previous mtime was: ${prev.mtime}`); + * }); + * ``` + * + * These stat objects are instances of `fs.Stat`. If the `bigint` option is `true`, + * the numeric values in these objects are specified as `BigInt`s. + * + * To be notified when the file was modified, not just accessed, it is necessary + * to compare `curr.mtimeMs` and `prev.mtimeMs`. + * + * When an `fs.watchFile` operation results in an `ENOENT` error, it + * will invoke the listener once, with all the fields zeroed (or, for dates, the + * Unix Epoch). If the file is created later on, the listener will be called + * again, with the latest stat objects. This is a change in functionality since + * v0.10. + * + * Using {@link watch} is more efficient than `fs.watchFile` and`fs.unwatchFile`. `fs.watch` should be used instead of `fs.watchFile` and`fs.unwatchFile` when possible. + * + * When a file being watched by `fs.watchFile()` disappears and reappears, + * then the contents of `previous` in the second callback event (the file's + * reappearance) will be the same as the contents of `previous` in the first + * callback event (its disappearance). + * + * This happens when: + * + * * the file is deleted, followed by a restore + * * the file is renamed and then renamed a second time back to its original name + * @since v0.1.31 + */ + export interface WatchFileOptions { + bigint?: boolean | undefined; + persistent?: boolean | undefined; + interval?: number | undefined; + } + /** + * Watch for changes on `filename`. The callback `listener` will be called each + * time the file is accessed. + * + * The `options` argument may be omitted. If provided, it should be an object. The`options` object may contain a boolean named `persistent` that indicates + * whether the process should continue to run as long as files are being watched. + * The `options` object may specify an `interval` property indicating how often the + * target should be polled in milliseconds. + * + * The `listener` gets two arguments the current stat object and the previous + * stat object: + * + * ```js + * import { watchFile } from 'node:fs'; + * + * watchFile('message.text', (curr, prev) => { + * console.log(`the current mtime is: ${curr.mtime}`); + * console.log(`the previous mtime was: ${prev.mtime}`); + * }); + * ``` + * + * These stat objects are instances of `fs.Stat`. If the `bigint` option is `true`, + * the numeric values in these objects are specified as `BigInt`s. + * + * To be notified when the file was modified, not just accessed, it is necessary + * to compare `curr.mtimeMs` and `prev.mtimeMs`. + * + * When an `fs.watchFile` operation results in an `ENOENT` error, it + * will invoke the listener once, with all the fields zeroed (or, for dates, the + * Unix Epoch). If the file is created later on, the listener will be called + * again, with the latest stat objects. This is a change in functionality since + * v0.10. + * + * Using {@link watch} is more efficient than `fs.watchFile` and`fs.unwatchFile`. `fs.watch` should be used instead of `fs.watchFile` and`fs.unwatchFile` when possible. + * + * When a file being watched by `fs.watchFile()` disappears and reappears, + * then the contents of `previous` in the second callback event (the file's + * reappearance) will be the same as the contents of `previous` in the first + * callback event (its disappearance). + * + * This happens when: + * + * * the file is deleted, followed by a restore + * * the file is renamed and then renamed a second time back to its original name + * @since v0.1.31 + */ + export function watchFile( + filename: PathLike, + options: + | (WatchFileOptions & { + bigint?: false | undefined; + }) + | undefined, + listener: StatsListener, + ): StatWatcher; + export function watchFile( + filename: PathLike, + options: + | (WatchFileOptions & { + bigint: true; + }) + | undefined, + listener: BigIntStatsListener, + ): StatWatcher; + /** + * Watch for changes on `filename`. The callback `listener` will be called each time the file is accessed. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function watchFile(filename: PathLike, listener: StatsListener): StatWatcher; + /** + * Stop watching for changes on `filename`. If `listener` is specified, only that + * particular listener is removed. Otherwise, _all_ listeners are removed, + * effectively stopping watching of `filename`. + * + * Calling `fs.unwatchFile()` with a filename that is not being watched is a + * no-op, not an error. + * + * Using {@link watch} is more efficient than `fs.watchFile()` and`fs.unwatchFile()`. `fs.watch()` should be used instead of `fs.watchFile()`and `fs.unwatchFile()` when possible. + * @since v0.1.31 + * @param listener Optional, a listener previously attached using `fs.watchFile()` + */ + export function unwatchFile(filename: PathLike, listener?: StatsListener): void; + export function unwatchFile(filename: PathLike, listener?: BigIntStatsListener): void; + export interface WatchOptions extends Abortable { + encoding?: BufferEncoding | "buffer" | undefined; + persistent?: boolean | undefined; + recursive?: boolean | undefined; + } + export type WatchEventType = "rename" | "change"; + export type WatchListener = (event: WatchEventType, filename: T | null) => void; + export type StatsListener = (curr: Stats, prev: Stats) => void; + export type BigIntStatsListener = (curr: BigIntStats, prev: BigIntStats) => void; + /** + * Watch for changes on `filename`, where `filename` is either a file or a + * directory. + * + * The second argument is optional. If `options` is provided as a string, it + * specifies the `encoding`. Otherwise `options` should be passed as an object. + * + * The listener callback gets two arguments `(eventType, filename)`. `eventType`is either `'rename'` or `'change'`, and `filename` is the name of the file + * which triggered the event. + * + * On most platforms, `'rename'` is emitted whenever a filename appears or + * disappears in the directory. + * + * The listener callback is attached to the `'change'` event fired by `fs.FSWatcher`, but it is not the same thing as the `'change'` value of`eventType`. + * + * If a `signal` is passed, aborting the corresponding AbortController will close + * the returned `fs.FSWatcher`. + * @since v0.5.10 + * @param listener + */ + export function watch( + filename: PathLike, + options: + | (WatchOptions & { + encoding: "buffer"; + }) + | "buffer", + listener?: WatchListener, + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + export function watch( + filename: PathLike, + options?: WatchOptions | BufferEncoding | null, + listener?: WatchListener, + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + export function watch( + filename: PathLike, + options: WatchOptions | string, + listener?: WatchListener, + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function watch(filename: PathLike, listener?: WatchListener): FSWatcher; + /** + * Test whether or not the given path exists by checking with the file system. + * Then call the `callback` argument with either true or false: + * + * ```js + * import { exists } from 'node:fs'; + * + * exists('/etc/passwd', (e) => { + * console.log(e ? 'it exists' : 'no passwd!'); + * }); + * ``` + * + * **The parameters for this callback are not consistent with other Node.js** + * **callbacks.** Normally, the first parameter to a Node.js callback is an `err`parameter, optionally followed by other parameters. The `fs.exists()` callback + * has only one boolean parameter. This is one reason `fs.access()` is recommended + * instead of `fs.exists()`. + * + * Using `fs.exists()` to check for the existence of a file before calling`fs.open()`, `fs.readFile()`, or `fs.writeFile()` is not recommended. Doing + * so introduces a race condition, since other processes may change the file's + * state between the two calls. Instead, user code should open/read/write the + * file directly and handle the error raised if the file does not exist. + * + * **write (NOT RECOMMENDED)** + * + * ```js + * import { exists, open, close } from 'node:fs'; + * + * exists('myfile', (e) => { + * if (e) { + * console.error('myfile already exists'); + * } else { + * open('myfile', 'wx', (err, fd) => { + * if (err) throw err; + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * } + * }); + * ``` + * + * **write (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * open('myfile', 'wx', (err, fd) => { + * if (err) { + * if (err.code === 'EEXIST') { + * console.error('myfile already exists'); + * return; + * } + * + * throw err; + * } + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * **read (NOT RECOMMENDED)** + * + * ```js + * import { open, close, exists } from 'node:fs'; + * + * exists('myfile', (e) => { + * if (e) { + * open('myfile', 'r', (err, fd) => { + * if (err) throw err; + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * } else { + * console.error('myfile does not exist'); + * } + * }); + * ``` + * + * **read (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * + * open('myfile', 'r', (err, fd) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * The "not recommended" examples above check for existence and then use the + * file; the "recommended" examples are better because they use the file directly + * and handle the error, if any. + * + * In general, check for the existence of a file only if the file won't be + * used directly, for example when its existence is a signal from another + * process. + * @since v0.0.2 + * @deprecated Since v1.0.0 - Use {@link stat} or {@link access} instead. + */ + export function exists(path: PathLike, callback: (exists: boolean) => void): void; + /** @deprecated */ + export namespace exists { + /** + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(path: PathLike): Promise; + } + /** + * Returns `true` if the path exists, `false` otherwise. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link exists}. + * + * `fs.exists()` is deprecated, but `fs.existsSync()` is not. The `callback`parameter to `fs.exists()` accepts parameters that are inconsistent with other + * Node.js callbacks. `fs.existsSync()` does not use a callback. + * + * ```js + * import { existsSync } from 'node:fs'; + * + * if (existsSync('/etc/passwd')) + * console.log('The path exists.'); + * ``` + * @since v0.1.21 + */ + export function existsSync(path: PathLike): boolean; + export namespace constants { + // File Access Constants + /** Constant for fs.access(). File is visible to the calling process. */ + const F_OK: number; + /** Constant for fs.access(). File can be read by the calling process. */ + const R_OK: number; + /** Constant for fs.access(). File can be written by the calling process. */ + const W_OK: number; + /** Constant for fs.access(). File can be executed by the calling process. */ + const X_OK: number; + // File Copy Constants + /** Constant for fs.copyFile. Flag indicating the destination file should not be overwritten if it already exists. */ + const COPYFILE_EXCL: number; + /** + * Constant for fs.copyFile. copy operation will attempt to create a copy-on-write reflink. + * If the underlying platform does not support copy-on-write, then a fallback copy mechanism is used. + */ + const COPYFILE_FICLONE: number; + /** + * Constant for fs.copyFile. Copy operation will attempt to create a copy-on-write reflink. + * If the underlying platform does not support copy-on-write, then the operation will fail with an error. + */ + const COPYFILE_FICLONE_FORCE: number; + // File Open Constants + /** Constant for fs.open(). Flag indicating to open a file for read-only access. */ + const O_RDONLY: number; + /** Constant for fs.open(). Flag indicating to open a file for write-only access. */ + const O_WRONLY: number; + /** Constant for fs.open(). Flag indicating to open a file for read-write access. */ + const O_RDWR: number; + /** Constant for fs.open(). Flag indicating to create the file if it does not already exist. */ + const O_CREAT: number; + /** Constant for fs.open(). Flag indicating that opening a file should fail if the O_CREAT flag is set and the file already exists. */ + const O_EXCL: number; + /** + * Constant for fs.open(). Flag indicating that if path identifies a terminal device, + * opening the path shall not cause that terminal to become the controlling terminal for the process + * (if the process does not already have one). + */ + const O_NOCTTY: number; + /** Constant for fs.open(). Flag indicating that if the file exists and is a regular file, and the file is opened successfully for write access, its length shall be truncated to zero. */ + const O_TRUNC: number; + /** Constant for fs.open(). Flag indicating that data will be appended to the end of the file. */ + const O_APPEND: number; + /** Constant for fs.open(). Flag indicating that the open should fail if the path is not a directory. */ + const O_DIRECTORY: number; + /** + * constant for fs.open(). + * Flag indicating reading accesses to the file system will no longer result in + * an update to the atime information associated with the file. + * This flag is available on Linux operating systems only. + */ + const O_NOATIME: number; + /** Constant for fs.open(). Flag indicating that the open should fail if the path is a symbolic link. */ + const O_NOFOLLOW: number; + /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O. */ + const O_SYNC: number; + /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O with write operations waiting for data integrity. */ + const O_DSYNC: number; + /** Constant for fs.open(). Flag indicating to open the symbolic link itself rather than the resource it is pointing to. */ + const O_SYMLINK: number; + /** Constant for fs.open(). When set, an attempt will be made to minimize caching effects of file I/O. */ + const O_DIRECT: number; + /** Constant for fs.open(). Flag indicating to open the file in nonblocking mode when possible. */ + const O_NONBLOCK: number; + // File Type Constants + /** Constant for fs.Stats mode property for determining a file's type. Bit mask used to extract the file type code. */ + const S_IFMT: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a regular file. */ + const S_IFREG: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a directory. */ + const S_IFDIR: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a character-oriented device file. */ + const S_IFCHR: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a block-oriented device file. */ + const S_IFBLK: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a FIFO/pipe. */ + const S_IFIFO: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a symbolic link. */ + const S_IFLNK: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a socket. */ + const S_IFSOCK: number; + // File Mode Constants + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by owner. */ + const S_IRWXU: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by owner. */ + const S_IRUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by owner. */ + const S_IWUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by owner. */ + const S_IXUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by group. */ + const S_IRWXG: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by group. */ + const S_IRGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by group. */ + const S_IWGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by group. */ + const S_IXGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by others. */ + const S_IRWXO: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by others. */ + const S_IROTH: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by others. */ + const S_IWOTH: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by others. */ + const S_IXOTH: number; + /** + * When set, a memory file mapping is used to access the file. This flag + * is available on Windows operating systems only. On other operating systems, + * this flag is ignored. + */ + const UV_FS_O_FILEMAP: number; + } + /** + * Tests a user's permissions for the file or directory specified by `path`. + * The `mode` argument is an optional integer that specifies the accessibility + * checks to be performed. `mode` should be either the value `fs.constants.F_OK`or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`,`fs.constants.W_OK`, and `fs.constants.X_OK` + * (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * The final argument, `callback`, is a callback function that is invoked with + * a possible error argument. If any of the accessibility checks fail, the error + * argument will be an `Error` object. The following examples check if`package.json` exists, and if it is readable or writable. + * + * ```js + * import { access, constants } from 'node:fs'; + * + * const file = 'package.json'; + * + * // Check if the file exists in the current directory. + * access(file, constants.F_OK, (err) => { + * console.log(`${file} ${err ? 'does not exist' : 'exists'}`); + * }); + * + * // Check if the file is readable. + * access(file, constants.R_OK, (err) => { + * console.log(`${file} ${err ? 'is not readable' : 'is readable'}`); + * }); + * + * // Check if the file is writable. + * access(file, constants.W_OK, (err) => { + * console.log(`${file} ${err ? 'is not writable' : 'is writable'}`); + * }); + * + * // Check if the file is readable and writable. + * access(file, constants.R_OK | constants.W_OK, (err) => { + * console.log(`${file} ${err ? 'is not' : 'is'} readable and writable`); + * }); + * ``` + * + * Do not use `fs.access()` to check for the accessibility of a file before calling`fs.open()`, `fs.readFile()`, or `fs.writeFile()`. Doing + * so introduces a race condition, since other processes may change the file's + * state between the two calls. Instead, user code should open/read/write the + * file directly and handle the error raised if the file is not accessible. + * + * **write (NOT RECOMMENDED)** + * + * ```js + * import { access, open, close } from 'node:fs'; + * + * access('myfile', (err) => { + * if (!err) { + * console.error('myfile already exists'); + * return; + * } + * + * open('myfile', 'wx', (err, fd) => { + * if (err) throw err; + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * }); + * ``` + * + * **write (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * + * open('myfile', 'wx', (err, fd) => { + * if (err) { + * if (err.code === 'EEXIST') { + * console.error('myfile already exists'); + * return; + * } + * + * throw err; + * } + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * **read (NOT RECOMMENDED)** + * + * ```js + * import { access, open, close } from 'node:fs'; + * access('myfile', (err) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * open('myfile', 'r', (err, fd) => { + * if (err) throw err; + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * }); + * ``` + * + * **read (RECOMMENDED)** + * + * ```js + * import { open, close } from 'node:fs'; + * + * open('myfile', 'r', (err, fd) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * The "not recommended" examples above check for accessibility and then use the + * file; the "recommended" examples are better because they use the file directly + * and handle the error, if any. + * + * In general, check for the accessibility of a file only if the file will not be + * used directly, for example when its accessibility is a signal from another + * process. + * + * On Windows, access-control policies (ACLs) on a directory may limit access to + * a file or directory. The `fs.access()` function, however, does not check the + * ACL and therefore may report that a path is accessible even if the ACL restricts + * the user from reading or writing to it. + * @since v0.11.15 + * @param [mode=fs.constants.F_OK] + */ + export function access(path: PathLike, mode: number | undefined, callback: NoParamCallback): void; + /** + * Asynchronously tests a user's permissions for the file specified by path. + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function access(path: PathLike, callback: NoParamCallback): void; + export namespace access { + /** + * Asynchronously tests a user's permissions for the file specified by path. + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(path: PathLike, mode?: number): Promise; + } + /** + * Synchronously tests a user's permissions for the file or directory specified + * by `path`. The `mode` argument is an optional integer that specifies the + * accessibility checks to be performed. `mode` should be either the value`fs.constants.F_OK` or a mask consisting of the bitwise OR of any of`fs.constants.R_OK`, `fs.constants.W_OK`, and + * `fs.constants.X_OK` (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * If any of the accessibility checks fail, an `Error` will be thrown. Otherwise, + * the method will return `undefined`. + * + * ```js + * import { accessSync, constants } from 'node:fs'; + * + * try { + * accessSync('etc/passwd', constants.R_OK | constants.W_OK); + * console.log('can read/write'); + * } catch (err) { + * console.error('no access!'); + * } + * ``` + * @since v0.11.15 + * @param [mode=fs.constants.F_OK] + */ + export function accessSync(path: PathLike, mode?: number): void; + interface StreamOptions { + flags?: string | undefined; + encoding?: BufferEncoding | undefined; + fd?: number | promises.FileHandle | undefined; + mode?: number | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + signal?: AbortSignal | null | undefined; + highWaterMark?: number | undefined; + } + interface FSImplementation { + open?: (...args: any[]) => any; + close?: (...args: any[]) => any; + } + interface CreateReadStreamFSImplementation extends FSImplementation { + read: (...args: any[]) => any; + } + interface CreateWriteStreamFSImplementation extends FSImplementation { + write: (...args: any[]) => any; + writev?: (...args: any[]) => any; + } + interface ReadStreamOptions extends StreamOptions { + fs?: CreateReadStreamFSImplementation | null | undefined; + end?: number | undefined; + } + interface WriteStreamOptions extends StreamOptions { + fs?: CreateWriteStreamFSImplementation | null | undefined; + flush?: boolean | undefined; + } + /** + * Unlike the 16 KiB default `highWaterMark` for a `stream.Readable`, the stream + * returned by this method has a default `highWaterMark` of 64 KiB. + * + * `options` can include `start` and `end` values to read a range of bytes from + * the file instead of the entire file. Both `start` and `end` are inclusive and + * start counting at 0, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. If `fd` is specified and `start` is + * omitted or `undefined`, `fs.createReadStream()` reads sequentially from the + * current file position. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `fd` is specified, `ReadStream` will ignore the `path` argument and will use + * the specified file descriptor. This means that no `'open'` event will be + * emitted. `fd` should be blocking; non-blocking `fd`s should be passed to `net.Socket`. + * + * If `fd` points to a character device that only supports blocking reads + * (such as keyboard or sound card), read operations do not finish until data is + * available. This can prevent the process from exiting and the stream from + * closing naturally. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * By providing the `fs` option, it is possible to override the corresponding `fs`implementations for `open`, `read`, and `close`. When providing the `fs` option, + * an override for `read` is required. If no `fd` is provided, an override for`open` is also required. If `autoClose` is `true`, an override for `close` is + * also required. + * + * ```js + * import { createReadStream } from 'node:fs'; + * + * // Create a stream from some character device. + * const stream = createReadStream('/dev/input/event0'); + * setTimeout(() => { + * stream.close(); // This may not close the stream. + * // Artificially marking end-of-stream, as if the underlying resource had + * // indicated end-of-file by itself, allows the stream to close. + * // This does not cancel pending read operations, and if there is such an + * // operation, the process may still not be able to exit successfully + * // until it finishes. + * stream.push(null); + * stream.read(0); + * }, 100); + * ``` + * + * If `autoClose` is false, then the file descriptor won't be closed, even if + * there's an error. It is the application's responsibility to close it and make + * sure there's no file descriptor leak. If `autoClose` is set to true (default + * behavior), on `'error'` or `'end'` the file descriptor will be closed + * automatically. + * + * `mode` sets the file mode (permission and sticky bits), but only if the + * file was created. + * + * An example to read the last 10 bytes of a file which is 100 bytes long: + * + * ```js + * import { createReadStream } from 'node:fs'; + * + * createReadStream('sample.txt', { start: 90, end: 99 }); + * ``` + * + * If `options` is a string, then it specifies the encoding. + * @since v0.1.31 + */ + export function createReadStream(path: PathLike, options?: BufferEncoding | ReadStreamOptions): ReadStream; + /** + * `options` may also include a `start` option to allow writing data at some + * position past the beginning of the file, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. Modifying a file rather than + * replacing it may require the `flags` option to be set to `r+` rather than the + * default `w`. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `autoClose` is set to true (default behavior) on `'error'` or `'finish'`the file descriptor will be closed automatically. If `autoClose` is false, + * then the file descriptor won't be closed, even if there's an error. + * It is the application's responsibility to close it and make sure there's no + * file descriptor leak. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * By providing the `fs` option it is possible to override the corresponding `fs`implementations for `open`, `write`, `writev`, and `close`. Overriding `write()`without `writev()` can reduce + * performance as some optimizations (`_writev()`) + * will be disabled. When providing the `fs` option, overrides for at least one of`write` and `writev` are required. If no `fd` option is supplied, an override + * for `open` is also required. If `autoClose` is `true`, an override for `close`is also required. + * + * Like `fs.ReadStream`, if `fd` is specified, `fs.WriteStream` will ignore the`path` argument and will use the specified file descriptor. This means that no`'open'` event will be + * emitted. `fd` should be blocking; non-blocking `fd`s + * should be passed to `net.Socket`. + * + * If `options` is a string, then it specifies the encoding. + * @since v0.1.31 + */ + export function createWriteStream(path: PathLike, options?: BufferEncoding | WriteStreamOptions): WriteStream; + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. No arguments other + * than a possible + * exception are given to the completion callback. + * @since v0.1.96 + */ + export function fdatasync(fd: number, callback: NoParamCallback): void; + export namespace fdatasync { + /** + * Asynchronous fdatasync(2) - synchronize a file's in-core state with storage device. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. Returns `undefined`. + * @since v0.1.96 + */ + export function fdatasyncSync(fd: number): void; + /** + * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. No arguments other than a possible exception are given to the + * callback function. Node.js makes no guarantees about the atomicity of the copy + * operation. If an error occurs after the destination file has been opened for + * writing, Node.js will attempt to remove the destination. + * + * `mode` is an optional integer that specifies the behavior + * of the copy operation. It is possible to create a mask consisting of the bitwise + * OR of two or more values (e.g.`fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`). + * + * * `fs.constants.COPYFILE_EXCL`: The copy operation will fail if `dest` already + * exists. + * * `fs.constants.COPYFILE_FICLONE`: The copy operation will attempt to create a + * copy-on-write reflink. If the platform does not support copy-on-write, then a + * fallback copy mechanism is used. + * * `fs.constants.COPYFILE_FICLONE_FORCE`: The copy operation will attempt to + * create a copy-on-write reflink. If the platform does not support + * copy-on-write, then the operation will fail. + * + * ```js + * import { copyFile, constants } from 'node:fs'; + * + * function callback(err) { + * if (err) throw err; + * console.log('source.txt was copied to destination.txt'); + * } + * + * // destination.txt will be created or overwritten by default. + * copyFile('source.txt', 'destination.txt', callback); + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * copyFile('source.txt', 'destination.txt', constants.COPYFILE_EXCL, callback); + * ``` + * @since v8.5.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] modifiers for copy operation. + */ + export function copyFile(src: PathLike, dest: PathLike, callback: NoParamCallback): void; + export function copyFile(src: PathLike, dest: PathLike, mode: number, callback: NoParamCallback): void; + export namespace copyFile { + function __promisify__(src: PathLike, dst: PathLike, mode?: number): Promise; + } + /** + * Synchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. Returns `undefined`. Node.js makes no guarantees about the + * atomicity of the copy operation. If an error occurs after the destination file + * has been opened for writing, Node.js will attempt to remove the destination. + * + * `mode` is an optional integer that specifies the behavior + * of the copy operation. It is possible to create a mask consisting of the bitwise + * OR of two or more values (e.g.`fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`). + * + * * `fs.constants.COPYFILE_EXCL`: The copy operation will fail if `dest` already + * exists. + * * `fs.constants.COPYFILE_FICLONE`: The copy operation will attempt to create a + * copy-on-write reflink. If the platform does not support copy-on-write, then a + * fallback copy mechanism is used. + * * `fs.constants.COPYFILE_FICLONE_FORCE`: The copy operation will attempt to + * create a copy-on-write reflink. If the platform does not support + * copy-on-write, then the operation will fail. + * + * ```js + * import { copyFileSync, constants } from 'node:fs'; + * + * // destination.txt will be created or overwritten by default. + * copyFileSync('source.txt', 'destination.txt'); + * console.log('source.txt was copied to destination.txt'); + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * copyFileSync('source.txt', 'destination.txt', constants.COPYFILE_EXCL); + * ``` + * @since v8.5.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] modifiers for copy operation. + */ + export function copyFileSync(src: PathLike, dest: PathLike, mode?: number): void; + /** + * Write an array of `ArrayBufferView`s to the file specified by `fd` using`writev()`. + * + * `position` is the offset from the beginning of the file where this data + * should be written. If `typeof position !== 'number'`, the data will be written + * at the current position. + * + * The callback will be given three arguments: `err`, `bytesWritten`, and`buffers`. `bytesWritten` is how many bytes were written from `buffers`. + * + * If this method is `util.promisify()` ed, it returns a promise for an`Object` with `bytesWritten` and `buffers` properties. + * + * It is unsafe to use `fs.writev()` multiple times on the same file without + * waiting for the callback. For this scenario, use {@link createWriteStream}. + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v12.9.0 + * @param [position='null'] + */ + export function writev( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + cb: (err: NodeJS.ErrnoException | null, bytesWritten: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export function writev( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position: number, + cb: (err: NodeJS.ErrnoException | null, bytesWritten: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export interface WriteVResult { + bytesWritten: number; + buffers: NodeJS.ArrayBufferView[]; + } + export namespace writev { + function __promisify__( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position?: number, + ): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link writev}. + * @since v12.9.0 + * @param [position='null'] + * @return The number of bytes written. + */ + export function writevSync(fd: number, buffers: readonly NodeJS.ArrayBufferView[], position?: number): number; + /** + * Read from a file specified by `fd` and write to an array of `ArrayBufferView`s + * using `readv()`. + * + * `position` is the offset from the beginning of the file from where data + * should be read. If `typeof position !== 'number'`, the data will be read + * from the current position. + * + * The callback will be given three arguments: `err`, `bytesRead`, and`buffers`. `bytesRead` is how many bytes were read from the file. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesRead` and `buffers` properties. + * @since v13.13.0, v12.17.0 + * @param [position='null'] + */ + export function readv( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + cb: (err: NodeJS.ErrnoException | null, bytesRead: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export function readv( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position: number, + cb: (err: NodeJS.ErrnoException | null, bytesRead: number, buffers: NodeJS.ArrayBufferView[]) => void, + ): void; + export interface ReadVResult { + bytesRead: number; + buffers: NodeJS.ArrayBufferView[]; + } + export namespace readv { + function __promisify__( + fd: number, + buffers: readonly NodeJS.ArrayBufferView[], + position?: number, + ): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link readv}. + * @since v13.13.0, v12.17.0 + * @param [position='null'] + * @return The number of bytes read. + */ + export function readvSync(fd: number, buffers: readonly NodeJS.ArrayBufferView[], position?: number): number; + + export interface OpenAsBlobOptions { + /** + * An optional mime type for the blob. + * + * @default 'undefined' + */ + type?: string | undefined; + } + + /** + * Returns a `Blob` whose data is backed by the given file. + * + * The file must not be modified after the `Blob` is created. Any modifications + * will cause reading the `Blob` data to fail with a `DOMException` error. + * Synchronous stat operations on the file when the `Blob` is created, and before + * each read in order to detect whether the file data has been modified on disk. + * + * ```js + * import { openAsBlob } from 'node:fs'; + * + * const blob = await openAsBlob('the.file.txt'); + * const ab = await blob.arrayBuffer(); + * blob.stream(); + * ``` + * @since v19.8.0 + * @experimental + */ + export function openAsBlob(path: PathLike, options?: OpenAsBlobOptions): Promise; + + export interface OpenDirOptions { + /** + * @default 'utf8' + */ + encoding?: BufferEncoding | undefined; + /** + * Number of directory entries that are buffered + * internally when reading from the directory. Higher values lead to better + * performance but higher memory usage. + * @default 32 + */ + bufferSize?: number | undefined; + /** + * @default false + */ + recursive?: boolean; + } + /** + * Synchronously open a directory. See [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html). + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * @since v12.12.0 + */ + export function opendirSync(path: PathLike, options?: OpenDirOptions): Dir; + /** + * Asynchronously open a directory. See the POSIX [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html) documentation for + * more details. + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * @since v12.12.0 + */ + export function opendir(path: PathLike, cb: (err: NodeJS.ErrnoException | null, dir: Dir) => void): void; + export function opendir( + path: PathLike, + options: OpenDirOptions, + cb: (err: NodeJS.ErrnoException | null, dir: Dir) => void, + ): void; + export namespace opendir { + function __promisify__(path: PathLike, options?: OpenDirOptions): Promise; + } + export interface BigIntStats extends StatsBase { + atimeNs: bigint; + mtimeNs: bigint; + ctimeNs: bigint; + birthtimeNs: bigint; + } + export interface BigIntOptions { + bigint: true; + } + export interface StatOptions { + bigint?: boolean | undefined; + } + export interface StatSyncOptions extends StatOptions { + throwIfNoEntry?: boolean | undefined; + } + interface CopyOptionsBase { + /** + * Dereference symlinks + * @default false + */ + dereference?: boolean; + /** + * When `force` is `false`, and the destination + * exists, throw an error. + * @default false + */ + errorOnExist?: boolean; + /** + * Overwrite existing file or directory. _The copy + * operation will ignore errors if you set this to false and the destination + * exists. Use the `errorOnExist` option to change this behavior. + * @default true + */ + force?: boolean; + /** + * Modifiers for copy operation. See `mode` flag of {@link copyFileSync()} + */ + mode?: number; + /** + * When `true` timestamps from `src` will + * be preserved. + * @default false + */ + preserveTimestamps?: boolean; + /** + * Copy directories recursively. + * @default false + */ + recursive?: boolean; + /** + * When true, path resolution for symlinks will be skipped + * @default false + */ + verbatimSymlinks?: boolean; + } + export interface CopyOptions extends CopyOptionsBase { + /** + * Function to filter copied files/directories. Return + * `true` to copy the item, `false` to ignore it. + */ + filter?(source: string, destination: string): boolean | Promise; + } + export interface CopySyncOptions extends CopyOptionsBase { + /** + * Function to filter copied files/directories. Return + * `true` to copy the item, `false` to ignore it. + */ + filter?(source: string, destination: string): boolean; + } + /** + * Asynchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + */ + export function cp( + source: string | URL, + destination: string | URL, + callback: (err: NodeJS.ErrnoException | null) => void, + ): void; + export function cp( + source: string | URL, + destination: string | URL, + opts: CopyOptions, + callback: (err: NodeJS.ErrnoException | null) => void, + ): void; + /** + * Synchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + */ + export function cpSync(source: string | URL, destination: string | URL, opts?: CopySyncOptions): void; +} +declare module "node:fs" { + export * from "fs"; +} diff --git a/dist/node_modules/@types/node/ts4.8/fs/promises.d.ts b/dist/node_modules/@types/node/ts4.8/fs/promises.d.ts new file mode 100644 index 0000000..88a9fc3 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/fs/promises.d.ts @@ -0,0 +1,1239 @@ +/** + * The `fs/promises` API provides asynchronous file system methods that return + * promises. + * + * The promise APIs use the underlying Node.js threadpool to perform file + * system operations off the event loop thread. These operations are not + * synchronized or threadsafe. Care must be taken when performing multiple + * concurrent modifications on the same file or data corruption may occur. + * @since v10.0.0 + */ +declare module "fs/promises" { + import { Abortable } from "node:events"; + import { Stream } from "node:stream"; + import { ReadableStream } from "node:stream/web"; + import { + BigIntStats, + BigIntStatsFs, + BufferEncodingOption, + constants as fsConstants, + CopyOptions, + Dir, + Dirent, + MakeDirectoryOptions, + Mode, + ObjectEncodingOptions, + OpenDirOptions, + OpenMode, + PathLike, + ReadStream, + ReadVResult, + RmDirOptions, + RmOptions, + StatFsOptions, + StatOptions, + Stats, + StatsFs, + TimeLike, + WatchEventType, + WatchOptions, + WriteStream, + WriteVResult, + } from "node:fs"; + import { Interface as ReadlineInterface } from "node:readline"; + interface FileChangeInfo { + eventType: WatchEventType; + filename: T | null; + } + interface FlagAndOpenMode { + mode?: Mode | undefined; + flag?: OpenMode | undefined; + } + interface FileReadResult { + bytesRead: number; + buffer: T; + } + interface FileReadOptions { + /** + * @default `Buffer.alloc(0xffff)` + */ + buffer?: T; + /** + * @default 0 + */ + offset?: number | null; + /** + * @default `buffer.byteLength` + */ + length?: number | null; + position?: number | null; + } + interface CreateReadStreamOptions { + encoding?: BufferEncoding | null | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + end?: number | undefined; + highWaterMark?: number | undefined; + } + interface CreateWriteStreamOptions { + encoding?: BufferEncoding | null | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + highWaterMark?: number | undefined; + flush?: boolean | undefined; + } + interface ReadableWebStreamOptions { + /** + * Whether to open a normal or a `'bytes'` stream. + * @since v20.0.0 + */ + type?: "bytes" | undefined; + } + // TODO: Add `EventEmitter` close + interface FileHandle { + /** + * The numeric file descriptor managed by the {FileHandle} object. + * @since v10.0.0 + */ + readonly fd: number; + /** + * Alias of `filehandle.writeFile()`. + * + * When operating on file handles, the mode cannot be changed from what it was set + * to with `fsPromises.open()`. Therefore, this is equivalent to `filehandle.writeFile()`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + appendFile( + data: string | Uint8Array, + options?: + | (ObjectEncodingOptions & FlagAndOpenMode & { flush?: boolean | undefined }) + | BufferEncoding + | null, + ): Promise; + /** + * Changes the ownership of the file. A wrapper for [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html). + * @since v10.0.0 + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + * @return Fulfills with `undefined` upon success. + */ + chown(uid: number, gid: number): Promise; + /** + * Modifies the permissions on the file. See [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html). + * @since v10.0.0 + * @param mode the file mode bit mask. + * @return Fulfills with `undefined` upon success. + */ + chmod(mode: Mode): Promise; + /** + * Unlike the 16 KiB default `highWaterMark` for a `stream.Readable`, the stream + * returned by this method has a default `highWaterMark` of 64 KiB. + * + * `options` can include `start` and `end` values to read a range of bytes from + * the file instead of the entire file. Both `start` and `end` are inclusive and + * start counting at 0, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. If `start` is + * omitted or `undefined`, `filehandle.createReadStream()` reads sequentially from + * the current file position. The `encoding` can be any one of those accepted by `Buffer`. + * + * If the `FileHandle` points to a character device that only supports blocking + * reads (such as keyboard or sound card), read operations do not finish until data + * is available. This can prevent the process from exiting and the stream from + * closing naturally. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const fd = await open('/dev/input/event0'); + * // Create a stream from some character device. + * const stream = fd.createReadStream(); + * setTimeout(() => { + * stream.close(); // This may not close the stream. + * // Artificially marking end-of-stream, as if the underlying resource had + * // indicated end-of-file by itself, allows the stream to close. + * // This does not cancel pending read operations, and if there is such an + * // operation, the process may still not be able to exit successfully + * // until it finishes. + * stream.push(null); + * stream.read(0); + * }, 100); + * ``` + * + * If `autoClose` is false, then the file descriptor won't be closed, even if + * there's an error. It is the application's responsibility to close it and make + * sure there's no file descriptor leak. If `autoClose` is set to true (default + * behavior), on `'error'` or `'end'` the file descriptor will be closed + * automatically. + * + * An example to read the last 10 bytes of a file which is 100 bytes long: + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const fd = await open('sample.txt'); + * fd.createReadStream({ start: 90, end: 99 }); + * ``` + * @since v16.11.0 + */ + createReadStream(options?: CreateReadStreamOptions): ReadStream; + /** + * `options` may also include a `start` option to allow writing data at some + * position past the beginning of the file, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. Modifying a file rather than + * replacing it may require the `flags` `open` option to be set to `r+` rather than + * the default `r`. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `autoClose` is set to true (default behavior) on `'error'` or `'finish'`the file descriptor will be closed automatically. If `autoClose` is false, + * then the file descriptor won't be closed, even if there's an error. + * It is the application's responsibility to close it and make sure there's no + * file descriptor leak. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * @since v16.11.0 + */ + createWriteStream(options?: CreateWriteStreamOptions): WriteStream; + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. + * + * Unlike `filehandle.sync` this method does not flush modified metadata. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + datasync(): Promise; + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + sync(): Promise; + /** + * Reads data from the file and stores that in the given buffer. + * + * If the file is not modified concurrently, the end-of-file is reached when the + * number of bytes read is zero. + * @since v10.0.0 + * @param buffer A buffer that will be filled with the file data read. + * @param offset The location in the buffer at which to start filling. + * @param length The number of bytes to read. + * @param position The location where to begin reading data from the file. If `null`, data will be read from the current file position, and the position will be updated. If `position` is an + * integer, the current file position will remain unchanged. + * @return Fulfills upon success with an object with two properties: + */ + read( + buffer: T, + offset?: number | null, + length?: number | null, + position?: number | null, + ): Promise>; + read(options?: FileReadOptions): Promise>; + /** + * Returns a `ReadableStream` that may be used to read the files data. + * + * An error will be thrown if this method is called more than once or is called + * after the `FileHandle` is closed or closing. + * + * ```js + * import { + * open, + * } from 'node:fs/promises'; + * + * const file = await open('./some/file/to/read'); + * + * for await (const chunk of file.readableWebStream()) + * console.log(chunk); + * + * await file.close(); + * ``` + * + * While the `ReadableStream` will read the file to completion, it will not + * close the `FileHandle` automatically. User code must still call the`fileHandle.close()` method. + * @since v17.0.0 + * @experimental + */ + readableWebStream(options?: ReadableWebStreamOptions): ReadableStream; + /** + * Asynchronously reads the entire contents of a file. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `FileHandle` has to support reading. + * + * If one or more `filehandle.read()` calls are made on a file handle and then a`filehandle.readFile()` call is made, the data will be read from the current + * position till the end of the file. It doesn't always read from the beginning + * of the file. + * @since v10.0.0 + * @return Fulfills upon a successful read with the contents of the file. If no encoding is specified (using `options.encoding`), the data is returned as a {Buffer} object. Otherwise, the + * data will be a string. + */ + readFile( + options?: { + encoding?: null | undefined; + flag?: OpenMode | undefined; + } | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. + * The `FileHandle` must have been opened for reading. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + readFile( + options: + | { + encoding: BufferEncoding; + flag?: OpenMode | undefined; + } + | BufferEncoding, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. + * The `FileHandle` must have been opened for reading. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + readFile( + options?: + | (ObjectEncodingOptions & { + flag?: OpenMode | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Convenience method to create a `readline` interface and stream over the file. + * See `filehandle.createReadStream()` for the options. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const file = await open('./some/file/to/read'); + * + * for await (const line of file.readLines()) { + * console.log(line); + * } + * ``` + * @since v18.11.0 + */ + readLines(options?: CreateReadStreamOptions): ReadlineInterface; + /** + * @since v10.0.0 + * @return Fulfills with an {fs.Stats} for the file. + */ + stat( + opts?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + stat( + opts: StatOptions & { + bigint: true; + }, + ): Promise; + stat(opts?: StatOptions): Promise; + /** + * Truncates the file. + * + * If the file was larger than `len` bytes, only the first `len` bytes will be + * retained in the file. + * + * The following example retains only the first four bytes of the file: + * + * ```js + * import { open } from 'node:fs/promises'; + * + * let filehandle = null; + * try { + * filehandle = await open('temp.txt', 'r+'); + * await filehandle.truncate(4); + * } finally { + * await filehandle?.close(); + * } + * ``` + * + * If the file previously was shorter than `len` bytes, it is extended, and the + * extended part is filled with null bytes (`'\0'`): + * + * If `len` is negative then `0` will be used. + * @since v10.0.0 + * @param [len=0] + * @return Fulfills with `undefined` upon success. + */ + truncate(len?: number): Promise; + /** + * Change the file system timestamps of the object referenced by the `FileHandle` then fulfills the promise with no arguments upon success. + * @since v10.0.0 + */ + utimes(atime: TimeLike, mtime: TimeLike): Promise; + /** + * Asynchronously writes data to a file, replacing the file if it already exists.`data` can be a string, a buffer, an + * [AsyncIterable](https://tc39.github.io/ecma262/#sec-asynciterable-interface), or an + * [Iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol) object. + * The promise is fulfilled with no arguments upon success. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `FileHandle` has to support writing. + * + * It is unsafe to use `filehandle.writeFile()` multiple times on the same file + * without waiting for the promise to be fulfilled (or rejected). + * + * If one or more `filehandle.write()` calls are made on a file handle and then a`filehandle.writeFile()` call is made, the data will be written from the + * current position till the end of the file. It doesn't always write from the + * beginning of the file. + * @since v10.0.0 + */ + writeFile( + data: string | Uint8Array, + options?: + | (ObjectEncodingOptions & FlagAndOpenMode & Abortable & { flush?: boolean | undefined }) + | BufferEncoding + | null, + ): Promise; + /** + * Write `buffer` to the file. + * + * The promise is fulfilled with an object containing two properties: + * + * It is unsafe to use `filehandle.write()` multiple times on the same file + * without waiting for the promise to be fulfilled (or rejected). For this + * scenario, use `filehandle.createWriteStream()`. + * + * On Linux, positional writes do not work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v10.0.0 + * @param offset The start position from within `buffer` where the data to write begins. + * @param [length=buffer.byteLength - offset] The number of bytes from `buffer` to write. + * @param [position='null'] The offset from the beginning of the file where the data from `buffer` should be written. If `position` is not a `number`, the data will be written at the current + * position. See the POSIX pwrite(2) documentation for more detail. + */ + write( + buffer: TBuffer, + offset?: number | null, + length?: number | null, + position?: number | null, + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + write( + data: string, + position?: number | null, + encoding?: BufferEncoding | null, + ): Promise<{ + bytesWritten: number; + buffer: string; + }>; + /** + * Write an array of [ArrayBufferView](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView) s to the file. + * + * The promise is fulfilled with an object containing a two properties: + * + * It is unsafe to call `writev()` multiple times on the same file without waiting + * for the promise to be fulfilled (or rejected). + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v12.9.0 + * @param [position='null'] The offset from the beginning of the file where the data from `buffers` should be written. If `position` is not a `number`, the data will be written at the current + * position. + */ + writev(buffers: readonly NodeJS.ArrayBufferView[], position?: number): Promise; + /** + * Read from a file and write to an array of [ArrayBufferView](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView) s + * @since v13.13.0, v12.17.0 + * @param [position='null'] The offset from the beginning of the file where the data should be read from. If `position` is not a `number`, the data will be read from the current position. + * @return Fulfills upon success an object containing two properties: + */ + readv(buffers: readonly NodeJS.ArrayBufferView[], position?: number): Promise; + /** + * Closes the file handle after waiting for any pending operation on the handle to + * complete. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * let filehandle; + * try { + * filehandle = await open('thefile.txt', 'r'); + * } finally { + * await filehandle?.close(); + * } + * ``` + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + close(): Promise; + /** + * An alias for {@link FileHandle.close()}. + * @since v20.4.0 + */ + [Symbol.asyncDispose](): Promise; + } + const constants: typeof fsConstants; + /** + * Tests a user's permissions for the file or directory specified by `path`. + * The `mode` argument is an optional integer that specifies the accessibility + * checks to be performed. `mode` should be either the value `fs.constants.F_OK`or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`,`fs.constants.W_OK`, and `fs.constants.X_OK` + * (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * If the accessibility check is successful, the promise is fulfilled with no + * value. If any of the accessibility checks fail, the promise is rejected + * with an [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object. The following example checks if the file`/etc/passwd` can be read and + * written by the current process. + * + * ```js + * import { access, constants } from 'node:fs/promises'; + * + * try { + * await access('/etc/passwd', constants.R_OK | constants.W_OK); + * console.log('can access'); + * } catch { + * console.error('cannot access'); + * } + * ``` + * + * Using `fsPromises.access()` to check for the accessibility of a file before + * calling `fsPromises.open()` is not recommended. Doing so introduces a race + * condition, since other processes may change the file's state between the two + * calls. Instead, user code should open/read/write the file directly and handle + * the error raised if the file is not accessible. + * @since v10.0.0 + * @param [mode=fs.constants.F_OK] + * @return Fulfills with `undefined` upon success. + */ + function access(path: PathLike, mode?: number): Promise; + /** + * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. + * + * No guarantees are made about the atomicity of the copy operation. If an + * error occurs after the destination file has been opened for writing, an attempt + * will be made to remove the destination. + * + * ```js + * import { copyFile, constants } from 'node:fs/promises'; + * + * try { + * await copyFile('source.txt', 'destination.txt'); + * console.log('source.txt was copied to destination.txt'); + * } catch { + * console.error('The file could not be copied'); + * } + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * try { + * await copyFile('source.txt', 'destination.txt', constants.COPYFILE_EXCL); + * console.log('source.txt was copied to destination.txt'); + * } catch { + * console.error('The file could not be copied'); + * } + * ``` + * @since v10.0.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] Optional modifiers that specify the behavior of the copy operation. It is possible to create a mask consisting of the bitwise OR of two or more values (e.g. + * `fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`) + * @return Fulfills with `undefined` upon success. + */ + function copyFile(src: PathLike, dest: PathLike, mode?: number): Promise; + /** + * Opens a `FileHandle`. + * + * Refer to the POSIX [`open(2)`](http://man7.org/linux/man-pages/man2/open.2.html) documentation for more detail. + * + * Some characters (`< > : " / \ | ? *`) are reserved under Windows as documented + * by [Naming Files, Paths, and Namespaces](https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file). Under NTFS, if the filename contains + * a colon, Node.js will open a file system stream, as described by [this MSDN page](https://docs.microsoft.com/en-us/windows/desktop/FileIO/using-streams). + * @since v10.0.0 + * @param [flags='r'] See `support of file system `flags``. + * @param [mode=0o666] Sets the file mode (permission and sticky bits) if the file is created. + * @return Fulfills with a {FileHandle} object. + */ + function open(path: PathLike, flags?: string | number, mode?: Mode): Promise; + /** + * Renames `oldPath` to `newPath`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function rename(oldPath: PathLike, newPath: PathLike): Promise; + /** + * Truncates (shortens or extends the length) of the content at `path` to `len`bytes. + * @since v10.0.0 + * @param [len=0] + * @return Fulfills with `undefined` upon success. + */ + function truncate(path: PathLike, len?: number): Promise; + /** + * Removes the directory identified by `path`. + * + * Using `fsPromises.rmdir()` on a file (not a directory) results in the + * promise being rejected with an `ENOENT` error on Windows and an `ENOTDIR`error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use `fsPromises.rm()` with options `{ recursive: true, force: true }`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function rmdir(path: PathLike, options?: RmDirOptions): Promise; + /** + * Removes files and directories (modeled on the standard POSIX `rm` utility). + * @since v14.14.0 + * @return Fulfills with `undefined` upon success. + */ + function rm(path: PathLike, options?: RmOptions): Promise; + /** + * Asynchronously creates a directory. + * + * The optional `options` argument can be an integer specifying `mode` (permission + * and sticky bits), or an object with a `mode` property and a `recursive`property indicating whether parent directories should be created. Calling`fsPromises.mkdir()` when `path` is a directory + * that exists results in a + * rejection only when `recursive` is false. + * + * ```js + * import { mkdir } from 'node:fs/promises'; + * + * try { + * const projectFolder = new URL('./test/project/', import.meta.url); + * const createDir = await mkdir(projectFolder, { recursive: true }); + * + * console.log(`created ${createDir}`); + * } catch (err) { + * console.error(err.message); + * } + * ``` + * @since v10.0.0 + * @return Upon success, fulfills with `undefined` if `recursive` is `false`, or the first directory path created if `recursive` is `true`. + */ + function mkdir( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function mkdir( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null, + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function mkdir(path: PathLike, options?: Mode | MakeDirectoryOptions | null): Promise; + /** + * Reads the contents of a directory. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames. If the `encoding` is set to `'buffer'`, the filenames returned + * will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the returned array will contain `fs.Dirent` objects. + * + * ```js + * import { readdir } from 'node:fs/promises'; + * + * try { + * const files = await readdir(path); + * for (const file of files) + * console.log(file); + * } catch (err) { + * console.error(err); + * } + * ``` + * @since v10.0.0 + * @return Fulfills with an array of the names of the files in the directory excluding `'.'` and `'..'`. + */ + function readdir( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readdir( + path: PathLike, + options: + | { + encoding: "buffer"; + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + } + | "buffer", + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readdir( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + recursive?: boolean | undefined; + }) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + function readdir( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + recursive?: boolean | undefined; + }, + ): Promise; + /** + * Reads the contents of the symbolic link referred to by `path`. See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more detail. The promise is + * fulfilled with the`linkString` upon success. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path returned. If the `encoding` is set to `'buffer'`, the link path + * returned will be passed as a `Buffer` object. + * @since v10.0.0 + * @return Fulfills with the `linkString` upon success. + */ + function readlink(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readlink(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readlink(path: PathLike, options?: ObjectEncodingOptions | string | null): Promise; + /** + * Creates a symbolic link. + * + * The `type` argument is only used on Windows platforms and can be one of `'dir'`,`'file'`, or `'junction'`. If the `type` argument is not a string, Node.js will + * autodetect `target` type and use `'file'` or `'dir'`. If the `target` does not + * exist, `'file'` will be used. Windows junction points require the destination + * path to be absolute. When using `'junction'`, the `target` argument will + * automatically be normalized to absolute path. Junction points on NTFS volumes + * can only point to directories. + * @since v10.0.0 + * @param [type='null'] + * @return Fulfills with `undefined` upon success. + */ + function symlink(target: PathLike, path: PathLike, type?: string | null): Promise; + /** + * Equivalent to `fsPromises.stat()` unless `path` refers to a symbolic link, + * in which case the link itself is stat-ed, not the file that it refers to. + * Refer to the POSIX [`lstat(2)`](http://man7.org/linux/man-pages/man2/lstat.2.html) document for more detail. + * @since v10.0.0 + * @return Fulfills with the {fs.Stats} object for the given symbolic link `path`. + */ + function lstat( + path: PathLike, + opts?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function lstat( + path: PathLike, + opts: StatOptions & { + bigint: true; + }, + ): Promise; + function lstat(path: PathLike, opts?: StatOptions): Promise; + /** + * @since v10.0.0 + * @return Fulfills with the {fs.Stats} object for the given `path`. + */ + function stat( + path: PathLike, + opts?: StatOptions & { + bigint?: false | undefined; + }, + ): Promise; + function stat( + path: PathLike, + opts: StatOptions & { + bigint: true; + }, + ): Promise; + function stat(path: PathLike, opts?: StatOptions): Promise; + /** + * @since v19.6.0, v18.15.0 + * @return Fulfills with the {fs.StatFs} object for the given `path`. + */ + function statfs( + path: PathLike, + opts?: StatFsOptions & { + bigint?: false | undefined; + }, + ): Promise; + function statfs( + path: PathLike, + opts: StatFsOptions & { + bigint: true; + }, + ): Promise; + function statfs(path: PathLike, opts?: StatFsOptions): Promise; + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function link(existingPath: PathLike, newPath: PathLike): Promise; + /** + * If `path` refers to a symbolic link, then the link is removed without affecting + * the file or directory to which that link refers. If the `path` refers to a file + * path that is not a symbolic link, the file is deleted. See the POSIX [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function unlink(path: PathLike): Promise; + /** + * Changes the permissions of a file. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function chmod(path: PathLike, mode: Mode): Promise; + /** + * Changes the permissions on a symbolic link. + * + * This method is only implemented on macOS. + * @deprecated Since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function lchmod(path: PathLike, mode: Mode): Promise; + /** + * Changes the ownership on a symbolic link. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function lchown(path: PathLike, uid: number, gid: number): Promise; + /** + * Changes the access and modification times of a file in the same way as `fsPromises.utimes()`, with the difference that if the path refers to a + * symbolic link, then the link is not dereferenced: instead, the timestamps of + * the symbolic link itself are changed. + * @since v14.5.0, v12.19.0 + * @return Fulfills with `undefined` upon success. + */ + function lutimes(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + /** + * Changes the ownership of a file. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function chown(path: PathLike, uid: number, gid: number): Promise; + /** + * Change the file system timestamps of the object referenced by `path`. + * + * The `atime` and `mtime` arguments follow these rules: + * + * * Values can be either numbers representing Unix epoch time, `Date`s, or a + * numeric string like `'123456789.0'`. + * * If the value can not be converted to a number, or is `NaN`, `Infinity`, or`-Infinity`, an `Error` will be thrown. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function utimes(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + /** + * Determines the actual location of `path` using the same semantics as the`fs.realpath.native()` function. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path. If the `encoding` is set to `'buffer'`, the path returned will be + * passed as a `Buffer` object. + * + * On Linux, when Node.js is linked against musl libc, the procfs file system must + * be mounted on `/proc` in order for this function to work. Glibc does not have + * this restriction. + * @since v10.0.0 + * @return Fulfills with the resolved path upon success. + */ + function realpath(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function realpath(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function realpath( + path: PathLike, + options?: ObjectEncodingOptions | BufferEncoding | null, + ): Promise; + /** + * Creates a unique temporary directory. A unique directory name is generated by + * appending six random characters to the end of the provided `prefix`. Due to + * platform inconsistencies, avoid trailing `X` characters in `prefix`. Some + * platforms, notably the BSDs, can return more than six random characters, and + * replace trailing `X` characters in `prefix` with random characters. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * + * ```js + * import { mkdtemp } from 'node:fs/promises'; + * import { join } from 'node:path'; + * import { tmpdir } from 'node:os'; + * + * try { + * await mkdtemp(join(tmpdir(), 'foo-')); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * The `fsPromises.mkdtemp()` method will append the six randomly selected + * characters directly to the `prefix` string. For instance, given a directory`/tmp`, if the intention is to create a temporary directory _within_`/tmp`, the`prefix` must end with a trailing + * platform-specific path separator + * (`require('node:path').sep`). + * @since v10.0.0 + * @return Fulfills with a string containing the file system path of the newly created temporary directory. + */ + function mkdtemp(prefix: string, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function mkdtemp(prefix: string, options: BufferEncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function mkdtemp(prefix: string, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronously writes data to a file, replacing the file if it already exists.`data` can be a string, a buffer, an + * [AsyncIterable](https://tc39.github.io/ecma262/#sec-asynciterable-interface), or an + * [Iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol) object. + * + * The `encoding` option is ignored if `data` is a buffer. + * + * If `options` is a string, then it specifies the encoding. + * + * The `mode` option only affects the newly created file. See `fs.open()` for more details. + * + * Any specified `FileHandle` has to support writing. + * + * It is unsafe to use `fsPromises.writeFile()` multiple times on the same file + * without waiting for the promise to be settled. + * + * Similarly to `fsPromises.readFile` \- `fsPromises.writeFile` is a convenience + * method that performs multiple `write` calls internally to write the buffer + * passed to it. For performance sensitive code consider using `fs.createWriteStream()` or `filehandle.createWriteStream()`. + * + * It is possible to use an `AbortSignal` to cancel an `fsPromises.writeFile()`. + * Cancelation is "best effort", and some amount of data is likely still + * to be written. + * + * ```js + * import { writeFile } from 'node:fs/promises'; + * import { Buffer } from 'node:buffer'; + * + * try { + * const controller = new AbortController(); + * const { signal } = controller; + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * const promise = writeFile('message.txt', data, { signal }); + * + * // Abort the request before the promise settles. + * controller.abort(); + * + * await promise; + * } catch (err) { + * // When a request is aborted - err is an AbortError + * console.error(err); + * } + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.writeFile` performs. + * @since v10.0.0 + * @param file filename or `FileHandle` + * @return Fulfills with `undefined` upon success. + */ + function writeFile( + file: PathLike | FileHandle, + data: + | string + | NodeJS.ArrayBufferView + | Iterable + | AsyncIterable + | Stream, + options?: + | (ObjectEncodingOptions & { + mode?: Mode | undefined; + flag?: OpenMode | undefined; + } & Abortable) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `mode` option only affects the newly created file. See `fs.open()` for more details. + * + * The `path` may be specified as a `FileHandle` that has been opened + * for appending (using `fsPromises.open()`). + * @since v10.0.0 + * @param path filename or {FileHandle} + * @return Fulfills with `undefined` upon success. + */ + function appendFile( + path: PathLike | FileHandle, + data: string | Uint8Array, + options?: (ObjectEncodingOptions & FlagAndOpenMode & { flush?: boolean | undefined }) | BufferEncoding | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * + * If no encoding is specified (using `options.encoding`), the data is returned + * as a `Buffer` object. Otherwise, the data will be a string. + * + * If `options` is a string, then it specifies the encoding. + * + * When the `path` is a directory, the behavior of `fsPromises.readFile()` is + * platform-specific. On macOS, Linux, and Windows, the promise will be rejected + * with an error. On FreeBSD, a representation of the directory's contents will be + * returned. + * + * An example of reading a `package.json` file located in the same directory of the + * running code: + * + * ```js + * import { readFile } from 'node:fs/promises'; + * try { + * const filePath = new URL('./package.json', import.meta.url); + * const contents = await readFile(filePath, { encoding: 'utf8' }); + * console.log(contents); + * } catch (err) { + * console.error(err.message); + * } + * ``` + * + * It is possible to abort an ongoing `readFile` using an `AbortSignal`. If a + * request is aborted the promise returned is rejected with an `AbortError`: + * + * ```js + * import { readFile } from 'node:fs/promises'; + * + * try { + * const controller = new AbortController(); + * const { signal } = controller; + * const promise = readFile(fileName, { signal }); + * + * // Abort the request before the promise settles. + * controller.abort(); + * + * await promise; + * } catch (err) { + * // When a request is aborted - err is an AbortError + * console.error(err); + * } + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.readFile` performs. + * + * Any specified `FileHandle` has to support reading. + * @since v10.0.0 + * @param path filename or `FileHandle` + * @return Fulfills with the contents of the file. + */ + function readFile( + path: PathLike | FileHandle, + options?: + | ({ + encoding?: null | undefined; + flag?: OpenMode | undefined; + } & Abortable) + | null, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function readFile( + path: PathLike | FileHandle, + options: + | ({ + encoding: BufferEncoding; + flag?: OpenMode | undefined; + } & Abortable) + | BufferEncoding, + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function readFile( + path: PathLike | FileHandle, + options?: + | ( + & ObjectEncodingOptions + & Abortable + & { + flag?: OpenMode | undefined; + } + ) + | BufferEncoding + | null, + ): Promise; + /** + * Asynchronously open a directory for iterative scanning. See the POSIX [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html) documentation for more detail. + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * + * Example using async iteration: + * + * ```js + * import { opendir } from 'node:fs/promises'; + * + * try { + * const dir = await opendir('./'); + * for await (const dirent of dir) + * console.log(dirent.name); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * When using the async iterator, the `fs.Dir` object will be automatically + * closed after the iterator exits. + * @since v12.12.0 + * @return Fulfills with an {fs.Dir}. + */ + function opendir(path: PathLike, options?: OpenDirOptions): Promise; + /** + * Returns an async iterator that watches for changes on `filename`, where `filename`is either a file or a directory. + * + * ```js + * const { watch } = require('node:fs/promises'); + * + * const ac = new AbortController(); + * const { signal } = ac; + * setTimeout(() => ac.abort(), 10000); + * + * (async () => { + * try { + * const watcher = watch(__filename, { signal }); + * for await (const event of watcher) + * console.log(event); + * } catch (err) { + * if (err.name === 'AbortError') + * return; + * throw err; + * } + * })(); + * ``` + * + * On most platforms, `'rename'` is emitted whenever a filename appears or + * disappears in the directory. + * + * All the `caveats` for `fs.watch()` also apply to `fsPromises.watch()`. + * @since v15.9.0, v14.18.0 + * @return of objects with the properties: + */ + function watch( + filename: PathLike, + options: + | (WatchOptions & { + encoding: "buffer"; + }) + | "buffer", + ): AsyncIterable>; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + function watch(filename: PathLike, options?: WatchOptions | BufferEncoding): AsyncIterable>; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + function watch( + filename: PathLike, + options: WatchOptions | string, + ): AsyncIterable> | AsyncIterable>; + /** + * Asynchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + * @return Fulfills with `undefined` upon success. + */ + function cp(source: string | URL, destination: string | URL, opts?: CopyOptions): Promise; +} +declare module "node:fs/promises" { + export * from "fs/promises"; +} diff --git a/dist/node_modules/@types/node/ts4.8/globals.d.ts b/dist/node_modules/@types/node/ts4.8/globals.d.ts new file mode 100644 index 0000000..5f25006 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/globals.d.ts @@ -0,0 +1,411 @@ +export {}; // Make this a module + +// #region Fetch and friends +// Conditional type aliases, used at the end of this file. +// Will either be empty if lib-dom is included, or the undici version otherwise. +type _Request = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").Request; +type _Response = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").Response; +type _FormData = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").FormData; +type _Headers = typeof globalThis extends { onmessage: any } ? {} : import("undici-types").Headers; +type _RequestInit = typeof globalThis extends { onmessage: any } ? {} + : import("undici-types").RequestInit; +type _ResponseInit = typeof globalThis extends { onmessage: any } ? {} + : import("undici-types").ResponseInit; +type _File = typeof globalThis extends { onmessage: any } ? {} : import("node:buffer").File; +// #endregion Fetch and friends + +declare global { + // Declare "static" methods in Error + interface ErrorConstructor { + /** Create .stack property on a target object */ + captureStackTrace(targetObject: object, constructorOpt?: Function): void; + + /** + * Optional override for formatting stack traces + * + * @see https://v8.dev/docs/stack-trace-api#customizing-stack-traces + */ + prepareStackTrace?: ((err: Error, stackTraces: NodeJS.CallSite[]) => any) | undefined; + + stackTraceLimit: number; + } + + /*-----------------------------------------------* + * * + * GLOBAL * + * * + ------------------------------------------------*/ + + // For backwards compability + interface NodeRequire extends NodeJS.Require {} + interface RequireResolve extends NodeJS.RequireResolve {} + interface NodeModule extends NodeJS.Module {} + + var process: NodeJS.Process; + var console: Console; + + var __filename: string; + var __dirname: string; + + var require: NodeRequire; + var module: NodeModule; + + // Same as module.exports + var exports: any; + + /** + * Only available if `--expose-gc` is passed to the process. + */ + var gc: undefined | (() => void); + + // #region borrowed + // from https://github.com/microsoft/TypeScript/blob/38da7c600c83e7b31193a62495239a0fe478cb67/lib/lib.webworker.d.ts#L633 until moved to separate lib + /** A controller object that allows you to abort one or more DOM requests as and when desired. */ + interface AbortController { + /** + * Returns the AbortSignal object associated with this object. + */ + + readonly signal: AbortSignal; + /** + * Invoking this method will set this object's AbortSignal's aborted flag and signal to any observers that the associated activity is to be aborted. + */ + abort(reason?: any): void; + } + + /** A signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. */ + interface AbortSignal extends EventTarget { + /** + * Returns true if this AbortSignal's AbortController has signaled to abort, and false otherwise. + */ + readonly aborted: boolean; + readonly reason: any; + onabort: null | ((this: AbortSignal, event: Event) => any); + throwIfAborted(): void; + } + + var AbortController: typeof globalThis extends { onmessage: any; AbortController: infer T } ? T + : { + prototype: AbortController; + new(): AbortController; + }; + + var AbortSignal: typeof globalThis extends { onmessage: any; AbortSignal: infer T } ? T + : { + prototype: AbortSignal; + new(): AbortSignal; + abort(reason?: any): AbortSignal; + timeout(milliseconds: number): AbortSignal; + }; + // #endregion borrowed + + // #region Disposable + interface SymbolConstructor { + /** + * A method that is used to release resources held by an object. Called by the semantics of the `using` statement. + */ + readonly dispose: unique symbol; + + /** + * A method that is used to asynchronously release resources held by an object. Called by the semantics of the `await using` statement. + */ + readonly asyncDispose: unique symbol; + } + + interface Disposable { + [Symbol.dispose](): void; + } + + interface AsyncDisposable { + [Symbol.asyncDispose](): PromiseLike; + } + // #endregion Disposable + + // #region ArrayLike.at() + interface RelativeIndexable { + /** + * Takes an integer value and returns the item at that index, + * allowing for positive and negative integers. + * Negative integers count back from the last item in the array. + */ + at(index: number): T | undefined; + } + interface String extends RelativeIndexable {} + interface Array extends RelativeIndexable {} + interface ReadonlyArray extends RelativeIndexable {} + interface Int8Array extends RelativeIndexable {} + interface Uint8Array extends RelativeIndexable {} + interface Uint8ClampedArray extends RelativeIndexable {} + interface Int16Array extends RelativeIndexable {} + interface Uint16Array extends RelativeIndexable {} + interface Int32Array extends RelativeIndexable {} + interface Uint32Array extends RelativeIndexable {} + interface Float32Array extends RelativeIndexable {} + interface Float64Array extends RelativeIndexable {} + interface BigInt64Array extends RelativeIndexable {} + interface BigUint64Array extends RelativeIndexable {} + // #endregion ArrayLike.at() end + + /** + * @since v17.0.0 + * + * Creates a deep clone of an object. + */ + function structuredClone( + value: T, + transfer?: { transfer: ReadonlyArray }, + ): T; + + /*----------------------------------------------* + * * + * GLOBAL INTERFACES * + * * + *-----------------------------------------------*/ + namespace NodeJS { + interface CallSite { + /** + * Value of "this" + */ + getThis(): unknown; + + /** + * Type of "this" as a string. + * This is the name of the function stored in the constructor field of + * "this", if available. Otherwise the object's [[Class]] internal + * property. + */ + getTypeName(): string | null; + + /** + * Current function + */ + getFunction(): Function | undefined; + + /** + * Name of the current function, typically its name property. + * If a name property is not available an attempt will be made to try + * to infer a name from the function's context. + */ + getFunctionName(): string | null; + + /** + * Name of the property [of "this" or one of its prototypes] that holds + * the current function + */ + getMethodName(): string | null; + + /** + * Name of the script [if this function was defined in a script] + */ + getFileName(): string | undefined; + + /** + * Current line number [if this function was defined in a script] + */ + getLineNumber(): number | null; + + /** + * Current column number [if this function was defined in a script] + */ + getColumnNumber(): number | null; + + /** + * A call site object representing the location where eval was called + * [if this function was created using a call to eval] + */ + getEvalOrigin(): string | undefined; + + /** + * Is this a toplevel invocation, that is, is "this" the global object? + */ + isToplevel(): boolean; + + /** + * Does this call take place in code defined by a call to eval? + */ + isEval(): boolean; + + /** + * Is this call in native V8 code? + */ + isNative(): boolean; + + /** + * Is this a constructor call? + */ + isConstructor(): boolean; + + /** + * is this an async call (i.e. await, Promise.all(), or Promise.any())? + */ + isAsync(): boolean; + + /** + * is this an async call to Promise.all()? + */ + isPromiseAll(): boolean; + + /** + * returns the index of the promise element that was followed in + * Promise.all() or Promise.any() for async stack traces, or null + * if the CallSite is not an async + */ + getPromiseIndex(): number | null; + + getScriptNameOrSourceURL(): string; + getScriptHash(): string; + + getEnclosingColumnNumber(): number; + getEnclosingLineNumber(): number; + getPosition(): number; + + toString(): string; + } + + interface ErrnoException extends Error { + errno?: number | undefined; + code?: string | undefined; + path?: string | undefined; + syscall?: string | undefined; + } + + interface ReadableStream extends EventEmitter { + readable: boolean; + read(size?: number): string | Buffer; + setEncoding(encoding: BufferEncoding): this; + pause(): this; + resume(): this; + isPaused(): boolean; + pipe(destination: T, options?: { end?: boolean | undefined }): T; + unpipe(destination?: WritableStream): this; + unshift(chunk: string | Uint8Array, encoding?: BufferEncoding): void; + wrap(oldStream: ReadableStream): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + + interface WritableStream extends EventEmitter { + writable: boolean; + write(buffer: Uint8Array | string, cb?: (err?: Error | null) => void): boolean; + write(str: string, encoding?: BufferEncoding, cb?: (err?: Error | null) => void): boolean; + end(cb?: () => void): this; + end(data: string | Uint8Array, cb?: () => void): this; + end(str: string, encoding?: BufferEncoding, cb?: () => void): this; + } + + interface ReadWriteStream extends ReadableStream, WritableStream {} + + interface RefCounted { + ref(): this; + unref(): this; + } + + type TypedArray = + | Uint8Array + | Uint8ClampedArray + | Uint16Array + | Uint32Array + | Int8Array + | Int16Array + | Int32Array + | BigUint64Array + | BigInt64Array + | Float32Array + | Float64Array; + type ArrayBufferView = TypedArray | DataView; + + interface Require { + (id: string): any; + resolve: RequireResolve; + cache: Dict; + /** + * @deprecated + */ + extensions: RequireExtensions; + main: Module | undefined; + } + + interface RequireResolve { + (id: string, options?: { paths?: string[] | undefined }): string; + paths(request: string): string[] | null; + } + + interface RequireExtensions extends Dict<(m: Module, filename: string) => any> { + ".js": (m: Module, filename: string) => any; + ".json": (m: Module, filename: string) => any; + ".node": (m: Module, filename: string) => any; + } + interface Module { + /** + * `true` if the module is running during the Node.js preload + */ + isPreloading: boolean; + exports: any; + require: Require; + id: string; + filename: string; + loaded: boolean; + /** @deprecated since v14.6.0 Please use `require.main` and `module.children` instead. */ + parent: Module | null | undefined; + children: Module[]; + /** + * @since v11.14.0 + * + * The directory name of the module. This is usually the same as the path.dirname() of the module.id. + */ + path: string; + paths: string[]; + } + + interface Dict { + [key: string]: T | undefined; + } + + interface ReadOnlyDict { + readonly [key: string]: T | undefined; + } + } + + interface RequestInit extends _RequestInit {} + + function fetch( + input: string | URL | globalThis.Request, + init?: RequestInit, + ): Promise; + + interface Request extends _Request {} + var Request: typeof globalThis extends { + onmessage: any; + Request: infer T; + } ? T + : typeof import("undici-types").Request; + + interface ResponseInit extends _ResponseInit {} + + interface Response extends _Response {} + var Response: typeof globalThis extends { + onmessage: any; + Response: infer T; + } ? T + : typeof import("undici-types").Response; + + interface FormData extends _FormData {} + var FormData: typeof globalThis extends { + onmessage: any; + FormData: infer T; + } ? T + : typeof import("undici-types").FormData; + + interface Headers extends _Headers {} + var Headers: typeof globalThis extends { + onmessage: any; + Headers: infer T; + } ? T + : typeof import("undici-types").Headers; + + interface File extends _File {} + var File: typeof globalThis extends { + onmessage: any; + File: infer T; + } ? T + : typeof import("node:buffer").File; +} diff --git a/dist/node_modules/@types/node/ts4.8/globals.global.d.ts b/dist/node_modules/@types/node/ts4.8/globals.global.d.ts new file mode 100644 index 0000000..ef1198c --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/globals.global.d.ts @@ -0,0 +1 @@ +declare var global: typeof globalThis; diff --git a/dist/node_modules/@types/node/ts4.8/http.d.ts b/dist/node_modules/@types/node/ts4.8/http.d.ts new file mode 100644 index 0000000..98479fc --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/http.d.ts @@ -0,0 +1,1889 @@ +/** + * To use the HTTP server and client one must `require('node:http')`. + * + * The HTTP interfaces in Node.js are designed to support many features + * of the protocol which have been traditionally difficult to use. + * In particular, large, possibly chunk-encoded, messages. The interface is + * careful to never buffer entire requests or responses, so the + * user is able to stream data. + * + * HTTP message headers are represented by an object like this: + * + * ```json + * { "content-length": "123", + * "content-type": "text/plain", + * "connection": "keep-alive", + * "host": "example.com", + * "accept": "*" } + * ``` + * + * Keys are lowercased. Values are not modified. + * + * In order to support the full spectrum of possible HTTP applications, the Node.js + * HTTP API is very low-level. It deals with stream handling and message + * parsing only. It parses a message into headers and body but it does not + * parse the actual headers or the body. + * + * See `message.headers` for details on how duplicate headers are handled. + * + * The raw headers as they were received are retained in the `rawHeaders`property, which is an array of `[key, value, key2, value2, ...]`. For + * example, the previous message header object might have a `rawHeaders`list like the following: + * + * ```js + * [ 'ConTent-Length', '123456', + * 'content-LENGTH', '123', + * 'content-type', 'text/plain', + * 'CONNECTION', 'keep-alive', + * 'Host', 'example.com', + * 'accepT', '*' ] + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/http.js) + */ +declare module "http" { + import * as stream from "node:stream"; + import { URL } from "node:url"; + import { LookupOptions } from "node:dns"; + import { EventEmitter } from "node:events"; + import { LookupFunction, Server as NetServer, Socket, TcpSocketConnectOpts } from "node:net"; + // incoming headers will never contain number + interface IncomingHttpHeaders extends NodeJS.Dict { + accept?: string | undefined; + "accept-language"?: string | undefined; + "accept-patch"?: string | undefined; + "accept-ranges"?: string | undefined; + "access-control-allow-credentials"?: string | undefined; + "access-control-allow-headers"?: string | undefined; + "access-control-allow-methods"?: string | undefined; + "access-control-allow-origin"?: string | undefined; + "access-control-expose-headers"?: string | undefined; + "access-control-max-age"?: string | undefined; + "access-control-request-headers"?: string | undefined; + "access-control-request-method"?: string | undefined; + age?: string | undefined; + allow?: string | undefined; + "alt-svc"?: string | undefined; + authorization?: string | undefined; + "cache-control"?: string | undefined; + connection?: string | undefined; + "content-disposition"?: string | undefined; + "content-encoding"?: string | undefined; + "content-language"?: string | undefined; + "content-length"?: string | undefined; + "content-location"?: string | undefined; + "content-range"?: string | undefined; + "content-type"?: string | undefined; + cookie?: string | undefined; + date?: string | undefined; + etag?: string | undefined; + expect?: string | undefined; + expires?: string | undefined; + forwarded?: string | undefined; + from?: string | undefined; + host?: string | undefined; + "if-match"?: string | undefined; + "if-modified-since"?: string | undefined; + "if-none-match"?: string | undefined; + "if-unmodified-since"?: string | undefined; + "last-modified"?: string | undefined; + location?: string | undefined; + origin?: string | undefined; + pragma?: string | undefined; + "proxy-authenticate"?: string | undefined; + "proxy-authorization"?: string | undefined; + "public-key-pins"?: string | undefined; + range?: string | undefined; + referer?: string | undefined; + "retry-after"?: string | undefined; + "sec-websocket-accept"?: string | undefined; + "sec-websocket-extensions"?: string | undefined; + "sec-websocket-key"?: string | undefined; + "sec-websocket-protocol"?: string | undefined; + "sec-websocket-version"?: string | undefined; + "set-cookie"?: string[] | undefined; + "strict-transport-security"?: string | undefined; + tk?: string | undefined; + trailer?: string | undefined; + "transfer-encoding"?: string | undefined; + upgrade?: string | undefined; + "user-agent"?: string | undefined; + vary?: string | undefined; + via?: string | undefined; + warning?: string | undefined; + "www-authenticate"?: string | undefined; + } + // outgoing headers allows numbers (as they are converted internally to strings) + type OutgoingHttpHeader = number | string | string[]; + interface OutgoingHttpHeaders extends NodeJS.Dict { + accept?: string | string[] | undefined; + "accept-charset"?: string | string[] | undefined; + "accept-encoding"?: string | string[] | undefined; + "accept-language"?: string | string[] | undefined; + "accept-ranges"?: string | undefined; + "access-control-allow-credentials"?: string | undefined; + "access-control-allow-headers"?: string | undefined; + "access-control-allow-methods"?: string | undefined; + "access-control-allow-origin"?: string | undefined; + "access-control-expose-headers"?: string | undefined; + "access-control-max-age"?: string | undefined; + "access-control-request-headers"?: string | undefined; + "access-control-request-method"?: string | undefined; + age?: string | undefined; + allow?: string | undefined; + authorization?: string | undefined; + "cache-control"?: string | undefined; + "cdn-cache-control"?: string | undefined; + connection?: string | string[] | undefined; + "content-disposition"?: string | undefined; + "content-encoding"?: string | undefined; + "content-language"?: string | undefined; + "content-length"?: string | number | undefined; + "content-location"?: string | undefined; + "content-range"?: string | undefined; + "content-security-policy"?: string | undefined; + "content-security-policy-report-only"?: string | undefined; + cookie?: string | string[] | undefined; + dav?: string | string[] | undefined; + dnt?: string | undefined; + date?: string | undefined; + etag?: string | undefined; + expect?: string | undefined; + expires?: string | undefined; + forwarded?: string | undefined; + from?: string | undefined; + host?: string | undefined; + "if-match"?: string | undefined; + "if-modified-since"?: string | undefined; + "if-none-match"?: string | undefined; + "if-range"?: string | undefined; + "if-unmodified-since"?: string | undefined; + "last-modified"?: string | undefined; + link?: string | string[] | undefined; + location?: string | undefined; + "max-forwards"?: string | undefined; + origin?: string | undefined; + prgama?: string | string[] | undefined; + "proxy-authenticate"?: string | string[] | undefined; + "proxy-authorization"?: string | undefined; + "public-key-pins"?: string | undefined; + "public-key-pins-report-only"?: string | undefined; + range?: string | undefined; + referer?: string | undefined; + "referrer-policy"?: string | undefined; + refresh?: string | undefined; + "retry-after"?: string | undefined; + "sec-websocket-accept"?: string | undefined; + "sec-websocket-extensions"?: string | string[] | undefined; + "sec-websocket-key"?: string | undefined; + "sec-websocket-protocol"?: string | string[] | undefined; + "sec-websocket-version"?: string | undefined; + server?: string | undefined; + "set-cookie"?: string | string[] | undefined; + "strict-transport-security"?: string | undefined; + te?: string | undefined; + trailer?: string | undefined; + "transfer-encoding"?: string | undefined; + "user-agent"?: string | undefined; + upgrade?: string | undefined; + "upgrade-insecure-requests"?: string | undefined; + vary?: string | undefined; + via?: string | string[] | undefined; + warning?: string | undefined; + "www-authenticate"?: string | string[] | undefined; + "x-content-type-options"?: string | undefined; + "x-dns-prefetch-control"?: string | undefined; + "x-frame-options"?: string | undefined; + "x-xss-protection"?: string | undefined; + } + interface ClientRequestArgs { + _defaultAgent?: Agent | undefined; + agent?: Agent | boolean | undefined; + auth?: string | null | undefined; + createConnection?: + | (( + options: ClientRequestArgs, + oncreate: (err: Error | null, socket: stream.Duplex) => void, + ) => stream.Duplex | null | undefined) + | undefined; + defaultPort?: number | string | undefined; + family?: number | undefined; + headers?: OutgoingHttpHeaders | undefined; + hints?: LookupOptions["hints"]; + host?: string | null | undefined; + hostname?: string | null | undefined; + insecureHTTPParser?: boolean | undefined; + localAddress?: string | undefined; + localPort?: number | undefined; + lookup?: LookupFunction | undefined; + /** + * @default 16384 + */ + maxHeaderSize?: number | undefined; + method?: string | undefined; + path?: string | null | undefined; + port?: number | string | null | undefined; + protocol?: string | null | undefined; + setHost?: boolean | undefined; + signal?: AbortSignal | undefined; + socketPath?: string | undefined; + timeout?: number | undefined; + uniqueHeaders?: Array | undefined; + joinDuplicateHeaders?: boolean; + } + interface ServerOptions< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > { + /** + * Specifies the `IncomingMessage` class to be used. Useful for extending the original `IncomingMessage`. + */ + IncomingMessage?: Request | undefined; + /** + * Specifies the `ServerResponse` class to be used. Useful for extending the original `ServerResponse`. + */ + ServerResponse?: Response | undefined; + /** + * Sets the timeout value in milliseconds for receiving the entire request from the client. + * @see Server.requestTimeout for more information. + * @default 300000 + * @since v18.0.0 + */ + requestTimeout?: number | undefined; + /** + * It joins the field line values of multiple headers in a request with `, ` instead of discarding the duplicates. + * @default false + * @since v18.14.0 + */ + joinDuplicateHeaders?: boolean; + /** + * The number of milliseconds of inactivity a server needs to wait for additional incoming data, + * after it has finished writing the last response, before a socket will be destroyed. + * @see Server.keepAliveTimeout for more information. + * @default 5000 + * @since v18.0.0 + */ + keepAliveTimeout?: number | undefined; + /** + * Sets the interval value in milliseconds to check for request and headers timeout in incomplete requests. + * @default 30000 + */ + connectionsCheckingInterval?: number | undefined; + /** + * Optionally overrides all `socket`s' `readableHighWaterMark` and `writableHighWaterMark`. + * This affects `highWaterMark` property of both `IncomingMessage` and `ServerResponse`. + * Default: @see stream.getDefaultHighWaterMark(). + * @since v20.1.0 + */ + highWaterMark?: number | undefined; + /** + * Use an insecure HTTP parser that accepts invalid HTTP headers when `true`. + * Using the insecure parser should be avoided. + * See --insecure-http-parser for more information. + * @default false + */ + insecureHTTPParser?: boolean | undefined; + /** + * Optionally overrides the value of + * `--max-http-header-size` for requests received by this server, i.e. + * the maximum length of request headers in bytes. + * @default 16384 + * @since v13.3.0 + */ + maxHeaderSize?: number | undefined; + /** + * If set to `true`, it disables the use of Nagle's algorithm immediately after a new incoming connection is received. + * @default true + * @since v16.5.0 + */ + noDelay?: boolean | undefined; + /** + * If set to `true`, it enables keep-alive functionality on the socket immediately after a new incoming connection is received, + * similarly on what is done in `socket.setKeepAlive([enable][, initialDelay])`. + * @default false + * @since v16.5.0 + */ + keepAlive?: boolean | undefined; + /** + * If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket. + * @default 0 + * @since v16.5.0 + */ + keepAliveInitialDelay?: number | undefined; + /** + * A list of response headers that should be sent only once. + * If the header's value is an array, the items will be joined using `; `. + */ + uniqueHeaders?: Array | undefined; + } + type RequestListener< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > = (req: InstanceType, res: InstanceType & { req: InstanceType }) => void; + /** + * @since v0.1.17 + */ + class Server< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > extends NetServer { + constructor(requestListener?: RequestListener); + constructor(options: ServerOptions, requestListener?: RequestListener); + /** + * Sets the timeout value for sockets, and emits a `'timeout'` event on + * the Server object, passing the socket as an argument, if a timeout + * occurs. + * + * If there is a `'timeout'` event listener on the Server object, then it + * will be called with the timed-out socket as an argument. + * + * By default, the Server does not timeout sockets. However, if a callback + * is assigned to the Server's `'timeout'` event, timeouts must be handled + * explicitly. + * @since v0.9.12 + * @param [msecs=0 (no timeout)] + */ + setTimeout(msecs?: number, callback?: () => void): this; + setTimeout(callback: () => void): this; + /** + * Limits maximum incoming headers count. If set to 0, no limit will be applied. + * @since v0.7.0 + */ + maxHeadersCount: number | null; + /** + * The maximum number of requests socket can handle + * before closing keep alive connection. + * + * A value of `0` will disable the limit. + * + * When the limit is reached it will set the `Connection` header value to `close`, + * but will not actually close the connection, subsequent requests sent + * after the limit is reached will get `503 Service Unavailable` as a response. + * @since v16.10.0 + */ + maxRequestsPerSocket: number | null; + /** + * The number of milliseconds of inactivity before a socket is presumed + * to have timed out. + * + * A value of `0` will disable the timeout behavior on incoming connections. + * + * The socket timeout logic is set up on connection, so changing this + * value only affects new connections to the server, not any existing connections. + * @since v0.9.12 + */ + timeout: number; + /** + * Limit the amount of time the parser will wait to receive the complete HTTP + * headers. + * + * If the timeout expires, the server responds with status 408 without + * forwarding the request to the request listener and then closes the connection. + * + * It must be set to a non-zero value (e.g. 120 seconds) to protect against + * potential Denial-of-Service attacks in case the server is deployed without a + * reverse proxy in front. + * @since v11.3.0, v10.14.0 + */ + headersTimeout: number; + /** + * The number of milliseconds of inactivity a server needs to wait for additional + * incoming data, after it has finished writing the last response, before a socket + * will be destroyed. If the server receives new data before the keep-alive + * timeout has fired, it will reset the regular inactivity timeout, i.e.,`server.timeout`. + * + * A value of `0` will disable the keep-alive timeout behavior on incoming + * connections. + * A value of `0` makes the http server behave similarly to Node.js versions prior + * to 8.0.0, which did not have a keep-alive timeout. + * + * The socket timeout logic is set up on connection, so changing this value only + * affects new connections to the server, not any existing connections. + * @since v8.0.0 + */ + keepAliveTimeout: number; + /** + * Sets the timeout value in milliseconds for receiving the entire request from + * the client. + * + * If the timeout expires, the server responds with status 408 without + * forwarding the request to the request listener and then closes the connection. + * + * It must be set to a non-zero value (e.g. 120 seconds) to protect against + * potential Denial-of-Service attacks in case the server is deployed without a + * reverse proxy in front. + * @since v14.11.0 + */ + requestTimeout: number; + /** + * Closes all connections connected to this server. + * @since v18.2.0 + */ + closeAllConnections(): void; + /** + * Closes all connections connected to this server which are not sending a request + * or waiting for a response. + * @since v18.2.0 + */ + closeIdleConnections(): void; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connection", listener: (socket: Socket) => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "checkContinue", listener: RequestListener): this; + addListener(event: "checkExpectation", listener: RequestListener): this; + addListener(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + addListener( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + addListener(event: "dropRequest", listener: (req: InstanceType, socket: stream.Duplex) => void): this; + addListener(event: "request", listener: RequestListener): this; + addListener( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + emit(event: string, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "connection", socket: Socket): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit( + event: "checkContinue", + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit( + event: "checkExpectation", + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: "clientError", err: Error, socket: stream.Duplex): boolean; + emit(event: "connect", req: InstanceType, socket: stream.Duplex, head: Buffer): boolean; + emit(event: "dropRequest", req: InstanceType, socket: stream.Duplex): boolean; + emit( + event: "request", + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: "upgrade", req: InstanceType, socket: stream.Duplex, head: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connection", listener: (socket: Socket) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "checkContinue", listener: RequestListener): this; + on(event: "checkExpectation", listener: RequestListener): this; + on(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + on(event: "connect", listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void): this; + on(event: "dropRequest", listener: (req: InstanceType, socket: stream.Duplex) => void): this; + on(event: "request", listener: RequestListener): this; + on(event: "upgrade", listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connection", listener: (socket: Socket) => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "checkContinue", listener: RequestListener): this; + once(event: "checkExpectation", listener: RequestListener): this; + once(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + once( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + once(event: "dropRequest", listener: (req: InstanceType, socket: stream.Duplex) => void): this; + once(event: "request", listener: RequestListener): this; + once( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connection", listener: (socket: Socket) => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "checkContinue", listener: RequestListener): this; + prependListener(event: "checkExpectation", listener: RequestListener): this; + prependListener(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + prependListener( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependListener( + event: "dropRequest", + listener: (req: InstanceType, socket: stream.Duplex) => void, + ): this; + prependListener(event: "request", listener: RequestListener): this; + prependListener( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connection", listener: (socket: Socket) => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "checkContinue", listener: RequestListener): this; + prependOnceListener(event: "checkExpectation", listener: RequestListener): this; + prependOnceListener(event: "clientError", listener: (err: Error, socket: stream.Duplex) => void): this; + prependOnceListener( + event: "connect", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependOnceListener( + event: "dropRequest", + listener: (req: InstanceType, socket: stream.Duplex) => void, + ): this; + prependOnceListener(event: "request", listener: RequestListener): this; + prependOnceListener( + event: "upgrade", + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + } + /** + * This class serves as the parent class of {@link ClientRequest} and {@link ServerResponse}. It is an abstract outgoing message from + * the perspective of the participants of an HTTP transaction. + * @since v0.1.17 + */ + class OutgoingMessage extends stream.Writable { + readonly req: Request; + chunkedEncoding: boolean; + shouldKeepAlive: boolean; + useChunkedEncodingByDefault: boolean; + sendDate: boolean; + /** + * @deprecated Use `writableEnded` instead. + */ + finished: boolean; + /** + * Read-only. `true` if the headers were sent, otherwise `false`. + * @since v0.9.3 + */ + readonly headersSent: boolean; + /** + * Alias of `outgoingMessage.socket`. + * @since v0.3.0 + * @deprecated Since v15.12.0,v14.17.1 - Use `socket` instead. + */ + readonly connection: Socket | null; + /** + * Reference to the underlying socket. Usually, users will not want to access + * this property. + * + * After calling `outgoingMessage.end()`, this property will be nulled. + * @since v0.3.0 + */ + readonly socket: Socket | null; + constructor(); + /** + * Once a socket is associated with the message and is connected,`socket.setTimeout()` will be called with `msecs` as the first parameter. + * @since v0.9.12 + * @param callback Optional function to be called when a timeout occurs. Same as binding to the `timeout` event. + */ + setTimeout(msecs: number, callback?: () => void): this; + /** + * Sets a single header value. If the header already exists in the to-be-sent + * headers, its value will be replaced. Use an array of strings to send multiple + * headers with the same name. + * @since v0.4.0 + * @param name Header name + * @param value Header value + */ + setHeader(name: string, value: number | string | readonly string[]): this; + /** + * Append a single header value for the header object. + * + * If the value is an array, this is equivalent of calling this method multiple + * times. + * + * If there were no previous value for the header, this is equivalent of calling `outgoingMessage.setHeader(name, value)`. + * + * Depending of the value of `options.uniqueHeaders` when the client request or the + * server were created, this will end up in the header being sent multiple times or + * a single time with values joined using `; `. + * @since v18.3.0, v16.17.0 + * @param name Header name + * @param value Header value + */ + appendHeader(name: string, value: string | readonly string[]): this; + /** + * Gets the value of the HTTP header with the given name. If that header is not + * set, the returned value will be `undefined`. + * @since v0.4.0 + * @param name Name of header + */ + getHeader(name: string): number | string | string[] | undefined; + /** + * Returns a shallow copy of the current outgoing headers. Since a shallow + * copy is used, array values may be mutated without additional calls to + * various header-related HTTP module methods. The keys of the returned + * object are the header names and the values are the respective header + * values. All header names are lowercase. + * + * The object returned by the `outgoingMessage.getHeaders()` method does + * not prototypically inherit from the JavaScript `Object`. This means that + * typical `Object` methods such as `obj.toString()`, `obj.hasOwnProperty()`, + * and others are not defined and will not work. + * + * ```js + * outgoingMessage.setHeader('Foo', 'bar'); + * outgoingMessage.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headers = outgoingMessage.getHeaders(); + * // headers === { foo: 'bar', 'set-cookie': ['foo=bar', 'bar=baz'] } + * ``` + * @since v7.7.0 + */ + getHeaders(): OutgoingHttpHeaders; + /** + * Returns an array containing the unique names of the current outgoing headers. + * All names are lowercase. + * @since v7.7.0 + */ + getHeaderNames(): string[]; + /** + * Returns `true` if the header identified by `name` is currently set in the + * outgoing headers. The header name is case-insensitive. + * + * ```js + * const hasContentType = outgoingMessage.hasHeader('content-type'); + * ``` + * @since v7.7.0 + */ + hasHeader(name: string): boolean; + /** + * Removes a header that is queued for implicit sending. + * + * ```js + * outgoingMessage.removeHeader('Content-Encoding'); + * ``` + * @since v0.4.0 + * @param name Header name + */ + removeHeader(name: string): void; + /** + * Adds HTTP trailers (headers but at the end of the message) to the message. + * + * Trailers will **only** be emitted if the message is chunked encoded. If not, + * the trailers will be silently discarded. + * + * HTTP requires the `Trailer` header to be sent to emit trailers, + * with a list of header field names in its value, e.g. + * + * ```js + * message.writeHead(200, { 'Content-Type': 'text/plain', + * 'Trailer': 'Content-MD5' }); + * message.write(fileData); + * message.addTrailers({ 'Content-MD5': '7895bf4b8828b55ceaf47747b4bca667' }); + * message.end(); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v0.3.0 + */ + addTrailers(headers: OutgoingHttpHeaders | ReadonlyArray<[string, string]>): void; + /** + * Flushes the message headers. + * + * For efficiency reason, Node.js normally buffers the message headers + * until `outgoingMessage.end()` is called or the first chunk of message data + * is written. It then tries to pack the headers and data into a single TCP + * packet. + * + * It is usually desired (it saves a TCP round-trip), but not when the first + * data is not sent until possibly much later. `outgoingMessage.flushHeaders()`bypasses the optimization and kickstarts the message. + * @since v1.6.0 + */ + flushHeaders(): void; + } + /** + * This object is created internally by an HTTP server, not by the user. It is + * passed as the second parameter to the `'request'` event. + * @since v0.1.17 + */ + class ServerResponse extends OutgoingMessage { + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status code that will be sent to the client when + * the headers get flushed. + * + * ```js + * response.statusCode = 404; + * ``` + * + * After response header was sent to the client, this property indicates the + * status code which was sent out. + * @since v0.4.0 + */ + statusCode: number; + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status message that will be sent to the client when + * the headers get flushed. If this is left as `undefined` then the standard + * message for the status code will be used. + * + * ```js + * response.statusMessage = 'Not found'; + * ``` + * + * After response header was sent to the client, this property indicates the + * status message which was sent out. + * @since v0.11.8 + */ + statusMessage: string; + /** + * If set to `true`, Node.js will check whether the `Content-Length`header value and the size of the body, in bytes, are equal. + * Mismatching the `Content-Length` header value will result + * in an `Error` being thrown, identified by `code:``'ERR_HTTP_CONTENT_LENGTH_MISMATCH'`. + * @since v18.10.0, v16.18.0 + */ + strictContentLength: boolean; + constructor(req: Request); + assignSocket(socket: Socket): void; + detachSocket(socket: Socket): void; + /** + * Sends an HTTP/1.1 100 Continue message to the client, indicating that + * the request body should be sent. See the `'checkContinue'` event on`Server`. + * @since v0.3.0 + */ + writeContinue(callback?: () => void): void; + /** + * Sends an HTTP/1.1 103 Early Hints message to the client with a Link header, + * indicating that the user agent can preload/preconnect the linked resources. + * The `hints` is an object containing the values of headers to be sent with + * early hints message. The optional `callback` argument will be called when + * the response message has been written. + * + * **Example** + * + * ```js + * const earlyHintsLink = '; rel=preload; as=style'; + * response.writeEarlyHints({ + * 'link': earlyHintsLink, + * }); + * + * const earlyHintsLinks = [ + * '; rel=preload; as=style', + * '; rel=preload; as=script', + * ]; + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * 'x-trace-id': 'id for diagnostics', + * }); + * + * const earlyHintsCallback = () => console.log('early hints message sent'); + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * }, earlyHintsCallback); + * ``` + * @since v18.11.0 + * @param hints An object containing the values of headers + * @param callback Will be called when the response message has been written + */ + writeEarlyHints(hints: Record, callback?: () => void): void; + /** + * Sends a response header to the request. The status code is a 3-digit HTTP + * status code, like `404`. The last argument, `headers`, are the response headers. + * Optionally one can give a human-readable `statusMessage` as the second + * argument. + * + * `headers` may be an `Array` where the keys and values are in the same list. + * It is _not_ a list of tuples. So, the even-numbered offsets are key values, + * and the odd-numbered offsets are the associated values. The array is in the same + * format as `request.rawHeaders`. + * + * Returns a reference to the `ServerResponse`, so that calls can be chained. + * + * ```js + * const body = 'hello world'; + * response + * .writeHead(200, { + * 'Content-Length': Buffer.byteLength(body), + * 'Content-Type': 'text/plain', + * }) + * .end(body); + * ``` + * + * This method must only be called once on a message and it must + * be called before `response.end()` is called. + * + * If `response.write()` or `response.end()` are called before calling + * this, the implicit/mutable headers will be calculated and call this function. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * If this method is called and `response.setHeader()` has not been called, + * it will directly write the supplied header values onto the network channel + * without caching internally, and the `response.getHeader()` on the header + * will not yield the expected result. If progressive population of headers is + * desired with potential future retrieval and modification, use `response.setHeader()` instead. + * + * ```js + * // Returns content-type = text/plain + * const server = http.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain' }); + * res.end('ok'); + * }); + * ``` + * + * `Content-Length` is read in bytes, not characters. Use `Buffer.byteLength()` to determine the length of the body in bytes. Node.js + * will check whether `Content-Length` and the length of the body which has + * been transmitted are equal or not. + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a \[`Error`\]\[\] being thrown. + * @since v0.1.30 + */ + writeHead( + statusCode: number, + statusMessage?: string, + headers?: OutgoingHttpHeaders | OutgoingHttpHeader[], + ): this; + writeHead(statusCode: number, headers?: OutgoingHttpHeaders | OutgoingHttpHeader[]): this; + /** + * Sends a HTTP/1.1 102 Processing message to the client, indicating that + * the request body should be sent. + * @since v10.0.0 + */ + writeProcessing(): void; + } + interface InformationEvent { + statusCode: number; + statusMessage: string; + httpVersion: string; + httpVersionMajor: number; + httpVersionMinor: number; + headers: IncomingHttpHeaders; + rawHeaders: string[]; + } + /** + * This object is created internally and returned from {@link request}. It + * represents an _in-progress_ request whose header has already been queued. The + * header is still mutable using the `setHeader(name, value)`,`getHeader(name)`, `removeHeader(name)` API. The actual header will + * be sent along with the first data chunk or when calling `request.end()`. + * + * To get the response, add a listener for `'response'` to the request object.`'response'` will be emitted from the request object when the response + * headers have been received. The `'response'` event is executed with one + * argument which is an instance of {@link IncomingMessage}. + * + * During the `'response'` event, one can add listeners to the + * response object; particularly to listen for the `'data'` event. + * + * If no `'response'` handler is added, then the response will be + * entirely discarded. However, if a `'response'` event handler is added, + * then the data from the response object **must** be consumed, either by + * calling `response.read()` whenever there is a `'readable'` event, or + * by adding a `'data'` handler, or by calling the `.resume()` method. + * Until the data is consumed, the `'end'` event will not fire. Also, until + * the data is read it will consume memory that can eventually lead to a + * 'process out of memory' error. + * + * For backward compatibility, `res` will only emit `'error'` if there is an`'error'` listener registered. + * + * Set `Content-Length` header to limit the response body size. + * If `response.strictContentLength` is set to `true`, mismatching the`Content-Length` header value will result in an `Error` being thrown, + * identified by `code:``'ERR_HTTP_CONTENT_LENGTH_MISMATCH'`. + * + * `Content-Length` value should be in bytes, not characters. Use `Buffer.byteLength()` to determine the length of the body in bytes. + * @since v0.1.17 + */ + class ClientRequest extends OutgoingMessage { + /** + * The `request.aborted` property will be `true` if the request has + * been aborted. + * @since v0.11.14 + * @deprecated Since v17.0.0,v16.12.0 - Check `destroyed` instead. + */ + aborted: boolean; + /** + * The request host. + * @since v14.5.0, v12.19.0 + */ + host: string; + /** + * The request protocol. + * @since v14.5.0, v12.19.0 + */ + protocol: string; + /** + * When sending request through a keep-alive enabled agent, the underlying socket + * might be reused. But if server closes connection at unfortunate time, client + * may run into a 'ECONNRESET' error. + * + * ```js + * import http from 'node:http'; + * + * // Server has a 5 seconds keep-alive timeout by default + * http + * .createServer((req, res) => { + * res.write('hello\n'); + * res.end(); + * }) + * .listen(3000); + * + * setInterval(() => { + * // Adapting a keep-alive agent + * http.get('http://localhost:3000', { agent }, (res) => { + * res.on('data', (data) => { + * // Do nothing + * }); + * }); + * }, 5000); // Sending request on 5s interval so it's easy to hit idle timeout + * ``` + * + * By marking a request whether it reused socket or not, we can do + * automatic error retry base on it. + * + * ```js + * import http from 'node:http'; + * const agent = new http.Agent({ keepAlive: true }); + * + * function retriableRequest() { + * const req = http + * .get('http://localhost:3000', { agent }, (res) => { + * // ... + * }) + * .on('error', (err) => { + * // Check if retry is needed + * if (req.reusedSocket && err.code === 'ECONNRESET') { + * retriableRequest(); + * } + * }); + * } + * + * retriableRequest(); + * ``` + * @since v13.0.0, v12.16.0 + */ + reusedSocket: boolean; + /** + * Limits maximum response headers count. If set to 0, no limit will be applied. + */ + maxHeadersCount: number; + constructor(url: string | URL | ClientRequestArgs, cb?: (res: IncomingMessage) => void); + /** + * The request method. + * @since v0.1.97 + */ + method: string; + /** + * The request path. + * @since v0.4.0 + */ + path: string; + /** + * Marks the request as aborting. Calling this will cause remaining data + * in the response to be dropped and the socket to be destroyed. + * @since v0.3.8 + * @deprecated Since v14.1.0,v13.14.0 - Use `destroy` instead. + */ + abort(): void; + onSocket(socket: Socket): void; + /** + * Once a socket is assigned to this request and is connected `socket.setTimeout()` will be called. + * @since v0.5.9 + * @param timeout Milliseconds before a request times out. + * @param callback Optional function to be called when a timeout occurs. Same as binding to the `'timeout'` event. + */ + setTimeout(timeout: number, callback?: () => void): this; + /** + * Once a socket is assigned to this request and is connected `socket.setNoDelay()` will be called. + * @since v0.5.9 + */ + setNoDelay(noDelay?: boolean): void; + /** + * Once a socket is assigned to this request and is connected `socket.setKeepAlive()` will be called. + * @since v0.5.9 + */ + setSocketKeepAlive(enable?: boolean, initialDelay?: number): void; + /** + * Returns an array containing the unique names of the current outgoing raw + * headers. Header names are returned with their exact casing being set. + * + * ```js + * request.setHeader('Foo', 'bar'); + * request.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headerNames = request.getRawHeaderNames(); + * // headerNames === ['Foo', 'Set-Cookie'] + * ``` + * @since v15.13.0, v14.17.0 + */ + getRawHeaderNames(): string[]; + /** + * @deprecated + */ + addListener(event: "abort", listener: () => void): this; + addListener( + event: "connect", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + addListener(event: "continue", listener: () => void): this; + addListener(event: "information", listener: (info: InformationEvent) => void): this; + addListener(event: "response", listener: (response: IncomingMessage) => void): this; + addListener(event: "socket", listener: (socket: Socket) => void): this; + addListener(event: "timeout", listener: () => void): this; + addListener( + event: "upgrade", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + on(event: "abort", listener: () => void): this; + on(event: "connect", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + on(event: "continue", listener: () => void): this; + on(event: "information", listener: (info: InformationEvent) => void): this; + on(event: "response", listener: (response: IncomingMessage) => void): this; + on(event: "socket", listener: (socket: Socket) => void): this; + on(event: "timeout", listener: () => void): this; + on(event: "upgrade", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + once(event: "abort", listener: () => void): this; + once(event: "connect", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + once(event: "continue", listener: () => void): this; + once(event: "information", listener: (info: InformationEvent) => void): this; + once(event: "response", listener: (response: IncomingMessage) => void): this; + once(event: "socket", listener: (socket: Socket) => void): this; + once(event: "timeout", listener: () => void): this; + once(event: "upgrade", listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + prependListener(event: "abort", listener: () => void): this; + prependListener( + event: "connect", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependListener(event: "continue", listener: () => void): this; + prependListener(event: "information", listener: (info: InformationEvent) => void): this; + prependListener(event: "response", listener: (response: IncomingMessage) => void): this; + prependListener(event: "socket", listener: (socket: Socket) => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener( + event: "upgrade", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + prependOnceListener(event: "abort", listener: () => void): this; + prependOnceListener( + event: "connect", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependOnceListener(event: "continue", listener: () => void): this; + prependOnceListener(event: "information", listener: (info: InformationEvent) => void): this; + prependOnceListener(event: "response", listener: (response: IncomingMessage) => void): this; + prependOnceListener(event: "socket", listener: (socket: Socket) => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener( + event: "upgrade", + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * An `IncomingMessage` object is created by {@link Server} or {@link ClientRequest} and passed as the first argument to the `'request'` and `'response'` event respectively. It may be used to + * access response + * status, headers, and data. + * + * Different from its `socket` value which is a subclass of `stream.Duplex`, the`IncomingMessage` itself extends `stream.Readable` and is created separately to + * parse and emit the incoming HTTP headers and payload, as the underlying socket + * may be reused multiple times in case of keep-alive. + * @since v0.1.17 + */ + class IncomingMessage extends stream.Readable { + constructor(socket: Socket); + /** + * The `message.aborted` property will be `true` if the request has + * been aborted. + * @since v10.1.0 + * @deprecated Since v17.0.0,v16.12.0 - Check `message.destroyed` from stream.Readable. + */ + aborted: boolean; + /** + * In case of server request, the HTTP version sent by the client. In the case of + * client response, the HTTP version of the connected-to server. + * Probably either `'1.1'` or `'1.0'`. + * + * Also `message.httpVersionMajor` is the first integer and`message.httpVersionMinor` is the second. + * @since v0.1.1 + */ + httpVersion: string; + httpVersionMajor: number; + httpVersionMinor: number; + /** + * The `message.complete` property will be `true` if a complete HTTP message has + * been received and successfully parsed. + * + * This property is particularly useful as a means of determining if a client or + * server fully transmitted a message before a connection was terminated: + * + * ```js + * const req = http.request({ + * host: '127.0.0.1', + * port: 8080, + * method: 'POST', + * }, (res) => { + * res.resume(); + * res.on('end', () => { + * if (!res.complete) + * console.error( + * 'The connection was terminated while the message was still being sent'); + * }); + * }); + * ``` + * @since v0.3.0 + */ + complete: boolean; + /** + * Alias for `message.socket`. + * @since v0.1.90 + * @deprecated Since v16.0.0 - Use `socket`. + */ + connection: Socket; + /** + * The `net.Socket` object associated with the connection. + * + * With HTTPS support, use `request.socket.getPeerCertificate()` to obtain the + * client's authentication details. + * + * This property is guaranteed to be an instance of the `net.Socket` class, + * a subclass of `stream.Duplex`, unless the user specified a socket + * type other than `net.Socket` or internally nulled. + * @since v0.3.0 + */ + socket: Socket; + /** + * The request/response headers object. + * + * Key-value pairs of header names and values. Header names are lower-cased. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': 'curl/7.22.0', + * // host: '127.0.0.1:8000', + * // accept: '*' } + * console.log(request.headers); + * ``` + * + * Duplicates in raw headers are handled in the following ways, depending on the + * header name: + * + * * Duplicates of `age`, `authorization`, `content-length`, `content-type`,`etag`, `expires`, `from`, `host`, `if-modified-since`, `if-unmodified-since`,`last-modified`, `location`, + * `max-forwards`, `proxy-authorization`, `referer`,`retry-after`, `server`, or `user-agent` are discarded. + * To allow duplicate values of the headers listed above to be joined, + * use the option `joinDuplicateHeaders` in {@link request} and {@link createServer}. See RFC 9110 Section 5.3 for more + * information. + * * `set-cookie` is always an array. Duplicates are added to the array. + * * For duplicate `cookie` headers, the values are joined together with `; `. + * * For all other headers, the values are joined together with `, `. + * @since v0.1.5 + */ + headers: IncomingHttpHeaders; + /** + * Similar to `message.headers`, but there is no join logic and the values are + * always arrays of strings, even for headers received just once. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': ['curl/7.22.0'], + * // host: ['127.0.0.1:8000'], + * // accept: ['*'] } + * console.log(request.headersDistinct); + * ``` + * @since v18.3.0, v16.17.0 + */ + headersDistinct: NodeJS.Dict; + /** + * The raw request/response headers list exactly as they were received. + * + * The keys and values are in the same list. It is _not_ a + * list of tuples. So, the even-numbered offsets are key values, and the + * odd-numbered offsets are the associated values. + * + * Header names are not lowercased, and duplicates are not merged. + * + * ```js + * // Prints something like: + * // + * // [ 'user-agent', + * // 'this is invalid because there can be only one', + * // 'User-Agent', + * // 'curl/7.22.0', + * // 'Host', + * // '127.0.0.1:8000', + * // 'ACCEPT', + * // '*' ] + * console.log(request.rawHeaders); + * ``` + * @since v0.11.6 + */ + rawHeaders: string[]; + /** + * The request/response trailers object. Only populated at the `'end'` event. + * @since v0.3.0 + */ + trailers: NodeJS.Dict; + /** + * Similar to `message.trailers`, but there is no join logic and the values are + * always arrays of strings, even for headers received just once. + * Only populated at the `'end'` event. + * @since v18.3.0, v16.17.0 + */ + trailersDistinct: NodeJS.Dict; + /** + * The raw request/response trailer keys and values exactly as they were + * received. Only populated at the `'end'` event. + * @since v0.11.6 + */ + rawTrailers: string[]; + /** + * Calls `message.socket.setTimeout(msecs, callback)`. + * @since v0.5.9 + */ + setTimeout(msecs: number, callback?: () => void): this; + /** + * **Only valid for request obtained from {@link Server}.** + * + * The request method as a string. Read only. Examples: `'GET'`, `'DELETE'`. + * @since v0.1.1 + */ + method?: string | undefined; + /** + * **Only valid for request obtained from {@link Server}.** + * + * Request URL string. This contains only the URL that is present in the actual + * HTTP request. Take the following request: + * + * ```http + * GET /status?name=ryan HTTP/1.1 + * Accept: text/plain + * ``` + * + * To parse the URL into its parts: + * + * ```js + * new URL(request.url, `http://${request.headers.host}`); + * ``` + * + * When `request.url` is `'/status?name=ryan'` and `request.headers.host` is`'localhost:3000'`: + * + * ```console + * $ node + * > new URL(request.url, `http://${request.headers.host}`) + * URL { + * href: 'http://localhost:3000/status?name=ryan', + * origin: 'http://localhost:3000', + * protocol: 'http:', + * username: '', + * password: '', + * host: 'localhost:3000', + * hostname: 'localhost', + * port: '3000', + * pathname: '/status', + * search: '?name=ryan', + * searchParams: URLSearchParams { 'name' => 'ryan' }, + * hash: '' + * } + * ``` + * @since v0.1.90 + */ + url?: string | undefined; + /** + * **Only valid for response obtained from {@link ClientRequest}.** + * + * The 3-digit HTTP response status code. E.G. `404`. + * @since v0.1.1 + */ + statusCode?: number | undefined; + /** + * **Only valid for response obtained from {@link ClientRequest}.** + * + * The HTTP response status message (reason phrase). E.G. `OK` or `Internal Server Error`. + * @since v0.11.10 + */ + statusMessage?: string | undefined; + /** + * Calls `destroy()` on the socket that received the `IncomingMessage`. If `error`is provided, an `'error'` event is emitted on the socket and `error` is passed + * as an argument to any listeners on the event. + * @since v0.3.0 + */ + destroy(error?: Error): this; + } + interface AgentOptions extends Partial { + /** + * Keep sockets around in a pool to be used by other requests in the future. Default = false + */ + keepAlive?: boolean | undefined; + /** + * When using HTTP KeepAlive, how often to send TCP KeepAlive packets over sockets being kept alive. Default = 1000. + * Only relevant if keepAlive is set to true. + */ + keepAliveMsecs?: number | undefined; + /** + * Maximum number of sockets to allow per host. Default for Node 0.10 is 5, default for Node 0.12 is Infinity + */ + maxSockets?: number | undefined; + /** + * Maximum number of sockets allowed for all hosts in total. Each request will use a new socket until the maximum is reached. Default: Infinity. + */ + maxTotalSockets?: number | undefined; + /** + * Maximum number of sockets to leave open in a free state. Only relevant if keepAlive is set to true. Default = 256. + */ + maxFreeSockets?: number | undefined; + /** + * Socket timeout in milliseconds. This will set the timeout after the socket is connected. + */ + timeout?: number | undefined; + /** + * Scheduling strategy to apply when picking the next free socket to use. + * @default `lifo` + */ + scheduling?: "fifo" | "lifo" | undefined; + } + /** + * An `Agent` is responsible for managing connection persistence + * and reuse for HTTP clients. It maintains a queue of pending requests + * for a given host and port, reusing a single socket connection for each + * until the queue is empty, at which time the socket is either destroyed + * or put into a pool where it is kept to be used again for requests to the + * same host and port. Whether it is destroyed or pooled depends on the`keepAlive` `option`. + * + * Pooled connections have TCP Keep-Alive enabled for them, but servers may + * still close idle connections, in which case they will be removed from the + * pool and a new connection will be made when a new HTTP request is made for + * that host and port. Servers may also refuse to allow multiple requests + * over the same connection, in which case the connection will have to be + * remade for every request and cannot be pooled. The `Agent` will still make + * the requests to that server, but each one will occur over a new connection. + * + * When a connection is closed by the client or the server, it is removed + * from the pool. Any unused sockets in the pool will be unrefed so as not + * to keep the Node.js process running when there are no outstanding requests. + * (see `socket.unref()`). + * + * It is good practice, to `destroy()` an `Agent` instance when it is no + * longer in use, because unused sockets consume OS resources. + * + * Sockets are removed from an agent when the socket emits either + * a `'close'` event or an `'agentRemove'` event. When intending to keep one + * HTTP request open for a long time without keeping it in the agent, something + * like the following may be done: + * + * ```js + * http.get(options, (res) => { + * // Do stuff + * }).on('socket', (socket) => { + * socket.emit('agentRemove'); + * }); + * ``` + * + * An agent may also be used for an individual request. By providing`{agent: false}` as an option to the `http.get()` or `http.request()`functions, a one-time use `Agent` with default options + * will be used + * for the client connection. + * + * `agent:false`: + * + * ```js + * http.get({ + * hostname: 'localhost', + * port: 80, + * path: '/', + * agent: false, // Create a new agent just for this one request + * }, (res) => { + * // Do stuff with response + * }); + * ``` + * @since v0.3.4 + */ + class Agent extends EventEmitter { + /** + * By default set to 256. For agents with `keepAlive` enabled, this + * sets the maximum number of sockets that will be left open in the free + * state. + * @since v0.11.7 + */ + maxFreeSockets: number; + /** + * By default set to `Infinity`. Determines how many concurrent sockets the agent + * can have open per origin. Origin is the returned value of `agent.getName()`. + * @since v0.3.6 + */ + maxSockets: number; + /** + * By default set to `Infinity`. Determines how many concurrent sockets the agent + * can have open. Unlike `maxSockets`, this parameter applies across all origins. + * @since v14.5.0, v12.19.0 + */ + maxTotalSockets: number; + /** + * An object which contains arrays of sockets currently awaiting use by + * the agent when `keepAlive` is enabled. Do not modify. + * + * Sockets in the `freeSockets` list will be automatically destroyed and + * removed from the array on `'timeout'`. + * @since v0.11.4 + */ + readonly freeSockets: NodeJS.ReadOnlyDict; + /** + * An object which contains arrays of sockets currently in use by the + * agent. Do not modify. + * @since v0.3.6 + */ + readonly sockets: NodeJS.ReadOnlyDict; + /** + * An object which contains queues of requests that have not yet been assigned to + * sockets. Do not modify. + * @since v0.5.9 + */ + readonly requests: NodeJS.ReadOnlyDict; + constructor(opts?: AgentOptions); + /** + * Destroy any sockets that are currently in use by the agent. + * + * It is usually not necessary to do this. However, if using an + * agent with `keepAlive` enabled, then it is best to explicitly shut down + * the agent when it is no longer needed. Otherwise, + * sockets might stay open for quite a long time before the server + * terminates them. + * @since v0.11.4 + */ + destroy(): void; + } + const METHODS: string[]; + const STATUS_CODES: { + [errorCode: number]: string | undefined; + [errorCode: string]: string | undefined; + }; + /** + * Returns a new instance of {@link Server}. + * + * The `requestListener` is a function which is automatically + * added to the `'request'` event. + * + * ```js + * import http from 'node:http'; + * + * // Create a local server to receive data from + * const server = http.createServer((req, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!', + * })); + * }); + * + * server.listen(8000); + * ``` + * + * ```js + * import http from 'node:http'; + * + * // Create a local server to receive data from + * const server = http.createServer(); + * + * // Listen to the request event + * server.on('request', (request, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!', + * })); + * }); + * + * server.listen(8000); + * ``` + * @since v0.1.13 + */ + function createServer< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + >(requestListener?: RequestListener): Server; + function createServer< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + >( + options: ServerOptions, + requestListener?: RequestListener, + ): Server; + // although RequestOptions are passed as ClientRequestArgs to ClientRequest directly, + // create interface RequestOptions would make the naming more clear to developers + interface RequestOptions extends ClientRequestArgs {} + /** + * `options` in `socket.connect()` are also supported. + * + * Node.js maintains several connections per server to make HTTP requests. + * This function allows one to transparently issue requests. + * + * `url` can be a string or a `URL` object. If `url` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * If both `url` and `options` are specified, the objects are merged, with the`options` properties taking precedence. + * + * The optional `callback` parameter will be added as a one-time listener for + * the `'response'` event. + * + * `http.request()` returns an instance of the {@link ClientRequest} class. The `ClientRequest` instance is a writable stream. If one needs to + * upload a file with a POST request, then write to the `ClientRequest` object. + * + * ```js + * import http from 'node:http'; + * import { Buffer } from 'node:buffer'; + * + * const postData = JSON.stringify({ + * 'msg': 'Hello World!', + * }); + * + * const options = { + * hostname: 'www.google.com', + * port: 80, + * path: '/upload', + * method: 'POST', + * headers: { + * 'Content-Type': 'application/json', + * 'Content-Length': Buffer.byteLength(postData), + * }, + * }; + * + * const req = http.request(options, (res) => { + * console.log(`STATUS: ${res.statusCode}`); + * console.log(`HEADERS: ${JSON.stringify(res.headers)}`); + * res.setEncoding('utf8'); + * res.on('data', (chunk) => { + * console.log(`BODY: ${chunk}`); + * }); + * res.on('end', () => { + * console.log('No more data in response.'); + * }); + * }); + * + * req.on('error', (e) => { + * console.error(`problem with request: ${e.message}`); + * }); + * + * // Write data to request body + * req.write(postData); + * req.end(); + * ``` + * + * In the example `req.end()` was called. With `http.request()` one + * must always call `req.end()` to signify the end of the request - + * even if there is no data being written to the request body. + * + * If any error is encountered during the request (be that with DNS resolution, + * TCP level errors, or actual HTTP parse errors) an `'error'` event is emitted + * on the returned request object. As with all `'error'` events, if no listeners + * are registered the error will be thrown. + * + * There are a few special headers that should be noted. + * + * * Sending a 'Connection: keep-alive' will notify Node.js that the connection to + * the server should be persisted until the next request. + * * Sending a 'Content-Length' header will disable the default chunked encoding. + * * Sending an 'Expect' header will immediately send the request headers. + * Usually, when sending 'Expect: 100-continue', both a timeout and a listener + * for the `'continue'` event should be set. See RFC 2616 Section 8.2.3 for more + * information. + * * Sending an Authorization header will override using the `auth` option + * to compute basic authentication. + * + * Example using a `URL` as `options`: + * + * ```js + * const options = new URL('http://abc:xyz@example.com'); + * + * const req = http.request(options, (res) => { + * // ... + * }); + * ``` + * + * In a successful request, the following events will be emitted in the following + * order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * (`'data'` will not be emitted at all if the response body is empty, for + * instance, in most redirects) + * * `'end'` on the `res` object + * * `'close'` + * + * In the case of a connection error, the following events will be emitted: + * + * * `'socket'` + * * `'error'` + * * `'close'` + * + * In the case of a premature connection close before the response is received, + * the following events will be emitted in the following order: + * + * * `'socket'` + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * In the case of a premature connection close after the response is received, + * the following events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (connection closed here) + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message`'Error: aborted'` and code `'ECONNRESET'` + * * `'close'` + * * `'close'` on the `res` object + * + * If `req.destroy()` is called before a socket is assigned, the following + * events will be emitted in the following order: + * + * * (`req.destroy()` called here) + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'`, or the error with which `req.destroy()` was called + * * `'close'` + * + * If `req.destroy()` is called before the connection succeeds, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * (`req.destroy()` called here) + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'`, or the error with which `req.destroy()` was called + * * `'close'` + * + * If `req.destroy()` is called after the response is received, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (`req.destroy()` called here) + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message `'Error: aborted'`and code `'ECONNRESET'`, or the error with which `req.destroy()` was called + * * `'close'` + * * `'close'` on the `res` object + * + * If `req.abort()` is called before a socket is assigned, the following + * events will be emitted in the following order: + * + * * (`req.abort()` called here) + * * `'abort'` + * * `'close'` + * + * If `req.abort()` is called before the connection succeeds, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * (`req.abort()` called here) + * * `'abort'` + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * If `req.abort()` is called after the response is received, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (`req.abort()` called here) + * * `'abort'` + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message`'Error: aborted'` and code `'ECONNRESET'`. + * * `'close'` + * * `'close'` on the `res` object + * + * Setting the `timeout` option or using the `setTimeout()` function will + * not abort the request or do anything besides add a `'timeout'` event. + * + * Passing an `AbortSignal` and then calling `abort()` on the corresponding`AbortController` will behave the same way as calling `.destroy()` on the + * request. Specifically, the `'error'` event will be emitted with an error with + * the message `'AbortError: The operation was aborted'`, the code `'ABORT_ERR'`and the `cause`, if one was provided. + * @since v0.3.6 + */ + function request(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; + function request( + url: string | URL, + options: RequestOptions, + callback?: (res: IncomingMessage) => void, + ): ClientRequest; + /** + * Since most requests are GET requests without bodies, Node.js provides this + * convenience method. The only difference between this method and {@link request} is that it sets the method to GET by default and calls `req.end()`automatically. The callback must take care to + * consume the response + * data for reasons stated in {@link ClientRequest} section. + * + * The `callback` is invoked with a single argument that is an instance of {@link IncomingMessage}. + * + * JSON fetching example: + * + * ```js + * http.get('http://localhost:8000/', (res) => { + * const { statusCode } = res; + * const contentType = res.headers['content-type']; + * + * let error; + * // Any 2xx status code signals a successful response but + * // here we're only checking for 200. + * if (statusCode !== 200) { + * error = new Error('Request Failed.\n' + + * `Status Code: ${statusCode}`); + * } else if (!/^application\/json/.test(contentType)) { + * error = new Error('Invalid content-type.\n' + + * `Expected application/json but received ${contentType}`); + * } + * if (error) { + * console.error(error.message); + * // Consume response data to free up memory + * res.resume(); + * return; + * } + * + * res.setEncoding('utf8'); + * let rawData = ''; + * res.on('data', (chunk) => { rawData += chunk; }); + * res.on('end', () => { + * try { + * const parsedData = JSON.parse(rawData); + * console.log(parsedData); + * } catch (e) { + * console.error(e.message); + * } + * }); + * }).on('error', (e) => { + * console.error(`Got error: ${e.message}`); + * }); + * + * // Create a local server to receive data from + * const server = http.createServer((req, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!', + * })); + * }); + * + * server.listen(8000); + * ``` + * @since v0.3.6 + * @param options Accepts the same `options` as {@link request}, with the method set to GET by default. + */ + function get(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; + function get(url: string | URL, options: RequestOptions, callback?: (res: IncomingMessage) => void): ClientRequest; + /** + * Performs the low-level validations on the provided `name` that are done when`res.setHeader(name, value)` is called. + * + * Passing illegal value as `name` will result in a `TypeError` being thrown, + * identified by `code: 'ERR_INVALID_HTTP_TOKEN'`. + * + * It is not necessary to use this method before passing headers to an HTTP request + * or response. The HTTP module will automatically validate such headers. + * + * Example: + * + * ```js + * import { validateHeaderName } from 'node:http'; + * + * try { + * validateHeaderName(''); + * } catch (err) { + * console.error(err instanceof TypeError); // --> true + * console.error(err.code); // --> 'ERR_INVALID_HTTP_TOKEN' + * console.error(err.message); // --> 'Header name must be a valid HTTP token [""]' + * } + * ``` + * @since v14.3.0 + * @param [label='Header name'] Label for error message. + */ + function validateHeaderName(name: string): void; + /** + * Performs the low-level validations on the provided `value` that are done when`res.setHeader(name, value)` is called. + * + * Passing illegal value as `value` will result in a `TypeError` being thrown. + * + * * Undefined value error is identified by `code: 'ERR_HTTP_INVALID_HEADER_VALUE'`. + * * Invalid value character error is identified by `code: 'ERR_INVALID_CHAR'`. + * + * It is not necessary to use this method before passing headers to an HTTP request + * or response. The HTTP module will automatically validate such headers. + * + * Examples: + * + * ```js + * import { validateHeaderValue } from 'node:http'; + * + * try { + * validateHeaderValue('x-my-header', undefined); + * } catch (err) { + * console.error(err instanceof TypeError); // --> true + * console.error(err.code === 'ERR_HTTP_INVALID_HEADER_VALUE'); // --> true + * console.error(err.message); // --> 'Invalid value "undefined" for header "x-my-header"' + * } + * + * try { + * validateHeaderValue('x-my-header', 'oʊmɪɡə'); + * } catch (err) { + * console.error(err instanceof TypeError); // --> true + * console.error(err.code === 'ERR_INVALID_CHAR'); // --> true + * console.error(err.message); // --> 'Invalid character in header content ["x-my-header"]' + * } + * ``` + * @since v14.3.0 + * @param name Header name + * @param value Header value + */ + function validateHeaderValue(name: string, value: string): void; + /** + * Set the maximum number of idle HTTP parsers. + * @since v18.8.0, v16.18.0 + * @param [max=1000] + */ + function setMaxIdleHTTPParsers(max: number): void; + let globalAgent: Agent; + /** + * Read-only property specifying the maximum allowed size of HTTP headers in bytes. + * Defaults to 16KB. Configurable using the `--max-http-header-size` CLI option. + */ + const maxHeaderSize: number; +} +declare module "node:http" { + export * from "http"; +} diff --git a/dist/node_modules/@types/node/ts4.8/http2.d.ts b/dist/node_modules/@types/node/ts4.8/http2.d.ts new file mode 100644 index 0000000..c3b3e8e --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/http2.d.ts @@ -0,0 +1,2382 @@ +/** + * The `node:http2` module provides an implementation of the [HTTP/2](https://tools.ietf.org/html/rfc7540) protocol. + * It can be accessed using: + * + * ```js + * const http2 = require('node:http2'); + * ``` + * @since v8.4.0 + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/http2.js) + */ +declare module "http2" { + import EventEmitter = require("node:events"); + import * as fs from "node:fs"; + import * as net from "node:net"; + import * as stream from "node:stream"; + import * as tls from "node:tls"; + import * as url from "node:url"; + import { + IncomingHttpHeaders as Http1IncomingHttpHeaders, + IncomingMessage, + OutgoingHttpHeaders, + ServerResponse, + } from "node:http"; + export { OutgoingHttpHeaders } from "node:http"; + export interface IncomingHttpStatusHeader { + ":status"?: number | undefined; + } + export interface IncomingHttpHeaders extends Http1IncomingHttpHeaders { + ":path"?: string | undefined; + ":method"?: string | undefined; + ":authority"?: string | undefined; + ":scheme"?: string | undefined; + } + // Http2Stream + export interface StreamPriorityOptions { + exclusive?: boolean | undefined; + parent?: number | undefined; + weight?: number | undefined; + silent?: boolean | undefined; + } + export interface StreamState { + localWindowSize?: number | undefined; + state?: number | undefined; + localClose?: number | undefined; + remoteClose?: number | undefined; + sumDependencyWeight?: number | undefined; + weight?: number | undefined; + } + export interface ServerStreamResponseOptions { + endStream?: boolean | undefined; + waitForTrailers?: boolean | undefined; + } + export interface StatOptions { + offset: number; + length: number; + } + export interface ServerStreamFileResponseOptions { + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + statCheck?(stats: fs.Stats, headers: OutgoingHttpHeaders, statOptions: StatOptions): void | boolean; + waitForTrailers?: boolean | undefined; + offset?: number | undefined; + length?: number | undefined; + } + export interface ServerStreamFileResponseOptionsWithError extends ServerStreamFileResponseOptions { + onError?(err: NodeJS.ErrnoException): void; + } + export interface Http2Stream extends stream.Duplex { + /** + * Set to `true` if the `Http2Stream` instance was aborted abnormally. When set, + * the `'aborted'` event will have been emitted. + * @since v8.4.0 + */ + readonly aborted: boolean; + /** + * This property shows the number of characters currently buffered to be written. + * See `net.Socket.bufferSize` for details. + * @since v11.2.0, v10.16.0 + */ + readonly bufferSize: number; + /** + * Set to `true` if the `Http2Stream` instance has been closed. + * @since v9.4.0 + */ + readonly closed: boolean; + /** + * Set to `true` if the `Http2Stream` instance has been destroyed and is no longer + * usable. + * @since v8.4.0 + */ + readonly destroyed: boolean; + /** + * Set to `true` if the `END_STREAM` flag was set in the request or response + * HEADERS frame received, indicating that no additional data should be received + * and the readable side of the `Http2Stream` will be closed. + * @since v10.11.0 + */ + readonly endAfterHeaders: boolean; + /** + * The numeric stream identifier of this `Http2Stream` instance. Set to `undefined`if the stream identifier has not yet been assigned. + * @since v8.4.0 + */ + readonly id?: number | undefined; + /** + * Set to `true` if the `Http2Stream` instance has not yet been assigned a + * numeric stream identifier. + * @since v9.4.0 + */ + readonly pending: boolean; + /** + * Set to the `RST_STREAM` `error code` reported when the `Http2Stream` is + * destroyed after either receiving an `RST_STREAM` frame from the connected peer, + * calling `http2stream.close()`, or `http2stream.destroy()`. Will be`undefined` if the `Http2Stream` has not been closed. + * @since v8.4.0 + */ + readonly rstCode: number; + /** + * An object containing the outbound headers sent for this `Http2Stream`. + * @since v9.5.0 + */ + readonly sentHeaders: OutgoingHttpHeaders; + /** + * An array of objects containing the outbound informational (additional) headers + * sent for this `Http2Stream`. + * @since v9.5.0 + */ + readonly sentInfoHeaders?: OutgoingHttpHeaders[] | undefined; + /** + * An object containing the outbound trailers sent for this `HttpStream`. + * @since v9.5.0 + */ + readonly sentTrailers?: OutgoingHttpHeaders | undefined; + /** + * A reference to the `Http2Session` instance that owns this `Http2Stream`. The + * value will be `undefined` after the `Http2Stream` instance is destroyed. + * @since v8.4.0 + */ + readonly session: Http2Session | undefined; + /** + * Provides miscellaneous information about the current state of the`Http2Stream`. + * + * A current state of this `Http2Stream`. + * @since v8.4.0 + */ + readonly state: StreamState; + /** + * Closes the `Http2Stream` instance by sending an `RST_STREAM` frame to the + * connected HTTP/2 peer. + * @since v8.4.0 + * @param [code=http2.constants.NGHTTP2_NO_ERROR] Unsigned 32-bit integer identifying the error code. + * @param callback An optional function registered to listen for the `'close'` event. + */ + close(code?: number, callback?: () => void): void; + /** + * Updates the priority for this `Http2Stream` instance. + * @since v8.4.0 + */ + priority(options: StreamPriorityOptions): void; + /** + * ```js + * const http2 = require('node:http2'); + * const client = http2.connect('http://example.org:8000'); + * const { NGHTTP2_CANCEL } = http2.constants; + * const req = client.request({ ':path': '/' }); + * + * // Cancel the stream if there's no activity after 5 seconds + * req.setTimeout(5000, () => req.close(NGHTTP2_CANCEL)); + * ``` + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * Sends a trailing `HEADERS` frame to the connected HTTP/2 peer. This method + * will cause the `Http2Stream` to be immediately closed and must only be + * called after the `'wantTrailers'` event has been emitted. When sending a + * request or sending a response, the `options.waitForTrailers` option must be set + * in order to keep the `Http2Stream` open after the final `DATA` frame so that + * trailers can be sent. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond(undefined, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ xyz: 'abc' }); + * }); + * stream.end('Hello World'); + * }); + * ``` + * + * The HTTP/1 specification forbids trailers from containing HTTP/2 pseudo-header + * fields (e.g. `':method'`, `':path'`, etc). + * @since v10.0.0 + */ + sendTrailers(headers: OutgoingHttpHeaders): void; + addListener(event: "aborted", listener: () => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: "streamClosed", listener: (code: number) => void): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: "wantTrailers", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "aborted"): boolean; + emit(event: "close"): boolean; + emit(event: "data", chunk: Buffer | string): boolean; + emit(event: "drain"): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "frameError", frameType: number, errorCode: number): boolean; + emit(event: "pipe", src: stream.Readable): boolean; + emit(event: "unpipe", src: stream.Readable): boolean; + emit(event: "streamClosed", code: number): boolean; + emit(event: "timeout"): boolean; + emit(event: "trailers", trailers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "wantTrailers"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "aborted", listener: () => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "drain", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: "streamClosed", listener: (code: number) => void): this; + on(event: "timeout", listener: () => void): this; + on(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + on(event: "wantTrailers", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "aborted", listener: () => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "drain", listener: () => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: "streamClosed", listener: (code: number) => void): this; + once(event: "timeout", listener: () => void): this; + once(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + once(event: "wantTrailers", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "aborted", listener: () => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "streamClosed", listener: (code: number) => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: "wantTrailers", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "aborted", listener: () => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "frameError", listener: (frameType: number, errorCode: number) => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "streamClosed", listener: (code: number) => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: "trailers", listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: "wantTrailers", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ClientHttp2Stream extends Http2Stream { + addListener(event: "continue", listener: () => {}): this; + addListener( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + addListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + addListener( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "continue"): boolean; + emit(event: "headers", headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: "push", headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "response", headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "continue", listener: () => {}): this; + on( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + on(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + on( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "continue", listener: () => {}): this; + once( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + once(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + once( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "continue", listener: () => {}): this; + prependListener( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + prependListener( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "continue", listener: () => {}): this; + prependOnceListener( + event: "headers", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependOnceListener(event: "push", listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener( + event: "response", + listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void, + ): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ServerHttp2Stream extends Http2Stream { + /** + * True if headers were sent, false otherwise (read-only). + * @since v8.4.0 + */ + readonly headersSent: boolean; + /** + * Read-only property mapped to the `SETTINGS_ENABLE_PUSH` flag of the remote + * client's most recent `SETTINGS` frame. Will be `true` if the remote peer + * accepts push streams, `false` otherwise. Settings are the same for every`Http2Stream` in the same `Http2Session`. + * @since v8.4.0 + */ + readonly pushAllowed: boolean; + /** + * Sends an additional informational `HEADERS` frame to the connected HTTP/2 peer. + * @since v8.4.0 + */ + additionalHeaders(headers: OutgoingHttpHeaders): void; + /** + * Initiates a push stream. The callback is invoked with the new `Http2Stream`instance created for the push stream passed as the second argument, or an`Error` passed as the first argument. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }); + * stream.pushStream({ ':path': '/' }, (err, pushStream, headers) => { + * if (err) throw err; + * pushStream.respond({ ':status': 200 }); + * pushStream.end('some pushed data'); + * }); + * stream.end('some data'); + * }); + * ``` + * + * Setting the weight of a push stream is not allowed in the `HEADERS` frame. Pass + * a `weight` value to `http2stream.priority` with the `silent` option set to`true` to enable server-side bandwidth balancing between concurrent streams. + * + * Calling `http2stream.pushStream()` from within a pushed stream is not permitted + * and will throw an error. + * @since v8.4.0 + * @param callback Callback that is called once the push stream has been initiated. + */ + pushStream( + headers: OutgoingHttpHeaders, + callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void, + ): void; + pushStream( + headers: OutgoingHttpHeaders, + options?: StreamPriorityOptions, + callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void, + ): void; + /** + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }); + * stream.end('some data'); + * }); + * ``` + * + * Initiates a response. When the `options.waitForTrailers` option is set, the`'wantTrailers'` event will be emitted immediately after queuing the last chunk + * of payload data to be sent. The `http2stream.sendTrailers()` method can then be + * used to sent trailing header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * stream.end('some data'); + * }); + * ``` + * @since v8.4.0 + */ + respond(headers?: OutgoingHttpHeaders, options?: ServerStreamResponseOptions): void; + /** + * Initiates a response whose data is read from the given file descriptor. No + * validation is performed on the given file descriptor. If an error occurs while + * attempting to read data using the file descriptor, the `Http2Stream` will be + * closed using an `RST_STREAM` frame using the standard `INTERNAL_ERROR` code. + * + * When used, the `Http2Stream` object's `Duplex` interface will be closed + * automatically. + * + * ```js + * const http2 = require('node:http2'); + * const fs = require('node:fs'); + * + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * const fd = fs.openSync('/some/file', 'r'); + * + * const stat = fs.fstatSync(fd); + * const headers = { + * 'content-length': stat.size, + * 'last-modified': stat.mtime.toUTCString(), + * 'content-type': 'text/plain; charset=utf-8', + * }; + * stream.respondWithFD(fd, headers); + * stream.on('close', () => fs.closeSync(fd)); + * }); + * ``` + * + * The optional `options.statCheck` function may be specified to give user code + * an opportunity to set additional content headers based on the `fs.Stat` details + * of the given fd. If the `statCheck` function is provided, the`http2stream.respondWithFD()` method will perform an `fs.fstat()` call to + * collect details on the provided file descriptor. + * + * The `offset` and `length` options may be used to limit the response to a + * specific range subset. This can be used, for instance, to support HTTP Range + * requests. + * + * The file descriptor or `FileHandle` is not closed when the stream is closed, + * so it will need to be closed manually once it is no longer needed. + * Using the same file descriptor concurrently for multiple streams + * is not supported and may result in data loss. Re-using a file descriptor + * after a stream has finished is supported. + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code _must_ call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('node:http2'); + * const fs = require('node:fs'); + * + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * const fd = fs.openSync('/some/file', 'r'); + * + * const stat = fs.fstatSync(fd); + * const headers = { + * 'content-length': stat.size, + * 'last-modified': stat.mtime.toUTCString(), + * 'content-type': 'text/plain; charset=utf-8', + * }; + * stream.respondWithFD(fd, headers, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * + * stream.on('close', () => fs.closeSync(fd)); + * }); + * ``` + * @since v8.4.0 + * @param fd A readable file descriptor. + */ + respondWithFD( + fd: number | fs.promises.FileHandle, + headers?: OutgoingHttpHeaders, + options?: ServerStreamFileResponseOptions, + ): void; + /** + * Sends a regular file as the response. The `path` must specify a regular file + * or an `'error'` event will be emitted on the `Http2Stream` object. + * + * When used, the `Http2Stream` object's `Duplex` interface will be closed + * automatically. + * + * The optional `options.statCheck` function may be specified to give user code + * an opportunity to set additional content headers based on the `fs.Stat` details + * of the given file: + * + * If an error occurs while attempting to read the file data, the `Http2Stream`will be closed using an `RST_STREAM` frame using the standard `INTERNAL_ERROR`code. If the `onError` callback is + * defined, then it will be called. Otherwise + * the stream will be destroyed. + * + * Example using a file path: + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * function statCheck(stat, headers) { + * headers['last-modified'] = stat.mtime.toUTCString(); + * } + * + * function onError(err) { + * // stream.respond() can throw if the stream has been destroyed by + * // the other side. + * try { + * if (err.code === 'ENOENT') { + * stream.respond({ ':status': 404 }); + * } else { + * stream.respond({ ':status': 500 }); + * } + * } catch (err) { + * // Perform actual error handling. + * console.error(err); + * } + * stream.end(); + * } + * + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { statCheck, onError }); + * }); + * ``` + * + * The `options.statCheck` function may also be used to cancel the send operation + * by returning `false`. For instance, a conditional request may check the stat + * results to determine if the file has been modified to return an appropriate`304` response: + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * function statCheck(stat, headers) { + * // Check the stat here... + * stream.respond({ ':status': 304 }); + * return false; // Cancel the send operation + * } + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { statCheck }); + * }); + * ``` + * + * The `content-length` header field will be automatically set. + * + * The `offset` and `length` options may be used to limit the response to a + * specific range subset. This can be used, for instance, to support HTTP Range + * requests. + * + * The `options.onError` function may also be used to handle all the errors + * that could happen before the delivery of the file is initiated. The + * default behavior is to destroy the stream. + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * }); + * ``` + * @since v8.4.0 + */ + respondWithFile( + path: string, + headers?: OutgoingHttpHeaders, + options?: ServerStreamFileResponseOptionsWithError, + ): void; + } + // Http2Session + export interface Settings { + headerTableSize?: number | undefined; + enablePush?: boolean | undefined; + initialWindowSize?: number | undefined; + maxFrameSize?: number | undefined; + maxConcurrentStreams?: number | undefined; + maxHeaderListSize?: number | undefined; + enableConnectProtocol?: boolean | undefined; + } + export interface ClientSessionRequestOptions { + endStream?: boolean | undefined; + exclusive?: boolean | undefined; + parent?: number | undefined; + weight?: number | undefined; + waitForTrailers?: boolean | undefined; + signal?: AbortSignal | undefined; + } + export interface SessionState { + effectiveLocalWindowSize?: number | undefined; + effectiveRecvDataLength?: number | undefined; + nextStreamID?: number | undefined; + localWindowSize?: number | undefined; + lastProcStreamID?: number | undefined; + remoteWindowSize?: number | undefined; + outboundQueueSize?: number | undefined; + deflateDynamicTableSize?: number | undefined; + inflateDynamicTableSize?: number | undefined; + } + export interface Http2Session extends EventEmitter { + /** + * Value will be `undefined` if the `Http2Session` is not yet connected to a + * socket, `h2c` if the `Http2Session` is not connected to a `TLSSocket`, or + * will return the value of the connected `TLSSocket`'s own `alpnProtocol`property. + * @since v9.4.0 + */ + readonly alpnProtocol?: string | undefined; + /** + * Will be `true` if this `Http2Session` instance has been closed, otherwise`false`. + * @since v9.4.0 + */ + readonly closed: boolean; + /** + * Will be `true` if this `Http2Session` instance is still connecting, will be set + * to `false` before emitting `connect` event and/or calling the `http2.connect`callback. + * @since v10.0.0 + */ + readonly connecting: boolean; + /** + * Will be `true` if this `Http2Session` instance has been destroyed and must no + * longer be used, otherwise `false`. + * @since v8.4.0 + */ + readonly destroyed: boolean; + /** + * Value is `undefined` if the `Http2Session` session socket has not yet been + * connected, `true` if the `Http2Session` is connected with a `TLSSocket`, + * and `false` if the `Http2Session` is connected to any other kind of socket + * or stream. + * @since v9.4.0 + */ + readonly encrypted?: boolean | undefined; + /** + * A prototype-less object describing the current local settings of this`Http2Session`. The local settings are local to _this_`Http2Session` instance. + * @since v8.4.0 + */ + readonly localSettings: Settings; + /** + * If the `Http2Session` is connected to a `TLSSocket`, the `originSet` property + * will return an `Array` of origins for which the `Http2Session` may be + * considered authoritative. + * + * The `originSet` property is only available when using a secure TLS connection. + * @since v9.4.0 + */ + readonly originSet?: string[] | undefined; + /** + * Indicates whether the `Http2Session` is currently waiting for acknowledgment of + * a sent `SETTINGS` frame. Will be `true` after calling the`http2session.settings()` method. Will be `false` once all sent `SETTINGS`frames have been acknowledged. + * @since v8.4.0 + */ + readonly pendingSettingsAck: boolean; + /** + * A prototype-less object describing the current remote settings of this`Http2Session`. The remote settings are set by the _connected_ HTTP/2 peer. + * @since v8.4.0 + */ + readonly remoteSettings: Settings; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * limits available methods to ones safe to use with HTTP/2. + * + * `destroy`, `emit`, `end`, `pause`, `read`, `resume`, and `write` will throw + * an error with code `ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for more information. + * + * `setTimeout` method will be called on this `Http2Session`. + * + * All other interactions will be routed directly to the socket. + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * Provides miscellaneous information about the current state of the`Http2Session`. + * + * An object describing the current status of this `Http2Session`. + * @since v8.4.0 + */ + readonly state: SessionState; + /** + * The `http2session.type` will be equal to`http2.constants.NGHTTP2_SESSION_SERVER` if this `Http2Session` instance is a + * server, and `http2.constants.NGHTTP2_SESSION_CLIENT` if the instance is a + * client. + * @since v8.4.0 + */ + readonly type: number; + /** + * Gracefully closes the `Http2Session`, allowing any existing streams to + * complete on their own and preventing new `Http2Stream` instances from being + * created. Once closed, `http2session.destroy()`_might_ be called if there + * are no open `Http2Stream` instances. + * + * If specified, the `callback` function is registered as a handler for the`'close'` event. + * @since v9.4.0 + */ + close(callback?: () => void): void; + /** + * Immediately terminates the `Http2Session` and the associated `net.Socket` or`tls.TLSSocket`. + * + * Once destroyed, the `Http2Session` will emit the `'close'` event. If `error`is not undefined, an `'error'` event will be emitted immediately before the`'close'` event. + * + * If there are any remaining open `Http2Streams` associated with the`Http2Session`, those will also be destroyed. + * @since v8.4.0 + * @param error An `Error` object if the `Http2Session` is being destroyed due to an error. + * @param code The HTTP/2 error code to send in the final `GOAWAY` frame. If unspecified, and `error` is not undefined, the default is `INTERNAL_ERROR`, otherwise defaults to `NO_ERROR`. + */ + destroy(error?: Error, code?: number): void; + /** + * Transmits a `GOAWAY` frame to the connected peer _without_ shutting down the`Http2Session`. + * @since v9.4.0 + * @param code An HTTP/2 error code + * @param lastStreamID The numeric ID of the last processed `Http2Stream` + * @param opaqueData A `TypedArray` or `DataView` instance containing additional data to be carried within the `GOAWAY` frame. + */ + goaway(code?: number, lastStreamID?: number, opaqueData?: NodeJS.ArrayBufferView): void; + /** + * Sends a `PING` frame to the connected HTTP/2 peer. A `callback` function must + * be provided. The method will return `true` if the `PING` was sent, `false`otherwise. + * + * The maximum number of outstanding (unacknowledged) pings is determined by the`maxOutstandingPings` configuration option. The default maximum is 10. + * + * If provided, the `payload` must be a `Buffer`, `TypedArray`, or `DataView`containing 8 bytes of data that will be transmitted with the `PING` and + * returned with the ping acknowledgment. + * + * The callback will be invoked with three arguments: an error argument that will + * be `null` if the `PING` was successfully acknowledged, a `duration` argument + * that reports the number of milliseconds elapsed since the ping was sent and the + * acknowledgment was received, and a `Buffer` containing the 8-byte `PING`payload. + * + * ```js + * session.ping(Buffer.from('abcdefgh'), (err, duration, payload) => { + * if (!err) { + * console.log(`Ping acknowledged in ${duration} milliseconds`); + * console.log(`With payload '${payload.toString()}'`); + * } + * }); + * ``` + * + * If the `payload` argument is not specified, the default payload will be the + * 64-bit timestamp (little endian) marking the start of the `PING` duration. + * @since v8.9.3 + * @param payload Optional ping payload. + */ + ping(callback: (err: Error | null, duration: number, payload: Buffer) => void): boolean; + ping( + payload: NodeJS.ArrayBufferView, + callback: (err: Error | null, duration: number, payload: Buffer) => void, + ): boolean; + /** + * Calls `ref()` on this `Http2Session`instance's underlying `net.Socket`. + * @since v9.4.0 + */ + ref(): void; + /** + * Sets the local endpoint's window size. + * The `windowSize` is the total window size to set, not + * the delta. + * + * ```js + * const http2 = require('node:http2'); + * + * const server = http2.createServer(); + * const expectedWindowSize = 2 ** 20; + * server.on('connect', (session) => { + * + * // Set local window size to be 2 ** 20 + * session.setLocalWindowSize(expectedWindowSize); + * }); + * ``` + * @since v15.3.0, v14.18.0 + */ + setLocalWindowSize(windowSize: number): void; + /** + * Used to set a callback function that is called when there is no activity on + * the `Http2Session` after `msecs` milliseconds. The given `callback` is + * registered as a listener on the `'timeout'` event. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * Updates the current local settings for this `Http2Session` and sends a new`SETTINGS` frame to the connected HTTP/2 peer. + * + * Once called, the `http2session.pendingSettingsAck` property will be `true`while the session is waiting for the remote peer to acknowledge the new + * settings. + * + * The new settings will not become effective until the `SETTINGS` acknowledgment + * is received and the `'localSettings'` event is emitted. It is possible to send + * multiple `SETTINGS` frames while acknowledgment is still pending. + * @since v8.4.0 + * @param callback Callback that is called once the session is connected or right away if the session is already connected. + */ + settings( + settings: Settings, + callback?: (err: Error | null, settings: Settings, duration: number) => void, + ): void; + /** + * Calls `unref()` on this `Http2Session`instance's underlying `net.Socket`. + * @since v9.4.0 + */ + unref(): void; + addListener(event: "close", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener( + event: "frameError", + listener: (frameType: number, errorCode: number, streamID: number) => void, + ): this; + addListener( + event: "goaway", + listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void, + ): this; + addListener(event: "localSettings", listener: (settings: Settings) => void): this; + addListener(event: "ping", listener: () => void): this; + addListener(event: "remoteSettings", listener: (settings: Settings) => void): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "frameError", frameType: number, errorCode: number, streamID: number): boolean; + emit(event: "goaway", errorCode: number, lastStreamID: number, opaqueData?: Buffer): boolean; + emit(event: "localSettings", settings: Settings): boolean; + emit(event: "ping"): boolean; + emit(event: "remoteSettings", settings: Settings): boolean; + emit(event: "timeout"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; + on(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void): this; + on(event: "localSettings", listener: (settings: Settings) => void): this; + on(event: "ping", listener: () => void): this; + on(event: "remoteSettings", listener: (settings: Settings) => void): this; + on(event: "timeout", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "frameError", listener: (frameType: number, errorCode: number, streamID: number) => void): this; + once(event: "goaway", listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void): this; + once(event: "localSettings", listener: (settings: Settings) => void): this; + once(event: "ping", listener: () => void): this; + once(event: "remoteSettings", listener: (settings: Settings) => void): this; + once(event: "timeout", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener( + event: "frameError", + listener: (frameType: number, errorCode: number, streamID: number) => void, + ): this; + prependListener( + event: "goaway", + listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void, + ): this; + prependListener(event: "localSettings", listener: (settings: Settings) => void): this; + prependListener(event: "ping", listener: () => void): this; + prependListener(event: "remoteSettings", listener: (settings: Settings) => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener( + event: "frameError", + listener: (frameType: number, errorCode: number, streamID: number) => void, + ): this; + prependOnceListener( + event: "goaway", + listener: (errorCode: number, lastStreamID: number, opaqueData?: Buffer) => void, + ): this; + prependOnceListener(event: "localSettings", listener: (settings: Settings) => void): this; + prependOnceListener(event: "ping", listener: () => void): this; + prependOnceListener(event: "remoteSettings", listener: (settings: Settings) => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ClientHttp2Session extends Http2Session { + /** + * For HTTP/2 Client `Http2Session` instances only, the `http2session.request()`creates and returns an `Http2Stream` instance that can be used to send an + * HTTP/2 request to the connected server. + * + * When a `ClientHttp2Session` is first created, the socket may not yet be + * connected. if `clienthttp2session.request()` is called during this time, the + * actual request will be deferred until the socket is ready to go. + * If the `session` is closed before the actual request be executed, an`ERR_HTTP2_GOAWAY_SESSION` is thrown. + * + * This method is only available if `http2session.type` is equal to`http2.constants.NGHTTP2_SESSION_CLIENT`. + * + * ```js + * const http2 = require('node:http2'); + * const clientSession = http2.connect('https://localhost:1234'); + * const { + * HTTP2_HEADER_PATH, + * HTTP2_HEADER_STATUS, + * } = http2.constants; + * + * const req = clientSession.request({ [HTTP2_HEADER_PATH]: '/' }); + * req.on('response', (headers) => { + * console.log(headers[HTTP2_HEADER_STATUS]); + * req.on('data', (chunk) => { // .. }); + * req.on('end', () => { // .. }); + * }); + * ``` + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * is emitted immediately after queuing the last chunk of payload data to be sent. + * The `http2stream.sendTrailers()` method can then be called to send trailing + * headers to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * When `options.signal` is set with an `AbortSignal` and then `abort` on the + * corresponding `AbortController` is called, the request will emit an `'error'`event with an `AbortError` error. + * + * The `:method` and `:path` pseudo-headers are not specified within `headers`, + * they respectively default to: + * + * * `:method` \= `'GET'` + * * `:path` \= `/` + * @since v8.4.0 + */ + request(headers?: OutgoingHttpHeaders, options?: ClientSessionRequestOptions): ClientHttp2Stream; + addListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + addListener(event: "origin", listener: (origins: string[]) => void): this; + addListener( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + addListener( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "altsvc", alt: string, origin: string, stream: number): boolean; + emit(event: "origin", origins: readonly string[]): boolean; + emit(event: "connect", session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; + emit( + event: "stream", + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + on(event: "origin", listener: (origins: string[]) => void): this; + on(event: "connect", listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + on( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + once(event: "origin", listener: (origins: string[]) => void): this; + once( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + once( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + prependListener(event: "origin", listener: (origins: string[]) => void): this; + prependListener( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependListener( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "altsvc", listener: (alt: string, origin: string, stream: number) => void): this; + prependOnceListener(event: "origin", listener: (origins: string[]) => void): this; + prependOnceListener( + event: "connect", + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependOnceListener( + event: "stream", + listener: ( + stream: ClientHttp2Stream, + headers: IncomingHttpHeaders & IncomingHttpStatusHeader, + flags: number, + ) => void, + ): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface AlternativeServiceOptions { + origin: number | string | url.URL; + } + export interface ServerHttp2Session extends Http2Session { + readonly server: Http2Server | Http2SecureServer; + /** + * Submits an `ALTSVC` frame (as defined by [RFC 7838](https://tools.ietf.org/html/rfc7838)) to the connected client. + * + * ```js + * const http2 = require('node:http2'); + * + * const server = http2.createServer(); + * server.on('session', (session) => { + * // Set altsvc for origin https://example.org:80 + * session.altsvc('h2=":8000"', 'https://example.org:80'); + * }); + * + * server.on('stream', (stream) => { + * // Set altsvc for a specific stream + * stream.session.altsvc('h2=":8000"', stream.id); + * }); + * ``` + * + * Sending an `ALTSVC` frame with a specific stream ID indicates that the alternate + * service is associated with the origin of the given `Http2Stream`. + * + * The `alt` and origin string _must_ contain only ASCII bytes and are + * strictly interpreted as a sequence of ASCII bytes. The special value `'clear'`may be passed to clear any previously set alternative service for a given + * domain. + * + * When a string is passed for the `originOrStream` argument, it will be parsed as + * a URL and the origin will be derived. For instance, the origin for the + * HTTP URL `'https://example.org/foo/bar'` is the ASCII string`'https://example.org'`. An error will be thrown if either the given string + * cannot be parsed as a URL or if a valid origin cannot be derived. + * + * A `URL` object, or any object with an `origin` property, may be passed as`originOrStream`, in which case the value of the `origin` property will be + * used. The value of the `origin` property _must_ be a properly serialized + * ASCII origin. + * @since v9.4.0 + * @param alt A description of the alternative service configuration as defined by `RFC 7838`. + * @param originOrStream Either a URL string specifying the origin (or an `Object` with an `origin` property) or the numeric identifier of an active `Http2Stream` as given by the + * `http2stream.id` property. + */ + altsvc(alt: string, originOrStream: number | string | url.URL | AlternativeServiceOptions): void; + /** + * Submits an `ORIGIN` frame (as defined by [RFC 8336](https://tools.ietf.org/html/rfc8336)) to the connected client + * to advertise the set of origins for which the server is capable of providing + * authoritative responses. + * + * ```js + * const http2 = require('node:http2'); + * const options = getSecureOptionsSomehow(); + * const server = http2.createSecureServer(options); + * server.on('stream', (stream) => { + * stream.respond(); + * stream.end('ok'); + * }); + * server.on('session', (session) => { + * session.origin('https://example.com', 'https://example.org'); + * }); + * ``` + * + * When a string is passed as an `origin`, it will be parsed as a URL and the + * origin will be derived. For instance, the origin for the HTTP URL`'https://example.org/foo/bar'` is the ASCII string`'https://example.org'`. An error will be thrown if either the given + * string + * cannot be parsed as a URL or if a valid origin cannot be derived. + * + * A `URL` object, or any object with an `origin` property, may be passed as + * an `origin`, in which case the value of the `origin` property will be + * used. The value of the `origin` property _must_ be a properly serialized + * ASCII origin. + * + * Alternatively, the `origins` option may be used when creating a new HTTP/2 + * server using the `http2.createSecureServer()` method: + * + * ```js + * const http2 = require('node:http2'); + * const options = getSecureOptionsSomehow(); + * options.origins = ['https://example.com', 'https://example.org']; + * const server = http2.createSecureServer(options); + * server.on('stream', (stream) => { + * stream.respond(); + * stream.end('ok'); + * }); + * ``` + * @since v10.12.0 + * @param origins One or more URL Strings passed as separate arguments. + */ + origin( + ...origins: Array< + | string + | url.URL + | { + origin: string; + } + > + ): void; + addListener( + event: "connect", + listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + addListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "connect", session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; + emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "connect", listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + on( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once( + event: "connect", + listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + once( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener( + event: "connect", + listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "connect", + listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): this; + prependOnceListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + // Http2Server + export interface SessionOptions { + maxDeflateDynamicTableSize?: number | undefined; + maxSessionMemory?: number | undefined; + maxHeaderListPairs?: number | undefined; + maxOutstandingPings?: number | undefined; + maxSendHeaderBlockLength?: number | undefined; + paddingStrategy?: number | undefined; + peerMaxConcurrentStreams?: number | undefined; + settings?: Settings | undefined; + /** + * Specifies a timeout in milliseconds that + * a server should wait when an [`'unknownProtocol'`][] is emitted. If the + * socket has not been destroyed by that time the server will destroy it. + * @default 100000 + */ + unknownProtocolTimeout?: number | undefined; + selectPadding?(frameLen: number, maxFrameLen: number): number; + } + export interface ClientSessionOptions extends SessionOptions { + maxReservedRemoteStreams?: number | undefined; + createConnection?: ((authority: url.URL, option: SessionOptions) => stream.Duplex) | undefined; + protocol?: "http:" | "https:" | undefined; + } + export interface ServerSessionOptions extends SessionOptions { + Http1IncomingMessage?: typeof IncomingMessage | undefined; + Http1ServerResponse?: typeof ServerResponse | undefined; + Http2ServerRequest?: typeof Http2ServerRequest | undefined; + Http2ServerResponse?: typeof Http2ServerResponse | undefined; + } + export interface SecureClientSessionOptions extends ClientSessionOptions, tls.ConnectionOptions {} + export interface SecureServerSessionOptions extends ServerSessionOptions, tls.TlsOptions {} + export interface ServerOptions extends ServerSessionOptions {} + export interface SecureServerOptions extends SecureServerSessionOptions { + allowHTTP1?: boolean | undefined; + origins?: string[] | undefined; + } + interface HTTP2ServerCommon { + setTimeout(msec?: number, callback?: () => void): this; + /** + * Throws ERR_HTTP2_INVALID_SETTING_VALUE for invalid settings values. + * Throws ERR_INVALID_ARG_TYPE for invalid settings argument. + */ + updateSettings(settings: Settings): void; + } + export interface Http2Server extends net.Server, HTTP2ServerCommon { + addListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + addListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + addListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + addListener(event: "sessionError", listener: (err: Error) => void): this; + addListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "checkContinue", request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: "request", request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: "session", session: ServerHttp2Session): boolean; + emit(event: "sessionError", err: Error): boolean; + emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "timeout"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + on(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: "session", listener: (session: ServerHttp2Session) => void): this; + on(event: "sessionError", listener: (err: Error) => void): this; + on( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + on(event: "timeout", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + once(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: "session", listener: (session: ServerHttp2Session) => void): this; + once(event: "sessionError", listener: (err: Error) => void): this; + once( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + once(event: "timeout", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + prependListener(event: "sessionError", listener: (err: Error) => void): this; + prependListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependOnceListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependOnceListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + prependOnceListener(event: "sessionError", listener: (err: Error) => void): this; + prependOnceListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface Http2SecureServer extends tls.Server, HTTP2ServerCommon { + addListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + addListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + addListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + addListener(event: "sessionError", listener: (err: Error) => void): this; + addListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + addListener(event: "timeout", listener: () => void): this; + addListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "checkContinue", request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: "request", request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: "session", session: ServerHttp2Session): boolean; + emit(event: "sessionError", err: Error): boolean; + emit(event: "stream", stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: "timeout"): boolean; + emit(event: "unknownProtocol", socket: tls.TLSSocket): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + on(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: "session", listener: (session: ServerHttp2Session) => void): this; + on(event: "sessionError", listener: (err: Error) => void): this; + on( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + on(event: "timeout", listener: () => void): this; + on(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + once(event: "request", listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: "session", listener: (session: ServerHttp2Session) => void): this; + once(event: "sessionError", listener: (err: Error) => void): this; + once( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + once(event: "timeout", listener: () => void): this; + once(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + prependListener(event: "sessionError", listener: (err: Error) => void): this; + prependListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependListener(event: "timeout", listener: () => void): this; + prependListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener( + event: "checkContinue", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependOnceListener( + event: "request", + listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): this; + prependOnceListener(event: "session", listener: (session: ServerHttp2Session) => void): this; + prependOnceListener(event: "sessionError", listener: (err: Error) => void): this; + prependOnceListener( + event: "stream", + listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void, + ): this; + prependOnceListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: "unknownProtocol", listener: (socket: tls.TLSSocket) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * A `Http2ServerRequest` object is created by {@link Server} or {@link SecureServer} and passed as the first argument to the `'request'` event. It may be used to access a request status, + * headers, and + * data. + * @since v8.4.0 + */ + export class Http2ServerRequest extends stream.Readable { + constructor( + stream: ServerHttp2Stream, + headers: IncomingHttpHeaders, + options: stream.ReadableOptions, + rawHeaders: readonly string[], + ); + /** + * The `request.aborted` property will be `true` if the request has + * been aborted. + * @since v10.1.0 + */ + readonly aborted: boolean; + /** + * The request authority pseudo header field. Because HTTP/2 allows requests + * to set either `:authority` or `host`, this value is derived from`req.headers[':authority']` if present. Otherwise, it is derived from`req.headers['host']`. + * @since v8.4.0 + */ + readonly authority: string; + /** + * See `request.socket`. + * @since v8.4.0 + * @deprecated Since v13.0.0 - Use `socket`. + */ + readonly connection: net.Socket | tls.TLSSocket; + /** + * The `request.complete` property will be `true` if the request has + * been completed, aborted, or destroyed. + * @since v12.10.0 + */ + readonly complete: boolean; + /** + * The request/response headers object. + * + * Key-value pairs of header names and values. Header names are lower-cased. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': 'curl/7.22.0', + * // host: '127.0.0.1:8000', + * // accept: '*' } + * console.log(request.headers); + * ``` + * + * See `HTTP/2 Headers Object`. + * + * In HTTP/2, the request path, host name, protocol, and method are represented as + * special headers prefixed with the `:` character (e.g. `':path'`). These special + * headers will be included in the `request.headers` object. Care must be taken not + * to inadvertently modify these special headers or errors may occur. For instance, + * removing all headers from the request will cause errors to occur: + * + * ```js + * removeAllHeaders(request.headers); + * assert(request.url); // Fails because the :path header has been removed + * ``` + * @since v8.4.0 + */ + readonly headers: IncomingHttpHeaders; + /** + * In case of server request, the HTTP version sent by the client. In the case of + * client response, the HTTP version of the connected-to server. Returns`'2.0'`. + * + * Also `message.httpVersionMajor` is the first integer and`message.httpVersionMinor` is the second. + * @since v8.4.0 + */ + readonly httpVersion: string; + readonly httpVersionMinor: number; + readonly httpVersionMajor: number; + /** + * The request method as a string. Read-only. Examples: `'GET'`, `'DELETE'`. + * @since v8.4.0 + */ + readonly method: string; + /** + * The raw request/response headers list exactly as they were received. + * + * The keys and values are in the same list. It is _not_ a + * list of tuples. So, the even-numbered offsets are key values, and the + * odd-numbered offsets are the associated values. + * + * Header names are not lowercased, and duplicates are not merged. + * + * ```js + * // Prints something like: + * // + * // [ 'user-agent', + * // 'this is invalid because there can be only one', + * // 'User-Agent', + * // 'curl/7.22.0', + * // 'Host', + * // '127.0.0.1:8000', + * // 'ACCEPT', + * // '*' ] + * console.log(request.rawHeaders); + * ``` + * @since v8.4.0 + */ + readonly rawHeaders: string[]; + /** + * The raw request/response trailer keys and values exactly as they were + * received. Only populated at the `'end'` event. + * @since v8.4.0 + */ + readonly rawTrailers: string[]; + /** + * The request scheme pseudo header field indicating the scheme + * portion of the target URL. + * @since v8.4.0 + */ + readonly scheme: string; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * applies getters, setters, and methods based on HTTP/2 logic. + * + * `destroyed`, `readable`, and `writable` properties will be retrieved from and + * set on `request.stream`. + * + * `destroy`, `emit`, `end`, `on` and `once` methods will be called on`request.stream`. + * + * `setTimeout` method will be called on `request.stream.session`. + * + * `pause`, `read`, `resume`, and `write` will throw an error with code`ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for + * more information. + * + * All other interactions will be routed directly to the socket. With TLS support, + * use `request.socket.getPeerCertificate()` to obtain the client's + * authentication details. + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * The `Http2Stream` object backing the request. + * @since v8.4.0 + */ + readonly stream: ServerHttp2Stream; + /** + * The request/response trailers object. Only populated at the `'end'` event. + * @since v8.4.0 + */ + readonly trailers: IncomingHttpHeaders; + /** + * Request URL string. This contains only the URL that is present in the actual + * HTTP request. If the request is: + * + * ```http + * GET /status?name=ryan HTTP/1.1 + * Accept: text/plain + * ``` + * + * Then `request.url` will be: + * + * ```js + * '/status?name=ryan' + * ``` + * + * To parse the url into its parts, `new URL()` can be used: + * + * ```console + * $ node + * > new URL('/status?name=ryan', 'http://example.com') + * URL { + * href: 'http://example.com/status?name=ryan', + * origin: 'http://example.com', + * protocol: 'http:', + * username: '', + * password: '', + * host: 'example.com', + * hostname: 'example.com', + * port: '', + * pathname: '/status', + * search: '?name=ryan', + * searchParams: URLSearchParams { 'name' => 'ryan' }, + * hash: '' + * } + * ``` + * @since v8.4.0 + */ + url: string; + /** + * Sets the `Http2Stream`'s timeout value to `msecs`. If a callback is + * provided, then it is added as a listener on the `'timeout'` event on + * the response object. + * + * If no `'timeout'` listener is added to the request, the response, or + * the server, then `Http2Stream` s are destroyed when they time out. If a + * handler is assigned to the request, the response, or the server's `'timeout'`events, timed out sockets must be handled explicitly. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + read(size?: number): Buffer | string | null; + addListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "aborted", hadError: boolean, code: number): boolean; + emit(event: "close"): boolean; + emit(event: "data", chunk: Buffer | string): boolean; + emit(event: "end"): boolean; + emit(event: "readable"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "end", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "end", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "aborted", listener: (hadError: boolean, code: number) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * This object is created internally by an HTTP server, not by the user. It is + * passed as the second parameter to the `'request'` event. + * @since v8.4.0 + */ + export class Http2ServerResponse extends stream.Writable { + constructor(stream: ServerHttp2Stream); + /** + * See `response.socket`. + * @since v8.4.0 + * @deprecated Since v13.0.0 - Use `socket`. + */ + readonly connection: net.Socket | tls.TLSSocket; + /** + * Boolean value that indicates whether the response has completed. Starts + * as `false`. After `response.end()` executes, the value will be `true`. + * @since v8.4.0 + * @deprecated Since v13.4.0,v12.16.0 - Use `writableEnded`. + */ + readonly finished: boolean; + /** + * True if headers were sent, false otherwise (read-only). + * @since v8.4.0 + */ + readonly headersSent: boolean; + /** + * A reference to the original HTTP2 `request` object. + * @since v15.7.0 + */ + readonly req: Http2ServerRequest; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * applies getters, setters, and methods based on HTTP/2 logic. + * + * `destroyed`, `readable`, and `writable` properties will be retrieved from and + * set on `response.stream`. + * + * `destroy`, `emit`, `end`, `on` and `once` methods will be called on`response.stream`. + * + * `setTimeout` method will be called on `response.stream.session`. + * + * `pause`, `read`, `resume`, and `write` will throw an error with code`ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for + * more information. + * + * All other interactions will be routed directly to the socket. + * + * ```js + * const http2 = require('node:http2'); + * const server = http2.createServer((req, res) => { + * const ip = req.socket.remoteAddress; + * const port = req.socket.remotePort; + * res.end(`Your IP address is ${ip} and your source port is ${port}.`); + * }).listen(3000); + * ``` + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * The `Http2Stream` object backing the response. + * @since v8.4.0 + */ + readonly stream: ServerHttp2Stream; + /** + * When true, the Date header will be automatically generated and sent in + * the response if it is not already present in the headers. Defaults to true. + * + * This should only be disabled for testing; HTTP requires the Date header + * in responses. + * @since v8.4.0 + */ + sendDate: boolean; + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status code that will be sent to the client when + * the headers get flushed. + * + * ```js + * response.statusCode = 404; + * ``` + * + * After response header was sent to the client, this property indicates the + * status code which was sent out. + * @since v8.4.0 + */ + statusCode: number; + /** + * Status message is not supported by HTTP/2 (RFC 7540 8.1.2.4). It returns + * an empty string. + * @since v8.4.0 + */ + statusMessage: ""; + /** + * This method adds HTTP trailing headers (a header but at the end of the + * message) to the response. + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v8.4.0 + */ + addTrailers(trailers: OutgoingHttpHeaders): void; + /** + * This method signals to the server that all of the response headers and body + * have been sent; that server should consider this message complete. + * The method, `response.end()`, MUST be called on each response. + * + * If `data` is specified, it is equivalent to calling `response.write(data, encoding)` followed by `response.end(callback)`. + * + * If `callback` is specified, it will be called when the response stream + * is finished. + * @since v8.4.0 + */ + end(callback?: () => void): this; + end(data: string | Uint8Array, callback?: () => void): this; + end(data: string | Uint8Array, encoding: BufferEncoding, callback?: () => void): this; + /** + * Reads out a header that has already been queued but not sent to the client. + * The name is case-insensitive. + * + * ```js + * const contentType = response.getHeader('content-type'); + * ``` + * @since v8.4.0 + */ + getHeader(name: string): string; + /** + * Returns an array containing the unique names of the current outgoing headers. + * All header names are lowercase. + * + * ```js + * response.setHeader('Foo', 'bar'); + * response.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headerNames = response.getHeaderNames(); + * // headerNames === ['foo', 'set-cookie'] + * ``` + * @since v8.4.0 + */ + getHeaderNames(): string[]; + /** + * Returns a shallow copy of the current outgoing headers. Since a shallow copy + * is used, array values may be mutated without additional calls to various + * header-related http module methods. The keys of the returned object are the + * header names and the values are the respective header values. All header names + * are lowercase. + * + * The object returned by the `response.getHeaders()` method _does not_prototypically inherit from the JavaScript `Object`. This means that typical`Object` methods such as `obj.toString()`, + * `obj.hasOwnProperty()`, and others + * are not defined and _will not work_. + * + * ```js + * response.setHeader('Foo', 'bar'); + * response.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headers = response.getHeaders(); + * // headers === { foo: 'bar', 'set-cookie': ['foo=bar', 'bar=baz'] } + * ``` + * @since v8.4.0 + */ + getHeaders(): OutgoingHttpHeaders; + /** + * Returns `true` if the header identified by `name` is currently set in the + * outgoing headers. The header name matching is case-insensitive. + * + * ```js + * const hasContentType = response.hasHeader('content-type'); + * ``` + * @since v8.4.0 + */ + hasHeader(name: string): boolean; + /** + * Removes a header that has been queued for implicit sending. + * + * ```js + * response.removeHeader('Content-Encoding'); + * ``` + * @since v8.4.0 + */ + removeHeader(name: string): void; + /** + * Sets a single header value for implicit headers. If this header already exists + * in the to-be-sent headers, its value will be replaced. Use an array of strings + * here to send multiple headers with the same name. + * + * ```js + * response.setHeader('Content-Type', 'text/html; charset=utf-8'); + * ``` + * + * or + * + * ```js + * response.setHeader('Set-Cookie', ['type=ninja', 'language=javascript']); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http2.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html; charset=utf-8'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain; charset=utf-8' }); + * res.end('ok'); + * }); + * ``` + * @since v8.4.0 + */ + setHeader(name: string, value: number | string | readonly string[]): void; + /** + * Sets the `Http2Stream`'s timeout value to `msecs`. If a callback is + * provided, then it is added as a listener on the `'timeout'` event on + * the response object. + * + * If no `'timeout'` listener is added to the request, the response, or + * the server, then `Http2Stream` s are destroyed when they time out. If a + * handler is assigned to the request, the response, or the server's `'timeout'`events, timed out sockets must be handled explicitly. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * If this method is called and `response.writeHead()` has not been called, + * it will switch to implicit header mode and flush the implicit headers. + * + * This sends a chunk of the response body. This method may + * be called multiple times to provide successive parts of the body. + * + * In the `node:http` module, the response body is omitted when the + * request is a HEAD request. Similarly, the `204` and `304` responses _must not_ include a message body. + * + * `chunk` can be a string or a buffer. If `chunk` is a string, + * the second parameter specifies how to encode it into a byte stream. + * By default the `encoding` is `'utf8'`. `callback` will be called when this chunk + * of data is flushed. + * + * This is the raw HTTP body and has nothing to do with higher-level multi-part + * body encodings that may be used. + * + * The first time `response.write()` is called, it will send the buffered + * header information and the first chunk of the body to the client. The second + * time `response.write()` is called, Node.js assumes data will be streamed, + * and sends the new data separately. That is, the response is buffered up to the + * first chunk of the body. + * + * Returns `true` if the entire data was flushed successfully to the kernel + * buffer. Returns `false` if all or part of the data was queued in user memory.`'drain'` will be emitted when the buffer is free again. + * @since v8.4.0 + */ + write(chunk: string | Uint8Array, callback?: (err: Error) => void): boolean; + write(chunk: string | Uint8Array, encoding: BufferEncoding, callback?: (err: Error) => void): boolean; + /** + * Sends a status `100 Continue` to the client, indicating that the request body + * should be sent. See the `'checkContinue'` event on `Http2Server` and`Http2SecureServer`. + * @since v8.4.0 + */ + writeContinue(): void; + /** + * Sends a status `103 Early Hints` to the client with a Link header, + * indicating that the user agent can preload/preconnect the linked resources. + * The `hints` is an object containing the values of headers to be sent with + * early hints message. + * + * **Example** + * + * ```js + * const earlyHintsLink = '; rel=preload; as=style'; + * response.writeEarlyHints({ + * 'link': earlyHintsLink, + * }); + * + * const earlyHintsLinks = [ + * '; rel=preload; as=style', + * '; rel=preload; as=script', + * ]; + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * }); + * ``` + * @since v18.11.0 + */ + writeEarlyHints(hints: Record): void; + /** + * Sends a response header to the request. The status code is a 3-digit HTTP + * status code, like `404`. The last argument, `headers`, are the response headers. + * + * Returns a reference to the `Http2ServerResponse`, so that calls can be chained. + * + * For compatibility with `HTTP/1`, a human-readable `statusMessage` may be + * passed as the second argument. However, because the `statusMessage` has no + * meaning within HTTP/2, the argument will have no effect and a process warning + * will be emitted. + * + * ```js + * const body = 'hello world'; + * response.writeHead(200, { + * 'Content-Length': Buffer.byteLength(body), + * 'Content-Type': 'text/plain; charset=utf-8', + * }); + * ``` + * + * `Content-Length` is given in bytes not characters. The`Buffer.byteLength()` API may be used to determine the number of bytes in a + * given encoding. On outbound messages, Node.js does not check if Content-Length + * and the length of the body being transmitted are equal or not. However, when + * receiving messages, Node.js will automatically reject messages when the`Content-Length` does not match the actual payload size. + * + * This method may be called at most one time on a message before `response.end()` is called. + * + * If `response.write()` or `response.end()` are called before calling + * this, the implicit/mutable headers will be calculated and call this function. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http2.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html; charset=utf-8'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain; charset=utf-8' }); + * res.end('ok'); + * }); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v8.4.0 + */ + writeHead(statusCode: number, headers?: OutgoingHttpHeaders): this; + writeHead(statusCode: number, statusMessage: string, headers?: OutgoingHttpHeaders): this; + /** + * Call `http2stream.pushStream()` with the given headers, and wrap the + * given `Http2Stream` on a newly created `Http2ServerResponse` as the callback + * parameter if successful. When `Http2ServerRequest` is closed, the callback is + * called with an error `ERR_HTTP2_INVALID_STREAM`. + * @since v8.4.0 + * @param headers An object describing the headers + * @param callback Called once `http2stream.pushStream()` is finished, or either when the attempt to create the pushed `Http2Stream` has failed or has been rejected, or the state of + * `Http2ServerRequest` is closed prior to calling the `http2stream.pushStream()` method + */ + createPushResponse( + headers: OutgoingHttpHeaders, + callback: (err: Error | null, res: Http2ServerResponse) => void, + ): void; + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (error: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "drain"): boolean; + emit(event: "error", error: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "pipe", src: stream.Readable): boolean; + emit(event: "unpipe", src: stream.Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (error: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (error: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (error: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (error: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export namespace constants { + const NGHTTP2_SESSION_SERVER: number; + const NGHTTP2_SESSION_CLIENT: number; + const NGHTTP2_STREAM_STATE_IDLE: number; + const NGHTTP2_STREAM_STATE_OPEN: number; + const NGHTTP2_STREAM_STATE_RESERVED_LOCAL: number; + const NGHTTP2_STREAM_STATE_RESERVED_REMOTE: number; + const NGHTTP2_STREAM_STATE_HALF_CLOSED_LOCAL: number; + const NGHTTP2_STREAM_STATE_HALF_CLOSED_REMOTE: number; + const NGHTTP2_STREAM_STATE_CLOSED: number; + const NGHTTP2_NO_ERROR: number; + const NGHTTP2_PROTOCOL_ERROR: number; + const NGHTTP2_INTERNAL_ERROR: number; + const NGHTTP2_FLOW_CONTROL_ERROR: number; + const NGHTTP2_SETTINGS_TIMEOUT: number; + const NGHTTP2_STREAM_CLOSED: number; + const NGHTTP2_FRAME_SIZE_ERROR: number; + const NGHTTP2_REFUSED_STREAM: number; + const NGHTTP2_CANCEL: number; + const NGHTTP2_COMPRESSION_ERROR: number; + const NGHTTP2_CONNECT_ERROR: number; + const NGHTTP2_ENHANCE_YOUR_CALM: number; + const NGHTTP2_INADEQUATE_SECURITY: number; + const NGHTTP2_HTTP_1_1_REQUIRED: number; + const NGHTTP2_ERR_FRAME_SIZE_ERROR: number; + const NGHTTP2_FLAG_NONE: number; + const NGHTTP2_FLAG_END_STREAM: number; + const NGHTTP2_FLAG_END_HEADERS: number; + const NGHTTP2_FLAG_ACK: number; + const NGHTTP2_FLAG_PADDED: number; + const NGHTTP2_FLAG_PRIORITY: number; + const DEFAULT_SETTINGS_HEADER_TABLE_SIZE: number; + const DEFAULT_SETTINGS_ENABLE_PUSH: number; + const DEFAULT_SETTINGS_INITIAL_WINDOW_SIZE: number; + const DEFAULT_SETTINGS_MAX_FRAME_SIZE: number; + const MAX_MAX_FRAME_SIZE: number; + const MIN_MAX_FRAME_SIZE: number; + const MAX_INITIAL_WINDOW_SIZE: number; + const NGHTTP2_DEFAULT_WEIGHT: number; + const NGHTTP2_SETTINGS_HEADER_TABLE_SIZE: number; + const NGHTTP2_SETTINGS_ENABLE_PUSH: number; + const NGHTTP2_SETTINGS_MAX_CONCURRENT_STREAMS: number; + const NGHTTP2_SETTINGS_INITIAL_WINDOW_SIZE: number; + const NGHTTP2_SETTINGS_MAX_FRAME_SIZE: number; + const NGHTTP2_SETTINGS_MAX_HEADER_LIST_SIZE: number; + const PADDING_STRATEGY_NONE: number; + const PADDING_STRATEGY_MAX: number; + const PADDING_STRATEGY_CALLBACK: number; + const HTTP2_HEADER_STATUS: string; + const HTTP2_HEADER_METHOD: string; + const HTTP2_HEADER_AUTHORITY: string; + const HTTP2_HEADER_SCHEME: string; + const HTTP2_HEADER_PATH: string; + const HTTP2_HEADER_ACCEPT_CHARSET: string; + const HTTP2_HEADER_ACCEPT_ENCODING: string; + const HTTP2_HEADER_ACCEPT_LANGUAGE: string; + const HTTP2_HEADER_ACCEPT_RANGES: string; + const HTTP2_HEADER_ACCEPT: string; + const HTTP2_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN: string; + const HTTP2_HEADER_AGE: string; + const HTTP2_HEADER_ALLOW: string; + const HTTP2_HEADER_AUTHORIZATION: string; + const HTTP2_HEADER_CACHE_CONTROL: string; + const HTTP2_HEADER_CONNECTION: string; + const HTTP2_HEADER_CONTENT_DISPOSITION: string; + const HTTP2_HEADER_CONTENT_ENCODING: string; + const HTTP2_HEADER_CONTENT_LANGUAGE: string; + const HTTP2_HEADER_CONTENT_LENGTH: string; + const HTTP2_HEADER_CONTENT_LOCATION: string; + const HTTP2_HEADER_CONTENT_MD5: string; + const HTTP2_HEADER_CONTENT_RANGE: string; + const HTTP2_HEADER_CONTENT_TYPE: string; + const HTTP2_HEADER_COOKIE: string; + const HTTP2_HEADER_DATE: string; + const HTTP2_HEADER_ETAG: string; + const HTTP2_HEADER_EXPECT: string; + const HTTP2_HEADER_EXPIRES: string; + const HTTP2_HEADER_FROM: string; + const HTTP2_HEADER_HOST: string; + const HTTP2_HEADER_IF_MATCH: string; + const HTTP2_HEADER_IF_MODIFIED_SINCE: string; + const HTTP2_HEADER_IF_NONE_MATCH: string; + const HTTP2_HEADER_IF_RANGE: string; + const HTTP2_HEADER_IF_UNMODIFIED_SINCE: string; + const HTTP2_HEADER_LAST_MODIFIED: string; + const HTTP2_HEADER_LINK: string; + const HTTP2_HEADER_LOCATION: string; + const HTTP2_HEADER_MAX_FORWARDS: string; + const HTTP2_HEADER_PREFER: string; + const HTTP2_HEADER_PROXY_AUTHENTICATE: string; + const HTTP2_HEADER_PROXY_AUTHORIZATION: string; + const HTTP2_HEADER_RANGE: string; + const HTTP2_HEADER_REFERER: string; + const HTTP2_HEADER_REFRESH: string; + const HTTP2_HEADER_RETRY_AFTER: string; + const HTTP2_HEADER_SERVER: string; + const HTTP2_HEADER_SET_COOKIE: string; + const HTTP2_HEADER_STRICT_TRANSPORT_SECURITY: string; + const HTTP2_HEADER_TRANSFER_ENCODING: string; + const HTTP2_HEADER_TE: string; + const HTTP2_HEADER_UPGRADE: string; + const HTTP2_HEADER_USER_AGENT: string; + const HTTP2_HEADER_VARY: string; + const HTTP2_HEADER_VIA: string; + const HTTP2_HEADER_WWW_AUTHENTICATE: string; + const HTTP2_HEADER_HTTP2_SETTINGS: string; + const HTTP2_HEADER_KEEP_ALIVE: string; + const HTTP2_HEADER_PROXY_CONNECTION: string; + const HTTP2_METHOD_ACL: string; + const HTTP2_METHOD_BASELINE_CONTROL: string; + const HTTP2_METHOD_BIND: string; + const HTTP2_METHOD_CHECKIN: string; + const HTTP2_METHOD_CHECKOUT: string; + const HTTP2_METHOD_CONNECT: string; + const HTTP2_METHOD_COPY: string; + const HTTP2_METHOD_DELETE: string; + const HTTP2_METHOD_GET: string; + const HTTP2_METHOD_HEAD: string; + const HTTP2_METHOD_LABEL: string; + const HTTP2_METHOD_LINK: string; + const HTTP2_METHOD_LOCK: string; + const HTTP2_METHOD_MERGE: string; + const HTTP2_METHOD_MKACTIVITY: string; + const HTTP2_METHOD_MKCALENDAR: string; + const HTTP2_METHOD_MKCOL: string; + const HTTP2_METHOD_MKREDIRECTREF: string; + const HTTP2_METHOD_MKWORKSPACE: string; + const HTTP2_METHOD_MOVE: string; + const HTTP2_METHOD_OPTIONS: string; + const HTTP2_METHOD_ORDERPATCH: string; + const HTTP2_METHOD_PATCH: string; + const HTTP2_METHOD_POST: string; + const HTTP2_METHOD_PRI: string; + const HTTP2_METHOD_PROPFIND: string; + const HTTP2_METHOD_PROPPATCH: string; + const HTTP2_METHOD_PUT: string; + const HTTP2_METHOD_REBIND: string; + const HTTP2_METHOD_REPORT: string; + const HTTP2_METHOD_SEARCH: string; + const HTTP2_METHOD_TRACE: string; + const HTTP2_METHOD_UNBIND: string; + const HTTP2_METHOD_UNCHECKOUT: string; + const HTTP2_METHOD_UNLINK: string; + const HTTP2_METHOD_UNLOCK: string; + const HTTP2_METHOD_UPDATE: string; + const HTTP2_METHOD_UPDATEREDIRECTREF: string; + const HTTP2_METHOD_VERSION_CONTROL: string; + const HTTP_STATUS_CONTINUE: number; + const HTTP_STATUS_SWITCHING_PROTOCOLS: number; + const HTTP_STATUS_PROCESSING: number; + const HTTP_STATUS_OK: number; + const HTTP_STATUS_CREATED: number; + const HTTP_STATUS_ACCEPTED: number; + const HTTP_STATUS_NON_AUTHORITATIVE_INFORMATION: number; + const HTTP_STATUS_NO_CONTENT: number; + const HTTP_STATUS_RESET_CONTENT: number; + const HTTP_STATUS_PARTIAL_CONTENT: number; + const HTTP_STATUS_MULTI_STATUS: number; + const HTTP_STATUS_ALREADY_REPORTED: number; + const HTTP_STATUS_IM_USED: number; + const HTTP_STATUS_MULTIPLE_CHOICES: number; + const HTTP_STATUS_MOVED_PERMANENTLY: number; + const HTTP_STATUS_FOUND: number; + const HTTP_STATUS_SEE_OTHER: number; + const HTTP_STATUS_NOT_MODIFIED: number; + const HTTP_STATUS_USE_PROXY: number; + const HTTP_STATUS_TEMPORARY_REDIRECT: number; + const HTTP_STATUS_PERMANENT_REDIRECT: number; + const HTTP_STATUS_BAD_REQUEST: number; + const HTTP_STATUS_UNAUTHORIZED: number; + const HTTP_STATUS_PAYMENT_REQUIRED: number; + const HTTP_STATUS_FORBIDDEN: number; + const HTTP_STATUS_NOT_FOUND: number; + const HTTP_STATUS_METHOD_NOT_ALLOWED: number; + const HTTP_STATUS_NOT_ACCEPTABLE: number; + const HTTP_STATUS_PROXY_AUTHENTICATION_REQUIRED: number; + const HTTP_STATUS_REQUEST_TIMEOUT: number; + const HTTP_STATUS_CONFLICT: number; + const HTTP_STATUS_GONE: number; + const HTTP_STATUS_LENGTH_REQUIRED: number; + const HTTP_STATUS_PRECONDITION_FAILED: number; + const HTTP_STATUS_PAYLOAD_TOO_LARGE: number; + const HTTP_STATUS_URI_TOO_LONG: number; + const HTTP_STATUS_UNSUPPORTED_MEDIA_TYPE: number; + const HTTP_STATUS_RANGE_NOT_SATISFIABLE: number; + const HTTP_STATUS_EXPECTATION_FAILED: number; + const HTTP_STATUS_TEAPOT: number; + const HTTP_STATUS_MISDIRECTED_REQUEST: number; + const HTTP_STATUS_UNPROCESSABLE_ENTITY: number; + const HTTP_STATUS_LOCKED: number; + const HTTP_STATUS_FAILED_DEPENDENCY: number; + const HTTP_STATUS_UNORDERED_COLLECTION: number; + const HTTP_STATUS_UPGRADE_REQUIRED: number; + const HTTP_STATUS_PRECONDITION_REQUIRED: number; + const HTTP_STATUS_TOO_MANY_REQUESTS: number; + const HTTP_STATUS_REQUEST_HEADER_FIELDS_TOO_LARGE: number; + const HTTP_STATUS_UNAVAILABLE_FOR_LEGAL_REASONS: number; + const HTTP_STATUS_INTERNAL_SERVER_ERROR: number; + const HTTP_STATUS_NOT_IMPLEMENTED: number; + const HTTP_STATUS_BAD_GATEWAY: number; + const HTTP_STATUS_SERVICE_UNAVAILABLE: number; + const HTTP_STATUS_GATEWAY_TIMEOUT: number; + const HTTP_STATUS_HTTP_VERSION_NOT_SUPPORTED: number; + const HTTP_STATUS_VARIANT_ALSO_NEGOTIATES: number; + const HTTP_STATUS_INSUFFICIENT_STORAGE: number; + const HTTP_STATUS_LOOP_DETECTED: number; + const HTTP_STATUS_BANDWIDTH_LIMIT_EXCEEDED: number; + const HTTP_STATUS_NOT_EXTENDED: number; + const HTTP_STATUS_NETWORK_AUTHENTICATION_REQUIRED: number; + } + /** + * This symbol can be set as a property on the HTTP/2 headers object with + * an array value in order to provide a list of headers considered sensitive. + */ + export const sensitiveHeaders: symbol; + /** + * Returns an object containing the default settings for an `Http2Session`instance. This method returns a new object instance every time it is called + * so instances returned may be safely modified for use. + * @since v8.4.0 + */ + export function getDefaultSettings(): Settings; + /** + * Returns a `Buffer` instance containing serialized representation of the given + * HTTP/2 settings as specified in the [HTTP/2](https://tools.ietf.org/html/rfc7540) specification. This is intended + * for use with the `HTTP2-Settings` header field. + * + * ```js + * const http2 = require('node:http2'); + * + * const packed = http2.getPackedSettings({ enablePush: false }); + * + * console.log(packed.toString('base64')); + * // Prints: AAIAAAAA + * ``` + * @since v8.4.0 + */ + export function getPackedSettings(settings: Settings): Buffer; + /** + * Returns a `HTTP/2 Settings Object` containing the deserialized settings from + * the given `Buffer` as generated by `http2.getPackedSettings()`. + * @since v8.4.0 + * @param buf The packed settings. + */ + export function getUnpackedSettings(buf: Uint8Array): Settings; + /** + * Returns a `net.Server` instance that creates and manages `Http2Session`instances. + * + * Since there are no browsers known that support [unencrypted HTTP/2](https://http2.github.io/faq/#does-http2-require-encryption), the use of {@link createSecureServer} is necessary when + * communicating + * with browser clients. + * + * ```js + * const http2 = require('node:http2'); + * + * // Create an unencrypted HTTP/2 server. + * // Since there are no browsers known that support + * // unencrypted HTTP/2, the use of `http2.createSecureServer()` + * // is necessary when communicating with browser clients. + * const server = http2.createServer(); + * + * server.on('stream', (stream, headers) => { + * stream.respond({ + * 'content-type': 'text/html; charset=utf-8', + * ':status': 200, + * }); + * stream.end('

Hello World

'); + * }); + * + * server.listen(8000); + * ``` + * @since v8.4.0 + * @param onRequestHandler See `Compatibility API` + */ + export function createServer( + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2Server; + export function createServer( + options: ServerOptions, + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2Server; + /** + * Returns a `tls.Server` instance that creates and manages `Http2Session`instances. + * + * ```js + * const http2 = require('node:http2'); + * const fs = require('node:fs'); + * + * const options = { + * key: fs.readFileSync('server-key.pem'), + * cert: fs.readFileSync('server-cert.pem'), + * }; + * + * // Create a secure HTTP/2 server + * const server = http2.createSecureServer(options); + * + * server.on('stream', (stream, headers) => { + * stream.respond({ + * 'content-type': 'text/html; charset=utf-8', + * ':status': 200, + * }); + * stream.end('

Hello World

'); + * }); + * + * server.listen(8443); + * ``` + * @since v8.4.0 + * @param onRequestHandler See `Compatibility API` + */ + export function createSecureServer( + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2SecureServer; + export function createSecureServer( + options: SecureServerOptions, + onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void, + ): Http2SecureServer; + /** + * Returns a `ClientHttp2Session` instance. + * + * ```js + * const http2 = require('node:http2'); + * const client = http2.connect('https://localhost:1234'); + * + * // Use the client + * + * client.close(); + * ``` + * @since v8.4.0 + * @param authority The remote HTTP/2 server to connect to. This must be in the form of a minimal, valid URL with the `http://` or `https://` prefix, host name, and IP port (if a non-default port + * is used). Userinfo (user ID and password), path, querystring, and fragment details in the URL will be ignored. + * @param listener Will be registered as a one-time listener of the {@link 'connect'} event. + */ + export function connect( + authority: string | url.URL, + listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): ClientHttp2Session; + export function connect( + authority: string | url.URL, + options?: ClientSessionOptions | SecureClientSessionOptions, + listener?: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void, + ): ClientHttp2Session; +} +declare module "node:http2" { + export * from "http2"; +} diff --git a/dist/node_modules/@types/node/ts4.8/https.d.ts b/dist/node_modules/@types/node/ts4.8/https.d.ts new file mode 100644 index 0000000..36ae5b2 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/https.d.ts @@ -0,0 +1,550 @@ +/** + * HTTPS is the HTTP protocol over TLS/SSL. In Node.js this is implemented as a + * separate module. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/https.js) + */ +declare module "https" { + import { Duplex } from "node:stream"; + import * as tls from "node:tls"; + import * as http from "node:http"; + import { URL } from "node:url"; + type ServerOptions< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > = tls.SecureContextOptions & tls.TlsOptions & http.ServerOptions; + type RequestOptions = + & http.RequestOptions + & tls.SecureContextOptions + & { + checkServerIdentity?: typeof tls.checkServerIdentity | undefined; + rejectUnauthorized?: boolean | undefined; // Defaults to true + servername?: string | undefined; // SNI TLS Extension + }; + interface AgentOptions extends http.AgentOptions, tls.ConnectionOptions { + rejectUnauthorized?: boolean | undefined; + maxCachedSessions?: number | undefined; + } + /** + * An `Agent` object for HTTPS similar to `http.Agent`. See {@link request} for more information. + * @since v0.4.5 + */ + class Agent extends http.Agent { + constructor(options?: AgentOptions); + options: AgentOptions; + } + interface Server< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > extends http.Server {} + /** + * See `http.Server` for more information. + * @since v0.3.4 + */ + class Server< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > extends tls.Server { + constructor(requestListener?: http.RequestListener); + constructor( + options: ServerOptions, + requestListener?: http.RequestListener, + ); + /** + * Closes all connections connected to this server. + * @since v18.2.0 + */ + closeAllConnections(): void; + /** + * Closes all connections connected to this server which are not sending a request or waiting for a response. + * @since v18.2.0 + */ + closeIdleConnections(): void; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + addListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + addListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + addListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + addListener(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + addListener(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connection", listener: (socket: Duplex) => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "checkContinue", listener: http.RequestListener): this; + addListener(event: "checkExpectation", listener: http.RequestListener): this; + addListener(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + addListener( + event: "connect", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + addListener(event: "request", listener: http.RequestListener): this; + addListener( + event: "upgrade", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + emit(event: string, ...args: any[]): boolean; + emit(event: "keylog", line: Buffer, tlsSocket: tls.TLSSocket): boolean; + emit( + event: "newSession", + sessionId: Buffer, + sessionData: Buffer, + callback: (err: Error, resp: Buffer) => void, + ): boolean; + emit( + event: "OCSPRequest", + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ): boolean; + emit(event: "resumeSession", sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void): boolean; + emit(event: "secureConnection", tlsSocket: tls.TLSSocket): boolean; + emit(event: "tlsClientError", err: Error, tlsSocket: tls.TLSSocket): boolean; + emit(event: "close"): boolean; + emit(event: "connection", socket: Duplex): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit( + event: "checkContinue", + req: InstanceType, + res: InstanceType & { + req: InstanceType; + }, + ): boolean; + emit( + event: "checkExpectation", + req: InstanceType, + res: InstanceType & { + req: InstanceType; + }, + ): boolean; + emit(event: "clientError", err: Error, socket: Duplex): boolean; + emit(event: "connect", req: InstanceType, socket: Duplex, head: Buffer): boolean; + emit( + event: "request", + req: InstanceType, + res: InstanceType & { + req: InstanceType; + }, + ): boolean; + emit(event: "upgrade", req: InstanceType, socket: Duplex, head: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + on( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + on( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + on( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + on(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + on(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connection", listener: (socket: Duplex) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "checkContinue", listener: http.RequestListener): this; + on(event: "checkExpectation", listener: http.RequestListener): this; + on(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + on(event: "connect", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + on(event: "request", listener: http.RequestListener): this; + on(event: "upgrade", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + once( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + once( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + once( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + once(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + once(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connection", listener: (socket: Duplex) => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "checkContinue", listener: http.RequestListener): this; + once(event: "checkExpectation", listener: http.RequestListener): this; + once(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + once(event: "connect", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + once(event: "request", listener: http.RequestListener): this; + once(event: "upgrade", listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + prependListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + prependListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + prependListener(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + prependListener(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connection", listener: (socket: Duplex) => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "checkContinue", listener: http.RequestListener): this; + prependListener(event: "checkExpectation", listener: http.RequestListener): this; + prependListener(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + prependListener( + event: "connect", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependListener(event: "request", listener: http.RequestListener): this; + prependListener( + event: "upgrade", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "keylog", listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + prependOnceListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependOnceListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + prependOnceListener(event: "secureConnection", listener: (tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener(event: "tlsClientError", listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connection", listener: (socket: Duplex) => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "checkContinue", listener: http.RequestListener): this; + prependOnceListener(event: "checkExpectation", listener: http.RequestListener): this; + prependOnceListener(event: "clientError", listener: (err: Error, socket: Duplex) => void): this; + prependOnceListener( + event: "connect", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: "request", listener: http.RequestListener): this; + prependOnceListener( + event: "upgrade", + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + } + /** + * ```js + * // curl -k https://localhost:8000/ + * const https = require('node:https'); + * const fs = require('node:fs'); + * + * const options = { + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * }; + * + * https.createServer(options, (req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * ``` + * + * Or + * + * ```js + * const https = require('node:https'); + * const fs = require('node:fs'); + * + * const options = { + * pfx: fs.readFileSync('test/fixtures/test_cert.pfx'), + * passphrase: 'sample', + * }; + * + * https.createServer(options, (req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * ``` + * @since v0.3.4 + * @param options Accepts `options` from `createServer`, `createSecureContext` and `createServer`. + * @param requestListener A listener to be added to the `'request'` event. + */ + function createServer< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + >(requestListener?: http.RequestListener): Server; + function createServer< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + >( + options: ServerOptions, + requestListener?: http.RequestListener, + ): Server; + /** + * Makes a request to a secure web server. + * + * The following additional `options` from `tls.connect()` are also accepted:`ca`, `cert`, `ciphers`, `clientCertEngine`, `crl`, `dhparam`, `ecdhCurve`,`honorCipherOrder`, `key`, `passphrase`, + * `pfx`, `rejectUnauthorized`,`secureOptions`, `secureProtocol`, `servername`, `sessionIdContext`,`highWaterMark`. + * + * `options` can be an object, a string, or a `URL` object. If `options` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * `https.request()` returns an instance of the `http.ClientRequest` class. The `ClientRequest` instance is a writable stream. If one needs to + * upload a file with a POST request, then write to the `ClientRequest` object. + * + * ```js + * const https = require('node:https'); + * + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * }; + * + * const req = https.request(options, (res) => { + * console.log('statusCode:', res.statusCode); + * console.log('headers:', res.headers); + * + * res.on('data', (d) => { + * process.stdout.write(d); + * }); + * }); + * + * req.on('error', (e) => { + * console.error(e); + * }); + * req.end(); + * ``` + * + * Example using options from `tls.connect()`: + * + * ```js + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * }; + * options.agent = new https.Agent(options); + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Alternatively, opt out of connection pooling by not using an `Agent`. + * + * ```js + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * agent: false, + * }; + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Example using a `URL` as `options`: + * + * ```js + * const options = new URL('https://abc:xyz@example.com'); + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Example pinning on certificate fingerprint, or the public key (similar to`pin-sha256`): + * + * ```js + * const tls = require('node:tls'); + * const https = require('node:https'); + * const crypto = require('node:crypto'); + * + * function sha256(s) { + * return crypto.createHash('sha256').update(s).digest('base64'); + * } + * const options = { + * hostname: 'github.com', + * port: 443, + * path: '/', + * method: 'GET', + * checkServerIdentity: function(host, cert) { + * // Make sure the certificate is issued to the host we are connected to + * const err = tls.checkServerIdentity(host, cert); + * if (err) { + * return err; + * } + * + * // Pin the public key, similar to HPKP pin-sha256 pinning + * const pubkey256 = 'pL1+qb9HTMRZJmuC/bB/ZI9d302BYrrqiVuRyW+DGrU='; + * if (sha256(cert.pubkey) !== pubkey256) { + * const msg = 'Certificate verification error: ' + + * `The public key of '${cert.subject.CN}' ` + + * 'does not match our pinned fingerprint'; + * return new Error(msg); + * } + * + * // Pin the exact certificate, rather than the pub key + * const cert256 = '25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:' + + * 'D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16'; + * if (cert.fingerprint256 !== cert256) { + * const msg = 'Certificate verification error: ' + + * `The certificate of '${cert.subject.CN}' ` + + * 'does not match our pinned fingerprint'; + * return new Error(msg); + * } + * + * // This loop is informational only. + * // Print the certificate and public key fingerprints of all certs in the + * // chain. Its common to pin the public key of the issuer on the public + * // internet, while pinning the public key of the service in sensitive + * // environments. + * do { + * console.log('Subject Common Name:', cert.subject.CN); + * console.log(' Certificate SHA256 fingerprint:', cert.fingerprint256); + * + * hash = crypto.createHash('sha256'); + * console.log(' Public key ping-sha256:', sha256(cert.pubkey)); + * + * lastprint256 = cert.fingerprint256; + * cert = cert.issuerCertificate; + * } while (cert.fingerprint256 !== lastprint256); + * + * }, + * }; + * + * options.agent = new https.Agent(options); + * const req = https.request(options, (res) => { + * console.log('All OK. Server matched our pinned cert or public key'); + * console.log('statusCode:', res.statusCode); + * // Print the HPKP values + * console.log('headers:', res.headers['public-key-pins']); + * + * res.on('data', (d) => {}); + * }); + * + * req.on('error', (e) => { + * console.error(e.message); + * }); + * req.end(); + * ``` + * + * Outputs for example: + * + * ```text + * Subject Common Name: github.com + * Certificate SHA256 fingerprint: 25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16 + * Public key ping-sha256: pL1+qb9HTMRZJmuC/bB/ZI9d302BYrrqiVuRyW+DGrU= + * Subject Common Name: DigiCert SHA2 Extended Validation Server CA + * Certificate SHA256 fingerprint: 40:3E:06:2A:26:53:05:91:13:28:5B:AF:80:A0:D4:AE:42:2C:84:8C:9F:78:FA:D0:1F:C9:4B:C5:B8:7F:EF:1A + * Public key ping-sha256: RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho= + * Subject Common Name: DigiCert High Assurance EV Root CA + * Certificate SHA256 fingerprint: 74:31:E5:F4:C3:C1:CE:46:90:77:4F:0B:61:E0:54:40:88:3B:A9:A0:1E:D0:0B:A6:AB:D7:80:6E:D3:B1:18:CF + * Public key ping-sha256: WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18= + * All OK. Server matched our pinned cert or public key + * statusCode: 200 + * headers: max-age=0; pin-sha256="WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18="; pin-sha256="RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho="; + * pin-sha256="k2v657xBsOVe1PQRwOsHsw3bsGT2VzIqz5K+59sNQws="; pin-sha256="K87oWBWM9UZfyddvDfoxL+8lpNyoUB2ptGtn0fv6G2Q="; pin-sha256="IQBnNBEiFuhj+8x6X8XLgh01V9Ic5/V3IRQLNFFc7v4="; + * pin-sha256="iie1VXtL7HzAMF+/PVPR9xzT80kQxdZeJ+zduCB3uj0="; pin-sha256="LvRiGEjRqfzurezaWuj8Wie2gyHMrW5Q06LspMnox7A="; includeSubDomains + * ``` + * @since v0.3.6 + * @param options Accepts all `options` from `request`, with some differences in default values: + */ + function request( + options: RequestOptions | string | URL, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + function request( + url: string | URL, + options: RequestOptions, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + /** + * Like `http.get()` but for HTTPS. + * + * `options` can be an object, a string, or a `URL` object. If `options` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * ```js + * const https = require('node:https'); + * + * https.get('https://encrypted.google.com/', (res) => { + * console.log('statusCode:', res.statusCode); + * console.log('headers:', res.headers); + * + * res.on('data', (d) => { + * process.stdout.write(d); + * }); + * + * }).on('error', (e) => { + * console.error(e); + * }); + * ``` + * @since v0.3.6 + * @param options Accepts the same `options` as {@link request}, with the `method` always set to `GET`. + */ + function get( + options: RequestOptions | string | URL, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + function get( + url: string | URL, + options: RequestOptions, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + let globalAgent: Agent; +} +declare module "node:https" { + export * from "https"; +} diff --git a/dist/node_modules/@types/node/ts4.8/index.d.ts b/dist/node_modules/@types/node/ts4.8/index.d.ts new file mode 100644 index 0000000..7c8b38c --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/index.d.ts @@ -0,0 +1,88 @@ +/** + * License for programmatically and manually incorporated + * documentation aka. `JSDoc` from https://github.com/nodejs/node/tree/master/doc + * + * Copyright Node.js contributors. All rights reserved. + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ + +// NOTE: These definitions support NodeJS and TypeScript 4.8 and earlier. + +// Reference required types from the default lib: +/// +/// +/// +/// + +// Base definitions for all NodeJS modules that are not specific to any version of TypeScript: +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// + +/// diff --git a/dist/node_modules/@types/node/ts4.8/inspector.d.ts b/dist/node_modules/@types/node/ts4.8/inspector.d.ts new file mode 100644 index 0000000..3927b81 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/inspector.d.ts @@ -0,0 +1,2747 @@ +// Type definitions for inspector + +// These definitions are auto-generated. +// Please see https://github.com/DefinitelyTyped/DefinitelyTyped/pull/19330 +// for more information. + + +/** + * The `node:inspector` module provides an API for interacting with the V8 + * inspector. + * + * It can be accessed using: + * + * ```js + * import * as inspector from 'node:inspector/promises'; + * ``` + * + * or + * + * ```js + * import * as inspector from 'node:inspector'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/inspector.js) + */ +declare module 'inspector' { + import EventEmitter = require('node:events'); + interface InspectorNotification { + method: string; + params: T; + } + namespace Schema { + /** + * Description of the protocol domain. + */ + interface Domain { + /** + * Domain name. + */ + name: string; + /** + * Domain version. + */ + version: string; + } + interface GetDomainsReturnType { + /** + * List of supported domains. + */ + domains: Domain[]; + } + } + namespace Runtime { + /** + * Unique script identifier. + */ + type ScriptId = string; + /** + * Unique object identifier. + */ + type RemoteObjectId = string; + /** + * Primitive value which cannot be JSON-stringified. + */ + type UnserializableValue = string; + /** + * Mirror object referencing original JavaScript object. + */ + interface RemoteObject { + /** + * Object type. + */ + type: string; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + /** + * Object class (constructor) name. Specified for object type values only. + */ + className?: string | undefined; + /** + * Remote object value in case of primitive values or JSON values (if it was requested). + */ + value?: any; + /** + * Primitive value which can not be JSON-stringified does not have value, but gets this property. + */ + unserializableValue?: UnserializableValue | undefined; + /** + * String representation of the object. + */ + description?: string | undefined; + /** + * Unique object identifier (for non-primitive values). + */ + objectId?: RemoteObjectId | undefined; + /** + * Preview containing abbreviated property values. Specified for object type values only. + * @experimental + */ + preview?: ObjectPreview | undefined; + /** + * @experimental + */ + customPreview?: CustomPreview | undefined; + } + /** + * @experimental + */ + interface CustomPreview { + header: string; + hasBody: boolean; + formatterObjectId: RemoteObjectId; + bindRemoteObjectFunctionId: RemoteObjectId; + configObjectId?: RemoteObjectId | undefined; + } + /** + * Object containing abbreviated remote object value. + * @experimental + */ + interface ObjectPreview { + /** + * Object type. + */ + type: string; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + /** + * String representation of the object. + */ + description?: string | undefined; + /** + * True iff some of the properties or entries of the original object did not fit. + */ + overflow: boolean; + /** + * List of the properties. + */ + properties: PropertyPreview[]; + /** + * List of the entries. Specified for map and set subtype values only. + */ + entries?: EntryPreview[] | undefined; + } + /** + * @experimental + */ + interface PropertyPreview { + /** + * Property name. + */ + name: string; + /** + * Object type. Accessor means that the property itself is an accessor property. + */ + type: string; + /** + * User-friendly property value string. + */ + value?: string | undefined; + /** + * Nested value preview. + */ + valuePreview?: ObjectPreview | undefined; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + } + /** + * @experimental + */ + interface EntryPreview { + /** + * Preview of the key. Specified for map-like collection entries. + */ + key?: ObjectPreview | undefined; + /** + * Preview of the value. + */ + value: ObjectPreview; + } + /** + * Object property descriptor. + */ + interface PropertyDescriptor { + /** + * Property name or symbol description. + */ + name: string; + /** + * The value associated with the property. + */ + value?: RemoteObject | undefined; + /** + * True if the value associated with the property may be changed (data descriptors only). + */ + writable?: boolean | undefined; + /** + * A function which serves as a getter for the property, or undefined if there is no getter (accessor descriptors only). + */ + get?: RemoteObject | undefined; + /** + * A function which serves as a setter for the property, or undefined if there is no setter (accessor descriptors only). + */ + set?: RemoteObject | undefined; + /** + * True if the type of this property descriptor may be changed and if the property may be deleted from the corresponding object. + */ + configurable: boolean; + /** + * True if this property shows up during enumeration of the properties on the corresponding object. + */ + enumerable: boolean; + /** + * True if the result was thrown during the evaluation. + */ + wasThrown?: boolean | undefined; + /** + * True if the property is owned for the object. + */ + isOwn?: boolean | undefined; + /** + * Property symbol object, if the property is of the symbol type. + */ + symbol?: RemoteObject | undefined; + } + /** + * Object internal property descriptor. This property isn't normally visible in JavaScript code. + */ + interface InternalPropertyDescriptor { + /** + * Conventional property name. + */ + name: string; + /** + * The value associated with the property. + */ + value?: RemoteObject | undefined; + } + /** + * Represents function call argument. Either remote object id objectId, primitive value, unserializable primitive value or neither of (for undefined) them should be specified. + */ + interface CallArgument { + /** + * Primitive value or serializable javascript object. + */ + value?: any; + /** + * Primitive value which can not be JSON-stringified. + */ + unserializableValue?: UnserializableValue | undefined; + /** + * Remote object handle. + */ + objectId?: RemoteObjectId | undefined; + } + /** + * Id of an execution context. + */ + type ExecutionContextId = number; + /** + * Description of an isolated world. + */ + interface ExecutionContextDescription { + /** + * Unique id of the execution context. It can be used to specify in which execution context script evaluation should be performed. + */ + id: ExecutionContextId; + /** + * Execution context origin. + */ + origin: string; + /** + * Human readable name describing given context. + */ + name: string; + /** + * Embedder-specific auxiliary data. + */ + auxData?: {} | undefined; + } + /** + * Detailed information about exception (or error) that was thrown during script compilation or execution. + */ + interface ExceptionDetails { + /** + * Exception id. + */ + exceptionId: number; + /** + * Exception text, which should be used together with exception object when available. + */ + text: string; + /** + * Line number of the exception location (0-based). + */ + lineNumber: number; + /** + * Column number of the exception location (0-based). + */ + columnNumber: number; + /** + * Script ID of the exception location. + */ + scriptId?: ScriptId | undefined; + /** + * URL of the exception location, to be used when the script was not reported. + */ + url?: string | undefined; + /** + * JavaScript stack trace if available. + */ + stackTrace?: StackTrace | undefined; + /** + * Exception object if available. + */ + exception?: RemoteObject | undefined; + /** + * Identifier of the context where exception happened. + */ + executionContextId?: ExecutionContextId | undefined; + } + /** + * Number of milliseconds since epoch. + */ + type Timestamp = number; + /** + * Stack entry for runtime errors and assertions. + */ + interface CallFrame { + /** + * JavaScript function name. + */ + functionName: string; + /** + * JavaScript script id. + */ + scriptId: ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * JavaScript script line number (0-based). + */ + lineNumber: number; + /** + * JavaScript script column number (0-based). + */ + columnNumber: number; + } + /** + * Call frames for assertions or error messages. + */ + interface StackTrace { + /** + * String label of this stack trace. For async traces this may be a name of the function that initiated the async call. + */ + description?: string | undefined; + /** + * JavaScript function name. + */ + callFrames: CallFrame[]; + /** + * Asynchronous JavaScript stack trace that preceded this stack, if available. + */ + parent?: StackTrace | undefined; + /** + * Asynchronous JavaScript stack trace that preceded this stack, if available. + * @experimental + */ + parentId?: StackTraceId | undefined; + } + /** + * Unique identifier of current debugger. + * @experimental + */ + type UniqueDebuggerId = string; + /** + * If debuggerId is set stack trace comes from another debugger and can be resolved there. This allows to track cross-debugger calls. See Runtime.StackTrace and Debugger.paused for usages. + * @experimental + */ + interface StackTraceId { + id: string; + debuggerId?: UniqueDebuggerId | undefined; + } + interface EvaluateParameterType { + /** + * Expression to evaluate. + */ + expression: string; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + /** + * Determines whether Command Line API should be available during the evaluation. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Specifies in which execution context to perform evaluation. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + contextId?: ExecutionContextId | undefined; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should be treated as initiated by user in the UI. + */ + userGesture?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + } + interface AwaitPromiseParameterType { + /** + * Identifier of the promise. + */ + promiseObjectId: RemoteObjectId; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + */ + generatePreview?: boolean | undefined; + } + interface CallFunctionOnParameterType { + /** + * Declaration of the function to call. + */ + functionDeclaration: string; + /** + * Identifier of the object to call function on. Either objectId or executionContextId should be specified. + */ + objectId?: RemoteObjectId | undefined; + /** + * Call arguments. All call arguments must belong to the same JavaScript world as the target object. + */ + arguments?: CallArgument[] | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object which should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should be treated as initiated by user in the UI. + */ + userGesture?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + /** + * Specifies execution context which global object will be used to call function on. Either executionContextId or objectId should be specified. + */ + executionContextId?: ExecutionContextId | undefined; + /** + * Symbolic group name that can be used to release multiple objects. If objectGroup is not specified and objectId is, objectGroup will be inherited from object. + */ + objectGroup?: string | undefined; + } + interface GetPropertiesParameterType { + /** + * Identifier of the object to return properties for. + */ + objectId: RemoteObjectId; + /** + * If true, returns properties belonging only to the element itself, not to its prototype chain. + */ + ownProperties?: boolean | undefined; + /** + * If true, returns accessor properties (with getter/setter) only; internal properties are not returned either. + * @experimental + */ + accessorPropertiesOnly?: boolean | undefined; + /** + * Whether preview should be generated for the results. + * @experimental + */ + generatePreview?: boolean | undefined; + } + interface ReleaseObjectParameterType { + /** + * Identifier of the object to release. + */ + objectId: RemoteObjectId; + } + interface ReleaseObjectGroupParameterType { + /** + * Symbolic object group name. + */ + objectGroup: string; + } + interface SetCustomObjectFormatterEnabledParameterType { + enabled: boolean; + } + interface CompileScriptParameterType { + /** + * Expression to compile. + */ + expression: string; + /** + * Source url to be set for the script. + */ + sourceURL: string; + /** + * Specifies whether the compiled script should be persisted. + */ + persistScript: boolean; + /** + * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + executionContextId?: ExecutionContextId | undefined; + } + interface RunScriptParameterType { + /** + * Id of the script to run. + */ + scriptId: ScriptId; + /** + * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + executionContextId?: ExecutionContextId | undefined; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Determines whether Command Line API should be available during the evaluation. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object which should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + } + interface QueryObjectsParameterType { + /** + * Identifier of the prototype to return objects for. + */ + prototypeObjectId: RemoteObjectId; + } + interface GlobalLexicalScopeNamesParameterType { + /** + * Specifies in which execution context to lookup global scope variables. + */ + executionContextId?: ExecutionContextId | undefined; + } + interface EvaluateReturnType { + /** + * Evaluation result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface AwaitPromiseReturnType { + /** + * Promise result. Will contain rejected value if promise was rejected. + */ + result: RemoteObject; + /** + * Exception details if stack strace is available. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface CallFunctionOnReturnType { + /** + * Call result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface GetPropertiesReturnType { + /** + * Object properties. + */ + result: PropertyDescriptor[]; + /** + * Internal object properties (only of the element itself). + */ + internalProperties?: InternalPropertyDescriptor[] | undefined; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface CompileScriptReturnType { + /** + * Id of the script. + */ + scriptId?: ScriptId | undefined; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface RunScriptReturnType { + /** + * Run result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface QueryObjectsReturnType { + /** + * Array with objects. + */ + objects: RemoteObject; + } + interface GlobalLexicalScopeNamesReturnType { + names: string[]; + } + interface ExecutionContextCreatedEventDataType { + /** + * A newly created execution context. + */ + context: ExecutionContextDescription; + } + interface ExecutionContextDestroyedEventDataType { + /** + * Id of the destroyed context + */ + executionContextId: ExecutionContextId; + } + interface ExceptionThrownEventDataType { + /** + * Timestamp of the exception. + */ + timestamp: Timestamp; + exceptionDetails: ExceptionDetails; + } + interface ExceptionRevokedEventDataType { + /** + * Reason describing why exception was revoked. + */ + reason: string; + /** + * The id of revoked exception, as reported in exceptionThrown. + */ + exceptionId: number; + } + interface ConsoleAPICalledEventDataType { + /** + * Type of the call. + */ + type: string; + /** + * Call arguments. + */ + args: RemoteObject[]; + /** + * Identifier of the context where the call was made. + */ + executionContextId: ExecutionContextId; + /** + * Call timestamp. + */ + timestamp: Timestamp; + /** + * Stack trace captured when the call was made. + */ + stackTrace?: StackTrace | undefined; + /** + * Console context descriptor for calls on non-default console context (not console.*): 'anonymous#unique-logger-id' for call on unnamed context, 'name#unique-logger-id' for call on named context. + * @experimental + */ + context?: string | undefined; + } + interface InspectRequestedEventDataType { + object: RemoteObject; + hints: {}; + } + } + namespace Debugger { + /** + * Breakpoint identifier. + */ + type BreakpointId = string; + /** + * Call frame identifier. + */ + type CallFrameId = string; + /** + * Location in the source code. + */ + interface Location { + /** + * Script identifier as reported in the Debugger.scriptParsed. + */ + scriptId: Runtime.ScriptId; + /** + * Line number in the script (0-based). + */ + lineNumber: number; + /** + * Column number in the script (0-based). + */ + columnNumber?: number | undefined; + } + /** + * Location in the source code. + * @experimental + */ + interface ScriptPosition { + lineNumber: number; + columnNumber: number; + } + /** + * JavaScript call frame. Array of call frames form the call stack. + */ + interface CallFrame { + /** + * Call frame identifier. This identifier is only valid while the virtual machine is paused. + */ + callFrameId: CallFrameId; + /** + * Name of the JavaScript function called on this call frame. + */ + functionName: string; + /** + * Location in the source code. + */ + functionLocation?: Location | undefined; + /** + * Location in the source code. + */ + location: Location; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Scope chain for this call frame. + */ + scopeChain: Scope[]; + /** + * this object for this call frame. + */ + this: Runtime.RemoteObject; + /** + * The value being returned, if the function is at return point. + */ + returnValue?: Runtime.RemoteObject | undefined; + } + /** + * Scope description. + */ + interface Scope { + /** + * Scope type. + */ + type: string; + /** + * Object representing the scope. For global and with scopes it represents the actual object; for the rest of the scopes, it is artificial transient object enumerating scope variables as its properties. + */ + object: Runtime.RemoteObject; + name?: string | undefined; + /** + * Location in the source code where scope starts + */ + startLocation?: Location | undefined; + /** + * Location in the source code where scope ends + */ + endLocation?: Location | undefined; + } + /** + * Search match for resource. + */ + interface SearchMatch { + /** + * Line number in resource content. + */ + lineNumber: number; + /** + * Line with match content. + */ + lineContent: string; + } + interface BreakLocation { + /** + * Script identifier as reported in the Debugger.scriptParsed. + */ + scriptId: Runtime.ScriptId; + /** + * Line number in the script (0-based). + */ + lineNumber: number; + /** + * Column number in the script (0-based). + */ + columnNumber?: number | undefined; + type?: string | undefined; + } + interface SetBreakpointsActiveParameterType { + /** + * New value for breakpoints active state. + */ + active: boolean; + } + interface SetSkipAllPausesParameterType { + /** + * New value for skip pauses state. + */ + skip: boolean; + } + interface SetBreakpointByUrlParameterType { + /** + * Line number to set breakpoint at. + */ + lineNumber: number; + /** + * URL of the resources to set breakpoint on. + */ + url?: string | undefined; + /** + * Regex pattern for the URLs of the resources to set breakpoints on. Either url or urlRegex must be specified. + */ + urlRegex?: string | undefined; + /** + * Script hash of the resources to set breakpoint on. + */ + scriptHash?: string | undefined; + /** + * Offset in the line to set breakpoint at. + */ + columnNumber?: number | undefined; + /** + * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. + */ + condition?: string | undefined; + } + interface SetBreakpointParameterType { + /** + * Location to set breakpoint in. + */ + location: Location; + /** + * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. + */ + condition?: string | undefined; + } + interface RemoveBreakpointParameterType { + breakpointId: BreakpointId; + } + interface GetPossibleBreakpointsParameterType { + /** + * Start of range to search possible breakpoint locations in. + */ + start: Location; + /** + * End of range to search possible breakpoint locations in (excluding). When not specified, end of scripts is used as end of range. + */ + end?: Location | undefined; + /** + * Only consider locations which are in the same (non-nested) function as start. + */ + restrictToFunction?: boolean | undefined; + } + interface ContinueToLocationParameterType { + /** + * Location to continue to. + */ + location: Location; + targetCallFrames?: string | undefined; + } + interface PauseOnAsyncCallParameterType { + /** + * Debugger will pause when async call with given stack trace is started. + */ + parentStackTraceId: Runtime.StackTraceId; + } + interface StepIntoParameterType { + /** + * Debugger will issue additional Debugger.paused notification if any async task is scheduled before next pause. + * @experimental + */ + breakOnAsyncCall?: boolean | undefined; + } + interface GetStackTraceParameterType { + stackTraceId: Runtime.StackTraceId; + } + interface SearchInContentParameterType { + /** + * Id of the script to search in. + */ + scriptId: Runtime.ScriptId; + /** + * String to search for. + */ + query: string; + /** + * If true, search is case sensitive. + */ + caseSensitive?: boolean | undefined; + /** + * If true, treats string parameter as regex. + */ + isRegex?: boolean | undefined; + } + interface SetScriptSourceParameterType { + /** + * Id of the script to edit. + */ + scriptId: Runtime.ScriptId; + /** + * New content of the script. + */ + scriptSource: string; + /** + * If true the change will not actually be applied. Dry run may be used to get result description without actually modifying the code. + */ + dryRun?: boolean | undefined; + } + interface RestartFrameParameterType { + /** + * Call frame identifier to evaluate on. + */ + callFrameId: CallFrameId; + } + interface GetScriptSourceParameterType { + /** + * Id of the script to get source for. + */ + scriptId: Runtime.ScriptId; + } + interface SetPauseOnExceptionsParameterType { + /** + * Pause on exceptions mode. + */ + state: string; + } + interface EvaluateOnCallFrameParameterType { + /** + * Call frame identifier to evaluate on. + */ + callFrameId: CallFrameId; + /** + * Expression to evaluate. + */ + expression: string; + /** + * String object group name to put result into (allows rapid releasing resulting object handles using releaseObjectGroup). + */ + objectGroup?: string | undefined; + /** + * Specifies whether command line API should be available to the evaluated expression, defaults to false. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether to throw an exception if side effect cannot be ruled out during evaluation. + */ + throwOnSideEffect?: boolean | undefined; + } + interface SetVariableValueParameterType { + /** + * 0-based number of scope as was listed in scope chain. Only 'local', 'closure' and 'catch' scope types are allowed. Other scopes could be manipulated manually. + */ + scopeNumber: number; + /** + * Variable name. + */ + variableName: string; + /** + * New variable value. + */ + newValue: Runtime.CallArgument; + /** + * Id of callframe that holds variable. + */ + callFrameId: CallFrameId; + } + interface SetReturnValueParameterType { + /** + * New return value. + */ + newValue: Runtime.CallArgument; + } + interface SetAsyncCallStackDepthParameterType { + /** + * Maximum depth of async call stacks. Setting to 0 will effectively disable collecting async call stacks (default). + */ + maxDepth: number; + } + interface SetBlackboxPatternsParameterType { + /** + * Array of regexps that will be used to check script url for blackbox state. + */ + patterns: string[]; + } + interface SetBlackboxedRangesParameterType { + /** + * Id of the script. + */ + scriptId: Runtime.ScriptId; + positions: ScriptPosition[]; + } + interface EnableReturnType { + /** + * Unique identifier of the debugger. + * @experimental + */ + debuggerId: Runtime.UniqueDebuggerId; + } + interface SetBreakpointByUrlReturnType { + /** + * Id of the created breakpoint for further reference. + */ + breakpointId: BreakpointId; + /** + * List of the locations this breakpoint resolved into upon addition. + */ + locations: Location[]; + } + interface SetBreakpointReturnType { + /** + * Id of the created breakpoint for further reference. + */ + breakpointId: BreakpointId; + /** + * Location this breakpoint resolved into. + */ + actualLocation: Location; + } + interface GetPossibleBreakpointsReturnType { + /** + * List of the possible breakpoint locations. + */ + locations: BreakLocation[]; + } + interface GetStackTraceReturnType { + stackTrace: Runtime.StackTrace; + } + interface SearchInContentReturnType { + /** + * List of search matches. + */ + result: SearchMatch[]; + } + interface SetScriptSourceReturnType { + /** + * New stack trace in case editing has happened while VM was stopped. + */ + callFrames?: CallFrame[] | undefined; + /** + * Whether current call stack was modified after applying the changes. + */ + stackChanged?: boolean | undefined; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + /** + * Exception details if any. + */ + exceptionDetails?: Runtime.ExceptionDetails | undefined; + } + interface RestartFrameReturnType { + /** + * New stack trace. + */ + callFrames: CallFrame[]; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + } + interface GetScriptSourceReturnType { + /** + * Script source. + */ + scriptSource: string; + } + interface EvaluateOnCallFrameReturnType { + /** + * Object wrapper for the evaluation result. + */ + result: Runtime.RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: Runtime.ExceptionDetails | undefined; + } + interface ScriptParsedEventDataType { + /** + * Identifier of the script parsed. + */ + scriptId: Runtime.ScriptId; + /** + * URL or name of the script parsed (if any). + */ + url: string; + /** + * Line offset of the script within the resource with given URL (for script tags). + */ + startLine: number; + /** + * Column offset of the script within the resource with given URL. + */ + startColumn: number; + /** + * Last line of the script. + */ + endLine: number; + /** + * Length of the last line of the script. + */ + endColumn: number; + /** + * Specifies script creation context. + */ + executionContextId: Runtime.ExecutionContextId; + /** + * Content hash of the script. + */ + hash: string; + /** + * Embedder-specific auxiliary data. + */ + executionContextAuxData?: {} | undefined; + /** + * True, if this script is generated as a result of the live edit operation. + * @experimental + */ + isLiveEdit?: boolean | undefined; + /** + * URL of source map associated with script (if any). + */ + sourceMapURL?: string | undefined; + /** + * True, if this script has sourceURL. + */ + hasSourceURL?: boolean | undefined; + /** + * True, if this script is ES6 module. + */ + isModule?: boolean | undefined; + /** + * This script length. + */ + length?: number | undefined; + /** + * JavaScript top stack frame of where the script parsed event was triggered if available. + * @experimental + */ + stackTrace?: Runtime.StackTrace | undefined; + } + interface ScriptFailedToParseEventDataType { + /** + * Identifier of the script parsed. + */ + scriptId: Runtime.ScriptId; + /** + * URL or name of the script parsed (if any). + */ + url: string; + /** + * Line offset of the script within the resource with given URL (for script tags). + */ + startLine: number; + /** + * Column offset of the script within the resource with given URL. + */ + startColumn: number; + /** + * Last line of the script. + */ + endLine: number; + /** + * Length of the last line of the script. + */ + endColumn: number; + /** + * Specifies script creation context. + */ + executionContextId: Runtime.ExecutionContextId; + /** + * Content hash of the script. + */ + hash: string; + /** + * Embedder-specific auxiliary data. + */ + executionContextAuxData?: {} | undefined; + /** + * URL of source map associated with script (if any). + */ + sourceMapURL?: string | undefined; + /** + * True, if this script has sourceURL. + */ + hasSourceURL?: boolean | undefined; + /** + * True, if this script is ES6 module. + */ + isModule?: boolean | undefined; + /** + * This script length. + */ + length?: number | undefined; + /** + * JavaScript top stack frame of where the script parsed event was triggered if available. + * @experimental + */ + stackTrace?: Runtime.StackTrace | undefined; + } + interface BreakpointResolvedEventDataType { + /** + * Breakpoint unique identifier. + */ + breakpointId: BreakpointId; + /** + * Actual breakpoint location. + */ + location: Location; + } + interface PausedEventDataType { + /** + * Call stack the virtual machine stopped on. + */ + callFrames: CallFrame[]; + /** + * Pause reason. + */ + reason: string; + /** + * Object containing break-specific auxiliary properties. + */ + data?: {} | undefined; + /** + * Hit breakpoints IDs + */ + hitBreakpoints?: string[] | undefined; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + /** + * Just scheduled async call will have this stack trace as parent stack during async execution. This field is available only after Debugger.stepInto call with breakOnAsynCall flag. + * @experimental + */ + asyncCallStackTraceId?: Runtime.StackTraceId | undefined; + } + } + namespace Console { + /** + * Console message. + */ + interface ConsoleMessage { + /** + * Message source. + */ + source: string; + /** + * Message severity. + */ + level: string; + /** + * Message text. + */ + text: string; + /** + * URL of the message origin. + */ + url?: string | undefined; + /** + * Line number in the resource that generated this message (1-based). + */ + line?: number | undefined; + /** + * Column number in the resource that generated this message (1-based). + */ + column?: number | undefined; + } + interface MessageAddedEventDataType { + /** + * Console message that has been added. + */ + message: ConsoleMessage; + } + } + namespace Profiler { + /** + * Profile node. Holds callsite information, execution statistics and child nodes. + */ + interface ProfileNode { + /** + * Unique id of the node. + */ + id: number; + /** + * Function location. + */ + callFrame: Runtime.CallFrame; + /** + * Number of samples where this node was on top of the call stack. + */ + hitCount?: number | undefined; + /** + * Child node ids. + */ + children?: number[] | undefined; + /** + * The reason of being not optimized. The function may be deoptimized or marked as don't optimize. + */ + deoptReason?: string | undefined; + /** + * An array of source position ticks. + */ + positionTicks?: PositionTickInfo[] | undefined; + } + /** + * Profile. + */ + interface Profile { + /** + * The list of profile nodes. First item is the root node. + */ + nodes: ProfileNode[]; + /** + * Profiling start timestamp in microseconds. + */ + startTime: number; + /** + * Profiling end timestamp in microseconds. + */ + endTime: number; + /** + * Ids of samples top nodes. + */ + samples?: number[] | undefined; + /** + * Time intervals between adjacent samples in microseconds. The first delta is relative to the profile startTime. + */ + timeDeltas?: number[] | undefined; + } + /** + * Specifies a number of samples attributed to a certain source position. + */ + interface PositionTickInfo { + /** + * Source line number (1-based). + */ + line: number; + /** + * Number of samples attributed to the source line. + */ + ticks: number; + } + /** + * Coverage data for a source range. + */ + interface CoverageRange { + /** + * JavaScript script source offset for the range start. + */ + startOffset: number; + /** + * JavaScript script source offset for the range end. + */ + endOffset: number; + /** + * Collected execution count of the source range. + */ + count: number; + } + /** + * Coverage data for a JavaScript function. + */ + interface FunctionCoverage { + /** + * JavaScript function name. + */ + functionName: string; + /** + * Source ranges inside the function with coverage data. + */ + ranges: CoverageRange[]; + /** + * Whether coverage data for this function has block granularity. + */ + isBlockCoverage: boolean; + } + /** + * Coverage data for a JavaScript script. + */ + interface ScriptCoverage { + /** + * JavaScript script id. + */ + scriptId: Runtime.ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Functions contained in the script that has coverage data. + */ + functions: FunctionCoverage[]; + } + /** + * Describes a type collected during runtime. + * @experimental + */ + interface TypeObject { + /** + * Name of a type collected with type profiling. + */ + name: string; + } + /** + * Source offset and types for a parameter or return value. + * @experimental + */ + interface TypeProfileEntry { + /** + * Source offset of the parameter or end of function for return values. + */ + offset: number; + /** + * The types for this parameter or return value. + */ + types: TypeObject[]; + } + /** + * Type profile data collected during runtime for a JavaScript script. + * @experimental + */ + interface ScriptTypeProfile { + /** + * JavaScript script id. + */ + scriptId: Runtime.ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Type profile entries for parameters and return values of the functions in the script. + */ + entries: TypeProfileEntry[]; + } + interface SetSamplingIntervalParameterType { + /** + * New sampling interval in microseconds. + */ + interval: number; + } + interface StartPreciseCoverageParameterType { + /** + * Collect accurate call counts beyond simple 'covered' or 'not covered'. + */ + callCount?: boolean | undefined; + /** + * Collect block-based coverage. + */ + detailed?: boolean | undefined; + } + interface StopReturnType { + /** + * Recorded profile. + */ + profile: Profile; + } + interface TakePreciseCoverageReturnType { + /** + * Coverage data for the current isolate. + */ + result: ScriptCoverage[]; + } + interface GetBestEffortCoverageReturnType { + /** + * Coverage data for the current isolate. + */ + result: ScriptCoverage[]; + } + interface TakeTypeProfileReturnType { + /** + * Type profile for all scripts since startTypeProfile() was turned on. + */ + result: ScriptTypeProfile[]; + } + interface ConsoleProfileStartedEventDataType { + id: string; + /** + * Location of console.profile(). + */ + location: Debugger.Location; + /** + * Profile title passed as an argument to console.profile(). + */ + title?: string | undefined; + } + interface ConsoleProfileFinishedEventDataType { + id: string; + /** + * Location of console.profileEnd(). + */ + location: Debugger.Location; + profile: Profile; + /** + * Profile title passed as an argument to console.profile(). + */ + title?: string | undefined; + } + } + namespace HeapProfiler { + /** + * Heap snapshot object id. + */ + type HeapSnapshotObjectId = string; + /** + * Sampling Heap Profile node. Holds callsite information, allocation statistics and child nodes. + */ + interface SamplingHeapProfileNode { + /** + * Function location. + */ + callFrame: Runtime.CallFrame; + /** + * Allocations size in bytes for the node excluding children. + */ + selfSize: number; + /** + * Child nodes. + */ + children: SamplingHeapProfileNode[]; + } + /** + * Profile. + */ + interface SamplingHeapProfile { + head: SamplingHeapProfileNode; + } + interface StartTrackingHeapObjectsParameterType { + trackAllocations?: boolean | undefined; + } + interface StopTrackingHeapObjectsParameterType { + /** + * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken when the tracking is stopped. + */ + reportProgress?: boolean | undefined; + } + interface TakeHeapSnapshotParameterType { + /** + * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken. + */ + reportProgress?: boolean | undefined; + } + interface GetObjectByHeapObjectIdParameterType { + objectId: HeapSnapshotObjectId; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + } + interface AddInspectedHeapObjectParameterType { + /** + * Heap snapshot object id to be accessible by means of $x command line API. + */ + heapObjectId: HeapSnapshotObjectId; + } + interface GetHeapObjectIdParameterType { + /** + * Identifier of the object to get heap object id for. + */ + objectId: Runtime.RemoteObjectId; + } + interface StartSamplingParameterType { + /** + * Average sample interval in bytes. Poisson distribution is used for the intervals. The default value is 32768 bytes. + */ + samplingInterval?: number | undefined; + } + interface GetObjectByHeapObjectIdReturnType { + /** + * Evaluation result. + */ + result: Runtime.RemoteObject; + } + interface GetHeapObjectIdReturnType { + /** + * Id of the heap snapshot object corresponding to the passed remote object id. + */ + heapSnapshotObjectId: HeapSnapshotObjectId; + } + interface StopSamplingReturnType { + /** + * Recorded sampling heap profile. + */ + profile: SamplingHeapProfile; + } + interface GetSamplingProfileReturnType { + /** + * Return the sampling profile being collected. + */ + profile: SamplingHeapProfile; + } + interface AddHeapSnapshotChunkEventDataType { + chunk: string; + } + interface ReportHeapSnapshotProgressEventDataType { + done: number; + total: number; + finished?: boolean | undefined; + } + interface LastSeenObjectIdEventDataType { + lastSeenObjectId: number; + timestamp: number; + } + interface HeapStatsUpdateEventDataType { + /** + * An array of triplets. Each triplet describes a fragment. The first integer is the fragment index, the second integer is a total count of objects for the fragment, the third integer is a total size of the objects for the fragment. + */ + statsUpdate: number[]; + } + } + namespace NodeTracing { + interface TraceConfig { + /** + * Controls how the trace buffer stores data. + */ + recordMode?: string | undefined; + /** + * Included category filters. + */ + includedCategories: string[]; + } + interface StartParameterType { + traceConfig: TraceConfig; + } + interface GetCategoriesReturnType { + /** + * A list of supported tracing categories. + */ + categories: string[]; + } + interface DataCollectedEventDataType { + value: Array<{}>; + } + } + namespace NodeWorker { + type WorkerID = string; + /** + * Unique identifier of attached debugging session. + */ + type SessionID = string; + interface WorkerInfo { + workerId: WorkerID; + type: string; + title: string; + url: string; + } + interface SendMessageToWorkerParameterType { + message: string; + /** + * Identifier of the session. + */ + sessionId: SessionID; + } + interface EnableParameterType { + /** + * Whether to new workers should be paused until the frontend sends `Runtime.runIfWaitingForDebugger` + * message to run them. + */ + waitForDebuggerOnStart: boolean; + } + interface DetachParameterType { + sessionId: SessionID; + } + interface AttachedToWorkerEventDataType { + /** + * Identifier assigned to the session used to send/receive messages. + */ + sessionId: SessionID; + workerInfo: WorkerInfo; + waitingForDebugger: boolean; + } + interface DetachedFromWorkerEventDataType { + /** + * Detached session identifier. + */ + sessionId: SessionID; + } + interface ReceivedMessageFromWorkerEventDataType { + /** + * Identifier of a session which sends a message. + */ + sessionId: SessionID; + message: string; + } + } + namespace NodeRuntime { + interface NotifyWhenWaitingForDisconnectParameterType { + enabled: boolean; + } + } + /** + * The `inspector.Session` is used for dispatching messages to the V8 inspector + * back-end and receiving message responses and notifications. + */ + class Session extends EventEmitter { + /** + * Create a new instance of the inspector.Session class. + * The inspector session needs to be connected through session.connect() before the messages can be dispatched to the inspector backend. + */ + constructor(); + /** + * Connects a session to the inspector back-end. + * @since v8.0.0 + */ + connect(): void; + /** + * Immediately close the session. All pending message callbacks will be called + * with an error. `session.connect()` will need to be called to be able to send + * messages again. Reconnected session will lose all inspector state, such as + * enabled agents or configured breakpoints. + * @since v8.0.0 + */ + disconnect(): void; + /** + * Posts a message to the inspector back-end. `callback` will be notified when + * a response is received. `callback` is a function that accepts two optional + * arguments: error and message-specific result. + * + * ```js + * session.post('Runtime.evaluate', { expression: '2 + 2' }, + * (error, { result }) => console.log(result)); + * // Output: { type: 'number', value: 4, description: '4' } + * ``` + * + * The latest version of the V8 inspector protocol is published on the [Chrome DevTools Protocol Viewer](https://chromedevtools.github.io/devtools-protocol/v8/). + * + * Node.js inspector supports all the Chrome DevTools Protocol domains declared + * by V8\. Chrome DevTools Protocol domain provides an interface for interacting + * with one of the runtime agents used to inspect the application state and listen + * to the run-time events. + * + * ## Example usage + * + * Apart from the debugger, various V8 Profilers are available through the DevTools + * protocol. + * @since v8.0.0 + */ + post(method: string, params?: {}, callback?: (err: Error | null, params?: {}) => void): void; + post(method: string, callback?: (err: Error | null, params?: {}) => void): void; + /** + * Returns supported domains. + */ + post(method: 'Schema.getDomains', callback?: (err: Error | null, params: Schema.GetDomainsReturnType) => void): void; + /** + * Evaluates expression on global object. + */ + post(method: 'Runtime.evaluate', params?: Runtime.EvaluateParameterType, callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; + post(method: 'Runtime.evaluate', callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; + /** + * Add handler to promise with given promise object id. + */ + post(method: 'Runtime.awaitPromise', params?: Runtime.AwaitPromiseParameterType, callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; + post(method: 'Runtime.awaitPromise', callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; + /** + * Calls function with given declaration on the given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.callFunctionOn', params?: Runtime.CallFunctionOnParameterType, callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; + post(method: 'Runtime.callFunctionOn', callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; + /** + * Returns properties of a given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.getProperties', params?: Runtime.GetPropertiesParameterType, callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; + post(method: 'Runtime.getProperties', callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; + /** + * Releases remote object with given id. + */ + post(method: 'Runtime.releaseObject', params?: Runtime.ReleaseObjectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.releaseObject', callback?: (err: Error | null) => void): void; + /** + * Releases all remote objects that belong to a given group. + */ + post(method: 'Runtime.releaseObjectGroup', params?: Runtime.ReleaseObjectGroupParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.releaseObjectGroup', callback?: (err: Error | null) => void): void; + /** + * Tells inspected instance to run if it was waiting for debugger to attach. + */ + post(method: 'Runtime.runIfWaitingForDebugger', callback?: (err: Error | null) => void): void; + /** + * Enables reporting of execution contexts creation by means of executionContextCreated event. When the reporting gets enabled the event will be sent immediately for each existing execution context. + */ + post(method: 'Runtime.enable', callback?: (err: Error | null) => void): void; + /** + * Disables reporting of execution contexts creation. + */ + post(method: 'Runtime.disable', callback?: (err: Error | null) => void): void; + /** + * Discards collected exceptions and console API calls. + */ + post(method: 'Runtime.discardConsoleEntries', callback?: (err: Error | null) => void): void; + /** + * @experimental + */ + post(method: 'Runtime.setCustomObjectFormatterEnabled', params?: Runtime.SetCustomObjectFormatterEnabledParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.setCustomObjectFormatterEnabled', callback?: (err: Error | null) => void): void; + /** + * Compiles expression. + */ + post(method: 'Runtime.compileScript', params?: Runtime.CompileScriptParameterType, callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; + post(method: 'Runtime.compileScript', callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; + /** + * Runs script with given id in a given context. + */ + post(method: 'Runtime.runScript', params?: Runtime.RunScriptParameterType, callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; + post(method: 'Runtime.runScript', callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; + post(method: 'Runtime.queryObjects', params?: Runtime.QueryObjectsParameterType, callback?: (err: Error | null, params: Runtime.QueryObjectsReturnType) => void): void; + post(method: 'Runtime.queryObjects', callback?: (err: Error | null, params: Runtime.QueryObjectsReturnType) => void): void; + /** + * Returns all let, const and class variables from global scope. + */ + post( + method: 'Runtime.globalLexicalScopeNames', + params?: Runtime.GlobalLexicalScopeNamesParameterType, + callback?: (err: Error | null, params: Runtime.GlobalLexicalScopeNamesReturnType) => void + ): void; + post(method: 'Runtime.globalLexicalScopeNames', callback?: (err: Error | null, params: Runtime.GlobalLexicalScopeNamesReturnType) => void): void; + /** + * Enables debugger for the given page. Clients should not assume that the debugging has been enabled until the result for this command is received. + */ + post(method: 'Debugger.enable', callback?: (err: Error | null, params: Debugger.EnableReturnType) => void): void; + /** + * Disables debugger for given page. + */ + post(method: 'Debugger.disable', callback?: (err: Error | null) => void): void; + /** + * Activates / deactivates all breakpoints on the page. + */ + post(method: 'Debugger.setBreakpointsActive', params?: Debugger.SetBreakpointsActiveParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBreakpointsActive', callback?: (err: Error | null) => void): void; + /** + * Makes page not interrupt on any pauses (breakpoint, exception, dom exception etc). + */ + post(method: 'Debugger.setSkipAllPauses', params?: Debugger.SetSkipAllPausesParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setSkipAllPauses', callback?: (err: Error | null) => void): void; + /** + * Sets JavaScript breakpoint at given location specified either by URL or URL regex. Once this command is issued, all existing parsed scripts will have breakpoints resolved and returned in locations property. Further matching script parsing will result in subsequent breakpointResolved events issued. This logical breakpoint will survive page reloads. + */ + post(method: 'Debugger.setBreakpointByUrl', params?: Debugger.SetBreakpointByUrlParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; + post(method: 'Debugger.setBreakpointByUrl', callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; + /** + * Sets JavaScript breakpoint at a given location. + */ + post(method: 'Debugger.setBreakpoint', params?: Debugger.SetBreakpointParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; + post(method: 'Debugger.setBreakpoint', callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; + /** + * Removes JavaScript breakpoint. + */ + post(method: 'Debugger.removeBreakpoint', params?: Debugger.RemoveBreakpointParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.removeBreakpoint', callback?: (err: Error | null) => void): void; + /** + * Returns possible locations for breakpoint. scriptId in start and end range locations should be the same. + */ + post( + method: 'Debugger.getPossibleBreakpoints', + params?: Debugger.GetPossibleBreakpointsParameterType, + callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void + ): void; + post(method: 'Debugger.getPossibleBreakpoints', callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void): void; + /** + * Continues execution until specific location is reached. + */ + post(method: 'Debugger.continueToLocation', params?: Debugger.ContinueToLocationParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.continueToLocation', callback?: (err: Error | null) => void): void; + /** + * @experimental + */ + post(method: 'Debugger.pauseOnAsyncCall', params?: Debugger.PauseOnAsyncCallParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.pauseOnAsyncCall', callback?: (err: Error | null) => void): void; + /** + * Steps over the statement. + */ + post(method: 'Debugger.stepOver', callback?: (err: Error | null) => void): void; + /** + * Steps into the function call. + */ + post(method: 'Debugger.stepInto', params?: Debugger.StepIntoParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.stepInto', callback?: (err: Error | null) => void): void; + /** + * Steps out of the function call. + */ + post(method: 'Debugger.stepOut', callback?: (err: Error | null) => void): void; + /** + * Stops on the next JavaScript statement. + */ + post(method: 'Debugger.pause', callback?: (err: Error | null) => void): void; + /** + * This method is deprecated - use Debugger.stepInto with breakOnAsyncCall and Debugger.pauseOnAsyncTask instead. Steps into next scheduled async task if any is scheduled before next pause. Returns success when async task is actually scheduled, returns error if no task were scheduled or another scheduleStepIntoAsync was called. + * @experimental + */ + post(method: 'Debugger.scheduleStepIntoAsync', callback?: (err: Error | null) => void): void; + /** + * Resumes JavaScript execution. + */ + post(method: 'Debugger.resume', callback?: (err: Error | null) => void): void; + /** + * Returns stack trace with given stackTraceId. + * @experimental + */ + post(method: 'Debugger.getStackTrace', params?: Debugger.GetStackTraceParameterType, callback?: (err: Error | null, params: Debugger.GetStackTraceReturnType) => void): void; + post(method: 'Debugger.getStackTrace', callback?: (err: Error | null, params: Debugger.GetStackTraceReturnType) => void): void; + /** + * Searches for given string in script content. + */ + post(method: 'Debugger.searchInContent', params?: Debugger.SearchInContentParameterType, callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; + post(method: 'Debugger.searchInContent', callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; + /** + * Edits JavaScript source live. + */ + post(method: 'Debugger.setScriptSource', params?: Debugger.SetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; + post(method: 'Debugger.setScriptSource', callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; + /** + * Restarts particular call frame from the beginning. + */ + post(method: 'Debugger.restartFrame', params?: Debugger.RestartFrameParameterType, callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; + post(method: 'Debugger.restartFrame', callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; + /** + * Returns source for the script with given id. + */ + post(method: 'Debugger.getScriptSource', params?: Debugger.GetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; + post(method: 'Debugger.getScriptSource', callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; + /** + * Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions or no exceptions. Initial pause on exceptions state is none. + */ + post(method: 'Debugger.setPauseOnExceptions', params?: Debugger.SetPauseOnExceptionsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setPauseOnExceptions', callback?: (err: Error | null) => void): void; + /** + * Evaluates expression on a given call frame. + */ + post(method: 'Debugger.evaluateOnCallFrame', params?: Debugger.EvaluateOnCallFrameParameterType, callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; + post(method: 'Debugger.evaluateOnCallFrame', callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; + /** + * Changes value of variable in a callframe. Object-based scopes are not supported and must be mutated manually. + */ + post(method: 'Debugger.setVariableValue', params?: Debugger.SetVariableValueParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setVariableValue', callback?: (err: Error | null) => void): void; + /** + * Changes return value in top frame. Available only at return break position. + * @experimental + */ + post(method: 'Debugger.setReturnValue', params?: Debugger.SetReturnValueParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setReturnValue', callback?: (err: Error | null) => void): void; + /** + * Enables or disables async call stacks tracking. + */ + post(method: 'Debugger.setAsyncCallStackDepth', params?: Debugger.SetAsyncCallStackDepthParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setAsyncCallStackDepth', callback?: (err: Error | null) => void): void; + /** + * Replace previous blackbox patterns with passed ones. Forces backend to skip stepping/pausing in scripts with url matching one of the patterns. VM will try to leave blackboxed script by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. + * @experimental + */ + post(method: 'Debugger.setBlackboxPatterns', params?: Debugger.SetBlackboxPatternsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBlackboxPatterns', callback?: (err: Error | null) => void): void; + /** + * Makes backend skip steps in the script in blackboxed ranges. VM will try leave blacklisted scripts by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. Positions array contains positions where blackbox state is changed. First interval isn't blackboxed. Array should be sorted. + * @experimental + */ + post(method: 'Debugger.setBlackboxedRanges', params?: Debugger.SetBlackboxedRangesParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBlackboxedRanges', callback?: (err: Error | null) => void): void; + /** + * Enables console domain, sends the messages collected so far to the client by means of the messageAdded notification. + */ + post(method: 'Console.enable', callback?: (err: Error | null) => void): void; + /** + * Disables console domain, prevents further console messages from being reported to the client. + */ + post(method: 'Console.disable', callback?: (err: Error | null) => void): void; + /** + * Does nothing. + */ + post(method: 'Console.clearMessages', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.enable', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.disable', callback?: (err: Error | null) => void): void; + /** + * Changes CPU profiler sampling interval. Must be called before CPU profiles recording started. + */ + post(method: 'Profiler.setSamplingInterval', params?: Profiler.SetSamplingIntervalParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Profiler.setSamplingInterval', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.start', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.stop', callback?: (err: Error | null, params: Profiler.StopReturnType) => void): void; + /** + * Enable precise code coverage. Coverage data for JavaScript executed before enabling precise code coverage may be incomplete. Enabling prevents running optimized code and resets execution counters. + */ + post(method: 'Profiler.startPreciseCoverage', params?: Profiler.StartPreciseCoverageParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Profiler.startPreciseCoverage', callback?: (err: Error | null) => void): void; + /** + * Disable precise code coverage. Disabling releases unnecessary execution count records and allows executing optimized code. + */ + post(method: 'Profiler.stopPreciseCoverage', callback?: (err: Error | null) => void): void; + /** + * Collect coverage data for the current isolate, and resets execution counters. Precise code coverage needs to have started. + */ + post(method: 'Profiler.takePreciseCoverage', callback?: (err: Error | null, params: Profiler.TakePreciseCoverageReturnType) => void): void; + /** + * Collect coverage data for the current isolate. The coverage data may be incomplete due to garbage collection. + */ + post(method: 'Profiler.getBestEffortCoverage', callback?: (err: Error | null, params: Profiler.GetBestEffortCoverageReturnType) => void): void; + /** + * Enable type profile. + * @experimental + */ + post(method: 'Profiler.startTypeProfile', callback?: (err: Error | null) => void): void; + /** + * Disable type profile. Disabling releases type profile data collected so far. + * @experimental + */ + post(method: 'Profiler.stopTypeProfile', callback?: (err: Error | null) => void): void; + /** + * Collect type profile. + * @experimental + */ + post(method: 'Profiler.takeTypeProfile', callback?: (err: Error | null, params: Profiler.TakeTypeProfileReturnType) => void): void; + post(method: 'HeapProfiler.enable', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.disable', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startTrackingHeapObjects', params?: HeapProfiler.StartTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startTrackingHeapObjects', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopTrackingHeapObjects', params?: HeapProfiler.StopTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopTrackingHeapObjects', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.takeHeapSnapshot', params?: HeapProfiler.TakeHeapSnapshotParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.takeHeapSnapshot', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.collectGarbage', callback?: (err: Error | null) => void): void; + post( + method: 'HeapProfiler.getObjectByHeapObjectId', + params?: HeapProfiler.GetObjectByHeapObjectIdParameterType, + callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void + ): void; + post(method: 'HeapProfiler.getObjectByHeapObjectId', callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void): void; + /** + * Enables console to refer to the node with given id via $x (see Command Line API for more details $x functions). + */ + post(method: 'HeapProfiler.addInspectedHeapObject', params?: HeapProfiler.AddInspectedHeapObjectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.addInspectedHeapObject', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.getHeapObjectId', params?: HeapProfiler.GetHeapObjectIdParameterType, callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; + post(method: 'HeapProfiler.getHeapObjectId', callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; + post(method: 'HeapProfiler.startSampling', params?: HeapProfiler.StartSamplingParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startSampling', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopSampling', callback?: (err: Error | null, params: HeapProfiler.StopSamplingReturnType) => void): void; + post(method: 'HeapProfiler.getSamplingProfile', callback?: (err: Error | null, params: HeapProfiler.GetSamplingProfileReturnType) => void): void; + /** + * Gets supported tracing categories. + */ + post(method: 'NodeTracing.getCategories', callback?: (err: Error | null, params: NodeTracing.GetCategoriesReturnType) => void): void; + /** + * Start trace events collection. + */ + post(method: 'NodeTracing.start', params?: NodeTracing.StartParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeTracing.start', callback?: (err: Error | null) => void): void; + /** + * Stop trace events collection. Remaining collected events will be sent as a sequence of + * dataCollected events followed by tracingComplete event. + */ + post(method: 'NodeTracing.stop', callback?: (err: Error | null) => void): void; + /** + * Sends protocol message over session with given id. + */ + post(method: 'NodeWorker.sendMessageToWorker', params?: NodeWorker.SendMessageToWorkerParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.sendMessageToWorker', callback?: (err: Error | null) => void): void; + /** + * Instructs the inspector to attach to running workers. Will also attach to new workers + * as they start + */ + post(method: 'NodeWorker.enable', params?: NodeWorker.EnableParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.enable', callback?: (err: Error | null) => void): void; + /** + * Detaches from all running workers and disables attaching to new workers as they are started. + */ + post(method: 'NodeWorker.disable', callback?: (err: Error | null) => void): void; + /** + * Detached from the worker with given sessionId. + */ + post(method: 'NodeWorker.detach', params?: NodeWorker.DetachParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.detach', callback?: (err: Error | null) => void): void; + /** + * Enable the `NodeRuntime.waitingForDisconnect`. + */ + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', params?: NodeRuntime.NotifyWhenWaitingForDisconnectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', callback?: (err: Error | null) => void): void; + // Events + addListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + addListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + addListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + addListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + addListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + addListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + addListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + addListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + addListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + addListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + addListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + addListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + addListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + addListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + addListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + addListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + addListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + addListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + addListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + addListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + addListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + addListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + addListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + addListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + addListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + addListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'inspectorNotification', message: InspectorNotification<{}>): boolean; + emit(event: 'Runtime.executionContextCreated', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextDestroyed', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextsCleared'): boolean; + emit(event: 'Runtime.exceptionThrown', message: InspectorNotification): boolean; + emit(event: 'Runtime.exceptionRevoked', message: InspectorNotification): boolean; + emit(event: 'Runtime.consoleAPICalled', message: InspectorNotification): boolean; + emit(event: 'Runtime.inspectRequested', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptParsed', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptFailedToParse', message: InspectorNotification): boolean; + emit(event: 'Debugger.breakpointResolved', message: InspectorNotification): boolean; + emit(event: 'Debugger.paused', message: InspectorNotification): boolean; + emit(event: 'Debugger.resumed'): boolean; + emit(event: 'Console.messageAdded', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileStarted', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileFinished', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.addHeapSnapshotChunk', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.resetProfiles'): boolean; + emit(event: 'HeapProfiler.reportHeapSnapshotProgress', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.lastSeenObjectId', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.heapStatsUpdate', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.dataCollected', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.tracingComplete'): boolean; + emit(event: 'NodeWorker.attachedToWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.detachedFromWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.receivedMessageFromWorker', message: InspectorNotification): boolean; + emit(event: 'NodeRuntime.waitingForDisconnect'): boolean; + on(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + on(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + on(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + on(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + on(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + on(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + on(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + on(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + on(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + on(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + on(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + on(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + on(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + on(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + on(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + on(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + on(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + on(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + on(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + on(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + on(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + on(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + on(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + on(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + on(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + on(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + once(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + once(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + once(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + once(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + once(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + once(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + once(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + once(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + once(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + once(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + once(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + once(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + once(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + once(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + once(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + once(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + once(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + once(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + once(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + once(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + once(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + once(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + once(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + once(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + once(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + prependListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependOnceListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + prependOnceListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependOnceListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependOnceListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependOnceListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependOnceListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependOnceListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependOnceListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependOnceListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependOnceListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependOnceListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependOnceListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependOnceListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependOnceListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependOnceListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependOnceListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependOnceListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependOnceListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependOnceListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependOnceListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependOnceListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependOnceListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependOnceListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependOnceListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + } + /** + * Activate inspector on host and port. Equivalent to`node --inspect=[[host:]port]`, but can be done programmatically after node has + * started. + * + * If wait is `true`, will block until a client has connected to the inspect port + * and flow control has been passed to the debugger client. + * + * See the `security warning` regarding the `host`parameter usage. + * @param [port='what was specified on the CLI'] Port to listen on for inspector connections. Optional. + * @param [host='what was specified on the CLI'] Host to listen on for inspector connections. Optional. + * @param [wait=false] Block until a client has connected. Optional. + * @returns Disposable that calls `inspector.close()`. + */ + function open(port?: number, host?: string, wait?: boolean): Disposable; + /** + * Deactivate the inspector. Blocks until there are no active connections. + */ + function close(): void; + /** + * Return the URL of the active inspector, or `undefined` if there is none. + * + * ```console + * $ node --inspect -p 'inspector.url()' + * Debugger listening on ws://127.0.0.1:9229/166e272e-7a30-4d09-97ce-f1c012b43c34 + * For help, see: https://nodejs.org/en/docs/inspector + * ws://127.0.0.1:9229/166e272e-7a30-4d09-97ce-f1c012b43c34 + * + * $ node --inspect=localhost:3000 -p 'inspector.url()' + * Debugger listening on ws://localhost:3000/51cf8d0e-3c36-4c59-8efd-54519839e56a + * For help, see: https://nodejs.org/en/docs/inspector + * ws://localhost:3000/51cf8d0e-3c36-4c59-8efd-54519839e56a + * + * $ node -p 'inspector.url()' + * undefined + * ``` + */ + function url(): string | undefined; + /** + * Blocks until a client (existing or connected later) has sent`Runtime.runIfWaitingForDebugger` command. + * + * An exception will be thrown if there is no active inspector. + * @since v12.7.0 + */ + function waitForDebugger(): void; +} +/** + * The inspector module provides an API for interacting with the V8 inspector. + */ +declare module 'node:inspector' { + import inspector = require('inspector'); + export = inspector; +} diff --git a/dist/node_modules/@types/node/ts4.8/module.d.ts b/dist/node_modules/@types/node/ts4.8/module.d.ts new file mode 100644 index 0000000..c38a9b8 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/module.d.ts @@ -0,0 +1,315 @@ +/** + * @since v0.3.7 + * @experimental + */ +declare module "module" { + import { URL } from "node:url"; + import { MessagePort } from "node:worker_threads"; + namespace Module { + /** + * The `module.syncBuiltinESMExports()` method updates all the live bindings for + * builtin `ES Modules` to match the properties of the `CommonJS` exports. It + * does not add or remove exported names from the `ES Modules`. + * + * ```js + * const fs = require('node:fs'); + * const assert = require('node:assert'); + * const { syncBuiltinESMExports } = require('node:module'); + * + * fs.readFile = newAPI; + * + * delete fs.readFileSync; + * + * function newAPI() { + * // ... + * } + * + * fs.newAPI = newAPI; + * + * syncBuiltinESMExports(); + * + * import('node:fs').then((esmFS) => { + * // It syncs the existing readFile property with the new value + * assert.strictEqual(esmFS.readFile, newAPI); + * // readFileSync has been deleted from the required fs + * assert.strictEqual('readFileSync' in fs, false); + * // syncBuiltinESMExports() does not remove readFileSync from esmFS + * assert.strictEqual('readFileSync' in esmFS, true); + * // syncBuiltinESMExports() does not add names + * assert.strictEqual(esmFS.newAPI, undefined); + * }); + * ``` + * @since v12.12.0 + */ + function syncBuiltinESMExports(): void; + /** + * `path` is the resolved path for the file for which a corresponding source map + * should be fetched. + * @since v13.7.0, v12.17.0 + * @return Returns `module.SourceMap` if a source map is found, `undefined` otherwise. + */ + function findSourceMap(path: string, error?: Error): SourceMap; + interface SourceMapPayload { + file: string; + version: number; + sources: string[]; + sourcesContent: string[]; + names: string[]; + mappings: string; + sourceRoot: string; + } + interface SourceMapping { + generatedLine: number; + generatedColumn: number; + originalSource: string; + originalLine: number; + originalColumn: number; + } + interface SourceOrigin { + /** + * The name of the range in the source map, if one was provided + */ + name?: string; + /** + * The file name of the original source, as reported in the SourceMap + */ + fileName: string; + /** + * The 1-indexed lineNumber of the corresponding call site in the original source + */ + lineNumber: number; + /** + * The 1-indexed columnNumber of the corresponding call site in the original source + */ + columnNumber: number; + } + /** + * @since v13.7.0, v12.17.0 + */ + class SourceMap { + /** + * Getter for the payload used to construct the `SourceMap` instance. + */ + readonly payload: SourceMapPayload; + constructor(payload: SourceMapPayload); + /** + * Given a line offset and column offset in the generated source + * file, returns an object representing the SourceMap range in the + * original file if found, or an empty object if not. + * + * The object returned contains the following keys: + * + * The returned value represents the raw range as it appears in the + * SourceMap, based on zero-indexed offsets, _not_ 1-indexed line and + * column numbers as they appear in Error messages and CallSite + * objects. + * + * To get the corresponding 1-indexed line and column numbers from a + * lineNumber and columnNumber as they are reported by Error stacks + * and CallSite objects, use `sourceMap.findOrigin(lineNumber, columnNumber)` + * @param lineOffset The zero-indexed line number offset in the generated source + * @param columnOffset The zero-indexed column number offset in the generated source + */ + findEntry(lineOffset: number, columnOffset: number): SourceMapping; + /** + * Given a 1-indexed `lineNumber` and `columnNumber` from a call site in the generated source, + * find the corresponding call site location in the original source. + * + * If the `lineNumber` and `columnNumber` provided are not found in any source map, + * then an empty object is returned. + * @param lineNumber The 1-indexed line number of the call site in the generated source + * @param columnNumber The 1-indexed column number of the call site in the generated source + */ + findOrigin(lineNumber: number, columnNumber: number): SourceOrigin | {}; + } + /** @deprecated Use `ImportAttributes` instead */ + interface ImportAssertions extends ImportAttributes {} + interface ImportAttributes extends NodeJS.Dict { + type?: string | undefined; + } + type ModuleFormat = "builtin" | "commonjs" | "json" | "module" | "wasm"; + type ModuleSource = string | ArrayBuffer | NodeJS.TypedArray; + interface GlobalPreloadContext { + port: MessagePort; + } + /** + * @deprecated This hook will be removed in a future version. + * Use `initialize` instead. When a loader has an `initialize` export, `globalPreload` will be ignored. + * + * Sometimes it might be necessary to run some code inside of the same global scope that the application runs in. + * This hook allows the return of a string that is run as a sloppy-mode script on startup. + * + * @param context Information to assist the preload code + * @return Code to run before application startup + */ + type GlobalPreloadHook = (context: GlobalPreloadContext) => string; + /** + * The `initialize` hook provides a way to define a custom function that runs in the hooks thread + * when the hooks module is initialized. Initialization happens when the hooks module is registered via `register`. + * + * This hook can receive data from a `register` invocation, including ports and other transferrable objects. + * The return value of `initialize` can be a `Promise`, in which case it will be awaited before the main application thread execution resumes. + */ + type InitializeHook = (data: Data) => void | Promise; + interface ResolveHookContext { + /** + * Export conditions of the relevant `package.json` + */ + conditions: string[]; + /** + * @deprecated Use `importAttributes` instead + */ + importAssertions: ImportAttributes; + /** + * An object whose key-value pairs represent the assertions for the module to import + */ + importAttributes: ImportAttributes; + /** + * The module importing this one, or undefined if this is the Node.js entry point + */ + parentURL: string | undefined; + } + interface ResolveFnOutput { + /** + * A hint to the load hook (it might be ignored) + */ + format?: ModuleFormat | null | undefined; + /** + * @deprecated Use `importAttributes` instead + */ + importAssertions?: ImportAttributes | undefined; + /** + * The import attributes to use when caching the module (optional; if excluded the input will be used) + */ + importAttributes?: ImportAttributes | undefined; + /** + * A signal that this hook intends to terminate the chain of `resolve` hooks. + * @default false + */ + shortCircuit?: boolean | undefined; + /** + * The absolute URL to which this input resolves + */ + url: string; + } + /** + * The `resolve` hook chain is responsible for resolving file URL for a given module specifier and parent URL, and optionally its format (such as `'module'`) as a hint to the `load` hook. + * If a format is specified, the load hook is ultimately responsible for providing the final `format` value (and it is free to ignore the hint provided by `resolve`); + * if `resolve` provides a format, a custom `load` hook is required even if only to pass the value to the Node.js default `load` hook. + * + * @param specifier The specified URL path of the module to be resolved + * @param context + * @param nextResolve The subsequent `resolve` hook in the chain, or the Node.js default `resolve` hook after the last user-supplied resolve hook + */ + type ResolveHook = ( + specifier: string, + context: ResolveHookContext, + nextResolve: ( + specifier: string, + context?: ResolveHookContext, + ) => ResolveFnOutput | Promise, + ) => ResolveFnOutput | Promise; + interface LoadHookContext { + /** + * Export conditions of the relevant `package.json` + */ + conditions: string[]; + /** + * The format optionally supplied by the `resolve` hook chain + */ + format: ModuleFormat; + /** + * @deprecated Use `importAttributes` instead + */ + importAssertions: ImportAttributes; + /** + * An object whose key-value pairs represent the assertions for the module to import + */ + importAttributes: ImportAttributes; + } + interface LoadFnOutput { + format: ModuleFormat; + /** + * A signal that this hook intends to terminate the chain of `resolve` hooks. + * @default false + */ + shortCircuit?: boolean | undefined; + /** + * The source for Node.js to evaluate + */ + source?: ModuleSource; + } + /** + * The `load` hook provides a way to define a custom method of determining how a URL should be interpreted, retrieved, and parsed. + * It is also in charge of validating the import assertion. + * + * @param url The URL/path of the module to be loaded + * @param context Metadata about the module + * @param nextLoad The subsequent `load` hook in the chain, or the Node.js default `load` hook after the last user-supplied `load` hook + */ + type LoadHook = ( + url: string, + context: LoadHookContext, + nextLoad: (url: string, context?: LoadHookContext) => LoadFnOutput | Promise, + ) => LoadFnOutput | Promise; + } + interface RegisterOptions { + parentURL: string | URL; + data?: Data | undefined; + transferList?: any[] | undefined; + } + interface Module extends NodeModule {} + class Module { + static runMain(): void; + static wrap(code: string): string; + static createRequire(path: string | URL): NodeRequire; + static builtinModules: string[]; + static isBuiltin(moduleName: string): boolean; + static Module: typeof Module; + static register( + specifier: string | URL, + parentURL?: string | URL, + options?: RegisterOptions, + ): void; + static register(specifier: string | URL, options?: RegisterOptions): void; + constructor(id: string, parent?: Module); + } + global { + interface ImportMeta { + /** + * The directory name of the current module. This is the same as the `path.dirname()` of the `import.meta.filename`. + * **Caveat:** only present on `file:` modules. + */ + dirname: string; + /** + * The full absolute path and filename of the current module, with symlinks resolved. + * This is the same as the `url.fileURLToPath()` of the `import.meta.url`. + * **Caveat:** only local modules support this property. Modules not using the `file:` protocol will not provide it. + */ + filename: string; + /** + * The absolute `file:` URL of the module. + */ + url: string; + /** + * Provides a module-relative resolution function scoped to each module, returning + * the URL string. + * + * Second `parent` parameter is only used when the `--experimental-import-meta-resolve` + * command flag enabled. + * + * @since v20.6.0 + * + * @param specifier The module specifier to resolve relative to `parent`. + * @param parent The absolute parent module URL to resolve from. + * @returns The absolute (`file:`) URL string for the resolved module. + */ + resolve(specifier: string, parent?: string | URL | undefined): string; + } + } + export = Module; +} +declare module "node:module" { + import module = require("module"); + export = module; +} diff --git a/dist/node_modules/@types/node/ts4.8/net.d.ts b/dist/node_modules/@types/node/ts4.8/net.d.ts new file mode 100644 index 0000000..c09b95d --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/net.d.ts @@ -0,0 +1,954 @@ +/** + * > Stability: 2 - Stable + * + * The `node:net` module provides an asynchronous network API for creating stream-based + * TCP or `IPC` servers ({@link createServer}) and clients + * ({@link createConnection}). + * + * It can be accessed using: + * + * ```js + * const net = require('node:net'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/net.js) + */ +declare module "net" { + import * as stream from "node:stream"; + import { Abortable, EventEmitter } from "node:events"; + import * as dns from "node:dns"; + type LookupFunction = ( + hostname: string, + options: dns.LookupOptions, + callback: (err: NodeJS.ErrnoException | null, address: string | dns.LookupAddress[], family?: number) => void, + ) => void; + interface AddressInfo { + address: string; + family: string; + port: number; + } + interface SocketConstructorOpts { + fd?: number | undefined; + allowHalfOpen?: boolean | undefined; + readable?: boolean | undefined; + writable?: boolean | undefined; + signal?: AbortSignal; + } + interface OnReadOpts { + buffer: Uint8Array | (() => Uint8Array); + /** + * This function is called for every chunk of incoming data. + * Two arguments are passed to it: the number of bytes written to buffer and a reference to buffer. + * Return false from this function to implicitly pause() the socket. + */ + callback(bytesWritten: number, buf: Uint8Array): boolean; + } + interface ConnectOpts { + /** + * If specified, incoming data is stored in a single buffer and passed to the supplied callback when data arrives on the socket. + * Note: this will cause the streaming functionality to not provide any data, however events like 'error', 'end', and 'close' will + * still be emitted as normal and methods like pause() and resume() will also behave as expected. + */ + onread?: OnReadOpts | undefined; + } + interface TcpSocketConnectOpts extends ConnectOpts { + port: number; + host?: string | undefined; + localAddress?: string | undefined; + localPort?: number | undefined; + hints?: number | undefined; + family?: number | undefined; + lookup?: LookupFunction | undefined; + noDelay?: boolean | undefined; + keepAlive?: boolean | undefined; + keepAliveInitialDelay?: number | undefined; + /** + * @since v18.13.0 + */ + autoSelectFamily?: boolean | undefined; + /** + * @since v18.13.0 + */ + autoSelectFamilyAttemptTimeout?: number | undefined; + } + interface IpcSocketConnectOpts extends ConnectOpts { + path: string; + } + type SocketConnectOpts = TcpSocketConnectOpts | IpcSocketConnectOpts; + type SocketReadyState = "opening" | "open" | "readOnly" | "writeOnly" | "closed"; + /** + * This class is an abstraction of a TCP socket or a streaming `IPC` endpoint + * (uses named pipes on Windows, and Unix domain sockets otherwise). It is also + * an `EventEmitter`. + * + * A `net.Socket` can be created by the user and used directly to interact with + * a server. For example, it is returned by {@link createConnection}, + * so the user can use it to talk to the server. + * + * It can also be created by Node.js and passed to the user when a connection + * is received. For example, it is passed to the listeners of a `'connection'` event emitted on a {@link Server}, so the user can use + * it to interact with the client. + * @since v0.3.4 + */ + class Socket extends stream.Duplex { + constructor(options?: SocketConstructorOpts); + /** + * Destroys the socket after all data is written. If the `finish` event was already emitted the socket is destroyed immediately. + * If the socket is still writable it implicitly calls `socket.end()`. + * @since v0.3.4 + */ + destroySoon(): void; + /** + * Sends data on the socket. The second parameter specifies the encoding in the + * case of a string. It defaults to UTF8 encoding. + * + * Returns `true` if the entire data was flushed successfully to the kernel + * buffer. Returns `false` if all or part of the data was queued in user memory.`'drain'` will be emitted when the buffer is again free. + * + * The optional `callback` parameter will be executed when the data is finally + * written out, which may not be immediately. + * + * See `Writable` stream `write()` method for more + * information. + * @since v0.1.90 + * @param [encoding='utf8'] Only used when data is `string`. + */ + write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean; + write(str: Uint8Array | string, encoding?: BufferEncoding, cb?: (err?: Error) => void): boolean; + /** + * Initiate a connection on a given socket. + * + * Possible signatures: + * + * * `socket.connect(options[, connectListener])` + * * `socket.connect(path[, connectListener])` for `IPC` connections. + * * `socket.connect(port[, host][, connectListener])` for TCP connections. + * * Returns: `net.Socket` The socket itself. + * + * This function is asynchronous. When the connection is established, the `'connect'` event will be emitted. If there is a problem connecting, + * instead of a `'connect'` event, an `'error'` event will be emitted with + * the error passed to the `'error'` listener. + * The last parameter `connectListener`, if supplied, will be added as a listener + * for the `'connect'` event **once**. + * + * This function should only be used for reconnecting a socket after`'close'` has been emitted or otherwise it may lead to undefined + * behavior. + */ + connect(options: SocketConnectOpts, connectionListener?: () => void): this; + connect(port: number, host: string, connectionListener?: () => void): this; + connect(port: number, connectionListener?: () => void): this; + connect(path: string, connectionListener?: () => void): this; + /** + * Set the encoding for the socket as a `Readable Stream`. See `readable.setEncoding()` for more information. + * @since v0.1.90 + * @return The socket itself. + */ + setEncoding(encoding?: BufferEncoding): this; + /** + * Pauses the reading of data. That is, `'data'` events will not be emitted. + * Useful to throttle back an upload. + * @return The socket itself. + */ + pause(): this; + /** + * Close the TCP connection by sending an RST packet and destroy the stream. + * If this TCP socket is in connecting status, it will send an RST packet and destroy this TCP socket once it is connected. + * Otherwise, it will call `socket.destroy` with an `ERR_SOCKET_CLOSED` Error. + * If this is not a TCP socket (for example, a pipe), calling this method will immediately throw an `ERR_INVALID_HANDLE_TYPE` Error. + * @since v18.3.0, v16.17.0 + */ + resetAndDestroy(): this; + /** + * Resumes reading after a call to `socket.pause()`. + * @return The socket itself. + */ + resume(): this; + /** + * Sets the socket to timeout after `timeout` milliseconds of inactivity on + * the socket. By default `net.Socket` do not have a timeout. + * + * When an idle timeout is triggered the socket will receive a `'timeout'` event but the connection will not be severed. The user must manually call `socket.end()` or `socket.destroy()` to + * end the connection. + * + * ```js + * socket.setTimeout(3000); + * socket.on('timeout', () => { + * console.log('socket timeout'); + * socket.end(); + * }); + * ``` + * + * If `timeout` is 0, then the existing idle timeout is disabled. + * + * The optional `callback` parameter will be added as a one-time listener for the `'timeout'` event. + * @since v0.1.90 + * @return The socket itself. + */ + setTimeout(timeout: number, callback?: () => void): this; + /** + * Enable/disable the use of Nagle's algorithm. + * + * When a TCP connection is created, it will have Nagle's algorithm enabled. + * + * Nagle's algorithm delays data before it is sent via the network. It attempts + * to optimize throughput at the expense of latency. + * + * Passing `true` for `noDelay` or not passing an argument will disable Nagle's + * algorithm for the socket. Passing `false` for `noDelay` will enable Nagle's + * algorithm. + * @since v0.1.90 + * @param [noDelay=true] + * @return The socket itself. + */ + setNoDelay(noDelay?: boolean): this; + /** + * Enable/disable keep-alive functionality, and optionally set the initial + * delay before the first keepalive probe is sent on an idle socket. + * + * Set `initialDelay` (in milliseconds) to set the delay between the last + * data packet received and the first keepalive probe. Setting `0` for`initialDelay` will leave the value unchanged from the default + * (or previous) setting. + * + * Enabling the keep-alive functionality will set the following socket options: + * + * * `SO_KEEPALIVE=1` + * * `TCP_KEEPIDLE=initialDelay` + * * `TCP_KEEPCNT=10` + * * `TCP_KEEPINTVL=1` + * @since v0.1.92 + * @param [enable=false] + * @param [initialDelay=0] + * @return The socket itself. + */ + setKeepAlive(enable?: boolean, initialDelay?: number): this; + /** + * Returns the bound `address`, the address `family` name and `port` of the + * socket as reported by the operating system:`{ port: 12346, family: 'IPv4', address: '127.0.0.1' }` + * @since v0.1.90 + */ + address(): AddressInfo | {}; + /** + * Calling `unref()` on a socket will allow the program to exit if this is the only + * active socket in the event system. If the socket is already `unref`ed calling`unref()` again will have no effect. + * @since v0.9.1 + * @return The socket itself. + */ + unref(): this; + /** + * Opposite of `unref()`, calling `ref()` on a previously `unref`ed socket will _not_ let the program exit if it's the only socket left (the default behavior). + * If the socket is `ref`ed calling `ref` again will have no effect. + * @since v0.9.1 + * @return The socket itself. + */ + ref(): this; + /** + * This property is only present if the family autoselection algorithm is enabled in `socket.connect(options)` + * and it is an array of the addresses that have been attempted. + * + * Each address is a string in the form of `$IP:$PORT`. + * If the connection was successful, then the last address is the one that the socket is currently connected to. + * @since v19.4.0 + */ + readonly autoSelectFamilyAttemptedAddresses: string[]; + /** + * This property shows the number of characters buffered for writing. The buffer + * may contain strings whose length after encoding is not yet known. So this number + * is only an approximation of the number of bytes in the buffer. + * + * `net.Socket` has the property that `socket.write()` always works. This is to + * help users get up and running quickly. The computer cannot always keep up + * with the amount of data that is written to a socket. The network connection + * simply might be too slow. Node.js will internally queue up the data written to a + * socket and send it out over the wire when it is possible. + * + * The consequence of this internal buffering is that memory may grow. + * Users who experience large or growing `bufferSize` should attempt to + * "throttle" the data flows in their program with `socket.pause()` and `socket.resume()`. + * @since v0.3.8 + * @deprecated Since v14.6.0 - Use `writableLength` instead. + */ + readonly bufferSize: number; + /** + * The amount of received bytes. + * @since v0.5.3 + */ + readonly bytesRead: number; + /** + * The amount of bytes sent. + * @since v0.5.3 + */ + readonly bytesWritten: number; + /** + * If `true`,`socket.connect(options[, connectListener])` was + * called and has not yet finished. It will stay `true` until the socket becomes + * connected, then it is set to `false` and the `'connect'` event is emitted. Note + * that the `socket.connect(options[, connectListener])` callback is a listener for the `'connect'` event. + * @since v6.1.0 + */ + readonly connecting: boolean; + /** + * This is `true` if the socket is not connected yet, either because `.connect()`has not yet been called or because it is still in the process of connecting + * (see `socket.connecting`). + * @since v11.2.0, v10.16.0 + */ + readonly pending: boolean; + /** + * See `writable.destroyed` for further details. + */ + readonly destroyed: boolean; + /** + * The string representation of the local IP address the remote client is + * connecting on. For example, in a server listening on `'0.0.0.0'`, if a client + * connects on `'192.168.1.1'`, the value of `socket.localAddress` would be`'192.168.1.1'`. + * @since v0.9.6 + */ + readonly localAddress?: string; + /** + * The numeric representation of the local port. For example, `80` or `21`. + * @since v0.9.6 + */ + readonly localPort?: number; + /** + * The string representation of the local IP family. `'IPv4'` or `'IPv6'`. + * @since v18.8.0, v16.18.0 + */ + readonly localFamily?: string; + /** + * This property represents the state of the connection as a string. + * + * * If the stream is connecting `socket.readyState` is `opening`. + * * If the stream is readable and writable, it is `open`. + * * If the stream is readable and not writable, it is `readOnly`. + * * If the stream is not readable and writable, it is `writeOnly`. + * @since v0.5.0 + */ + readonly readyState: SocketReadyState; + /** + * The string representation of the remote IP address. For example,`'74.125.127.100'` or `'2001:4860:a005::68'`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.5.10 + */ + readonly remoteAddress?: string | undefined; + /** + * The string representation of the remote IP family. `'IPv4'` or `'IPv6'`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.11.14 + */ + readonly remoteFamily?: string | undefined; + /** + * The numeric representation of the remote port. For example, `80` or `21`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.5.10 + */ + readonly remotePort?: number | undefined; + /** + * The socket timeout in milliseconds as set by `socket.setTimeout()`. + * It is `undefined` if a timeout has not been set. + * @since v10.7.0 + */ + readonly timeout?: number | undefined; + /** + * Half-closes the socket. i.e., it sends a FIN packet. It is possible the + * server will still send some data. + * + * See `writable.end()` for further details. + * @since v0.1.90 + * @param [encoding='utf8'] Only used when data is `string`. + * @param callback Optional callback for when the socket is finished. + * @return The socket itself. + */ + end(callback?: () => void): this; + end(buffer: Uint8Array | string, callback?: () => void): this; + end(str: Uint8Array | string, encoding?: BufferEncoding, callback?: () => void): this; + /** + * events.EventEmitter + * 1. close + * 2. connect + * 3. data + * 4. drain + * 5. end + * 6. error + * 7. lookup + * 8. ready + * 9. timeout + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: (hadError: boolean) => void): this; + addListener(event: "connect", listener: () => void): this; + addListener(event: "data", listener: (data: Buffer) => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + addListener(event: "ready", listener: () => void): this; + addListener(event: "timeout", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close", hadError: boolean): boolean; + emit(event: "connect"): boolean; + emit(event: "data", data: Buffer): boolean; + emit(event: "drain"): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "lookup", err: Error, address: string, family: string | number, host: string): boolean; + emit(event: "ready"): boolean; + emit(event: "timeout"): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: (hadError: boolean) => void): this; + on(event: "connect", listener: () => void): this; + on(event: "data", listener: (data: Buffer) => void): this; + on(event: "drain", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + on(event: "ready", listener: () => void): this; + on(event: "timeout", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: (hadError: boolean) => void): this; + once(event: "connect", listener: () => void): this; + once(event: "data", listener: (data: Buffer) => void): this; + once(event: "drain", listener: () => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + once(event: "ready", listener: () => void): this; + once(event: "timeout", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: (hadError: boolean) => void): this; + prependListener(event: "connect", listener: () => void): this; + prependListener(event: "data", listener: (data: Buffer) => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + prependListener(event: "ready", listener: () => void): this; + prependListener(event: "timeout", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: (hadError: boolean) => void): this; + prependOnceListener(event: "connect", listener: () => void): this; + prependOnceListener(event: "data", listener: (data: Buffer) => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener( + event: "lookup", + listener: (err: Error, address: string, family: string | number, host: string) => void, + ): this; + prependOnceListener(event: "ready", listener: () => void): this; + prependOnceListener(event: "timeout", listener: () => void): this; + } + interface ListenOptions extends Abortable { + port?: number | undefined; + host?: string | undefined; + backlog?: number | undefined; + path?: string | undefined; + exclusive?: boolean | undefined; + readableAll?: boolean | undefined; + writableAll?: boolean | undefined; + /** + * @default false + */ + ipv6Only?: boolean | undefined; + } + interface ServerOpts { + /** + * Indicates whether half-opened TCP connections are allowed. + * @default false + */ + allowHalfOpen?: boolean | undefined; + /** + * Indicates whether the socket should be paused on incoming connections. + * @default false + */ + pauseOnConnect?: boolean | undefined; + /** + * If set to `true`, it disables the use of Nagle's algorithm immediately after a new incoming connection is received. + * @default false + * @since v16.5.0 + */ + noDelay?: boolean | undefined; + /** + * If set to `true`, it enables keep-alive functionality on the socket immediately after a new incoming connection is received, + * similarly on what is done in `socket.setKeepAlive([enable][, initialDelay])`. + * @default false + * @since v16.5.0 + */ + keepAlive?: boolean | undefined; + /** + * If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket. + * @default 0 + * @since v16.5.0 + */ + keepAliveInitialDelay?: number | undefined; + } + interface DropArgument { + localAddress?: string; + localPort?: number; + localFamily?: string; + remoteAddress?: string; + remotePort?: number; + remoteFamily?: string; + } + /** + * This class is used to create a TCP or `IPC` server. + * @since v0.1.90 + */ + class Server extends EventEmitter { + constructor(connectionListener?: (socket: Socket) => void); + constructor(options?: ServerOpts, connectionListener?: (socket: Socket) => void); + /** + * Start a server listening for connections. A `net.Server` can be a TCP or + * an `IPC` server depending on what it listens to. + * + * Possible signatures: + * + * * `server.listen(handle[, backlog][, callback])` + * * `server.listen(options[, callback])` + * * `server.listen(path[, backlog][, callback])` for `IPC` servers + * * `server.listen([port[, host[, backlog]]][, callback])` for TCP servers + * + * This function is asynchronous. When the server starts listening, the `'listening'` event will be emitted. The last parameter `callback`will be added as a listener for the `'listening'` + * event. + * + * All `listen()` methods can take a `backlog` parameter to specify the maximum + * length of the queue of pending connections. The actual length will be determined + * by the OS through sysctl settings such as `tcp_max_syn_backlog` and `somaxconn`on Linux. The default value of this parameter is 511 (not 512). + * + * All {@link Socket} are set to `SO_REUSEADDR` (see [`socket(7)`](https://man7.org/linux/man-pages/man7/socket.7.html) for + * details). + * + * The `server.listen()` method can be called again if and only if there was an + * error during the first `server.listen()` call or `server.close()` has been + * called. Otherwise, an `ERR_SERVER_ALREADY_LISTEN` error will be thrown. + * + * One of the most common errors raised when listening is `EADDRINUSE`. + * This happens when another server is already listening on the requested`port`/`path`/`handle`. One way to handle this would be to retry + * after a certain amount of time: + * + * ```js + * server.on('error', (e) => { + * if (e.code === 'EADDRINUSE') { + * console.error('Address in use, retrying...'); + * setTimeout(() => { + * server.close(); + * server.listen(PORT, HOST); + * }, 1000); + * } + * }); + * ``` + */ + listen(port?: number, hostname?: string, backlog?: number, listeningListener?: () => void): this; + listen(port?: number, hostname?: string, listeningListener?: () => void): this; + listen(port?: number, backlog?: number, listeningListener?: () => void): this; + listen(port?: number, listeningListener?: () => void): this; + listen(path: string, backlog?: number, listeningListener?: () => void): this; + listen(path: string, listeningListener?: () => void): this; + listen(options: ListenOptions, listeningListener?: () => void): this; + listen(handle: any, backlog?: number, listeningListener?: () => void): this; + listen(handle: any, listeningListener?: () => void): this; + /** + * Stops the server from accepting new connections and keeps existing + * connections. This function is asynchronous, the server is finally closed + * when all connections are ended and the server emits a `'close'` event. + * The optional `callback` will be called once the `'close'` event occurs. Unlike + * that event, it will be called with an `Error` as its only argument if the server + * was not open when it was closed. + * @since v0.1.90 + * @param callback Called when the server is closed. + */ + close(callback?: (err?: Error) => void): this; + /** + * Returns the bound `address`, the address `family` name, and `port` of the server + * as reported by the operating system if listening on an IP socket + * (useful to find which port was assigned when getting an OS-assigned address):`{ port: 12346, family: 'IPv4', address: '127.0.0.1' }`. + * + * For a server listening on a pipe or Unix domain socket, the name is returned + * as a string. + * + * ```js + * const server = net.createServer((socket) => { + * socket.end('goodbye\n'); + * }).on('error', (err) => { + * // Handle errors here. + * throw err; + * }); + * + * // Grab an arbitrary unused port. + * server.listen(() => { + * console.log('opened server on', server.address()); + * }); + * ``` + * + * `server.address()` returns `null` before the `'listening'` event has been + * emitted or after calling `server.close()`. + * @since v0.1.90 + */ + address(): AddressInfo | string | null; + /** + * Asynchronously get the number of concurrent connections on the server. Works + * when sockets were sent to forks. + * + * Callback should take two arguments `err` and `count`. + * @since v0.9.7 + */ + getConnections(cb: (error: Error | null, count: number) => void): void; + /** + * Opposite of `unref()`, calling `ref()` on a previously `unref`ed server will _not_ let the program exit if it's the only server left (the default behavior). + * If the server is `ref`ed calling `ref()` again will have no effect. + * @since v0.9.1 + */ + ref(): this; + /** + * Calling `unref()` on a server will allow the program to exit if this is the only + * active server in the event system. If the server is already `unref`ed calling`unref()` again will have no effect. + * @since v0.9.1 + */ + unref(): this; + /** + * Set this property to reject connections when the server's connection count gets + * high. + * + * It is not recommended to use this option once a socket has been sent to a child + * with `child_process.fork()`. + * @since v0.2.0 + */ + maxConnections: number; + connections: number; + /** + * Indicates whether or not the server is listening for connections. + * @since v5.7.0 + */ + readonly listening: boolean; + /** + * events.EventEmitter + * 1. close + * 2. connection + * 3. error + * 4. listening + * 5. drop + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "connection", listener: (socket: Socket) => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "listening", listener: () => void): this; + addListener(event: "drop", listener: (data?: DropArgument) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "connection", socket: Socket): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "listening"): boolean; + emit(event: "drop", data?: DropArgument): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "connection", listener: (socket: Socket) => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "listening", listener: () => void): this; + on(event: "drop", listener: (data?: DropArgument) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "connection", listener: (socket: Socket) => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "listening", listener: () => void): this; + once(event: "drop", listener: (data?: DropArgument) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "connection", listener: (socket: Socket) => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "listening", listener: () => void): this; + prependListener(event: "drop", listener: (data?: DropArgument) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "connection", listener: (socket: Socket) => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "listening", listener: () => void): this; + prependOnceListener(event: "drop", listener: (data?: DropArgument) => void): this; + /** + * Calls {@link Server.close()} and returns a promise that fulfills when the server has closed. + * @since v20.5.0 + */ + [Symbol.asyncDispose](): Promise; + } + type IPVersion = "ipv4" | "ipv6"; + /** + * The `BlockList` object can be used with some network APIs to specify rules for + * disabling inbound or outbound access to specific IP addresses, IP ranges, or + * IP subnets. + * @since v15.0.0, v14.18.0 + */ + class BlockList { + /** + * Adds a rule to block the given IP address. + * @since v15.0.0, v14.18.0 + * @param address An IPv4 or IPv6 address. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addAddress(address: string, type?: IPVersion): void; + addAddress(address: SocketAddress): void; + /** + * Adds a rule to block a range of IP addresses from `start` (inclusive) to`end` (inclusive). + * @since v15.0.0, v14.18.0 + * @param start The starting IPv4 or IPv6 address in the range. + * @param end The ending IPv4 or IPv6 address in the range. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addRange(start: string, end: string, type?: IPVersion): void; + addRange(start: SocketAddress, end: SocketAddress): void; + /** + * Adds a rule to block a range of IP addresses specified as a subnet mask. + * @since v15.0.0, v14.18.0 + * @param net The network IPv4 or IPv6 address. + * @param prefix The number of CIDR prefix bits. For IPv4, this must be a value between `0` and `32`. For IPv6, this must be between `0` and `128`. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addSubnet(net: SocketAddress, prefix: number): void; + addSubnet(net: string, prefix: number, type?: IPVersion): void; + /** + * Returns `true` if the given IP address matches any of the rules added to the`BlockList`. + * + * ```js + * const blockList = new net.BlockList(); + * blockList.addAddress('123.123.123.123'); + * blockList.addRange('10.0.0.1', '10.0.0.10'); + * blockList.addSubnet('8592:757c:efae:4e45::', 64, 'ipv6'); + * + * console.log(blockList.check('123.123.123.123')); // Prints: true + * console.log(blockList.check('10.0.0.3')); // Prints: true + * console.log(blockList.check('222.111.111.222')); // Prints: false + * + * // IPv6 notation for IPv4 addresses works: + * console.log(blockList.check('::ffff:7b7b:7b7b', 'ipv6')); // Prints: true + * console.log(blockList.check('::ffff:123.123.123.123', 'ipv6')); // Prints: true + * ``` + * @since v15.0.0, v14.18.0 + * @param address The IP address to check + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + check(address: SocketAddress): boolean; + check(address: string, type?: IPVersion): boolean; + /** + * The list of rules added to the blocklist. + * @since v15.0.0, v14.18.0 + */ + rules: readonly string[]; + } + interface TcpNetConnectOpts extends TcpSocketConnectOpts, SocketConstructorOpts { + timeout?: number | undefined; + } + interface IpcNetConnectOpts extends IpcSocketConnectOpts, SocketConstructorOpts { + timeout?: number | undefined; + } + type NetConnectOpts = TcpNetConnectOpts | IpcNetConnectOpts; + /** + * Creates a new TCP or `IPC` server. + * + * If `allowHalfOpen` is set to `true`, when the other end of the socket + * signals the end of transmission, the server will only send back the end of + * transmission when `socket.end()` is explicitly called. For example, in the + * context of TCP, when a FIN packed is received, a FIN packed is sent + * back only when `socket.end()` is explicitly called. Until then the + * connection is half-closed (non-readable but still writable). See `'end'` event and [RFC 1122](https://tools.ietf.org/html/rfc1122) (section 4.2.2.13) for more information. + * + * If `pauseOnConnect` is set to `true`, then the socket associated with each + * incoming connection will be paused, and no data will be read from its handle. + * This allows connections to be passed between processes without any data being + * read by the original process. To begin reading data from a paused socket, call `socket.resume()`. + * + * The server can be a TCP server or an `IPC` server, depending on what it `listen()` to. + * + * Here is an example of a TCP echo server which listens for connections + * on port 8124: + * + * ```js + * const net = require('node:net'); + * const server = net.createServer((c) => { + * // 'connection' listener. + * console.log('client connected'); + * c.on('end', () => { + * console.log('client disconnected'); + * }); + * c.write('hello\r\n'); + * c.pipe(c); + * }); + * server.on('error', (err) => { + * throw err; + * }); + * server.listen(8124, () => { + * console.log('server bound'); + * }); + * ``` + * + * Test this by using `telnet`: + * + * ```bash + * telnet localhost 8124 + * ``` + * + * To listen on the socket `/tmp/echo.sock`: + * + * ```js + * server.listen('/tmp/echo.sock', () => { + * console.log('server bound'); + * }); + * ``` + * + * Use `nc` to connect to a Unix domain socket server: + * + * ```bash + * nc -U /tmp/echo.sock + * ``` + * @since v0.5.0 + * @param connectionListener Automatically set as a listener for the {@link 'connection'} event. + */ + function createServer(connectionListener?: (socket: Socket) => void): Server; + function createServer(options?: ServerOpts, connectionListener?: (socket: Socket) => void): Server; + /** + * Aliases to {@link createConnection}. + * + * Possible signatures: + * + * * {@link connect} + * * {@link connect} for `IPC` connections. + * * {@link connect} for TCP connections. + */ + function connect(options: NetConnectOpts, connectionListener?: () => void): Socket; + function connect(port: number, host?: string, connectionListener?: () => void): Socket; + function connect(path: string, connectionListener?: () => void): Socket; + /** + * A factory function, which creates a new {@link Socket}, + * immediately initiates connection with `socket.connect()`, + * then returns the `net.Socket` that starts the connection. + * + * When the connection is established, a `'connect'` event will be emitted + * on the returned socket. The last parameter `connectListener`, if supplied, + * will be added as a listener for the `'connect'` event **once**. + * + * Possible signatures: + * + * * {@link createConnection} + * * {@link createConnection} for `IPC` connections. + * * {@link createConnection} for TCP connections. + * + * The {@link connect} function is an alias to this function. + */ + function createConnection(options: NetConnectOpts, connectionListener?: () => void): Socket; + function createConnection(port: number, host?: string, connectionListener?: () => void): Socket; + function createConnection(path: string, connectionListener?: () => void): Socket; + /** + * Gets the current default value of the `autoSelectFamily` option of `socket.connect(options)`. + * The initial default value is `true`, unless the command line option`--no-network-family-autoselection` is provided. + * @since v19.4.0 + */ + function getDefaultAutoSelectFamily(): boolean; + /** + * Sets the default value of the `autoSelectFamily` option of `socket.connect(options)`. + * @since v19.4.0 + */ + function setDefaultAutoSelectFamily(value: boolean): void; + /** + * Gets the current default value of the `autoSelectFamilyAttemptTimeout` option of `socket.connect(options)`. + * The initial default value is `250`. + * @since v19.8.0 + */ + function getDefaultAutoSelectFamilyAttemptTimeout(): number; + /** + * Sets the default value of the `autoSelectFamilyAttemptTimeout` option of `socket.connect(options)`. + * @since v19.8.0 + */ + function setDefaultAutoSelectFamilyAttemptTimeout(value: number): void; + /** + * Returns `6` if `input` is an IPv6 address. Returns `4` if `input` is an IPv4 + * address in [dot-decimal notation](https://en.wikipedia.org/wiki/Dot-decimal_notation) with no leading zeroes. Otherwise, returns`0`. + * + * ```js + * net.isIP('::1'); // returns 6 + * net.isIP('127.0.0.1'); // returns 4 + * net.isIP('127.000.000.001'); // returns 0 + * net.isIP('127.0.0.1/24'); // returns 0 + * net.isIP('fhqwhgads'); // returns 0 + * ``` + * @since v0.3.0 + */ + function isIP(input: string): number; + /** + * Returns `true` if `input` is an IPv4 address in [dot-decimal notation](https://en.wikipedia.org/wiki/Dot-decimal_notation) with no + * leading zeroes. Otherwise, returns `false`. + * + * ```js + * net.isIPv4('127.0.0.1'); // returns true + * net.isIPv4('127.000.000.001'); // returns false + * net.isIPv4('127.0.0.1/24'); // returns false + * net.isIPv4('fhqwhgads'); // returns false + * ``` + * @since v0.3.0 + */ + function isIPv4(input: string): boolean; + /** + * Returns `true` if `input` is an IPv6 address. Otherwise, returns `false`. + * + * ```js + * net.isIPv6('::1'); // returns true + * net.isIPv6('fhqwhgads'); // returns false + * ``` + * @since v0.3.0 + */ + function isIPv6(input: string): boolean; + interface SocketAddressInitOptions { + /** + * The network address as either an IPv4 or IPv6 string. + * @default 127.0.0.1 + */ + address?: string | undefined; + /** + * @default `'ipv4'` + */ + family?: IPVersion | undefined; + /** + * An IPv6 flow-label used only if `family` is `'ipv6'`. + * @default 0 + */ + flowlabel?: number | undefined; + /** + * An IP port. + * @default 0 + */ + port?: number | undefined; + } + /** + * @since v15.14.0, v14.18.0 + */ + class SocketAddress { + constructor(options: SocketAddressInitOptions); + /** + * Either \`'ipv4'\` or \`'ipv6'\`. + * @since v15.14.0, v14.18.0 + */ + readonly address: string; + /** + * Either \`'ipv4'\` or \`'ipv6'\`. + * @since v15.14.0, v14.18.0 + */ + readonly family: IPVersion; + /** + * @since v15.14.0, v14.18.0 + */ + readonly port: number; + /** + * @since v15.14.0, v14.18.0 + */ + readonly flowlabel: number; + } +} +declare module "node:net" { + export * from "net"; +} diff --git a/dist/node_modules/@types/node/ts4.8/os.d.ts b/dist/node_modules/@types/node/ts4.8/os.d.ts new file mode 100644 index 0000000..39a33f7 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/os.d.ts @@ -0,0 +1,478 @@ +/** + * The `node:os` module provides operating system-related utility methods and + * properties. It can be accessed using: + * + * ```js + * const os = require('node:os'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/os.js) + */ +declare module "os" { + interface CpuInfo { + model: string; + speed: number; + times: { + user: number; + nice: number; + sys: number; + idle: number; + irq: number; + }; + } + interface NetworkInterfaceBase { + address: string; + netmask: string; + mac: string; + internal: boolean; + cidr: string | null; + } + interface NetworkInterfaceInfoIPv4 extends NetworkInterfaceBase { + family: "IPv4"; + scopeid?: undefined; + } + interface NetworkInterfaceInfoIPv6 extends NetworkInterfaceBase { + family: "IPv6"; + scopeid: number; + } + interface UserInfo { + username: T; + uid: number; + gid: number; + shell: T | null; + homedir: T; + } + type NetworkInterfaceInfo = NetworkInterfaceInfoIPv4 | NetworkInterfaceInfoIPv6; + /** + * Returns the host name of the operating system as a string. + * @since v0.3.3 + */ + function hostname(): string; + /** + * Returns an array containing the 1, 5, and 15 minute load averages. + * + * The load average is a measure of system activity calculated by the operating + * system and expressed as a fractional number. + * + * The load average is a Unix-specific concept. On Windows, the return value is + * always `[0, 0, 0]`. + * @since v0.3.3 + */ + function loadavg(): number[]; + /** + * Returns the system uptime in number of seconds. + * @since v0.3.3 + */ + function uptime(): number; + /** + * Returns the amount of free system memory in bytes as an integer. + * @since v0.3.3 + */ + function freemem(): number; + /** + * Returns the total amount of system memory in bytes as an integer. + * @since v0.3.3 + */ + function totalmem(): number; + /** + * Returns an array of objects containing information about each logical CPU core. + * The array will be empty if no CPU information is available, such as if the`/proc` file system is unavailable. + * + * The properties included on each object include: + * + * ```js + * [ + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 252020, + * nice: 0, + * sys: 30340, + * idle: 1070356870, + * irq: 0, + * }, + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 306960, + * nice: 0, + * sys: 26980, + * idle: 1071569080, + * irq: 0, + * }, + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 248450, + * nice: 0, + * sys: 21750, + * idle: 1070919370, + * irq: 0, + * }, + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 256880, + * nice: 0, + * sys: 19430, + * idle: 1070905480, + * irq: 20, + * }, + * }, + * ] + * ``` + * + * `nice` values are POSIX-only. On Windows, the `nice` values of all processors + * are always 0. + * + * `os.cpus().length` should not be used to calculate the amount of parallelism + * available to an application. Use {@link availableParallelism} for this purpose. + * @since v0.3.3 + */ + function cpus(): CpuInfo[]; + /** + * Returns an estimate of the default amount of parallelism a program should use. + * Always returns a value greater than zero. + * + * This function is a small wrapper about libuv's [`uv_available_parallelism()`](https://docs.libuv.org/en/v1.x/misc.html#c.uv_available_parallelism). + * @since v19.4.0, v18.14.0 + */ + function availableParallelism(): number; + /** + * Returns the operating system name as returned by [`uname(3)`](https://linux.die.net/man/3/uname). For example, it + * returns `'Linux'` on Linux, `'Darwin'` on macOS, and `'Windows_NT'` on Windows. + * + * See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for additional information + * about the output of running [`uname(3)`](https://linux.die.net/man/3/uname) on various operating systems. + * @since v0.3.3 + */ + function type(): string; + /** + * Returns the operating system as a string. + * + * On POSIX systems, the operating system release is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `GetVersionExW()` is used. See + * [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v0.3.3 + */ + function release(): string; + /** + * Returns an object containing network interfaces that have been assigned a + * network address. + * + * Each key on the returned object identifies a network interface. The associated + * value is an array of objects that each describe an assigned network address. + * + * The properties available on the assigned network address object include: + * + * ```js + * { + * lo: [ + * { + * address: '127.0.0.1', + * netmask: '255.0.0.0', + * family: 'IPv4', + * mac: '00:00:00:00:00:00', + * internal: true, + * cidr: '127.0.0.1/8' + * }, + * { + * address: '::1', + * netmask: 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', + * family: 'IPv6', + * mac: '00:00:00:00:00:00', + * scopeid: 0, + * internal: true, + * cidr: '::1/128' + * } + * ], + * eth0: [ + * { + * address: '192.168.1.108', + * netmask: '255.255.255.0', + * family: 'IPv4', + * mac: '01:02:03:0a:0b:0c', + * internal: false, + * cidr: '192.168.1.108/24' + * }, + * { + * address: 'fe80::a00:27ff:fe4e:66a1', + * netmask: 'ffff:ffff:ffff:ffff::', + * family: 'IPv6', + * mac: '01:02:03:0a:0b:0c', + * scopeid: 1, + * internal: false, + * cidr: 'fe80::a00:27ff:fe4e:66a1/64' + * } + * ] + * } + * ``` + * @since v0.6.0 + */ + function networkInterfaces(): NodeJS.Dict; + /** + * Returns the string path of the current user's home directory. + * + * On POSIX, it uses the `$HOME` environment variable if defined. Otherwise it + * uses the [effective UID](https://en.wikipedia.org/wiki/User_identifier#Effective_user_ID) to look up the user's home directory. + * + * On Windows, it uses the `USERPROFILE` environment variable if defined. + * Otherwise it uses the path to the profile directory of the current user. + * @since v2.3.0 + */ + function homedir(): string; + /** + * Returns information about the currently effective user. On POSIX platforms, + * this is typically a subset of the password file. The returned object includes + * the `username`, `uid`, `gid`, `shell`, and `homedir`. On Windows, the `uid` and`gid` fields are `-1`, and `shell` is `null`. + * + * The value of `homedir` returned by `os.userInfo()` is provided by the operating + * system. This differs from the result of `os.homedir()`, which queries + * environment variables for the home directory before falling back to the + * operating system response. + * + * Throws a `SystemError` if a user has no `username` or `homedir`. + * @since v6.0.0 + */ + function userInfo(options: { encoding: "buffer" }): UserInfo; + function userInfo(options?: { encoding: BufferEncoding }): UserInfo; + type SignalConstants = { + [key in NodeJS.Signals]: number; + }; + namespace constants { + const UV_UDP_REUSEADDR: number; + namespace signals {} + const signals: SignalConstants; + namespace errno { + const E2BIG: number; + const EACCES: number; + const EADDRINUSE: number; + const EADDRNOTAVAIL: number; + const EAFNOSUPPORT: number; + const EAGAIN: number; + const EALREADY: number; + const EBADF: number; + const EBADMSG: number; + const EBUSY: number; + const ECANCELED: number; + const ECHILD: number; + const ECONNABORTED: number; + const ECONNREFUSED: number; + const ECONNRESET: number; + const EDEADLK: number; + const EDESTADDRREQ: number; + const EDOM: number; + const EDQUOT: number; + const EEXIST: number; + const EFAULT: number; + const EFBIG: number; + const EHOSTUNREACH: number; + const EIDRM: number; + const EILSEQ: number; + const EINPROGRESS: number; + const EINTR: number; + const EINVAL: number; + const EIO: number; + const EISCONN: number; + const EISDIR: number; + const ELOOP: number; + const EMFILE: number; + const EMLINK: number; + const EMSGSIZE: number; + const EMULTIHOP: number; + const ENAMETOOLONG: number; + const ENETDOWN: number; + const ENETRESET: number; + const ENETUNREACH: number; + const ENFILE: number; + const ENOBUFS: number; + const ENODATA: number; + const ENODEV: number; + const ENOENT: number; + const ENOEXEC: number; + const ENOLCK: number; + const ENOLINK: number; + const ENOMEM: number; + const ENOMSG: number; + const ENOPROTOOPT: number; + const ENOSPC: number; + const ENOSR: number; + const ENOSTR: number; + const ENOSYS: number; + const ENOTCONN: number; + const ENOTDIR: number; + const ENOTEMPTY: number; + const ENOTSOCK: number; + const ENOTSUP: number; + const ENOTTY: number; + const ENXIO: number; + const EOPNOTSUPP: number; + const EOVERFLOW: number; + const EPERM: number; + const EPIPE: number; + const EPROTO: number; + const EPROTONOSUPPORT: number; + const EPROTOTYPE: number; + const ERANGE: number; + const EROFS: number; + const ESPIPE: number; + const ESRCH: number; + const ESTALE: number; + const ETIME: number; + const ETIMEDOUT: number; + const ETXTBSY: number; + const EWOULDBLOCK: number; + const EXDEV: number; + const WSAEINTR: number; + const WSAEBADF: number; + const WSAEACCES: number; + const WSAEFAULT: number; + const WSAEINVAL: number; + const WSAEMFILE: number; + const WSAEWOULDBLOCK: number; + const WSAEINPROGRESS: number; + const WSAEALREADY: number; + const WSAENOTSOCK: number; + const WSAEDESTADDRREQ: number; + const WSAEMSGSIZE: number; + const WSAEPROTOTYPE: number; + const WSAENOPROTOOPT: number; + const WSAEPROTONOSUPPORT: number; + const WSAESOCKTNOSUPPORT: number; + const WSAEOPNOTSUPP: number; + const WSAEPFNOSUPPORT: number; + const WSAEAFNOSUPPORT: number; + const WSAEADDRINUSE: number; + const WSAEADDRNOTAVAIL: number; + const WSAENETDOWN: number; + const WSAENETUNREACH: number; + const WSAENETRESET: number; + const WSAECONNABORTED: number; + const WSAECONNRESET: number; + const WSAENOBUFS: number; + const WSAEISCONN: number; + const WSAENOTCONN: number; + const WSAESHUTDOWN: number; + const WSAETOOMANYREFS: number; + const WSAETIMEDOUT: number; + const WSAECONNREFUSED: number; + const WSAELOOP: number; + const WSAENAMETOOLONG: number; + const WSAEHOSTDOWN: number; + const WSAEHOSTUNREACH: number; + const WSAENOTEMPTY: number; + const WSAEPROCLIM: number; + const WSAEUSERS: number; + const WSAEDQUOT: number; + const WSAESTALE: number; + const WSAEREMOTE: number; + const WSASYSNOTREADY: number; + const WSAVERNOTSUPPORTED: number; + const WSANOTINITIALISED: number; + const WSAEDISCON: number; + const WSAENOMORE: number; + const WSAECANCELLED: number; + const WSAEINVALIDPROCTABLE: number; + const WSAEINVALIDPROVIDER: number; + const WSAEPROVIDERFAILEDINIT: number; + const WSASYSCALLFAILURE: number; + const WSASERVICE_NOT_FOUND: number; + const WSATYPE_NOT_FOUND: number; + const WSA_E_NO_MORE: number; + const WSA_E_CANCELLED: number; + const WSAEREFUSED: number; + } + namespace priority { + const PRIORITY_LOW: number; + const PRIORITY_BELOW_NORMAL: number; + const PRIORITY_NORMAL: number; + const PRIORITY_ABOVE_NORMAL: number; + const PRIORITY_HIGH: number; + const PRIORITY_HIGHEST: number; + } + } + const devNull: string; + const EOL: string; + /** + * Returns the operating system CPU architecture for which the Node.js binary was + * compiled. Possible values are `'arm'`, `'arm64'`, `'ia32'`, `'loong64'`,`'mips'`, `'mipsel'`, `'ppc'`, `'ppc64'`, `'riscv64'`, `'s390'`, `'s390x'`, + * and `'x64'`. + * + * The return value is equivalent to `process.arch`. + * @since v0.5.0 + */ + function arch(): string; + /** + * Returns a string identifying the kernel version. + * + * On POSIX systems, the operating system release is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `RtlGetVersion()` is used, and if it is not + * available, `GetVersionExW()` will be used. See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v13.11.0, v12.17.0 + */ + function version(): string; + /** + * Returns a string identifying the operating system platform for which + * the Node.js binary was compiled. The value is set at compile time. + * Possible values are `'aix'`, `'darwin'`, `'freebsd'`,`'linux'`,`'openbsd'`, `'sunos'`, and `'win32'`. + * + * The return value is equivalent to `process.platform`. + * + * The value `'android'` may also be returned if Node.js is built on the Android + * operating system. [Android support is experimental](https://github.com/nodejs/node/blob/HEAD/BUILDING.md#androidandroid-based-devices-eg-firefox-os). + * @since v0.5.0 + */ + function platform(): NodeJS.Platform; + /** + * Returns the machine type as a string, such as `arm`, `arm64`, `aarch64`,`mips`, `mips64`, `ppc64`, `ppc64le`, `s390`, `s390x`, `i386`, `i686`, `x86_64`. + * + * On POSIX systems, the machine type is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `RtlGetVersion()` is used, and if it is not + * available, `GetVersionExW()` will be used. See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v18.9.0, v16.18.0 + */ + function machine(): string; + /** + * Returns the operating system's default directory for temporary files as a + * string. + * @since v0.9.9 + */ + function tmpdir(): string; + /** + * Returns a string identifying the endianness of the CPU for which the Node.js + * binary was compiled. + * + * Possible values are `'BE'` for big endian and `'LE'` for little endian. + * @since v0.9.4 + */ + function endianness(): "BE" | "LE"; + /** + * Returns the scheduling priority for the process specified by `pid`. If `pid` is + * not provided or is `0`, the priority of the current process is returned. + * @since v10.10.0 + * @param [pid=0] The process ID to retrieve scheduling priority for. + */ + function getPriority(pid?: number): number; + /** + * Attempts to set the scheduling priority for the process specified by `pid`. If`pid` is not provided or is `0`, the process ID of the current process is used. + * + * The `priority` input must be an integer between `-20` (high priority) and `19`(low priority). Due to differences between Unix priority levels and Windows + * priority classes, `priority` is mapped to one of six priority constants in`os.constants.priority`. When retrieving a process priority level, this range + * mapping may cause the return value to be slightly different on Windows. To avoid + * confusion, set `priority` to one of the priority constants. + * + * On Windows, setting priority to `PRIORITY_HIGHEST` requires elevated user + * privileges. Otherwise the set priority will be silently reduced to`PRIORITY_HIGH`. + * @since v10.10.0 + * @param [pid=0] The process ID to set scheduling priority for. + * @param priority The scheduling priority to assign to the process. + */ + function setPriority(priority: number): void; + function setPriority(pid: number, priority: number): void; +} +declare module "node:os" { + export * from "os"; +} diff --git a/dist/node_modules/@types/node/ts4.8/path.d.ts b/dist/node_modules/@types/node/ts4.8/path.d.ts new file mode 100644 index 0000000..6f07681 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/path.d.ts @@ -0,0 +1,191 @@ +declare module "path/posix" { + import path = require("path"); + export = path; +} +declare module "path/win32" { + import path = require("path"); + export = path; +} +/** + * The `node:path` module provides utilities for working with file and directory + * paths. It can be accessed using: + * + * ```js + * const path = require('node:path'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/path.js) + */ +declare module "path" { + namespace path { + /** + * A parsed path object generated by path.parse() or consumed by path.format(). + */ + interface ParsedPath { + /** + * The root of the path such as '/' or 'c:\' + */ + root: string; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir: string; + /** + * The file name including extension (if any) such as 'index.html' + */ + base: string; + /** + * The file extension (if any) such as '.html' + */ + ext: string; + /** + * The file name without extension (if any) such as 'index' + */ + name: string; + } + interface FormatInputPathObject { + /** + * The root of the path such as '/' or 'c:\' + */ + root?: string | undefined; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir?: string | undefined; + /** + * The file name including extension (if any) such as 'index.html' + */ + base?: string | undefined; + /** + * The file extension (if any) such as '.html' + */ + ext?: string | undefined; + /** + * The file name without extension (if any) such as 'index' + */ + name?: string | undefined; + } + interface PlatformPath { + /** + * Normalize a string path, reducing '..' and '.' parts. + * When multiple slashes are found, they're replaced by a single one; when the path contains a trailing slash, it is preserved. On Windows backslashes are used. + * + * @param path string path to normalize. + * @throws {TypeError} if `path` is not a string. + */ + normalize(path: string): string; + /** + * Join all arguments together and normalize the resulting path. + * + * @param paths paths to join. + * @throws {TypeError} if any of the path segments is not a string. + */ + join(...paths: string[]): string; + /** + * The right-most parameter is considered {to}. Other parameters are considered an array of {from}. + * + * Starting from leftmost {from} parameter, resolves {to} to an absolute path. + * + * If {to} isn't already absolute, {from} arguments are prepended in right to left order, + * until an absolute path is found. If after using all {from} paths still no absolute path is found, + * the current working directory is used as well. The resulting path is normalized, + * and trailing slashes are removed unless the path gets resolved to the root directory. + * + * @param paths A sequence of paths or path segments. + * @throws {TypeError} if any of the arguments is not a string. + */ + resolve(...paths: string[]): string; + /** + * Determines whether {path} is an absolute path. An absolute path will always resolve to the same location, regardless of the working directory. + * + * If the given {path} is a zero-length string, `false` will be returned. + * + * @param path path to test. + * @throws {TypeError} if `path` is not a string. + */ + isAbsolute(path: string): boolean; + /** + * Solve the relative path from {from} to {to} based on the current working directory. + * At times we have two absolute paths, and we need to derive the relative path from one to the other. This is actually the reverse transform of path.resolve. + * + * @throws {TypeError} if either `from` or `to` is not a string. + */ + relative(from: string, to: string): string; + /** + * Return the directory name of a path. Similar to the Unix dirname command. + * + * @param path the path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + dirname(path: string): string; + /** + * Return the last portion of a path. Similar to the Unix basename command. + * Often used to extract the file name from a fully qualified path. + * + * @param path the path to evaluate. + * @param suffix optionally, an extension to remove from the result. + * @throws {TypeError} if `path` is not a string or if `ext` is given and is not a string. + */ + basename(path: string, suffix?: string): string; + /** + * Return the extension of the path, from the last '.' to end of string in the last portion of the path. + * If there is no '.' in the last portion of the path or the first character of it is '.', then it returns an empty string. + * + * @param path the path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + extname(path: string): string; + /** + * The platform-specific file separator. '\\' or '/'. + */ + readonly sep: "\\" | "/"; + /** + * The platform-specific file delimiter. ';' or ':'. + */ + readonly delimiter: ";" | ":"; + /** + * Returns an object from a path string - the opposite of format(). + * + * @param path path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + parse(path: string): ParsedPath; + /** + * Returns a path string from an object - the opposite of parse(). + * + * @param pathObject path to evaluate. + */ + format(pathObject: FormatInputPathObject): string; + /** + * On Windows systems only, returns an equivalent namespace-prefixed path for the given path. + * If path is not a string, path will be returned without modifications. + * This method is meaningful only on Windows system. + * On POSIX systems, the method is non-operational and always returns path without modifications. + */ + toNamespacedPath(path: string): string; + /** + * Posix specific pathing. + * Same as parent object on posix. + */ + readonly posix: PlatformPath; + /** + * Windows specific pathing. + * Same as parent object on windows + */ + readonly win32: PlatformPath; + } + } + const path: path.PlatformPath; + export = path; +} +declare module "node:path" { + import path = require("path"); + export = path; +} +declare module "node:path/posix" { + import path = require("path/posix"); + export = path; +} +declare module "node:path/win32" { + import path = require("path/win32"); + export = path; +} diff --git a/dist/node_modules/@types/node/ts4.8/perf_hooks.d.ts b/dist/node_modules/@types/node/ts4.8/perf_hooks.d.ts new file mode 100644 index 0000000..c0ce852 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/perf_hooks.d.ts @@ -0,0 +1,645 @@ +/** + * This module provides an implementation of a subset of the W3C [Web Performance APIs](https://w3c.github.io/perf-timing-primer/) as well as additional APIs for + * Node.js-specific performance measurements. + * + * Node.js supports the following [Web Performance APIs](https://w3c.github.io/perf-timing-primer/): + * + * * [High Resolution Time](https://www.w3.org/TR/hr-time-2) + * * [Performance Timeline](https://w3c.github.io/performance-timeline/) + * * [User Timing](https://www.w3.org/TR/user-timing/) + * * [Resource Timing](https://www.w3.org/TR/resource-timing-2/) + * + * ```js + * const { PerformanceObserver, performance } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((items) => { + * console.log(items.getEntries()[0].duration); + * performance.clearMarks(); + * }); + * obs.observe({ type: 'measure' }); + * performance.measure('Start to Now'); + * + * performance.mark('A'); + * doSomeLongRunningProcess(() => { + * performance.measure('A to Now', 'A'); + * + * performance.mark('B'); + * performance.measure('A to B', 'A', 'B'); + * }); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/perf_hooks.js) + */ +declare module "perf_hooks" { + import { AsyncResource } from "node:async_hooks"; + type EntryType = "node" | "mark" | "measure" | "gc" | "function" | "http2" | "http" | "dns" | "net"; + interface NodeGCPerformanceDetail { + /** + * When `performanceEntry.entryType` is equal to 'gc', `the performance.kind` property identifies + * the type of garbage collection operation that occurred. + * See perf_hooks.constants for valid values. + */ + readonly kind?: number | undefined; + /** + * When `performanceEntry.entryType` is equal to 'gc', the `performance.flags` + * property contains additional information about garbage collection operation. + * See perf_hooks.constants for valid values. + */ + readonly flags?: number | undefined; + } + /** + * The constructor of this class is not exposed to users directly. + * @since v8.5.0 + */ + class PerformanceEntry { + protected constructor(); + /** + * The total number of milliseconds elapsed for this entry. This value will not + * be meaningful for all Performance Entry types. + * @since v8.5.0 + */ + readonly duration: number; + /** + * The name of the performance entry. + * @since v8.5.0 + */ + readonly name: string; + /** + * The high resolution millisecond timestamp marking the starting time of the + * Performance Entry. + * @since v8.5.0 + */ + readonly startTime: number; + /** + * The type of the performance entry. It may be one of: + * + * * `'node'` (Node.js only) + * * `'mark'` (available on the Web) + * * `'measure'` (available on the Web) + * * `'gc'` (Node.js only) + * * `'function'` (Node.js only) + * * `'http2'` (Node.js only) + * * `'http'` (Node.js only) + * @since v8.5.0 + */ + readonly entryType: EntryType; + /** + * Additional detail specific to the `entryType`. + * @since v16.0.0 + */ + readonly detail?: NodeGCPerformanceDetail | unknown | undefined; // TODO: Narrow this based on entry type. + toJSON(): any; + } + /** + * Exposes marks created via the `Performance.mark()` method. + * @since v18.2.0, v16.17.0 + */ + class PerformanceMark extends PerformanceEntry { + readonly duration: 0; + readonly entryType: "mark"; + } + /** + * Exposes measures created via the `Performance.measure()` method. + * + * The constructor of this class is not exposed to users directly. + * @since v18.2.0, v16.17.0 + */ + class PerformanceMeasure extends PerformanceEntry { + readonly entryType: "measure"; + } + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Provides timing details for Node.js itself. The constructor of this class + * is not exposed to users. + * @since v8.5.0 + */ + class PerformanceNodeTiming extends PerformanceEntry { + /** + * The high resolution millisecond timestamp at which the Node.js process + * completed bootstrapping. If bootstrapping has not yet finished, the property + * has the value of -1. + * @since v8.5.0 + */ + readonly bootstrapComplete: number; + /** + * The high resolution millisecond timestamp at which the Node.js environment was + * initialized. + * @since v8.5.0 + */ + readonly environment: number; + /** + * The high resolution millisecond timestamp of the amount of time the event loop + * has been idle within the event loop's event provider (e.g. `epoll_wait`). This + * does not take CPU usage into consideration. If the event loop has not yet + * started (e.g., in the first tick of the main script), the property has the + * value of 0. + * @since v14.10.0, v12.19.0 + */ + readonly idleTime: number; + /** + * The high resolution millisecond timestamp at which the Node.js event loop + * exited. If the event loop has not yet exited, the property has the value of -1\. + * It can only have a value of not -1 in a handler of the `'exit'` event. + * @since v8.5.0 + */ + readonly loopExit: number; + /** + * The high resolution millisecond timestamp at which the Node.js event loop + * started. If the event loop has not yet started (e.g., in the first tick of the + * main script), the property has the value of -1. + * @since v8.5.0 + */ + readonly loopStart: number; + /** + * The high resolution millisecond timestamp at which the V8 platform was + * initialized. + * @since v8.5.0 + */ + readonly v8Start: number; + } + interface EventLoopUtilization { + idle: number; + active: number; + utilization: number; + } + /** + * @param util1 The result of a previous call to eventLoopUtilization() + * @param util2 The result of a previous call to eventLoopUtilization() prior to util1 + */ + type EventLoopUtilityFunction = ( + util1?: EventLoopUtilization, + util2?: EventLoopUtilization, + ) => EventLoopUtilization; + interface MarkOptions { + /** + * Additional optional detail to include with the mark. + */ + detail?: unknown | undefined; + /** + * An optional timestamp to be used as the mark time. + * @default `performance.now()`. + */ + startTime?: number | undefined; + } + interface MeasureOptions { + /** + * Additional optional detail to include with the mark. + */ + detail?: unknown | undefined; + /** + * Duration between start and end times. + */ + duration?: number | undefined; + /** + * Timestamp to be used as the end time, or a string identifying a previously recorded mark. + */ + end?: number | string | undefined; + /** + * Timestamp to be used as the start time, or a string identifying a previously recorded mark. + */ + start?: number | string | undefined; + } + interface TimerifyOptions { + /** + * A histogram object created using + * `perf_hooks.createHistogram()` that will record runtime durations in + * nanoseconds. + */ + histogram?: RecordableHistogram | undefined; + } + interface Performance { + /** + * If name is not provided, removes all PerformanceMark objects from the Performance Timeline. + * If name is provided, removes only the named mark. + * @param name + */ + clearMarks(name?: string): void; + /** + * If name is not provided, removes all PerformanceMeasure objects from the Performance Timeline. + * If name is provided, removes only the named measure. + * @param name + * @since v16.7.0 + */ + clearMeasures(name?: string): void; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime`. + * If you are only interested in performance entries of certain types or that have certain names, see + * `performance.getEntriesByType()` and `performance.getEntriesByName()`. + * @since v16.7.0 + */ + getEntries(): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime` + * whose `performanceEntry.name` is equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to `type`. + * @param name + * @param type + * @since v16.7.0 + */ + getEntriesByName(name: string, type?: EntryType): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime` + * whose `performanceEntry.entryType` is equal to `type`. + * @param type + * @since v16.7.0 + */ + getEntriesByType(type: EntryType): PerformanceEntry[]; + /** + * Creates a new PerformanceMark entry in the Performance Timeline. + * A PerformanceMark is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'mark', + * and whose performanceEntry.duration is always 0. + * Performance marks are used to mark specific significant moments in the Performance Timeline. + * @param name + * @return The PerformanceMark entry that was created + */ + mark(name?: string, options?: MarkOptions): PerformanceMark; + /** + * Creates a new PerformanceMeasure entry in the Performance Timeline. + * A PerformanceMeasure is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'measure', + * and whose performanceEntry.duration measures the number of milliseconds elapsed since startMark and endMark. + * + * The startMark argument may identify any existing PerformanceMark in the the Performance Timeline, or may identify + * any of the timestamp properties provided by the PerformanceNodeTiming class. If the named startMark does not exist, + * then startMark is set to timeOrigin by default. + * + * The endMark argument must identify any existing PerformanceMark in the the Performance Timeline or any of the timestamp + * properties provided by the PerformanceNodeTiming class. If the named endMark does not exist, an error will be thrown. + * @param name + * @param startMark + * @param endMark + * @return The PerformanceMeasure entry that was created + */ + measure(name: string, startMark?: string, endMark?: string): PerformanceMeasure; + measure(name: string, options: MeasureOptions): PerformanceMeasure; + /** + * An instance of the PerformanceNodeTiming class that provides performance metrics for specific Node.js operational milestones. + */ + readonly nodeTiming: PerformanceNodeTiming; + /** + * @return the current high resolution millisecond timestamp + */ + now(): number; + /** + * The timeOrigin specifies the high resolution millisecond timestamp from which all performance metric durations are measured. + */ + readonly timeOrigin: number; + /** + * Wraps a function within a new function that measures the running time of the wrapped function. + * A PerformanceObserver must be subscribed to the 'function' event type in order for the timing details to be accessed. + * @param fn + */ + timerify any>(fn: T, options?: TimerifyOptions): T; + /** + * eventLoopUtilization is similar to CPU utilization except that it is calculated using high precision wall-clock time. + * It represents the percentage of time the event loop has spent outside the event loop's event provider (e.g. epoll_wait). + * No other CPU idle time is taken into consideration. + */ + eventLoopUtilization: EventLoopUtilityFunction; + } + interface PerformanceObserverEntryList { + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime`. + * + * ```js + * const { + * performance, + * PerformanceObserver, + * } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntries()); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 81.465639, + * * duration: 0, + * * detail: null + * * }, + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 81.860064, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ type: 'mark' }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntries(): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime` whose `performanceEntry.name` is + * equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to`type`. + * + * ```js + * const { + * performance, + * PerformanceObserver, + * } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntriesByName('meow')); + * + * * [ + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 98.545991, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * console.log(perfObserverList.getEntriesByName('nope')); // [] + * + * console.log(perfObserverList.getEntriesByName('test', 'mark')); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 63.518931, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * console.log(perfObserverList.getEntriesByName('test', 'measure')); // [] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ entryTypes: ['mark', 'measure'] }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntriesByName(name: string, type?: EntryType): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime` whose `performanceEntry.entryType`is equal to `type`. + * + * ```js + * const { + * performance, + * PerformanceObserver, + * } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntriesByType('mark')); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 55.897834, + * * duration: 0, + * * detail: null + * * }, + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 56.350146, + * * duration: 0, + * * detail: null + * * } + * * ] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ type: 'mark' }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntriesByType(type: EntryType): PerformanceEntry[]; + } + type PerformanceObserverCallback = (list: PerformanceObserverEntryList, observer: PerformanceObserver) => void; + /** + * @since v8.5.0 + */ + class PerformanceObserver extends AsyncResource { + constructor(callback: PerformanceObserverCallback); + /** + * Disconnects the `PerformanceObserver` instance from all notifications. + * @since v8.5.0 + */ + disconnect(): void; + /** + * Subscribes the `PerformanceObserver` instance to notifications of new `PerformanceEntry` instances identified either by `options.entryTypes`or `options.type`: + * + * ```js + * const { + * performance, + * PerformanceObserver, + * } = require('node:perf_hooks'); + * + * const obs = new PerformanceObserver((list, observer) => { + * // Called once asynchronously. `list` contains three items. + * }); + * obs.observe({ type: 'mark' }); + * + * for (let n = 0; n < 3; n++) + * performance.mark(`test${n}`); + * ``` + * @since v8.5.0 + */ + observe( + options: + | { + entryTypes: readonly EntryType[]; + buffered?: boolean | undefined; + } + | { + type: EntryType; + buffered?: boolean | undefined; + }, + ): void; + } + namespace constants { + const NODE_PERFORMANCE_GC_MAJOR: number; + const NODE_PERFORMANCE_GC_MINOR: number; + const NODE_PERFORMANCE_GC_INCREMENTAL: number; + const NODE_PERFORMANCE_GC_WEAKCB: number; + const NODE_PERFORMANCE_GC_FLAGS_NO: number; + const NODE_PERFORMANCE_GC_FLAGS_CONSTRUCT_RETAINED: number; + const NODE_PERFORMANCE_GC_FLAGS_FORCED: number; + const NODE_PERFORMANCE_GC_FLAGS_SYNCHRONOUS_PHANTOM_PROCESSING: number; + const NODE_PERFORMANCE_GC_FLAGS_ALL_AVAILABLE_GARBAGE: number; + const NODE_PERFORMANCE_GC_FLAGS_ALL_EXTERNAL_MEMORY: number; + const NODE_PERFORMANCE_GC_FLAGS_SCHEDULE_IDLE: number; + } + const performance: Performance; + interface EventLoopMonitorOptions { + /** + * The sampling rate in milliseconds. + * Must be greater than zero. + * @default 10 + */ + resolution?: number | undefined; + } + interface Histogram { + /** + * Returns a `Map` object detailing the accumulated percentile distribution. + * @since v11.10.0 + */ + readonly percentiles: Map; + /** + * The number of times the event loop delay exceeded the maximum 1 hour event + * loop delay threshold. + * @since v11.10.0 + */ + readonly exceeds: number; + /** + * The minimum recorded event loop delay. + * @since v11.10.0 + */ + readonly min: number; + /** + * The maximum recorded event loop delay. + * @since v11.10.0 + */ + readonly max: number; + /** + * The mean of the recorded event loop delays. + * @since v11.10.0 + */ + readonly mean: number; + /** + * The standard deviation of the recorded event loop delays. + * @since v11.10.0 + */ + readonly stddev: number; + /** + * Resets the collected histogram data. + * @since v11.10.0 + */ + reset(): void; + /** + * Returns the value at the given percentile. + * @since v11.10.0 + * @param percentile A percentile value in the range (0, 100]. + */ + percentile(percentile: number): number; + } + interface IntervalHistogram extends Histogram { + /** + * Enables the update interval timer. Returns `true` if the timer was + * started, `false` if it was already started. + * @since v11.10.0 + */ + enable(): boolean; + /** + * Disables the update interval timer. Returns `true` if the timer was + * stopped, `false` if it was already stopped. + * @since v11.10.0 + */ + disable(): boolean; + } + interface RecordableHistogram extends Histogram { + /** + * @since v15.9.0, v14.18.0 + * @param val The amount to record in the histogram. + */ + record(val: number | bigint): void; + /** + * Calculates the amount of time (in nanoseconds) that has passed since the + * previous call to `recordDelta()` and records that amount in the histogram. + * + * ## Examples + * @since v15.9.0, v14.18.0 + */ + recordDelta(): void; + /** + * Adds the values from `other` to this histogram. + * @since v17.4.0, v16.14.0 + */ + add(other: RecordableHistogram): void; + } + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Creates an `IntervalHistogram` object that samples and reports the event loop + * delay over time. The delays will be reported in nanoseconds. + * + * Using a timer to detect approximate event loop delay works because the + * execution of timers is tied specifically to the lifecycle of the libuv + * event loop. That is, a delay in the loop will cause a delay in the execution + * of the timer, and those delays are specifically what this API is intended to + * detect. + * + * ```js + * const { monitorEventLoopDelay } = require('node:perf_hooks'); + * const h = monitorEventLoopDelay({ resolution: 20 }); + * h.enable(); + * // Do something. + * h.disable(); + * console.log(h.min); + * console.log(h.max); + * console.log(h.mean); + * console.log(h.stddev); + * console.log(h.percentiles); + * console.log(h.percentile(50)); + * console.log(h.percentile(99)); + * ``` + * @since v11.10.0 + */ + function monitorEventLoopDelay(options?: EventLoopMonitorOptions): IntervalHistogram; + interface CreateHistogramOptions { + /** + * The minimum recordable value. Must be an integer value greater than 0. + * @default 1 + */ + min?: number | bigint | undefined; + /** + * The maximum recordable value. Must be an integer value greater than min. + * @default Number.MAX_SAFE_INTEGER + */ + max?: number | bigint | undefined; + /** + * The number of accuracy digits. Must be a number between 1 and 5. + * @default 3 + */ + figures?: number | undefined; + } + /** + * Returns a `RecordableHistogram`. + * @since v15.9.0, v14.18.0 + */ + function createHistogram(options?: CreateHistogramOptions): RecordableHistogram; + import { performance as _performance } from "perf_hooks"; + global { + /** + * `performance` is a global reference for `require('perf_hooks').performance` + * https://nodejs.org/api/globals.html#performance + * @since v16.0.0 + */ + var performance: typeof globalThis extends { + onmessage: any; + performance: infer T; + } ? T + : typeof _performance; + } +} +declare module "node:perf_hooks" { + export * from "perf_hooks"; +} diff --git a/dist/node_modules/@types/node/ts4.8/process.d.ts b/dist/node_modules/@types/node/ts4.8/process.d.ts new file mode 100644 index 0000000..d22308f --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/process.d.ts @@ -0,0 +1,1561 @@ +declare module "process" { + import * as tty from "node:tty"; + import { Worker } from "node:worker_threads"; + global { + var process: NodeJS.Process; + namespace NodeJS { + // this namespace merge is here because these are specifically used + // as the type for process.stdin, process.stdout, and process.stderr. + // they can't live in tty.d.ts because we need to disambiguate the imported name. + interface ReadStream extends tty.ReadStream {} + interface WriteStream extends tty.WriteStream {} + interface MemoryUsageFn { + /** + * The `process.memoryUsage()` method iterate over each page to gather informations about memory + * usage which can be slow depending on the program memory allocations. + */ + (): MemoryUsage; + /** + * method returns an integer representing the Resident Set Size (RSS) in bytes. + */ + rss(): number; + } + interface MemoryUsage { + rss: number; + heapTotal: number; + heapUsed: number; + external: number; + arrayBuffers: number; + } + interface CpuUsage { + user: number; + system: number; + } + interface ProcessRelease { + name: string; + sourceUrl?: string | undefined; + headersUrl?: string | undefined; + libUrl?: string | undefined; + lts?: string | undefined; + } + interface ProcessVersions extends Dict { + http_parser: string; + node: string; + v8: string; + ares: string; + uv: string; + zlib: string; + modules: string; + openssl: string; + } + type Platform = + | "aix" + | "android" + | "darwin" + | "freebsd" + | "haiku" + | "linux" + | "openbsd" + | "sunos" + | "win32" + | "cygwin" + | "netbsd"; + type Architecture = + | "arm" + | "arm64" + | "ia32" + | "mips" + | "mipsel" + | "ppc" + | "ppc64" + | "riscv64" + | "s390" + | "s390x" + | "x64"; + type Signals = + | "SIGABRT" + | "SIGALRM" + | "SIGBUS" + | "SIGCHLD" + | "SIGCONT" + | "SIGFPE" + | "SIGHUP" + | "SIGILL" + | "SIGINT" + | "SIGIO" + | "SIGIOT" + | "SIGKILL" + | "SIGPIPE" + | "SIGPOLL" + | "SIGPROF" + | "SIGPWR" + | "SIGQUIT" + | "SIGSEGV" + | "SIGSTKFLT" + | "SIGSTOP" + | "SIGSYS" + | "SIGTERM" + | "SIGTRAP" + | "SIGTSTP" + | "SIGTTIN" + | "SIGTTOU" + | "SIGUNUSED" + | "SIGURG" + | "SIGUSR1" + | "SIGUSR2" + | "SIGVTALRM" + | "SIGWINCH" + | "SIGXCPU" + | "SIGXFSZ" + | "SIGBREAK" + | "SIGLOST" + | "SIGINFO"; + type UncaughtExceptionOrigin = "uncaughtException" | "unhandledRejection"; + type MultipleResolveType = "resolve" | "reject"; + type BeforeExitListener = (code: number) => void; + type DisconnectListener = () => void; + type ExitListener = (code: number) => void; + type RejectionHandledListener = (promise: Promise) => void; + type UncaughtExceptionListener = (error: Error, origin: UncaughtExceptionOrigin) => void; + /** + * Most of the time the unhandledRejection will be an Error, but this should not be relied upon + * as *anything* can be thrown/rejected, it is therefore unsafe to assume that the value is an Error. + */ + type UnhandledRejectionListener = (reason: unknown, promise: Promise) => void; + type WarningListener = (warning: Error) => void; + type MessageListener = (message: unknown, sendHandle: unknown) => void; + type SignalsListener = (signal: Signals) => void; + type MultipleResolveListener = ( + type: MultipleResolveType, + promise: Promise, + value: unknown, + ) => void; + type WorkerListener = (worker: Worker) => void; + interface Socket extends ReadWriteStream { + isTTY?: true | undefined; + } + // Alias for compatibility + interface ProcessEnv extends Dict { + /** + * Can be used to change the default timezone at runtime + */ + TZ?: string; + } + interface HRTime { + (time?: [number, number]): [number, number]; + bigint(): bigint; + } + interface ProcessReport { + /** + * Directory where the report is written. + * working directory of the Node.js process. + * @default '' indicating that reports are written to the current + */ + directory: string; + /** + * Filename where the report is written. + * The default value is the empty string. + * @default '' the output filename will be comprised of a timestamp, + * PID, and sequence number. + */ + filename: string; + /** + * Returns a JSON-formatted diagnostic report for the running process. + * The report's JavaScript stack trace is taken from err, if present. + */ + getReport(err?: Error): string; + /** + * If true, a diagnostic report is generated on fatal errors, + * such as out of memory errors or failed C++ assertions. + * @default false + */ + reportOnFatalError: boolean; + /** + * If true, a diagnostic report is generated when the process + * receives the signal specified by process.report.signal. + * @default false + */ + reportOnSignal: boolean; + /** + * If true, a diagnostic report is generated on uncaught exception. + * @default false + */ + reportOnUncaughtException: boolean; + /** + * The signal used to trigger the creation of a diagnostic report. + * @default 'SIGUSR2' + */ + signal: Signals; + /** + * Writes a diagnostic report to a file. If filename is not provided, the default filename + * includes the date, time, PID, and a sequence number. + * The report's JavaScript stack trace is taken from err, if present. + * + * @param fileName Name of the file where the report is written. + * This should be a relative path, that will be appended to the directory specified in + * `process.report.directory`, or the current working directory of the Node.js process, + * if unspecified. + * @param error A custom error used for reporting the JavaScript stack. + * @return Filename of the generated report. + */ + writeReport(fileName?: string): string; + writeReport(error?: Error): string; + writeReport(fileName?: string, err?: Error): string; + } + interface ResourceUsage { + fsRead: number; + fsWrite: number; + involuntaryContextSwitches: number; + ipcReceived: number; + ipcSent: number; + majorPageFault: number; + maxRSS: number; + minorPageFault: number; + sharedMemorySize: number; + signalsCount: number; + swappedOut: number; + systemCPUTime: number; + unsharedDataSize: number; + unsharedStackSize: number; + userCPUTime: number; + voluntaryContextSwitches: number; + } + interface EmitWarningOptions { + /** + * When `warning` is a `string`, `type` is the name to use for the _type_ of warning being emitted. + * + * @default 'Warning' + */ + type?: string | undefined; + /** + * A unique identifier for the warning instance being emitted. + */ + code?: string | undefined; + /** + * When `warning` is a `string`, `ctor` is an optional function used to limit the generated stack trace. + * + * @default process.emitWarning + */ + ctor?: Function | undefined; + /** + * Additional text to include with the error. + */ + detail?: string | undefined; + } + interface ProcessConfig { + readonly target_defaults: { + readonly cflags: any[]; + readonly default_configuration: string; + readonly defines: string[]; + readonly include_dirs: string[]; + readonly libraries: string[]; + }; + readonly variables: { + readonly clang: number; + readonly host_arch: string; + readonly node_install_npm: boolean; + readonly node_install_waf: boolean; + readonly node_prefix: string; + readonly node_shared_openssl: boolean; + readonly node_shared_v8: boolean; + readonly node_shared_zlib: boolean; + readonly node_use_dtrace: boolean; + readonly node_use_etw: boolean; + readonly node_use_openssl: boolean; + readonly target_arch: string; + readonly v8_no_strict_aliasing: number; + readonly v8_use_snapshot: boolean; + readonly visibility: string; + }; + } + interface Process extends EventEmitter { + /** + * The `process.stdout` property returns a stream connected to`stdout` (fd `1`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `1` refers to a file, in which case it is + * a `Writable` stream. + * + * For example, to copy `process.stdin` to `process.stdout`: + * + * ```js + * import { stdin, stdout } from 'node:process'; + * + * stdin.pipe(stdout); + * ``` + * + * `process.stdout` differs from other Node.js streams in important ways. See `note on process I/O` for more information. + */ + stdout: WriteStream & { + fd: 1; + }; + /** + * The `process.stderr` property returns a stream connected to`stderr` (fd `2`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `2` refers to a file, in which case it is + * a `Writable` stream. + * + * `process.stderr` differs from other Node.js streams in important ways. See `note on process I/O` for more information. + */ + stderr: WriteStream & { + fd: 2; + }; + /** + * The `process.stdin` property returns a stream connected to`stdin` (fd `0`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `0` refers to a file, in which case it is + * a `Readable` stream. + * + * For details of how to read from `stdin` see `readable.read()`. + * + * As a `Duplex` stream, `process.stdin` can also be used in "old" mode that + * is compatible with scripts written for Node.js prior to v0.10\. + * For more information see `Stream compatibility`. + * + * In "old" streams mode the `stdin` stream is paused by default, so one + * must call `process.stdin.resume()` to read from it. Note also that calling`process.stdin.resume()` itself would switch stream to "old" mode. + */ + stdin: ReadStream & { + fd: 0; + }; + openStdin(): Socket; + /** + * The `process.argv` property returns an array containing the command-line + * arguments passed when the Node.js process was launched. The first element will + * be {@link execPath}. See `process.argv0` if access to the original value + * of `argv[0]` is needed. The second element will be the path to the JavaScript + * file being executed. The remaining elements will be any additional command-line + * arguments. + * + * For example, assuming the following script for `process-args.js`: + * + * ```js + * import { argv } from 'node:process'; + * + * // print process.argv + * argv.forEach((val, index) => { + * console.log(`${index}: ${val}`); + * }); + * ``` + * + * Launching the Node.js process as: + * + * ```bash + * node process-args.js one two=three four + * ``` + * + * Would generate the output: + * + * ```text + * 0: /usr/local/bin/node + * 1: /Users/mjr/work/node/process-args.js + * 2: one + * 3: two=three + * 4: four + * ``` + * @since v0.1.27 + */ + argv: string[]; + /** + * The `process.argv0` property stores a read-only copy of the original value of`argv[0]` passed when Node.js starts. + * + * ```console + * $ bash -c 'exec -a customArgv0 ./node' + * > process.argv[0] + * '/Volumes/code/external/node/out/Release/node' + * > process.argv0 + * 'customArgv0' + * ``` + * @since v6.4.0 + */ + argv0: string; + /** + * The `process.execArgv` property returns the set of Node.js-specific command-line + * options passed when the Node.js process was launched. These options do not + * appear in the array returned by the {@link argv} property, and do not + * include the Node.js executable, the name of the script, or any options following + * the script name. These options are useful in order to spawn child processes with + * the same execution environment as the parent. + * + * ```bash + * node --harmony script.js --version + * ``` + * + * Results in `process.execArgv`: + * + * ```js + * ['--harmony'] + * ``` + * + * And `process.argv`: + * + * ```js + * ['/usr/local/bin/node', 'script.js', '--version'] + * ``` + * + * Refer to `Worker constructor` for the detailed behavior of worker + * threads with this property. + * @since v0.7.7 + */ + execArgv: string[]; + /** + * The `process.execPath` property returns the absolute pathname of the executable + * that started the Node.js process. Symbolic links, if any, are resolved. + * + * ```js + * '/usr/local/bin/node' + * ``` + * @since v0.1.100 + */ + execPath: string; + /** + * The `process.abort()` method causes the Node.js process to exit immediately and + * generate a core file. + * + * This feature is not available in `Worker` threads. + * @since v0.7.0 + */ + abort(): never; + /** + * The `process.chdir()` method changes the current working directory of the + * Node.js process or throws an exception if doing so fails (for instance, if + * the specified `directory` does not exist). + * + * ```js + * import { chdir, cwd } from 'node:process'; + * + * console.log(`Starting directory: ${cwd()}`); + * try { + * chdir('/tmp'); + * console.log(`New directory: ${cwd()}`); + * } catch (err) { + * console.error(`chdir: ${err}`); + * } + * ``` + * + * This feature is not available in `Worker` threads. + * @since v0.1.17 + */ + chdir(directory: string): void; + /** + * The `process.cwd()` method returns the current working directory of the Node.js + * process. + * + * ```js + * import { cwd } from 'node:process'; + * + * console.log(`Current directory: ${cwd()}`); + * ``` + * @since v0.1.8 + */ + cwd(): string; + /** + * The port used by the Node.js debugger when enabled. + * + * ```js + * import process from 'node:process'; + * + * process.debugPort = 5858; + * ``` + * @since v0.7.2 + */ + debugPort: number; + /** + * The `process.emitWarning()` method can be used to emit custom or application + * specific process warnings. These can be listened for by adding a handler to the `'warning'` event. + * + * ```js + * import { emitWarning } from 'node:process'; + * + * // Emit a warning with a code and additional detail. + * emitWarning('Something happened!', { + * code: 'MY_WARNING', + * detail: 'This is some additional information', + * }); + * // Emits: + * // (node:56338) [MY_WARNING] Warning: Something happened! + * // This is some additional information + * ``` + * + * In this example, an `Error` object is generated internally by`process.emitWarning()` and passed through to the `'warning'` handler. + * + * ```js + * import process from 'node:process'; + * + * process.on('warning', (warning) => { + * console.warn(warning.name); // 'Warning' + * console.warn(warning.message); // 'Something happened!' + * console.warn(warning.code); // 'MY_WARNING' + * console.warn(warning.stack); // Stack trace + * console.warn(warning.detail); // 'This is some additional information' + * }); + * ``` + * + * If `warning` is passed as an `Error` object, the `options` argument is ignored. + * @since v8.0.0 + * @param warning The warning to emit. + */ + emitWarning(warning: string | Error, ctor?: Function): void; + emitWarning(warning: string | Error, type?: string, ctor?: Function): void; + emitWarning(warning: string | Error, type?: string, code?: string, ctor?: Function): void; + emitWarning(warning: string | Error, options?: EmitWarningOptions): void; + /** + * The `process.env` property returns an object containing the user environment. + * See [`environ(7)`](http://man7.org/linux/man-pages/man7/environ.7.html). + * + * An example of this object looks like: + * + * ```js + * { + * TERM: 'xterm-256color', + * SHELL: '/usr/local/bin/bash', + * USER: 'maciej', + * PATH: '~/.bin/:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin', + * PWD: '/Users/maciej', + * EDITOR: 'vim', + * SHLVL: '1', + * HOME: '/Users/maciej', + * LOGNAME: 'maciej', + * _: '/usr/local/bin/node' + * } + * ``` + * + * It is possible to modify this object, but such modifications will not be + * reflected outside the Node.js process, or (unless explicitly requested) + * to other `Worker` threads. + * In other words, the following example would not work: + * + * ```bash + * node -e 'process.env.foo = "bar"' && echo $foo + * ``` + * + * While the following will: + * + * ```js + * import { env } from 'node:process'; + * + * env.foo = 'bar'; + * console.log(env.foo); + * ``` + * + * Assigning a property on `process.env` will implicitly convert the value + * to a string. **This behavior is deprecated.** Future versions of Node.js may + * throw an error when the value is not a string, number, or boolean. + * + * ```js + * import { env } from 'node:process'; + * + * env.test = null; + * console.log(env.test); + * // => 'null' + * env.test = undefined; + * console.log(env.test); + * // => 'undefined' + * ``` + * + * Use `delete` to delete a property from `process.env`. + * + * ```js + * import { env } from 'node:process'; + * + * env.TEST = 1; + * delete env.TEST; + * console.log(env.TEST); + * // => undefined + * ``` + * + * On Windows operating systems, environment variables are case-insensitive. + * + * ```js + * import { env } from 'node:process'; + * + * env.TEST = 1; + * console.log(env.test); + * // => 1 + * ``` + * + * Unless explicitly specified when creating a `Worker` instance, + * each `Worker` thread has its own copy of `process.env`, based on its + * parent thread's `process.env`, or whatever was specified as the `env` option + * to the `Worker` constructor. Changes to `process.env` will not be visible + * across `Worker` threads, and only the main thread can make changes that + * are visible to the operating system or to native add-ons. On Windows, a copy of`process.env` on a `Worker` instance operates in a case-sensitive manner + * unlike the main thread. + * @since v0.1.27 + */ + env: ProcessEnv; + /** + * The `process.exit()` method instructs Node.js to terminate the process + * synchronously with an exit status of `code`. If `code` is omitted, exit uses + * either the 'success' code `0` or the value of `process.exitCode` if it has been + * set. Node.js will not terminate until all the `'exit'` event listeners are + * called. + * + * To exit with a 'failure' code: + * + * ```js + * import { exit } from 'node:process'; + * + * exit(1); + * ``` + * + * The shell that executed Node.js should see the exit code as `1`. + * + * Calling `process.exit()` will force the process to exit as quickly as possible + * even if there are still asynchronous operations pending that have not yet + * completed fully, including I/O operations to `process.stdout` and`process.stderr`. + * + * In most situations, it is not actually necessary to call `process.exit()`explicitly. The Node.js process will exit on its own _if there is no additional_ + * _work pending_ in the event loop. The `process.exitCode` property can be set to + * tell the process which exit code to use when the process exits gracefully. + * + * For instance, the following example illustrates a _misuse_ of the`process.exit()` method that could lead to data printed to stdout being + * truncated and lost: + * + * ```js + * import { exit } from 'node:process'; + * + * // This is an example of what *not* to do: + * if (someConditionNotMet()) { + * printUsageToStdout(); + * exit(1); + * } + * ``` + * + * The reason this is problematic is because writes to `process.stdout` in Node.js + * are sometimes _asynchronous_ and may occur over multiple ticks of the Node.js + * event loop. Calling `process.exit()`, however, forces the process to exit _before_ those additional writes to `stdout` can be performed. + * + * Rather than calling `process.exit()` directly, the code _should_ set the`process.exitCode` and allow the process to exit naturally by avoiding + * scheduling any additional work for the event loop: + * + * ```js + * import process from 'node:process'; + * + * // How to properly set the exit code while letting + * // the process exit gracefully. + * if (someConditionNotMet()) { + * printUsageToStdout(); + * process.exitCode = 1; + * } + * ``` + * + * If it is necessary to terminate the Node.js process due to an error condition, + * throwing an _uncaught_ error and allowing the process to terminate accordingly + * is safer than calling `process.exit()`. + * + * In `Worker` threads, this function stops the current thread rather + * than the current process. + * @since v0.1.13 + * @param [code=0] The exit code. For string type, only integer strings (e.g.,'1') are allowed. + */ + exit(code?: number): never; + /** + * A number which will be the process exit code, when the process either + * exits gracefully, or is exited via {@link exit} without specifying + * a code. + * + * Specifying a code to {@link exit} will override any + * previous setting of `process.exitCode`. + * @since v0.11.8 + */ + exitCode?: number | undefined; + /** + * The `process.getgid()` method returns the numerical group identity of the + * process. (See [`getgid(2)`](http://man7.org/linux/man-pages/man2/getgid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getgid) { + * console.log(`Current gid: ${process.getgid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.1.31 + */ + getgid?: () => number; + /** + * The `process.setgid()` method sets the group identity of the process. (See [`setgid(2)`](http://man7.org/linux/man-pages/man2/setgid.2.html).) The `id` can be passed as either a + * numeric ID or a group name + * string. If a group name is specified, this method blocks while resolving the + * associated numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getgid && process.setgid) { + * console.log(`Current gid: ${process.getgid()}`); + * try { + * process.setgid(501); + * console.log(`New gid: ${process.getgid()}`); + * } catch (err) { + * console.log(`Failed to set gid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.1.31 + * @param id The group name or ID + */ + setgid?: (id: number | string) => void; + /** + * The `process.getuid()` method returns the numeric user identity of the process. + * (See [`getuid(2)`](http://man7.org/linux/man-pages/man2/getuid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getuid) { + * console.log(`Current uid: ${process.getuid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.1.28 + */ + getuid?: () => number; + /** + * The `process.setuid(id)` method sets the user identity of the process. (See [`setuid(2)`](http://man7.org/linux/man-pages/man2/setuid.2.html).) The `id` can be passed as either a + * numeric ID or a username string. + * If a username is specified, the method blocks while resolving the associated + * numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getuid && process.setuid) { + * console.log(`Current uid: ${process.getuid()}`); + * try { + * process.setuid(501); + * console.log(`New uid: ${process.getuid()}`); + * } catch (err) { + * console.log(`Failed to set uid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.1.28 + */ + setuid?: (id: number | string) => void; + /** + * The `process.geteuid()` method returns the numerical effective user identity of + * the process. (See [`geteuid(2)`](http://man7.org/linux/man-pages/man2/geteuid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.geteuid) { + * console.log(`Current uid: ${process.geteuid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v2.0.0 + */ + geteuid?: () => number; + /** + * The `process.seteuid()` method sets the effective user identity of the process. + * (See [`seteuid(2)`](http://man7.org/linux/man-pages/man2/seteuid.2.html).) The `id` can be passed as either a numeric ID or a username + * string. If a username is specified, the method blocks while resolving the + * associated numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.geteuid && process.seteuid) { + * console.log(`Current uid: ${process.geteuid()}`); + * try { + * process.seteuid(501); + * console.log(`New uid: ${process.geteuid()}`); + * } catch (err) { + * console.log(`Failed to set uid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v2.0.0 + * @param id A user name or ID + */ + seteuid?: (id: number | string) => void; + /** + * The `process.getegid()` method returns the numerical effective group identity + * of the Node.js process. (See [`getegid(2)`](http://man7.org/linux/man-pages/man2/getegid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getegid) { + * console.log(`Current gid: ${process.getegid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v2.0.0 + */ + getegid?: () => number; + /** + * The `process.setegid()` method sets the effective group identity of the process. + * (See [`setegid(2)`](http://man7.org/linux/man-pages/man2/setegid.2.html).) The `id` can be passed as either a numeric ID or a group + * name string. If a group name is specified, this method blocks while resolving + * the associated a numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getegid && process.setegid) { + * console.log(`Current gid: ${process.getegid()}`); + * try { + * process.setegid(501); + * console.log(`New gid: ${process.getegid()}`); + * } catch (err) { + * console.log(`Failed to set gid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v2.0.0 + * @param id A group name or ID + */ + setegid?: (id: number | string) => void; + /** + * The `process.getgroups()` method returns an array with the supplementary group + * IDs. POSIX leaves it unspecified if the effective group ID is included but + * Node.js ensures it always is. + * + * ```js + * import process from 'process'; + * + * if (process.getgroups) { + * console.log(process.getgroups()); // [ 16, 21, 297 ] + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.9.4 + */ + getgroups?: () => number[]; + /** + * The `process.setgroups()` method sets the supplementary group IDs for the + * Node.js process. This is a privileged operation that requires the Node.js + * process to have `root` or the `CAP_SETGID` capability. + * + * The `groups` array can contain numeric group IDs, group names, or both. + * + * ```js + * import process from 'process'; + * + * if (process.getgroups && process.setgroups) { + * try { + * process.setgroups([501]); + * console.log(process.getgroups()); // new groups + * } catch (err) { + * console.log(`Failed to set groups: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.9.4 + */ + setgroups?: (groups: ReadonlyArray) => void; + /** + * The `process.setUncaughtExceptionCaptureCallback()` function sets a function + * that will be invoked when an uncaught exception occurs, which will receive the + * exception value itself as its first argument. + * + * If such a function is set, the `'uncaughtException'` event will + * not be emitted. If `--abort-on-uncaught-exception` was passed from the + * command line or set through `v8.setFlagsFromString()`, the process will + * not abort. Actions configured to take place on exceptions such as report + * generations will be affected too + * + * To unset the capture function,`process.setUncaughtExceptionCaptureCallback(null)` may be used. Calling this + * method with a non-`null` argument while another capture function is set will + * throw an error. + * + * Using this function is mutually exclusive with using the deprecated `domain` built-in module. + * @since v9.3.0 + */ + setUncaughtExceptionCaptureCallback(cb: ((err: Error) => void) | null): void; + /** + * Indicates whether a callback has been set using {@link setUncaughtExceptionCaptureCallback}. + * @since v9.3.0 + */ + hasUncaughtExceptionCaptureCallback(): boolean; + /** + * The `process.sourceMapsEnabled` property returns whether the [Source Map v3](https://sourcemaps.info/spec.html) support for stack traces is enabled. + * @since v20.7.0 + * @experimental + */ + readonly sourceMapsEnabled: boolean; + /** + * This function enables or disables the [Source Map v3](https://sourcemaps.info/spec.html) support for + * stack traces. + * + * It provides same features as launching Node.js process with commandline options`--enable-source-maps`. + * + * Only source maps in JavaScript files that are loaded after source maps has been + * enabled will be parsed and loaded. + * @since v16.6.0, v14.18.0 + * @experimental + */ + setSourceMapsEnabled(value: boolean): void; + /** + * The `process.version` property contains the Node.js version string. + * + * ```js + * import { version } from 'node:process'; + * + * console.log(`Version: ${version}`); + * // Version: v14.8.0 + * ``` + * + * To get the version string without the prepended _v_, use`process.versions.node`. + * @since v0.1.3 + */ + readonly version: string; + /** + * The `process.versions` property returns an object listing the version strings of + * Node.js and its dependencies. `process.versions.modules` indicates the current + * ABI version, which is increased whenever a C++ API changes. Node.js will refuse + * to load modules that were compiled against a different module ABI version. + * + * ```js + * import { versions } from 'node:process'; + * + * console.log(versions); + * ``` + * + * Will generate an object similar to: + * + * ```console + * { node: '20.2.0', + * acorn: '8.8.2', + * ada: '2.4.0', + * ares: '1.19.0', + * base64: '0.5.0', + * brotli: '1.0.9', + * cjs_module_lexer: '1.2.2', + * cldr: '43.0', + * icu: '73.1', + * llhttp: '8.1.0', + * modules: '115', + * napi: '8', + * nghttp2: '1.52.0', + * nghttp3: '0.7.0', + * ngtcp2: '0.8.1', + * openssl: '3.0.8+quic', + * simdutf: '3.2.9', + * tz: '2023c', + * undici: '5.22.0', + * unicode: '15.0', + * uv: '1.44.2', + * uvwasi: '0.0.16', + * v8: '11.3.244.8-node.9', + * zlib: '1.2.13' } + * ``` + * @since v0.2.0 + */ + readonly versions: ProcessVersions; + /** + * The `process.config` property returns a frozen `Object` containing the + * JavaScript representation of the configure options used to compile the current + * Node.js executable. This is the same as the `config.gypi` file that was produced + * when running the `./configure` script. + * + * An example of the possible output looks like: + * + * ```js + * { + * target_defaults: + * { cflags: [], + * default_configuration: 'Release', + * defines: [], + * include_dirs: [], + * libraries: [] }, + * variables: + * { + * host_arch: 'x64', + * napi_build_version: 5, + * node_install_npm: 'true', + * node_prefix: '', + * node_shared_cares: 'false', + * node_shared_http_parser: 'false', + * node_shared_libuv: 'false', + * node_shared_zlib: 'false', + * node_use_openssl: 'true', + * node_shared_openssl: 'false', + * strict_aliasing: 'true', + * target_arch: 'x64', + * v8_use_snapshot: 1 + * } + * } + * ``` + * @since v0.7.7 + */ + readonly config: ProcessConfig; + /** + * The `process.kill()` method sends the `signal` to the process identified by`pid`. + * + * Signal names are strings such as `'SIGINT'` or `'SIGHUP'`. See `Signal Events` and [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for more information. + * + * This method will throw an error if the target `pid` does not exist. As a special + * case, a signal of `0` can be used to test for the existence of a process. + * Windows platforms will throw an error if the `pid` is used to kill a process + * group. + * + * Even though the name of this function is `process.kill()`, it is really just a + * signal sender, like the `kill` system call. The signal sent may do something + * other than kill the target process. + * + * ```js + * import process, { kill } from 'node:process'; + * + * process.on('SIGHUP', () => { + * console.log('Got SIGHUP signal.'); + * }); + * + * setTimeout(() => { + * console.log('Exiting.'); + * process.exit(0); + * }, 100); + * + * kill(process.pid, 'SIGHUP'); + * ``` + * + * When `SIGUSR1` is received by a Node.js process, Node.js will start the + * debugger. See `Signal Events`. + * @since v0.0.6 + * @param pid A process ID + * @param [signal='SIGTERM'] The signal to send, either as a string or number. + */ + kill(pid: number, signal?: string | number): true; + /** + * The `process.pid` property returns the PID of the process. + * + * ```js + * import { pid } from 'node:process'; + * + * console.log(`This process is pid ${pid}`); + * ``` + * @since v0.1.15 + */ + readonly pid: number; + /** + * The `process.ppid` property returns the PID of the parent of the + * current process. + * + * ```js + * import { ppid } from 'node:process'; + * + * console.log(`The parent process is pid ${ppid}`); + * ``` + * @since v9.2.0, v8.10.0, v6.13.0 + */ + readonly ppid: number; + /** + * The `process.title` property returns the current process title (i.e. returns + * the current value of `ps`). Assigning a new value to `process.title` modifies + * the current value of `ps`. + * + * When a new value is assigned, different platforms will impose different maximum + * length restrictions on the title. Usually such restrictions are quite limited. + * For instance, on Linux and macOS, `process.title` is limited to the size of the + * binary name plus the length of the command-line arguments because setting the`process.title` overwrites the `argv` memory of the process. Node.js v0.8 + * allowed for longer process title strings by also overwriting the `environ`memory but that was potentially insecure and confusing in some (rather obscure) + * cases. + * + * Assigning a value to `process.title` might not result in an accurate label + * within process manager applications such as macOS Activity Monitor or Windows + * Services Manager. + * @since v0.1.104 + */ + title: string; + /** + * The operating system CPU architecture for which the Node.js binary was compiled. + * Possible values are: `'arm'`, `'arm64'`, `'ia32'`, `'loong64'`, `'mips'`,`'mipsel'`, `'ppc'`, `'ppc64'`, `'riscv64'`, `'s390'`, `'s390x'`, and `'x64'`. + * + * ```js + * import { arch } from 'node:process'; + * + * console.log(`This processor architecture is ${arch}`); + * ``` + * @since v0.5.0 + */ + readonly arch: Architecture; + /** + * The `process.platform` property returns a string identifying the operating + * system platform for which the Node.js binary was compiled. + * + * Currently possible values are: + * + * * `'aix'` + * * `'darwin'` + * * `'freebsd'` + * * `'linux'` + * * `'openbsd'` + * * `'sunos'` + * * `'win32'` + * + * ```js + * import { platform } from 'node:process'; + * + * console.log(`This platform is ${platform}`); + * ``` + * + * The value `'android'` may also be returned if the Node.js is built on the + * Android operating system. However, Android support in Node.js [is experimental](https://github.com/nodejs/node/blob/HEAD/BUILDING.md#androidandroid-based-devices-eg-firefox-os). + * @since v0.1.16 + */ + readonly platform: Platform; + /** + * The `process.mainModule` property provides an alternative way of retrieving `require.main`. The difference is that if the main module changes at + * runtime, `require.main` may still refer to the original main module in + * modules that were required before the change occurred. Generally, it's + * safe to assume that the two refer to the same module. + * + * As with `require.main`, `process.mainModule` will be `undefined` if there + * is no entry script. + * @since v0.1.17 + * @deprecated Since v14.0.0 - Use `main` instead. + */ + mainModule?: Module | undefined; + memoryUsage: MemoryUsageFn; + /** + * Gets the amount of memory available to the process (in bytes) based on + * limits imposed by the OS. If there is no such constraint, or the constraint + * is unknown, `undefined` is returned. + * + * See [`uv_get_constrained_memory`](https://docs.libuv.org/en/v1.x/misc.html#c.uv_get_constrained_memory) for more + * information. + * @since v19.6.0, v18.15.0 + * @experimental + */ + constrainedMemory(): number | undefined; + /** + * The `process.cpuUsage()` method returns the user and system CPU time usage of + * the current process, in an object with properties `user` and `system`, whose + * values are microsecond values (millionth of a second). These values measure time + * spent in user and system code respectively, and may end up being greater than + * actual elapsed time if multiple CPU cores are performing work for this process. + * + * The result of a previous call to `process.cpuUsage()` can be passed as the + * argument to the function, to get a diff reading. + * + * ```js + * import { cpuUsage } from 'node:process'; + * + * const startUsage = cpuUsage(); + * // { user: 38579, system: 6986 } + * + * // spin the CPU for 500 milliseconds + * const now = Date.now(); + * while (Date.now() - now < 500); + * + * console.log(cpuUsage(startUsage)); + * // { user: 514883, system: 11226 } + * ``` + * @since v6.1.0 + * @param previousValue A previous return value from calling `process.cpuUsage()` + */ + cpuUsage(previousValue?: CpuUsage): CpuUsage; + /** + * `process.nextTick()` adds `callback` to the "next tick queue". This queue is + * fully drained after the current operation on the JavaScript stack runs to + * completion and before the event loop is allowed to continue. It's possible to + * create an infinite loop if one were to recursively call `process.nextTick()`. + * See the [Event Loop](https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#process-nexttick) guide for more background. + * + * ```js + * import { nextTick } from 'node:process'; + * + * console.log('start'); + * nextTick(() => { + * console.log('nextTick callback'); + * }); + * console.log('scheduled'); + * // Output: + * // start + * // scheduled + * // nextTick callback + * ``` + * + * This is important when developing APIs in order to give users the opportunity + * to assign event handlers _after_ an object has been constructed but before any + * I/O has occurred: + * + * ```js + * import { nextTick } from 'node:process'; + * + * function MyThing(options) { + * this.setupOptions(options); + * + * nextTick(() => { + * this.startDoingStuff(); + * }); + * } + * + * const thing = new MyThing(); + * thing.getReadyForStuff(); + * + * // thing.startDoingStuff() gets called now, not before. + * ``` + * + * It is very important for APIs to be either 100% synchronous or 100% + * asynchronous. Consider this example: + * + * ```js + * // WARNING! DO NOT USE! BAD UNSAFE HAZARD! + * function maybeSync(arg, cb) { + * if (arg) { + * cb(); + * return; + * } + * + * fs.stat('file', cb); + * } + * ``` + * + * This API is hazardous because in the following case: + * + * ```js + * const maybeTrue = Math.random() > 0.5; + * + * maybeSync(maybeTrue, () => { + * foo(); + * }); + * + * bar(); + * ``` + * + * It is not clear whether `foo()` or `bar()` will be called first. + * + * The following approach is much better: + * + * ```js + * import { nextTick } from 'node:process'; + * + * function definitelyAsync(arg, cb) { + * if (arg) { + * nextTick(cb); + * return; + * } + * + * fs.stat('file', cb); + * } + * ``` + * @since v0.1.26 + * @param args Additional arguments to pass when invoking the `callback` + */ + nextTick(callback: Function, ...args: any[]): void; + /** + * The `process.release` property returns an `Object` containing metadata related + * to the current release, including URLs for the source tarball and headers-only + * tarball. + * + * `process.release` contains the following properties: + * + * ```js + * { + * name: 'node', + * lts: 'Hydrogen', + * sourceUrl: 'https://nodejs.org/download/release/v18.12.0/node-v18.12.0.tar.gz', + * headersUrl: 'https://nodejs.org/download/release/v18.12.0/node-v18.12.0-headers.tar.gz', + * libUrl: 'https://nodejs.org/download/release/v18.12.0/win-x64/node.lib' + * } + * ``` + * + * In custom builds from non-release versions of the source tree, only the`name` property may be present. The additional properties should not be + * relied upon to exist. + * @since v3.0.0 + */ + readonly release: ProcessRelease; + features: { + inspector: boolean; + debug: boolean; + uv: boolean; + ipv6: boolean; + tls_alpn: boolean; + tls_sni: boolean; + tls_ocsp: boolean; + tls: boolean; + }; + /** + * `process.umask()` returns the Node.js process's file mode creation mask. Child + * processes inherit the mask from the parent process. + * @since v0.1.19 + * @deprecated Calling `process.umask()` with no argument causes the process-wide umask to be written twice. This introduces a race condition between threads, and is a potential * + * security vulnerability. There is no safe, cross-platform alternative API. + */ + umask(): number; + /** + * Can only be set if not in worker thread. + */ + umask(mask: string | number): number; + /** + * The `process.uptime()` method returns the number of seconds the current Node.js + * process has been running. + * + * The return value includes fractions of a second. Use `Math.floor()` to get whole + * seconds. + * @since v0.5.0 + */ + uptime(): number; + hrtime: HRTime; + /** + * If the Node.js process was spawned with an IPC channel, the process.channel property is a reference to the IPC channel. + * If no IPC channel exists, this property is undefined. + * @since v7.1.0 + */ + channel?: { + /** + * This method makes the IPC channel keep the event loop of the process running if .unref() has been called before. + * @since v7.1.0 + */ + ref(): void; + /** + * This method makes the IPC channel not keep the event loop of the process running, and lets it finish even while the channel is open. + * @since v7.1.0 + */ + unref(): void; + }; + /** + * If Node.js is spawned with an IPC channel, the `process.send()` method can be + * used to send messages to the parent process. Messages will be received as a `'message'` event on the parent's `ChildProcess` object. + * + * If Node.js was not spawned with an IPC channel, `process.send` will be `undefined`. + * + * The message goes through serialization and parsing. The resulting message might + * not be the same as what is originally sent. + * @since v0.5.9 + * @param options used to parameterize the sending of certain types of handles.`options` supports the following properties: + */ + send?( + message: any, + sendHandle?: any, + options?: { + swallowErrors?: boolean | undefined; + }, + callback?: (error: Error | null) => void, + ): boolean; + /** + * If the Node.js process is spawned with an IPC channel (see the `Child Process` and `Cluster` documentation), the `process.disconnect()` method will close the + * IPC channel to the parent process, allowing the child process to exit gracefully + * once there are no other connections keeping it alive. + * + * The effect of calling `process.disconnect()` is the same as calling `ChildProcess.disconnect()` from the parent process. + * + * If the Node.js process was not spawned with an IPC channel,`process.disconnect()` will be `undefined`. + * @since v0.7.2 + */ + disconnect(): void; + /** + * If the Node.js process is spawned with an IPC channel (see the `Child Process` and `Cluster` documentation), the `process.connected` property will return`true` so long as the IPC + * channel is connected and will return `false` after`process.disconnect()` is called. + * + * Once `process.connected` is `false`, it is no longer possible to send messages + * over the IPC channel using `process.send()`. + * @since v0.7.2 + */ + connected: boolean; + /** + * The `process.allowedNodeEnvironmentFlags` property is a special, + * read-only `Set` of flags allowable within the `NODE_OPTIONS` environment variable. + * + * `process.allowedNodeEnvironmentFlags` extends `Set`, but overrides`Set.prototype.has` to recognize several different possible flag + * representations. `process.allowedNodeEnvironmentFlags.has()` will + * return `true` in the following cases: + * + * * Flags may omit leading single (`-`) or double (`--`) dashes; e.g.,`inspect-brk` for `--inspect-brk`, or `r` for `-r`. + * * Flags passed through to V8 (as listed in `--v8-options`) may replace + * one or more _non-leading_ dashes for an underscore, or vice-versa; + * e.g., `--perf_basic_prof`, `--perf-basic-prof`, `--perf_basic-prof`, + * etc. + * * Flags may contain one or more equals (`=`) characters; all + * characters after and including the first equals will be ignored; + * e.g., `--stack-trace-limit=100`. + * * Flags _must_ be allowable within `NODE_OPTIONS`. + * + * When iterating over `process.allowedNodeEnvironmentFlags`, flags will + * appear only _once_; each will begin with one or more dashes. Flags + * passed through to V8 will contain underscores instead of non-leading + * dashes: + * + * ```js + * import { allowedNodeEnvironmentFlags } from 'node:process'; + * + * allowedNodeEnvironmentFlags.forEach((flag) => { + * // -r + * // --inspect-brk + * // --abort_on_uncaught_exception + * // ... + * }); + * ``` + * + * The methods `add()`, `clear()`, and `delete()` of`process.allowedNodeEnvironmentFlags` do nothing, and will fail + * silently. + * + * If Node.js was compiled _without_ `NODE_OPTIONS` support (shown in {@link config}), `process.allowedNodeEnvironmentFlags` will + * contain what _would have_ been allowable. + * @since v10.10.0 + */ + allowedNodeEnvironmentFlags: ReadonlySet; + /** + * `process.report` is an object whose methods are used to generate diagnostic + * reports for the current process. Additional documentation is available in the `report documentation`. + * @since v11.8.0 + */ + report?: ProcessReport | undefined; + /** + * ```js + * import { resourceUsage } from 'node:process'; + * + * console.log(resourceUsage()); + * /* + * Will output: + * { + * userCPUTime: 82872, + * systemCPUTime: 4143, + * maxRSS: 33164, + * sharedMemorySize: 0, + * unsharedDataSize: 0, + * unsharedStackSize: 0, + * minorPageFault: 2469, + * majorPageFault: 0, + * swappedOut: 0, + * fsRead: 0, + * fsWrite: 8, + * ipcSent: 0, + * ipcReceived: 0, + * signalsCount: 0, + * voluntaryContextSwitches: 79, + * involuntaryContextSwitches: 1 + * } + * + * ``` + * @since v12.6.0 + * @return the resource usage for the current process. All of these values come from the `uv_getrusage` call which returns a [`uv_rusage_t` struct][uv_rusage_t]. + */ + resourceUsage(): ResourceUsage; + /** + * The `process.traceDeprecation` property indicates whether the`--trace-deprecation` flag is set on the current Node.js process. See the + * documentation for the `'warning' event` and the `emitWarning() method` for more information about this + * flag's behavior. + * @since v0.8.0 + */ + traceDeprecation: boolean; + /* EventEmitter */ + addListener(event: "beforeExit", listener: BeforeExitListener): this; + addListener(event: "disconnect", listener: DisconnectListener): this; + addListener(event: "exit", listener: ExitListener): this; + addListener(event: "rejectionHandled", listener: RejectionHandledListener): this; + addListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; + addListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + addListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + addListener(event: "warning", listener: WarningListener): this; + addListener(event: "message", listener: MessageListener): this; + addListener(event: Signals, listener: SignalsListener): this; + addListener(event: "multipleResolves", listener: MultipleResolveListener): this; + addListener(event: "worker", listener: WorkerListener): this; + emit(event: "beforeExit", code: number): boolean; + emit(event: "disconnect"): boolean; + emit(event: "exit", code: number): boolean; + emit(event: "rejectionHandled", promise: Promise): boolean; + emit(event: "uncaughtException", error: Error): boolean; + emit(event: "uncaughtExceptionMonitor", error: Error): boolean; + emit(event: "unhandledRejection", reason: unknown, promise: Promise): boolean; + emit(event: "warning", warning: Error): boolean; + emit(event: "message", message: unknown, sendHandle: unknown): this; + emit(event: Signals, signal?: Signals): boolean; + emit( + event: "multipleResolves", + type: MultipleResolveType, + promise: Promise, + value: unknown, + ): this; + emit(event: "worker", listener: WorkerListener): this; + on(event: "beforeExit", listener: BeforeExitListener): this; + on(event: "disconnect", listener: DisconnectListener): this; + on(event: "exit", listener: ExitListener): this; + on(event: "rejectionHandled", listener: RejectionHandledListener): this; + on(event: "uncaughtException", listener: UncaughtExceptionListener): this; + on(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + on(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + on(event: "warning", listener: WarningListener): this; + on(event: "message", listener: MessageListener): this; + on(event: Signals, listener: SignalsListener): this; + on(event: "multipleResolves", listener: MultipleResolveListener): this; + on(event: "worker", listener: WorkerListener): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "beforeExit", listener: BeforeExitListener): this; + once(event: "disconnect", listener: DisconnectListener): this; + once(event: "exit", listener: ExitListener): this; + once(event: "rejectionHandled", listener: RejectionHandledListener): this; + once(event: "uncaughtException", listener: UncaughtExceptionListener): this; + once(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + once(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + once(event: "warning", listener: WarningListener): this; + once(event: "message", listener: MessageListener): this; + once(event: Signals, listener: SignalsListener): this; + once(event: "multipleResolves", listener: MultipleResolveListener): this; + once(event: "worker", listener: WorkerListener): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "beforeExit", listener: BeforeExitListener): this; + prependListener(event: "disconnect", listener: DisconnectListener): this; + prependListener(event: "exit", listener: ExitListener): this; + prependListener(event: "rejectionHandled", listener: RejectionHandledListener): this; + prependListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; + prependListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + prependListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + prependListener(event: "warning", listener: WarningListener): this; + prependListener(event: "message", listener: MessageListener): this; + prependListener(event: Signals, listener: SignalsListener): this; + prependListener(event: "multipleResolves", listener: MultipleResolveListener): this; + prependListener(event: "worker", listener: WorkerListener): this; + prependOnceListener(event: "beforeExit", listener: BeforeExitListener): this; + prependOnceListener(event: "disconnect", listener: DisconnectListener): this; + prependOnceListener(event: "exit", listener: ExitListener): this; + prependOnceListener(event: "rejectionHandled", listener: RejectionHandledListener): this; + prependOnceListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; + prependOnceListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; + prependOnceListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; + prependOnceListener(event: "warning", listener: WarningListener): this; + prependOnceListener(event: "message", listener: MessageListener): this; + prependOnceListener(event: Signals, listener: SignalsListener): this; + prependOnceListener(event: "multipleResolves", listener: MultipleResolveListener): this; + prependOnceListener(event: "worker", listener: WorkerListener): this; + listeners(event: "beforeExit"): BeforeExitListener[]; + listeners(event: "disconnect"): DisconnectListener[]; + listeners(event: "exit"): ExitListener[]; + listeners(event: "rejectionHandled"): RejectionHandledListener[]; + listeners(event: "uncaughtException"): UncaughtExceptionListener[]; + listeners(event: "uncaughtExceptionMonitor"): UncaughtExceptionListener[]; + listeners(event: "unhandledRejection"): UnhandledRejectionListener[]; + listeners(event: "warning"): WarningListener[]; + listeners(event: "message"): MessageListener[]; + listeners(event: Signals): SignalsListener[]; + listeners(event: "multipleResolves"): MultipleResolveListener[]; + listeners(event: "worker"): WorkerListener[]; + } + } + } + export = process; +} +declare module "node:process" { + import process = require("process"); + export = process; +} diff --git a/dist/node_modules/@types/node/ts4.8/punycode.d.ts b/dist/node_modules/@types/node/ts4.8/punycode.d.ts new file mode 100644 index 0000000..d2fc9f9 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/punycode.d.ts @@ -0,0 +1,117 @@ +/** + * **The version of the punycode module bundled in Node.js is being deprecated.**In a future major version of Node.js this module will be removed. Users + * currently depending on the `punycode` module should switch to using the + * userland-provided [Punycode.js](https://github.com/bestiejs/punycode.js) module instead. For punycode-based URL + * encoding, see `url.domainToASCII` or, more generally, the `WHATWG URL API`. + * + * The `punycode` module is a bundled version of the [Punycode.js](https://github.com/bestiejs/punycode.js) module. It + * can be accessed using: + * + * ```js + * const punycode = require('punycode'); + * ``` + * + * [Punycode](https://tools.ietf.org/html/rfc3492) is a character encoding scheme defined by RFC 3492 that is + * primarily intended for use in Internationalized Domain Names. Because host + * names in URLs are limited to ASCII characters only, Domain Names that contain + * non-ASCII characters must be converted into ASCII using the Punycode scheme. + * For instance, the Japanese character that translates into the English word,`'example'` is `'例'`. The Internationalized Domain Name, `'例.com'` (equivalent + * to `'example.com'`) is represented by Punycode as the ASCII string`'xn--fsq.com'`. + * + * The `punycode` module provides a simple implementation of the Punycode standard. + * + * The `punycode` module is a third-party dependency used by Node.js and + * made available to developers as a convenience. Fixes or other modifications to + * the module must be directed to the [Punycode.js](https://github.com/bestiejs/punycode.js) project. + * @deprecated Since v7.0.0 - Deprecated + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/punycode.js) + */ +declare module "punycode" { + /** + * The `punycode.decode()` method converts a [Punycode](https://tools.ietf.org/html/rfc3492) string of ASCII-only + * characters to the equivalent string of Unicode codepoints. + * + * ```js + * punycode.decode('maana-pta'); // 'mañana' + * punycode.decode('--dqo34k'); // '☃-⌘' + * ``` + * @since v0.5.1 + */ + function decode(string: string): string; + /** + * The `punycode.encode()` method converts a string of Unicode codepoints to a [Punycode](https://tools.ietf.org/html/rfc3492) string of ASCII-only characters. + * + * ```js + * punycode.encode('mañana'); // 'maana-pta' + * punycode.encode('☃-⌘'); // '--dqo34k' + * ``` + * @since v0.5.1 + */ + function encode(string: string): string; + /** + * The `punycode.toUnicode()` method converts a string representing a domain name + * containing [Punycode](https://tools.ietf.org/html/rfc3492) encoded characters into Unicode. Only the [Punycode](https://tools.ietf.org/html/rfc3492) encoded parts of the domain name are be + * converted. + * + * ```js + * // decode domain names + * punycode.toUnicode('xn--maana-pta.com'); // 'mañana.com' + * punycode.toUnicode('xn----dqo34k.com'); // '☃-⌘.com' + * punycode.toUnicode('example.com'); // 'example.com' + * ``` + * @since v0.6.1 + */ + function toUnicode(domain: string): string; + /** + * The `punycode.toASCII()` method converts a Unicode string representing an + * Internationalized Domain Name to [Punycode](https://tools.ietf.org/html/rfc3492). Only the non-ASCII parts of the + * domain name will be converted. Calling `punycode.toASCII()` on a string that + * already only contains ASCII characters will have no effect. + * + * ```js + * // encode domain names + * punycode.toASCII('mañana.com'); // 'xn--maana-pta.com' + * punycode.toASCII('☃-⌘.com'); // 'xn----dqo34k.com' + * punycode.toASCII('example.com'); // 'example.com' + * ``` + * @since v0.6.1 + */ + function toASCII(domain: string): string; + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + const ucs2: ucs2; + interface ucs2 { + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + decode(string: string): number[]; + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + encode(codePoints: readonly number[]): string; + } + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + const version: string; +} +declare module "node:punycode" { + export * from "punycode"; +} diff --git a/dist/node_modules/@types/node/ts4.8/querystring.d.ts b/dist/node_modules/@types/node/ts4.8/querystring.d.ts new file mode 100644 index 0000000..b36ea2c --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/querystring.d.ts @@ -0,0 +1,141 @@ +/** + * The `node:querystring` module provides utilities for parsing and formatting URL + * query strings. It can be accessed using: + * + * ```js + * const querystring = require('node:querystring'); + * ``` + * + * `querystring` is more performant than `URLSearchParams` but is not a + * standardized API. Use `URLSearchParams` when performance is not critical or + * when compatibility with browser code is desirable. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/querystring.js) + */ +declare module "querystring" { + interface StringifyOptions { + encodeURIComponent?: ((str: string) => string) | undefined; + } + interface ParseOptions { + maxKeys?: number | undefined; + decodeURIComponent?: ((str: string) => string) | undefined; + } + interface ParsedUrlQuery extends NodeJS.Dict {} + interface ParsedUrlQueryInput extends + NodeJS.Dict< + | string + | number + | boolean + | readonly string[] + | readonly number[] + | readonly boolean[] + | null + > + {} + /** + * The `querystring.stringify()` method produces a URL query string from a + * given `obj` by iterating through the object's "own properties". + * + * It serializes the following types of values passed in `obj`:[string](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) | + * [number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) | + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) | + * [boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) | + * [string\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) | + * [number\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) | + * [bigint\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) | + * [boolean\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) The numeric values must be finite. Any other input values will be coerced to + * empty strings. + * + * ```js + * querystring.stringify({ foo: 'bar', baz: ['qux', 'quux'], corge: '' }); + * // Returns 'foo=bar&baz=qux&baz=quux&corge=' + * + * querystring.stringify({ foo: 'bar', baz: 'qux' }, ';', ':'); + * // Returns 'foo:bar;baz:qux' + * ``` + * + * By default, characters requiring percent-encoding within the query string will + * be encoded as UTF-8\. If an alternative encoding is required, then an alternative`encodeURIComponent` option will need to be specified: + * + * ```js + * // Assuming gbkEncodeURIComponent function already exists, + * + * querystring.stringify({ w: '中文', foo: 'bar' }, null, null, + * { encodeURIComponent: gbkEncodeURIComponent }); + * ``` + * @since v0.1.25 + * @param obj The object to serialize into a URL query string + * @param [sep='&'] The substring used to delimit key and value pairs in the query string. + * @param [eq='='] . The substring used to delimit keys and values in the query string. + */ + function stringify(obj?: ParsedUrlQueryInput, sep?: string, eq?: string, options?: StringifyOptions): string; + /** + * The `querystring.parse()` method parses a URL query string (`str`) into a + * collection of key and value pairs. + * + * For example, the query string `'foo=bar&abc=xyz&abc=123'` is parsed into: + * + * ```json + * { + * "foo": "bar", + * "abc": ["xyz", "123"] + * } + * ``` + * + * The object returned by the `querystring.parse()` method _does not_prototypically inherit from the JavaScript `Object`. This means that typical`Object` methods such as `obj.toString()`, + * `obj.hasOwnProperty()`, and others + * are not defined and _will not work_. + * + * By default, percent-encoded characters within the query string will be assumed + * to use UTF-8 encoding. If an alternative character encoding is used, then an + * alternative `decodeURIComponent` option will need to be specified: + * + * ```js + * // Assuming gbkDecodeURIComponent function already exists... + * + * querystring.parse('w=%D6%D0%CE%C4&foo=bar', null, null, + * { decodeURIComponent: gbkDecodeURIComponent }); + * ``` + * @since v0.1.25 + * @param str The URL query string to parse + * @param [sep='&'] The substring used to delimit key and value pairs in the query string. + * @param [eq='='] . The substring used to delimit keys and values in the query string. + */ + function parse(str: string, sep?: string, eq?: string, options?: ParseOptions): ParsedUrlQuery; + /** + * The querystring.encode() function is an alias for querystring.stringify(). + */ + const encode: typeof stringify; + /** + * The querystring.decode() function is an alias for querystring.parse(). + */ + const decode: typeof parse; + /** + * The `querystring.escape()` method performs URL percent-encoding on the given`str` in a manner that is optimized for the specific requirements of URL + * query strings. + * + * The `querystring.escape()` method is used by `querystring.stringify()` and is + * generally not expected to be used directly. It is exported primarily to allow + * application code to provide a replacement percent-encoding implementation if + * necessary by assigning `querystring.escape` to an alternative function. + * @since v0.1.25 + */ + function escape(str: string): string; + /** + * The `querystring.unescape()` method performs decoding of URL percent-encoded + * characters on the given `str`. + * + * The `querystring.unescape()` method is used by `querystring.parse()` and is + * generally not expected to be used directly. It is exported primarily to allow + * application code to provide a replacement decoding implementation if + * necessary by assigning `querystring.unescape` to an alternative function. + * + * By default, the `querystring.unescape()` method will attempt to use the + * JavaScript built-in `decodeURIComponent()` method to decode. If that fails, + * a safer equivalent that does not throw on malformed URLs will be used. + * @since v0.1.25 + */ + function unescape(str: string): string; +} +declare module "node:querystring" { + export * from "querystring"; +} diff --git a/dist/node_modules/@types/node/ts4.8/readline.d.ts b/dist/node_modules/@types/node/ts4.8/readline.d.ts new file mode 100644 index 0000000..b06d58b --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/readline.d.ts @@ -0,0 +1,539 @@ +/** + * The `node:readline` module provides an interface for reading data from a `Readable` stream (such as `process.stdin`) one line at a time. + * + * To use the promise-based APIs: + * + * ```js + * import * as readline from 'node:readline/promises'; + * ``` + * + * To use the callback and sync APIs: + * + * ```js + * import * as readline from 'node:readline'; + * ``` + * + * The following simple example illustrates the basic use of the `node:readline`module. + * + * ```js + * import * as readline from 'node:readline/promises'; + * import { stdin as input, stdout as output } from 'node:process'; + * + * const rl = readline.createInterface({ input, output }); + * + * const answer = await rl.question('What do you think of Node.js? '); + * + * console.log(`Thank you for your valuable feedback: ${answer}`); + * + * rl.close(); + * ``` + * + * Once this code is invoked, the Node.js application will not terminate until the`readline.Interface` is closed because the interface waits for data to be + * received on the `input` stream. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/readline.js) + */ +declare module "readline" { + import { Abortable, EventEmitter } from "node:events"; + import * as promises from "node:readline/promises"; + export { promises }; + export interface Key { + sequence?: string | undefined; + name?: string | undefined; + ctrl?: boolean | undefined; + meta?: boolean | undefined; + shift?: boolean | undefined; + } + /** + * Instances of the `readline.Interface` class are constructed using the`readline.createInterface()` method. Every instance is associated with a + * single `input` `Readable` stream and a single `output` `Writable` stream. + * The `output` stream is used to print prompts for user input that arrives on, + * and is read from, the `input` stream. + * @since v0.1.104 + */ + export class Interface extends EventEmitter { + readonly terminal: boolean; + /** + * The current input data being processed by node. + * + * This can be used when collecting input from a TTY stream to retrieve the + * current value that has been processed thus far, prior to the `line` event + * being emitted. Once the `line` event has been emitted, this property will + * be an empty string. + * + * Be aware that modifying the value during the instance runtime may have + * unintended consequences if `rl.cursor` is not also controlled. + * + * **If not using a TTY stream for input, use the `'line'` event.** + * + * One possible use case would be as follows: + * + * ```js + * const values = ['lorem ipsum', 'dolor sit amet']; + * const rl = readline.createInterface(process.stdin); + * const showResults = debounce(() => { + * console.log( + * '\n', + * values.filter((val) => val.startsWith(rl.line)).join(' '), + * ); + * }, 300); + * process.stdin.on('keypress', (c, k) => { + * showResults(); + * }); + * ``` + * @since v0.1.98 + */ + readonly line: string; + /** + * The cursor position relative to `rl.line`. + * + * This will track where the current cursor lands in the input string, when + * reading input from a TTY stream. The position of cursor determines the + * portion of the input string that will be modified as input is processed, + * as well as the column where the terminal caret will be rendered. + * @since v0.1.98 + */ + readonly cursor: number; + /** + * NOTE: According to the documentation: + * + * > Instances of the `readline.Interface` class are constructed using the + * > `readline.createInterface()` method. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/readline.html#class-interfaceconstructor + */ + protected constructor( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ); + /** + * NOTE: According to the documentation: + * + * > Instances of the `readline.Interface` class are constructed using the + * > `readline.createInterface()` method. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/readline.html#class-interfaceconstructor + */ + protected constructor(options: ReadLineOptions); + /** + * The `rl.getPrompt()` method returns the current prompt used by `rl.prompt()`. + * @since v15.3.0, v14.17.0 + * @return the current prompt string + */ + getPrompt(): string; + /** + * The `rl.setPrompt()` method sets the prompt that will be written to `output`whenever `rl.prompt()` is called. + * @since v0.1.98 + */ + setPrompt(prompt: string): void; + /** + * The `rl.prompt()` method writes the `Interface` instances configured`prompt` to a new line in `output` in order to provide a user with a new + * location at which to provide input. + * + * When called, `rl.prompt()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or`undefined` the prompt is not written. + * @since v0.1.98 + * @param preserveCursor If `true`, prevents the cursor placement from being reset to `0`. + */ + prompt(preserveCursor?: boolean): void; + /** + * The `rl.question()` method displays the `query` by writing it to the `output`, + * waits for user input to be provided on `input`, then invokes the `callback`function passing the provided input as the first argument. + * + * When called, `rl.question()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or`undefined` the `query` is not written. + * + * The `callback` function passed to `rl.question()` does not follow the typical + * pattern of accepting an `Error` object or `null` as the first argument. + * The `callback` is called with the provided answer as the only argument. + * + * An error will be thrown if calling `rl.question()` after `rl.close()`. + * + * Example usage: + * + * ```js + * rl.question('What is your favorite food? ', (answer) => { + * console.log(`Oh, so your favorite food is ${answer}`); + * }); + * ``` + * + * Using an `AbortController` to cancel a question. + * + * ```js + * const ac = new AbortController(); + * const signal = ac.signal; + * + * rl.question('What is your favorite food? ', { signal }, (answer) => { + * console.log(`Oh, so your favorite food is ${answer}`); + * }); + * + * signal.addEventListener('abort', () => { + * console.log('The food question timed out'); + * }, { once: true }); + * + * setTimeout(() => ac.abort(), 10000); + * ``` + * @since v0.3.3 + * @param query A statement or query to write to `output`, prepended to the prompt. + * @param callback A callback function that is invoked with the user's input in response to the `query`. + */ + question(query: string, callback: (answer: string) => void): void; + question(query: string, options: Abortable, callback: (answer: string) => void): void; + /** + * The `rl.pause()` method pauses the `input` stream, allowing it to be resumed + * later if necessary. + * + * Calling `rl.pause()` does not immediately pause other events (including`'line'`) from being emitted by the `Interface` instance. + * @since v0.3.4 + */ + pause(): this; + /** + * The `rl.resume()` method resumes the `input` stream if it has been paused. + * @since v0.3.4 + */ + resume(): this; + /** + * The `rl.close()` method closes the `Interface` instance and + * relinquishes control over the `input` and `output` streams. When called, + * the `'close'` event will be emitted. + * + * Calling `rl.close()` does not immediately stop other events (including `'line'`) + * from being emitted by the `Interface` instance. + * @since v0.1.98 + */ + close(): void; + /** + * The `rl.write()` method will write either `data` or a key sequence identified + * by `key` to the `output`. The `key` argument is supported only if `output` is + * a `TTY` text terminal. See `TTY keybindings` for a list of key + * combinations. + * + * If `key` is specified, `data` is ignored. + * + * When called, `rl.write()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or`undefined` the `data` and `key` are not written. + * + * ```js + * rl.write('Delete this!'); + * // Simulate Ctrl+U to delete the line written previously + * rl.write(null, { ctrl: true, name: 'u' }); + * ``` + * + * The `rl.write()` method will write the data to the `readline` `Interface`'s`input`_as if it were provided by the user_. + * @since v0.1.98 + */ + write(data: string | Buffer, key?: Key): void; + write(data: undefined | null | string | Buffer, key: Key): void; + /** + * Returns the real position of the cursor in relation to the input + * prompt + string. Long input (wrapping) strings, as well as multiple + * line prompts are included in the calculations. + * @since v13.5.0, v12.16.0 + */ + getCursorPos(): CursorPos; + /** + * events.EventEmitter + * 1. close + * 2. line + * 3. pause + * 4. resume + * 5. SIGCONT + * 6. SIGINT + * 7. SIGTSTP + * 8. history + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "line", listener: (input: string) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: "SIGCONT", listener: () => void): this; + addListener(event: "SIGINT", listener: () => void): this; + addListener(event: "SIGTSTP", listener: () => void): this; + addListener(event: "history", listener: (history: string[]) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "line", input: string): boolean; + emit(event: "pause"): boolean; + emit(event: "resume"): boolean; + emit(event: "SIGCONT"): boolean; + emit(event: "SIGINT"): boolean; + emit(event: "SIGTSTP"): boolean; + emit(event: "history", history: string[]): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "line", listener: (input: string) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: "SIGCONT", listener: () => void): this; + on(event: "SIGINT", listener: () => void): this; + on(event: "SIGTSTP", listener: () => void): this; + on(event: "history", listener: (history: string[]) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "line", listener: (input: string) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: "SIGCONT", listener: () => void): this; + once(event: "SIGINT", listener: () => void): this; + once(event: "SIGTSTP", listener: () => void): this; + once(event: "history", listener: (history: string[]) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "line", listener: (input: string) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: "SIGCONT", listener: () => void): this; + prependListener(event: "SIGINT", listener: () => void): this; + prependListener(event: "SIGTSTP", listener: () => void): this; + prependListener(event: "history", listener: (history: string[]) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "line", listener: (input: string) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: "SIGCONT", listener: () => void): this; + prependOnceListener(event: "SIGINT", listener: () => void): this; + prependOnceListener(event: "SIGTSTP", listener: () => void): this; + prependOnceListener(event: "history", listener: (history: string[]) => void): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + export type ReadLine = Interface; // type forwarded for backwards compatibility + export type Completer = (line: string) => CompleterResult; + export type AsyncCompleter = ( + line: string, + callback: (err?: null | Error, result?: CompleterResult) => void, + ) => void; + export type CompleterResult = [string[], string]; + export interface ReadLineOptions { + input: NodeJS.ReadableStream; + output?: NodeJS.WritableStream | undefined; + completer?: Completer | AsyncCompleter | undefined; + terminal?: boolean | undefined; + /** + * Initial list of history lines. This option makes sense + * only if `terminal` is set to `true` by the user or by an internal `output` + * check, otherwise the history caching mechanism is not initialized at all. + * @default [] + */ + history?: string[] | undefined; + historySize?: number | undefined; + prompt?: string | undefined; + crlfDelay?: number | undefined; + /** + * If `true`, when a new input line added + * to the history list duplicates an older one, this removes the older line + * from the list. + * @default false + */ + removeHistoryDuplicates?: boolean | undefined; + escapeCodeTimeout?: number | undefined; + tabSize?: number | undefined; + } + /** + * The `readline.createInterface()` method creates a new `readline.Interface`instance. + * + * ```js + * const readline = require('node:readline'); + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout, + * }); + * ``` + * + * Once the `readline.Interface` instance is created, the most common case is to + * listen for the `'line'` event: + * + * ```js + * rl.on('line', (line) => { + * console.log(`Received: ${line}`); + * }); + * ``` + * + * If `terminal` is `true` for this instance then the `output` stream will get + * the best compatibility if it defines an `output.columns` property and emits + * a `'resize'` event on the `output` if or when the columns ever change + * (`process.stdout` does this automatically when it is a TTY). + * + * When creating a `readline.Interface` using `stdin` as input, the program + * will not terminate until it receives an [EOF character](https://en.wikipedia.org/wiki/End-of-file#EOF_character). To exit without + * waiting for user input, call `process.stdin.unref()`. + * @since v0.1.98 + */ + export function createInterface( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ): Interface; + export function createInterface(options: ReadLineOptions): Interface; + /** + * The `readline.emitKeypressEvents()` method causes the given `Readable` stream to begin emitting `'keypress'` events corresponding to received input. + * + * Optionally, `interface` specifies a `readline.Interface` instance for which + * autocompletion is disabled when copy-pasted input is detected. + * + * If the `stream` is a `TTY`, then it must be in raw mode. + * + * This is automatically called by any readline instance on its `input` if the`input` is a terminal. Closing the `readline` instance does not stop + * the `input` from emitting `'keypress'` events. + * + * ```js + * readline.emitKeypressEvents(process.stdin); + * if (process.stdin.isTTY) + * process.stdin.setRawMode(true); + * ``` + * + * ## Example: Tiny CLI + * + * The following example illustrates the use of `readline.Interface` class to + * implement a small command-line interface: + * + * ```js + * const readline = require('node:readline'); + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout, + * prompt: 'OHAI> ', + * }); + * + * rl.prompt(); + * + * rl.on('line', (line) => { + * switch (line.trim()) { + * case 'hello': + * console.log('world!'); + * break; + * default: + * console.log(`Say what? I might have heard '${line.trim()}'`); + * break; + * } + * rl.prompt(); + * }).on('close', () => { + * console.log('Have a great day!'); + * process.exit(0); + * }); + * ``` + * + * ## Example: Read file stream line-by-Line + * + * A common use case for `readline` is to consume an input file one line at a + * time. The easiest way to do so is leveraging the `fs.ReadStream` API as + * well as a `for await...of` loop: + * + * ```js + * const fs = require('node:fs'); + * const readline = require('node:readline'); + * + * async function processLineByLine() { + * const fileStream = fs.createReadStream('input.txt'); + * + * const rl = readline.createInterface({ + * input: fileStream, + * crlfDelay: Infinity, + * }); + * // Note: we use the crlfDelay option to recognize all instances of CR LF + * // ('\r\n') in input.txt as a single line break. + * + * for await (const line of rl) { + * // Each line in input.txt will be successively available here as `line`. + * console.log(`Line from file: ${line}`); + * } + * } + * + * processLineByLine(); + * ``` + * + * Alternatively, one could use the `'line'` event: + * + * ```js + * const fs = require('node:fs'); + * const readline = require('node:readline'); + * + * const rl = readline.createInterface({ + * input: fs.createReadStream('sample.txt'), + * crlfDelay: Infinity, + * }); + * + * rl.on('line', (line) => { + * console.log(`Line from file: ${line}`); + * }); + * ``` + * + * Currently, `for await...of` loop can be a bit slower. If `async` / `await`flow and speed are both essential, a mixed approach can be applied: + * + * ```js + * const { once } = require('node:events'); + * const { createReadStream } = require('node:fs'); + * const { createInterface } = require('node:readline'); + * + * (async function processLineByLine() { + * try { + * const rl = createInterface({ + * input: createReadStream('big-file.txt'), + * crlfDelay: Infinity, + * }); + * + * rl.on('line', (line) => { + * // Process the line. + * }); + * + * await once(rl, 'close'); + * + * console.log('File processed.'); + * } catch (err) { + * console.error(err); + * } + * })(); + * ``` + * @since v0.7.7 + */ + export function emitKeypressEvents(stream: NodeJS.ReadableStream, readlineInterface?: Interface): void; + export type Direction = -1 | 0 | 1; + export interface CursorPos { + rows: number; + cols: number; + } + /** + * The `readline.clearLine()` method clears current line of given `TTY` stream + * in a specified direction identified by `dir`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function clearLine(stream: NodeJS.WritableStream, dir: Direction, callback?: () => void): boolean; + /** + * The `readline.clearScreenDown()` method clears the given `TTY` stream from + * the current position of the cursor down. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function clearScreenDown(stream: NodeJS.WritableStream, callback?: () => void): boolean; + /** + * The `readline.cursorTo()` method moves cursor to the specified position in a + * given `TTY` `stream`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function cursorTo(stream: NodeJS.WritableStream, x: number, y?: number, callback?: () => void): boolean; + /** + * The `readline.moveCursor()` method moves the cursor _relative_ to its current + * position in a given `TTY` `stream`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function moveCursor(stream: NodeJS.WritableStream, dx: number, dy: number, callback?: () => void): boolean; +} +declare module "node:readline" { + export * from "readline"; +} diff --git a/dist/node_modules/@types/node/ts4.8/readline/promises.d.ts b/dist/node_modules/@types/node/ts4.8/readline/promises.d.ts new file mode 100644 index 0000000..73fb111 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/readline/promises.d.ts @@ -0,0 +1,150 @@ +/** + * @since v17.0.0 + * @experimental + */ +declare module "readline/promises" { + import { AsyncCompleter, Completer, Direction, Interface as _Interface, ReadLineOptions } from "node:readline"; + import { Abortable } from "node:events"; + /** + * Instances of the `readlinePromises.Interface` class are constructed using the`readlinePromises.createInterface()` method. Every instance is associated with a + * single `input` `Readable` stream and a single `output` `Writable` stream. + * The `output` stream is used to print prompts for user input that arrives on, + * and is read from, the `input` stream. + * @since v17.0.0 + */ + class Interface extends _Interface { + /** + * The `rl.question()` method displays the `query` by writing it to the `output`, + * waits for user input to be provided on `input`, then invokes the `callback`function passing the provided input as the first argument. + * + * When called, `rl.question()` will resume the `input` stream if it has been + * paused. + * + * If the `Interface` was created with `output` set to `null` or`undefined` the `query` is not written. + * + * If the question is called after `rl.close()`, it returns a rejected promise. + * + * Example usage: + * + * ```js + * const answer = await rl.question('What is your favorite food? '); + * console.log(`Oh, so your favorite food is ${answer}`); + * ``` + * + * Using an `AbortSignal` to cancel a question. + * + * ```js + * const signal = AbortSignal.timeout(10_000); + * + * signal.addEventListener('abort', () => { + * console.log('The food question timed out'); + * }, { once: true }); + * + * const answer = await rl.question('What is your favorite food? ', { signal }); + * console.log(`Oh, so your favorite food is ${answer}`); + * ``` + * @since v17.0.0 + * @param query A statement or query to write to `output`, prepended to the prompt. + * @return A promise that is fulfilled with the user's input in response to the `query`. + */ + question(query: string): Promise; + question(query: string, options: Abortable): Promise; + } + /** + * @since v17.0.0 + */ + class Readline { + /** + * @param stream A TTY stream. + */ + constructor( + stream: NodeJS.WritableStream, + options?: { + autoCommit?: boolean; + }, + ); + /** + * The `rl.clearLine()` method adds to the internal list of pending action an + * action that clears current line of the associated `stream` in a specified + * direction identified by `dir`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true`was passed to the constructor. + * @since v17.0.0 + * @return this + */ + clearLine(dir: Direction): this; + /** + * The `rl.clearScreenDown()` method adds to the internal list of pending action an + * action that clears the associated stream from the current position of the + * cursor down. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true`was passed to the constructor. + * @since v17.0.0 + * @return this + */ + clearScreenDown(): this; + /** + * The `rl.commit()` method sends all the pending actions to the associated`stream` and clears the internal list of pending actions. + * @since v17.0.0 + */ + commit(): Promise; + /** + * The `rl.cursorTo()` method adds to the internal list of pending action an action + * that moves cursor to the specified position in the associated `stream`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true`was passed to the constructor. + * @since v17.0.0 + * @return this + */ + cursorTo(x: number, y?: number): this; + /** + * The `rl.moveCursor()` method adds to the internal list of pending action an + * action that moves the cursor _relative_ to its current position in the + * associated `stream`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true`was passed to the constructor. + * @since v17.0.0 + * @return this + */ + moveCursor(dx: number, dy: number): this; + /** + * The `rl.rollback` methods clears the internal list of pending actions without + * sending it to the associated `stream`. + * @since v17.0.0 + * @return this + */ + rollback(): this; + } + /** + * The `readlinePromises.createInterface()` method creates a new `readlinePromises.Interface`instance. + * + * ```js + * const readlinePromises = require('node:readline/promises'); + * const rl = readlinePromises.createInterface({ + * input: process.stdin, + * output: process.stdout, + * }); + * ``` + * + * Once the `readlinePromises.Interface` instance is created, the most common case + * is to listen for the `'line'` event: + * + * ```js + * rl.on('line', (line) => { + * console.log(`Received: ${line}`); + * }); + * ``` + * + * If `terminal` is `true` for this instance then the `output` stream will get + * the best compatibility if it defines an `output.columns` property and emits + * a `'resize'` event on the `output` if or when the columns ever change + * (`process.stdout` does this automatically when it is a TTY). + * @since v17.0.0 + */ + function createInterface( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ): Interface; + function createInterface(options: ReadLineOptions): Interface; +} +declare module "node:readline/promises" { + export * from "readline/promises"; +} diff --git a/dist/node_modules/@types/node/ts4.8/repl.d.ts b/dist/node_modules/@types/node/ts4.8/repl.d.ts new file mode 100644 index 0000000..6c5f81b --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/repl.d.ts @@ -0,0 +1,430 @@ +/** + * The `node:repl` module provides a Read-Eval-Print-Loop (REPL) implementation + * that is available both as a standalone program or includible in other + * applications. It can be accessed using: + * + * ```js + * const repl = require('node:repl'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/repl.js) + */ +declare module "repl" { + import { AsyncCompleter, Completer, Interface } from "node:readline"; + import { Context } from "node:vm"; + import { InspectOptions } from "node:util"; + interface ReplOptions { + /** + * The input prompt to display. + * @default "> " + */ + prompt?: string | undefined; + /** + * The `Readable` stream from which REPL input will be read. + * @default process.stdin + */ + input?: NodeJS.ReadableStream | undefined; + /** + * The `Writable` stream to which REPL output will be written. + * @default process.stdout + */ + output?: NodeJS.WritableStream | undefined; + /** + * If `true`, specifies that the output should be treated as a TTY terminal, and have + * ANSI/VT100 escape codes written to it. + * Default: checking the value of the `isTTY` property on the output stream upon + * instantiation. + */ + terminal?: boolean | undefined; + /** + * The function to be used when evaluating each given line of input. + * Default: an async wrapper for the JavaScript `eval()` function. An `eval` function can + * error with `repl.Recoverable` to indicate the input was incomplete and prompt for + * additional lines. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_default_evaluation + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_custom_evaluation_functions + */ + eval?: REPLEval | undefined; + /** + * Defines if the repl prints output previews or not. + * @default `true` Always `false` in case `terminal` is falsy. + */ + preview?: boolean | undefined; + /** + * If `true`, specifies that the default `writer` function should include ANSI color + * styling to REPL output. If a custom `writer` function is provided then this has no + * effect. + * Default: the REPL instance's `terminal` value. + */ + useColors?: boolean | undefined; + /** + * If `true`, specifies that the default evaluation function will use the JavaScript + * `global` as the context as opposed to creating a new separate context for the REPL + * instance. The node CLI REPL sets this value to `true`. + * Default: `false`. + */ + useGlobal?: boolean | undefined; + /** + * If `true`, specifies that the default writer will not output the return value of a + * command if it evaluates to `undefined`. + * Default: `false`. + */ + ignoreUndefined?: boolean | undefined; + /** + * The function to invoke to format the output of each command before writing to `output`. + * Default: a wrapper for `util.inspect`. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_customizing_repl_output + */ + writer?: REPLWriter | undefined; + /** + * An optional function used for custom Tab auto completion. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/readline.html#readline_use_of_the_completer_function + */ + completer?: Completer | AsyncCompleter | undefined; + /** + * A flag that specifies whether the default evaluator executes all JavaScript commands in + * strict mode or default (sloppy) mode. + * Accepted values are: + * - `repl.REPL_MODE_SLOPPY` - evaluates expressions in sloppy mode. + * - `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to + * prefacing every repl statement with `'use strict'`. + */ + replMode?: typeof REPL_MODE_SLOPPY | typeof REPL_MODE_STRICT | undefined; + /** + * Stop evaluating the current piece of code when `SIGINT` is received, i.e. `Ctrl+C` is + * pressed. This cannot be used together with a custom `eval` function. + * Default: `false`. + */ + breakEvalOnSigint?: boolean | undefined; + } + type REPLEval = ( + this: REPLServer, + evalCmd: string, + context: Context, + file: string, + cb: (err: Error | null, result: any) => void, + ) => void; + type REPLWriter = (this: REPLServer, obj: any) => string; + /** + * This is the default "writer" value, if none is passed in the REPL options, + * and it can be overridden by custom print functions. + */ + const writer: REPLWriter & { + options: InspectOptions; + }; + type REPLCommandAction = (this: REPLServer, text: string) => void; + interface REPLCommand { + /** + * Help text to be displayed when `.help` is entered. + */ + help?: string | undefined; + /** + * The function to execute, optionally accepting a single string argument. + */ + action: REPLCommandAction; + } + /** + * Instances of `repl.REPLServer` are created using the {@link start} method + * or directly using the JavaScript `new` keyword. + * + * ```js + * const repl = require('node:repl'); + * + * const options = { useColors: true }; + * + * const firstInstance = repl.start(options); + * const secondInstance = new repl.REPLServer(options); + * ``` + * @since v0.1.91 + */ + class REPLServer extends Interface { + /** + * The `vm.Context` provided to the `eval` function to be used for JavaScript + * evaluation. + */ + readonly context: Context; + /** + * @deprecated since v14.3.0 - Use `input` instead. + */ + readonly inputStream: NodeJS.ReadableStream; + /** + * @deprecated since v14.3.0 - Use `output` instead. + */ + readonly outputStream: NodeJS.WritableStream; + /** + * The `Readable` stream from which REPL input will be read. + */ + readonly input: NodeJS.ReadableStream; + /** + * The `Writable` stream to which REPL output will be written. + */ + readonly output: NodeJS.WritableStream; + /** + * The commands registered via `replServer.defineCommand()`. + */ + readonly commands: NodeJS.ReadOnlyDict; + /** + * A value indicating whether the REPL is currently in "editor mode". + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_commands_and_special_keys + */ + readonly editorMode: boolean; + /** + * A value indicating whether the `_` variable has been assigned. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly underscoreAssigned: boolean; + /** + * The last evaluation result from the REPL (assigned to the `_` variable inside of the REPL). + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly last: any; + /** + * A value indicating whether the `_error` variable has been assigned. + * + * @since v9.8.0 + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly underscoreErrAssigned: boolean; + /** + * The last error raised inside the REPL (assigned to the `_error` variable inside of the REPL). + * + * @since v9.8.0 + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly lastError: any; + /** + * Specified in the REPL options, this is the function to be used when evaluating each + * given line of input. If not specified in the REPL options, this is an async wrapper + * for the JavaScript `eval()` function. + */ + readonly eval: REPLEval; + /** + * Specified in the REPL options, this is a value indicating whether the default + * `writer` function should include ANSI color styling to REPL output. + */ + readonly useColors: boolean; + /** + * Specified in the REPL options, this is a value indicating whether the default `eval` + * function will use the JavaScript `global` as the context as opposed to creating a new + * separate context for the REPL instance. + */ + readonly useGlobal: boolean; + /** + * Specified in the REPL options, this is a value indicating whether the default `writer` + * function should output the result of a command if it evaluates to `undefined`. + */ + readonly ignoreUndefined: boolean; + /** + * Specified in the REPL options, this is the function to invoke to format the output of + * each command before writing to `outputStream`. If not specified in the REPL options, + * this will be a wrapper for `util.inspect`. + */ + readonly writer: REPLWriter; + /** + * Specified in the REPL options, this is the function to use for custom Tab auto-completion. + */ + readonly completer: Completer | AsyncCompleter; + /** + * Specified in the REPL options, this is a flag that specifies whether the default `eval` + * function should execute all JavaScript commands in strict mode or default (sloppy) mode. + * Possible values are: + * - `repl.REPL_MODE_SLOPPY` - evaluates expressions in sloppy mode. + * - `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to + * prefacing every repl statement with `'use strict'`. + */ + readonly replMode: typeof REPL_MODE_SLOPPY | typeof REPL_MODE_STRICT; + /** + * NOTE: According to the documentation: + * + * > Instances of `repl.REPLServer` are created using the `repl.start()` method and + * > _should not_ be created directly using the JavaScript `new` keyword. + * + * `REPLServer` cannot be subclassed due to implementation specifics in NodeJS. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_class_replserver + */ + private constructor(); + /** + * The `replServer.defineCommand()` method is used to add new `.`\-prefixed commands + * to the REPL instance. Such commands are invoked by typing a `.` followed by the`keyword`. The `cmd` is either a `Function` or an `Object` with the following + * properties: + * + * The following example shows two new commands added to the REPL instance: + * + * ```js + * const repl = require('node:repl'); + * + * const replServer = repl.start({ prompt: '> ' }); + * replServer.defineCommand('sayhello', { + * help: 'Say hello', + * action(name) { + * this.clearBufferedCommand(); + * console.log(`Hello, ${name}!`); + * this.displayPrompt(); + * }, + * }); + * replServer.defineCommand('saybye', function saybye() { + * console.log('Goodbye!'); + * this.close(); + * }); + * ``` + * + * The new commands can then be used from within the REPL instance: + * + * ```console + * > .sayhello Node.js User + * Hello, Node.js User! + * > .saybye + * Goodbye! + * ``` + * @since v0.3.0 + * @param keyword The command keyword (_without_ a leading `.` character). + * @param cmd The function to invoke when the command is processed. + */ + defineCommand(keyword: string, cmd: REPLCommandAction | REPLCommand): void; + /** + * The `replServer.displayPrompt()` method readies the REPL instance for input + * from the user, printing the configured `prompt` to a new line in the `output`and resuming the `input` to accept new input. + * + * When multi-line input is being entered, an ellipsis is printed rather than the + * 'prompt'. + * + * When `preserveCursor` is `true`, the cursor placement will not be reset to `0`. + * + * The `replServer.displayPrompt` method is primarily intended to be called from + * within the action function for commands registered using the`replServer.defineCommand()` method. + * @since v0.1.91 + */ + displayPrompt(preserveCursor?: boolean): void; + /** + * The `replServer.clearBufferedCommand()` method clears any command that has been + * buffered but not yet executed. This method is primarily intended to be + * called from within the action function for commands registered using the`replServer.defineCommand()` method. + * @since v9.0.0 + */ + clearBufferedCommand(): void; + /** + * Initializes a history log file for the REPL instance. When executing the + * Node.js binary and using the command-line REPL, a history file is initialized + * by default. However, this is not the case when creating a REPL + * programmatically. Use this method to initialize a history log file when working + * with REPL instances programmatically. + * @since v11.10.0 + * @param historyPath the path to the history file + * @param callback called when history writes are ready or upon error + */ + setupHistory(path: string, callback: (err: Error | null, repl: this) => void): void; + /** + * events.EventEmitter + * 1. close - inherited from `readline.Interface` + * 2. line - inherited from `readline.Interface` + * 3. pause - inherited from `readline.Interface` + * 4. resume - inherited from `readline.Interface` + * 5. SIGCONT - inherited from `readline.Interface` + * 6. SIGINT - inherited from `readline.Interface` + * 7. SIGTSTP - inherited from `readline.Interface` + * 8. exit + * 9. reset + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "line", listener: (input: string) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: "SIGCONT", listener: () => void): this; + addListener(event: "SIGINT", listener: () => void): this; + addListener(event: "SIGTSTP", listener: () => void): this; + addListener(event: "exit", listener: () => void): this; + addListener(event: "reset", listener: (context: Context) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "close"): boolean; + emit(event: "line", input: string): boolean; + emit(event: "pause"): boolean; + emit(event: "resume"): boolean; + emit(event: "SIGCONT"): boolean; + emit(event: "SIGINT"): boolean; + emit(event: "SIGTSTP"): boolean; + emit(event: "exit"): boolean; + emit(event: "reset", context: Context): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "close", listener: () => void): this; + on(event: "line", listener: (input: string) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: "SIGCONT", listener: () => void): this; + on(event: "SIGINT", listener: () => void): this; + on(event: "SIGTSTP", listener: () => void): this; + on(event: "exit", listener: () => void): this; + on(event: "reset", listener: (context: Context) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "line", listener: (input: string) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: "SIGCONT", listener: () => void): this; + once(event: "SIGINT", listener: () => void): this; + once(event: "SIGTSTP", listener: () => void): this; + once(event: "exit", listener: () => void): this; + once(event: "reset", listener: (context: Context) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "line", listener: (input: string) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: "SIGCONT", listener: () => void): this; + prependListener(event: "SIGINT", listener: () => void): this; + prependListener(event: "SIGTSTP", listener: () => void): this; + prependListener(event: "exit", listener: () => void): this; + prependListener(event: "reset", listener: (context: Context) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "line", listener: (input: string) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: "SIGCONT", listener: () => void): this; + prependOnceListener(event: "SIGINT", listener: () => void): this; + prependOnceListener(event: "SIGTSTP", listener: () => void): this; + prependOnceListener(event: "exit", listener: () => void): this; + prependOnceListener(event: "reset", listener: (context: Context) => void): this; + } + /** + * A flag passed in the REPL options. Evaluates expressions in sloppy mode. + */ + const REPL_MODE_SLOPPY: unique symbol; + /** + * A flag passed in the REPL options. Evaluates expressions in strict mode. + * This is equivalent to prefacing every repl statement with `'use strict'`. + */ + const REPL_MODE_STRICT: unique symbol; + /** + * The `repl.start()` method creates and starts a {@link REPLServer} instance. + * + * If `options` is a string, then it specifies the input prompt: + * + * ```js + * const repl = require('node:repl'); + * + * // a Unix style prompt + * repl.start('$ '); + * ``` + * @since v0.1.91 + */ + function start(options?: string | ReplOptions): REPLServer; + /** + * Indicates a recoverable error that a `REPLServer` can use to support multi-line input. + * + * @see https://nodejs.org/dist/latest-v20.x/docs/api/repl.html#repl_recoverable_errors + */ + class Recoverable extends SyntaxError { + err: Error; + constructor(err: Error); + } +} +declare module "node:repl" { + export * from "repl"; +} diff --git a/dist/node_modules/@types/node/ts4.8/stream.d.ts b/dist/node_modules/@types/node/ts4.8/stream.d.ts new file mode 100644 index 0000000..947a019 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/stream.d.ts @@ -0,0 +1,1701 @@ +/** + * A stream is an abstract interface for working with streaming data in Node.js. + * The `node:stream` module provides an API for implementing the stream interface. + * + * There are many stream objects provided by Node.js. For instance, a `request to an HTTP server` and `process.stdout` are both stream instances. + * + * Streams can be readable, writable, or both. All streams are instances of `EventEmitter`. + * + * To access the `node:stream` module: + * + * ```js + * const stream = require('node:stream'); + * ``` + * + * The `node:stream` module is useful for creating new types of stream instances. + * It is usually not necessary to use the `node:stream` module to consume streams. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/stream.js) + */ +declare module "stream" { + import { Abortable, EventEmitter } from "node:events"; + import { Blob as NodeBlob } from "node:buffer"; + import * as streamPromises from "node:stream/promises"; + import * as streamConsumers from "node:stream/consumers"; + import * as streamWeb from "node:stream/web"; + + type ComposeFnParam = (source: any) => void; + + class internal extends EventEmitter { + pipe( + destination: T, + options?: { + end?: boolean | undefined; + }, + ): T; + compose( + stream: T | ComposeFnParam | Iterable | AsyncIterable, + options?: { signal: AbortSignal }, + ): T; + } + import Stream = internal.Stream; + import Readable = internal.Readable; + import ReadableOptions = internal.ReadableOptions; + interface ArrayOptions { + /** the maximum concurrent invocations of `fn` to call on the stream at once. **Default: 1**. */ + concurrency?: number; + /** allows destroying the stream if the signal is aborted. */ + signal?: AbortSignal; + } + class ReadableBase extends Stream implements NodeJS.ReadableStream { + /** + * A utility method for creating Readable Streams out of iterators. + */ + static from(iterable: Iterable | AsyncIterable, options?: ReadableOptions): Readable; + /** + * Returns whether the stream has been read from or cancelled. + * @since v16.8.0 + */ + static isDisturbed(stream: Readable | NodeJS.ReadableStream): boolean; + /** + * Returns whether the stream was destroyed or errored before emitting `'end'`. + * @since v16.8.0 + * @experimental + */ + readonly readableAborted: boolean; + /** + * Is `true` if it is safe to call `readable.read()`, which means + * the stream has not been destroyed or emitted `'error'` or `'end'`. + * @since v11.4.0 + */ + readable: boolean; + /** + * Returns whether `'data'` has been emitted. + * @since v16.7.0, v14.18.0 + * @experimental + */ + readonly readableDidRead: boolean; + /** + * Getter for the property `encoding` of a given `Readable` stream. The `encoding`property can be set using the `readable.setEncoding()` method. + * @since v12.7.0 + */ + readonly readableEncoding: BufferEncoding | null; + /** + * Becomes `true` when `'end'` event is emitted. + * @since v12.9.0 + */ + readonly readableEnded: boolean; + /** + * This property reflects the current state of a `Readable` stream as described + * in the `Three states` section. + * @since v9.4.0 + */ + readonly readableFlowing: boolean | null; + /** + * Returns the value of `highWaterMark` passed when creating this `Readable`. + * @since v9.3.0 + */ + readonly readableHighWaterMark: number; + /** + * This property contains the number of bytes (or objects) in the queue + * ready to be read. The value provides introspection data regarding + * the status of the `highWaterMark`. + * @since v9.4.0 + */ + readonly readableLength: number; + /** + * Getter for the property `objectMode` of a given `Readable` stream. + * @since v12.3.0 + */ + readonly readableObjectMode: boolean; + /** + * Is `true` after `readable.destroy()` has been called. + * @since v8.0.0 + */ + destroyed: boolean; + /** + * Is `true` after `'close'` has been emitted. + * @since v18.0.0 + */ + readonly closed: boolean; + /** + * Returns error if the stream has been destroyed with an error. + * @since v18.0.0 + */ + readonly errored: Error | null; + constructor(opts?: ReadableOptions); + _construct?(callback: (error?: Error | null) => void): void; + _read(size: number): void; + /** + * The `readable.read()` method reads data out of the internal buffer and + * returns it. If no data is available to be read, `null` is returned. By default, + * the data is returned as a `Buffer` object unless an encoding has been + * specified using the `readable.setEncoding()` method or the stream is operating + * in object mode. + * + * The optional `size` argument specifies a specific number of bytes to read. If`size` bytes are not available to be read, `null` will be returned _unless_the stream has ended, in which + * case all of the data remaining in the internal + * buffer will be returned. + * + * If the `size` argument is not specified, all of the data contained in the + * internal buffer will be returned. + * + * The `size` argument must be less than or equal to 1 GiB. + * + * The `readable.read()` method should only be called on `Readable` streams + * operating in paused mode. In flowing mode, `readable.read()` is called + * automatically until the internal buffer is fully drained. + * + * ```js + * const readable = getReadableStreamSomehow(); + * + * // 'readable' may be triggered multiple times as data is buffered in + * readable.on('readable', () => { + * let chunk; + * console.log('Stream is readable (new data received in buffer)'); + * // Use a loop to make sure we read all currently available data + * while (null !== (chunk = readable.read())) { + * console.log(`Read ${chunk.length} bytes of data...`); + * } + * }); + * + * // 'end' will be triggered once when there is no more data available + * readable.on('end', () => { + * console.log('Reached end of stream.'); + * }); + * ``` + * + * Each call to `readable.read()` returns a chunk of data, or `null`. The chunks + * are not concatenated. A `while` loop is necessary to consume all data + * currently in the buffer. When reading a large file `.read()` may return `null`, + * having consumed all buffered content so far, but there is still more data to + * come not yet buffered. In this case a new `'readable'` event will be emitted + * when there is more data in the buffer. Finally the `'end'` event will be + * emitted when there is no more data to come. + * + * Therefore to read a file's whole contents from a `readable`, it is necessary + * to collect chunks across multiple `'readable'` events: + * + * ```js + * const chunks = []; + * + * readable.on('readable', () => { + * let chunk; + * while (null !== (chunk = readable.read())) { + * chunks.push(chunk); + * } + * }); + * + * readable.on('end', () => { + * const content = chunks.join(''); + * }); + * ``` + * + * A `Readable` stream in object mode will always return a single item from + * a call to `readable.read(size)`, regardless of the value of the`size` argument. + * + * If the `readable.read()` method returns a chunk of data, a `'data'` event will + * also be emitted. + * + * Calling {@link read} after the `'end'` event has + * been emitted will return `null`. No runtime error will be raised. + * @since v0.9.4 + * @param size Optional argument to specify how much data to read. + */ + read(size?: number): any; + /** + * The `readable.setEncoding()` method sets the character encoding for + * data read from the `Readable` stream. + * + * By default, no encoding is assigned and stream data will be returned as`Buffer` objects. Setting an encoding causes the stream data + * to be returned as strings of the specified encoding rather than as `Buffer`objects. For instance, calling `readable.setEncoding('utf8')` will cause the + * output data to be interpreted as UTF-8 data, and passed as strings. Calling`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal + * string format. + * + * The `Readable` stream will properly handle multi-byte characters delivered + * through the stream that would otherwise become improperly decoded if simply + * pulled from the stream as `Buffer` objects. + * + * ```js + * const readable = getReadableStreamSomehow(); + * readable.setEncoding('utf8'); + * readable.on('data', (chunk) => { + * assert.equal(typeof chunk, 'string'); + * console.log('Got %d characters of string data:', chunk.length); + * }); + * ``` + * @since v0.9.4 + * @param encoding The encoding to use. + */ + setEncoding(encoding: BufferEncoding): this; + /** + * The `readable.pause()` method will cause a stream in flowing mode to stop + * emitting `'data'` events, switching out of flowing mode. Any data that + * becomes available will remain in the internal buffer. + * + * ```js + * const readable = getReadableStreamSomehow(); + * readable.on('data', (chunk) => { + * console.log(`Received ${chunk.length} bytes of data.`); + * readable.pause(); + * console.log('There will be no additional data for 1 second.'); + * setTimeout(() => { + * console.log('Now data will start flowing again.'); + * readable.resume(); + * }, 1000); + * }); + * ``` + * + * The `readable.pause()` method has no effect if there is a `'readable'`event listener. + * @since v0.9.4 + */ + pause(): this; + /** + * The `readable.resume()` method causes an explicitly paused `Readable` stream to + * resume emitting `'data'` events, switching the stream into flowing mode. + * + * The `readable.resume()` method can be used to fully consume the data from a + * stream without actually processing any of that data: + * + * ```js + * getReadableStreamSomehow() + * .resume() + * .on('end', () => { + * console.log('Reached the end, but did not read anything.'); + * }); + * ``` + * + * The `readable.resume()` method has no effect if there is a `'readable'`event listener. + * @since v0.9.4 + */ + resume(): this; + /** + * The `readable.isPaused()` method returns the current operating state of the`Readable`. This is used primarily by the mechanism that underlies the`readable.pipe()` method. In most + * typical cases, there will be no reason to + * use this method directly. + * + * ```js + * const readable = new stream.Readable(); + * + * readable.isPaused(); // === false + * readable.pause(); + * readable.isPaused(); // === true + * readable.resume(); + * readable.isPaused(); // === false + * ``` + * @since v0.11.14 + */ + isPaused(): boolean; + /** + * The `readable.unpipe()` method detaches a `Writable` stream previously attached + * using the {@link pipe} method. + * + * If the `destination` is not specified, then _all_ pipes are detached. + * + * If the `destination` is specified, but no pipe is set up for it, then + * the method does nothing. + * + * ```js + * const fs = require('node:fs'); + * const readable = getReadableStreamSomehow(); + * const writable = fs.createWriteStream('file.txt'); + * // All the data from readable goes into 'file.txt', + * // but only for the first second. + * readable.pipe(writable); + * setTimeout(() => { + * console.log('Stop writing to file.txt.'); + * readable.unpipe(writable); + * console.log('Manually close the file stream.'); + * writable.end(); + * }, 1000); + * ``` + * @since v0.9.4 + * @param destination Optional specific stream to unpipe + */ + unpipe(destination?: NodeJS.WritableStream): this; + /** + * Passing `chunk` as `null` signals the end of the stream (EOF) and behaves the + * same as `readable.push(null)`, after which no more data can be written. The EOF + * signal is put at the end of the buffer and any buffered data will still be + * flushed. + * + * The `readable.unshift()` method pushes a chunk of data back into the internal + * buffer. This is useful in certain situations where a stream is being consumed by + * code that needs to "un-consume" some amount of data that it has optimistically + * pulled out of the source, so that the data can be passed on to some other party. + * + * The `stream.unshift(chunk)` method cannot be called after the `'end'` event + * has been emitted or a runtime error will be thrown. + * + * Developers using `stream.unshift()` often should consider switching to + * use of a `Transform` stream instead. See the `API for stream implementers` section for more information. + * + * ```js + * // Pull off a header delimited by \n\n. + * // Use unshift() if we get too much. + * // Call the callback with (error, header, stream). + * const { StringDecoder } = require('node:string_decoder'); + * function parseHeader(stream, callback) { + * stream.on('error', callback); + * stream.on('readable', onReadable); + * const decoder = new StringDecoder('utf8'); + * let header = ''; + * function onReadable() { + * let chunk; + * while (null !== (chunk = stream.read())) { + * const str = decoder.write(chunk); + * if (str.includes('\n\n')) { + * // Found the header boundary. + * const split = str.split(/\n\n/); + * header += split.shift(); + * const remaining = split.join('\n\n'); + * const buf = Buffer.from(remaining, 'utf8'); + * stream.removeListener('error', callback); + * // Remove the 'readable' listener before unshifting. + * stream.removeListener('readable', onReadable); + * if (buf.length) + * stream.unshift(buf); + * // Now the body of the message can be read from the stream. + * callback(null, header, stream); + * return; + * } + * // Still reading the header. + * header += str; + * } + * } + * } + * ``` + * + * Unlike {@link push}, `stream.unshift(chunk)` will not + * end the reading process by resetting the internal reading state of the stream. + * This can cause unexpected results if `readable.unshift()` is called during a + * read (i.e. from within a {@link _read} implementation on a + * custom stream). Following the call to `readable.unshift()` with an immediate {@link push} will reset the reading state appropriately, + * however it is best to simply avoid calling `readable.unshift()` while in the + * process of performing a read. + * @since v0.9.11 + * @param chunk Chunk of data to unshift onto the read queue. For streams not operating in object mode, `chunk` must be a string, `Buffer`, `Uint8Array`, or `null`. For object mode + * streams, `chunk` may be any JavaScript value. + * @param encoding Encoding of string chunks. Must be a valid `Buffer` encoding, such as `'utf8'` or `'ascii'`. + */ + unshift(chunk: any, encoding?: BufferEncoding): void; + /** + * Prior to Node.js 0.10, streams did not implement the entire `node:stream`module API as it is currently defined. (See `Compatibility` for more + * information.) + * + * When using an older Node.js library that emits `'data'` events and has a {@link pause} method that is advisory only, the`readable.wrap()` method can be used to create a `Readable` + * stream that uses + * the old stream as its data source. + * + * It will rarely be necessary to use `readable.wrap()` but the method has been + * provided as a convenience for interacting with older Node.js applications and + * libraries. + * + * ```js + * const { OldReader } = require('./old-api-module.js'); + * const { Readable } = require('node:stream'); + * const oreader = new OldReader(); + * const myReader = new Readable().wrap(oreader); + * + * myReader.on('readable', () => { + * myReader.read(); // etc. + * }); + * ``` + * @since v0.9.4 + * @param stream An "old style" readable stream + */ + wrap(stream: NodeJS.ReadableStream): this; + push(chunk: any, encoding?: BufferEncoding): boolean; + /** + * The iterator created by this method gives users the option to cancel the destruction + * of the stream if the `for await...of` loop is exited by `return`, `break`, or `throw`, + * or if the iterator should destroy the stream if the stream emitted an error during iteration. + * @since v16.3.0 + * @param options.destroyOnReturn When set to `false`, calling `return` on the async iterator, + * or exiting a `for await...of` iteration using a `break`, `return`, or `throw` will not destroy the stream. + * **Default: `true`**. + */ + iterator(options?: { destroyOnReturn?: boolean }): AsyncIterableIterator; + /** + * This method allows mapping over the stream. The *fn* function will be called for every chunk in the stream. + * If the *fn* function returns a promise - that promise will be `await`ed before being passed to the result stream. + * @since v17.4.0, v16.14.0 + * @param fn a function to map over every chunk in the stream. Async or not. + * @returns a stream mapped with the function *fn*. + */ + map(fn: (data: any, options?: Pick) => any, options?: ArrayOptions): Readable; + /** + * This method allows filtering the stream. For each chunk in the stream the *fn* function will be called + * and if it returns a truthy value, the chunk will be passed to the result stream. + * If the *fn* function returns a promise - that promise will be `await`ed. + * @since v17.4.0, v16.14.0 + * @param fn a function to filter chunks from the stream. Async or not. + * @returns a stream filtered with the predicate *fn*. + */ + filter( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Readable; + /** + * This method allows iterating a stream. For each chunk in the stream the *fn* function will be called. + * If the *fn* function returns a promise - that promise will be `await`ed. + * + * This method is different from `for await...of` loops in that it can optionally process chunks concurrently. + * In addition, a `forEach` iteration can only be stopped by having passed a `signal` option + * and aborting the related AbortController while `for await...of` can be stopped with `break` or `return`. + * In either case the stream will be destroyed. + * + * This method is different from listening to the `'data'` event in that it uses the `readable` event + * in the underlying machinary and can limit the number of concurrent *fn* calls. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise for when the stream has finished. + */ + forEach( + fn: (data: any, options?: Pick) => void | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method allows easily obtaining the contents of a stream. + * + * As this method reads the entire stream into memory, it negates the benefits of streams. It's intended + * for interoperability and convenience, not as the primary way to consume streams. + * @since v17.5.0 + * @returns a promise containing an array with the contents of the stream. + */ + toArray(options?: Pick): Promise; + /** + * This method is similar to `Array.prototype.some` and calls *fn* on each chunk in the stream + * until the awaited return value is `true` (or any truthy value). Once an *fn* call on a chunk + * `await`ed return value is truthy, the stream is destroyed and the promise is fulfilled with `true`. + * If none of the *fn* calls on the chunks return a truthy value, the promise is fulfilled with `false`. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise evaluating to `true` if *fn* returned a truthy value for at least one of the chunks. + */ + some( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method is similar to `Array.prototype.find` and calls *fn* on each chunk in the stream + * to find a chunk with a truthy value for *fn*. Once an *fn* call's awaited return value is truthy, + * the stream is destroyed and the promise is fulfilled with value for which *fn* returned a truthy value. + * If all of the *fn* calls on the chunks return a falsy value, the promise is fulfilled with `undefined`. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise evaluating to the first chunk for which *fn* evaluated with a truthy value, + * or `undefined` if no element was found. + */ + find( + fn: (data: any, options?: Pick) => data is T, + options?: ArrayOptions, + ): Promise; + find( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method is similar to `Array.prototype.every` and calls *fn* on each chunk in the stream + * to check if all awaited return values are truthy value for *fn*. Once an *fn* call on a chunk + * `await`ed return value is falsy, the stream is destroyed and the promise is fulfilled with `false`. + * If all of the *fn* calls on the chunks return a truthy value, the promise is fulfilled with `true`. + * @since v17.5.0 + * @param fn a function to call on each chunk of the stream. Async or not. + * @returns a promise evaluating to `true` if *fn* returned a truthy value for every one of the chunks. + */ + every( + fn: (data: any, options?: Pick) => boolean | Promise, + options?: ArrayOptions, + ): Promise; + /** + * This method returns a new stream by applying the given callback to each chunk of the stream + * and then flattening the result. + * + * It is possible to return a stream or another iterable or async iterable from *fn* and the result streams + * will be merged (flattened) into the returned stream. + * @since v17.5.0 + * @param fn a function to map over every chunk in the stream. May be async. May be a stream or generator. + * @returns a stream flat-mapped with the function *fn*. + */ + flatMap(fn: (data: any, options?: Pick) => any, options?: ArrayOptions): Readable; + /** + * This method returns a new stream with the first *limit* chunks dropped from the start. + * @since v17.5.0 + * @param limit the number of chunks to drop from the readable. + * @returns a stream with *limit* chunks dropped from the start. + */ + drop(limit: number, options?: Pick): Readable; + /** + * This method returns a new stream with the first *limit* chunks. + * @since v17.5.0 + * @param limit the number of chunks to take from the readable. + * @returns a stream with *limit* chunks taken. + */ + take(limit: number, options?: Pick): Readable; + /** + * This method returns a new stream with chunks of the underlying stream paired with a counter + * in the form `[index, chunk]`. The first index value is `0` and it increases by 1 for each chunk produced. + * @since v17.5.0 + * @returns a stream of indexed pairs. + */ + asIndexedPairs(options?: Pick): Readable; + /** + * This method calls *fn* on each chunk of the stream in order, passing it the result from the calculation + * on the previous element. It returns a promise for the final value of the reduction. + * + * If no *initial* value is supplied the first chunk of the stream is used as the initial value. + * If the stream is empty, the promise is rejected with a `TypeError` with the `ERR_INVALID_ARGS` code property. + * + * The reducer function iterates the stream element-by-element which means that there is no *concurrency* parameter + * or parallelism. To perform a reduce concurrently, you can extract the async function to `readable.map` method. + * @since v17.5.0 + * @param fn a reducer function to call over every chunk in the stream. Async or not. + * @param initial the initial value to use in the reduction. + * @returns a promise for the final value of the reduction. + */ + reduce( + fn: (previous: any, data: any, options?: Pick) => T, + initial?: undefined, + options?: Pick, + ): Promise; + reduce( + fn: (previous: T, data: any, options?: Pick) => T, + initial: T, + options?: Pick, + ): Promise; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + /** + * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the readable + * stream will release any internal resources and subsequent calls to `push()`will be ignored. + * + * Once `destroy()` has been called any further calls will be a no-op and no + * further errors except from `_destroy()` may be emitted as `'error'`. + * + * Implementors should not override this method, but instead implement `readable._destroy()`. + * @since v8.0.0 + * @param error Error which will be passed as payload in `'error'` event + */ + destroy(error?: Error): this; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. data + * 3. end + * 4. error + * 5. pause + * 6. readable + * 7. resume + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: any) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "data", chunk: any): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "pause"): boolean; + emit(event: "readable"): boolean; + emit(event: "resume"): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: any) => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "pause", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: any) => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "pause", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: any) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: any) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "data", listener: (chunk: any) => void): this; + removeListener(event: "end", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "pause", listener: () => void): this; + removeListener(event: "readable", listener: () => void): this; + removeListener(event: "resume", listener: () => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + /** + * Calls `readable.destroy()` with an `AbortError` and returns a promise that fulfills when the stream is finished. + * @since v20.4.0 + */ + [Symbol.asyncDispose](): Promise; + } + import WritableOptions = internal.WritableOptions; + class WritableBase extends Stream implements NodeJS.WritableStream { + /** + * Is `true` if it is safe to call `writable.write()`, which means + * the stream has not been destroyed, errored, or ended. + * @since v11.4.0 + */ + readonly writable: boolean; + /** + * Is `true` after `writable.end()` has been called. This property + * does not indicate whether the data has been flushed, for this use `writable.writableFinished` instead. + * @since v12.9.0 + */ + readonly writableEnded: boolean; + /** + * Is set to `true` immediately before the `'finish'` event is emitted. + * @since v12.6.0 + */ + readonly writableFinished: boolean; + /** + * Return the value of `highWaterMark` passed when creating this `Writable`. + * @since v9.3.0 + */ + readonly writableHighWaterMark: number; + /** + * This property contains the number of bytes (or objects) in the queue + * ready to be written. The value provides introspection data regarding + * the status of the `highWaterMark`. + * @since v9.4.0 + */ + readonly writableLength: number; + /** + * Getter for the property `objectMode` of a given `Writable` stream. + * @since v12.3.0 + */ + readonly writableObjectMode: boolean; + /** + * Number of times `writable.uncork()` needs to be + * called in order to fully uncork the stream. + * @since v13.2.0, v12.16.0 + */ + readonly writableCorked: number; + /** + * Is `true` after `writable.destroy()` has been called. + * @since v8.0.0 + */ + destroyed: boolean; + /** + * Is `true` after `'close'` has been emitted. + * @since v18.0.0 + */ + readonly closed: boolean; + /** + * Returns error if the stream has been destroyed with an error. + * @since v18.0.0 + */ + readonly errored: Error | null; + /** + * Is `true` if the stream's buffer has been full and stream will emit `'drain'`. + * @since v15.2.0, v14.17.0 + */ + readonly writableNeedDrain: boolean; + constructor(opts?: WritableOptions); + _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + _writev?( + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + _construct?(callback: (error?: Error | null) => void): void; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + _final(callback: (error?: Error | null) => void): void; + /** + * The `writable.write()` method writes some data to the stream, and calls the + * supplied `callback` once the data has been fully handled. If an error + * occurs, the `callback` will be called with the error as its + * first argument. The `callback` is called asynchronously and before `'error'` is + * emitted. + * + * The return value is `true` if the internal buffer is less than the`highWaterMark` configured when the stream was created after admitting `chunk`. + * If `false` is returned, further attempts to write data to the stream should + * stop until the `'drain'` event is emitted. + * + * While a stream is not draining, calls to `write()` will buffer `chunk`, and + * return false. Once all currently buffered chunks are drained (accepted for + * delivery by the operating system), the `'drain'` event will be emitted. + * Once `write()` returns false, do not write more chunks + * until the `'drain'` event is emitted. While calling `write()` on a stream that + * is not draining is allowed, Node.js will buffer all written chunks until + * maximum memory usage occurs, at which point it will abort unconditionally. + * Even before it aborts, high memory usage will cause poor garbage collector + * performance and high RSS (which is not typically released back to the system, + * even after the memory is no longer required). Since TCP sockets may never + * drain if the remote peer does not read the data, writing a socket that is + * not draining may lead to a remotely exploitable vulnerability. + * + * Writing data while the stream is not draining is particularly + * problematic for a `Transform`, because the `Transform` streams are paused + * by default until they are piped or a `'data'` or `'readable'` event handler + * is added. + * + * If the data to be written can be generated or fetched on demand, it is + * recommended to encapsulate the logic into a `Readable` and use {@link pipe}. However, if calling `write()` is preferred, it is + * possible to respect backpressure and avoid memory issues using the `'drain'` event: + * + * ```js + * function write(data, cb) { + * if (!stream.write(data)) { + * stream.once('drain', cb); + * } else { + * process.nextTick(cb); + * } + * } + * + * // Wait for cb to be called before doing any other write. + * write('hello', () => { + * console.log('Write completed, do more writes now.'); + * }); + * ``` + * + * A `Writable` stream in object mode will always ignore the `encoding` argument. + * @since v0.9.4 + * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any + * JavaScript value other than `null`. + * @param [encoding='utf8'] The encoding, if `chunk` is a string. + * @param callback Callback for when this chunk of data is flushed. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + write(chunk: any, callback?: (error: Error | null | undefined) => void): boolean; + write(chunk: any, encoding: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean; + /** + * The `writable.setDefaultEncoding()` method sets the default `encoding` for a `Writable` stream. + * @since v0.11.15 + * @param encoding The new default encoding + */ + setDefaultEncoding(encoding: BufferEncoding): this; + /** + * Calling the `writable.end()` method signals that no more data will be written + * to the `Writable`. The optional `chunk` and `encoding` arguments allow one + * final additional chunk of data to be written immediately before closing the + * stream. + * + * Calling the {@link write} method after calling {@link end} will raise an error. + * + * ```js + * // Write 'hello, ' and then end with 'world!'. + * const fs = require('node:fs'); + * const file = fs.createWriteStream('example.txt'); + * file.write('hello, '); + * file.end('world!'); + * // Writing more now is not allowed! + * ``` + * @since v0.9.4 + * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any + * JavaScript value other than `null`. + * @param encoding The encoding if `chunk` is a string + * @param callback Callback for when the stream is finished. + */ + end(cb?: () => void): this; + end(chunk: any, cb?: () => void): this; + end(chunk: any, encoding: BufferEncoding, cb?: () => void): this; + /** + * The `writable.cork()` method forces all written data to be buffered in memory. + * The buffered data will be flushed when either the {@link uncork} or {@link end} methods are called. + * + * The primary intent of `writable.cork()` is to accommodate a situation in which + * several small chunks are written to the stream in rapid succession. Instead of + * immediately forwarding them to the underlying destination, `writable.cork()`buffers all the chunks until `writable.uncork()` is called, which will pass them + * all to `writable._writev()`, if present. This prevents a head-of-line blocking + * situation where data is being buffered while waiting for the first small chunk + * to be processed. However, use of `writable.cork()` without implementing`writable._writev()` may have an adverse effect on throughput. + * + * See also: `writable.uncork()`, `writable._writev()`. + * @since v0.11.2 + */ + cork(): void; + /** + * The `writable.uncork()` method flushes all data buffered since {@link cork} was called. + * + * When using `writable.cork()` and `writable.uncork()` to manage the buffering + * of writes to a stream, defer calls to `writable.uncork()` using`process.nextTick()`. Doing so allows batching of all`writable.write()` calls that occur within a given Node.js event + * loop phase. + * + * ```js + * stream.cork(); + * stream.write('some '); + * stream.write('data '); + * process.nextTick(() => stream.uncork()); + * ``` + * + * If the `writable.cork()` method is called multiple times on a stream, the + * same number of calls to `writable.uncork()` must be called to flush the buffered + * data. + * + * ```js + * stream.cork(); + * stream.write('some '); + * stream.cork(); + * stream.write('data '); + * process.nextTick(() => { + * stream.uncork(); + * // The data will not be flushed until uncork() is called a second time. + * stream.uncork(); + * }); + * ``` + * + * See also: `writable.cork()`. + * @since v0.11.2 + */ + uncork(): void; + /** + * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the writable + * stream has ended and subsequent calls to `write()` or `end()` will result in + * an `ERR_STREAM_DESTROYED` error. + * This is a destructive and immediate way to destroy a stream. Previous calls to`write()` may not have drained, and may trigger an `ERR_STREAM_DESTROYED` error. + * Use `end()` instead of destroy if data should flush before close, or wait for + * the `'drain'` event before destroying the stream. + * + * Once `destroy()` has been called any further calls will be a no-op and no + * further errors except from `_destroy()` may be emitted as `'error'`. + * + * Implementors should not override this method, + * but instead implement `writable._destroy()`. + * @since v8.0.0 + * @param error Optional, an error to emit with `'error'` event. + */ + destroy(error?: Error): this; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. drain + * 3. error + * 4. finish + * 5. pipe + * 6. unpipe + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pipe", listener: (src: Readable) => void): this; + addListener(event: "unpipe", listener: (src: Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "drain"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "pipe", src: Readable): boolean; + emit(event: "unpipe", src: Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pipe", listener: (src: Readable) => void): this; + on(event: "unpipe", listener: (src: Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pipe", listener: (src: Readable) => void): this; + once(event: "unpipe", listener: (src: Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pipe", listener: (src: Readable) => void): this; + prependListener(event: "unpipe", listener: (src: Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "drain", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "finish", listener: () => void): this; + removeListener(event: "pipe", listener: (src: Readable) => void): this; + removeListener(event: "unpipe", listener: (src: Readable) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + namespace internal { + class Stream extends internal { + constructor(opts?: ReadableOptions); + } + interface StreamOptions extends Abortable { + emitClose?: boolean | undefined; + highWaterMark?: number | undefined; + objectMode?: boolean | undefined; + construct?(this: T, callback: (error?: Error | null) => void): void; + destroy?(this: T, error: Error | null, callback: (error?: Error | null) => void): void; + autoDestroy?: boolean | undefined; + } + interface ReadableOptions extends StreamOptions { + encoding?: BufferEncoding | undefined; + read?(this: Readable, size: number): void; + } + /** + * @since v0.9.4 + */ + class Readable extends ReadableBase { + /** + * A utility method for creating a `Readable` from a web `ReadableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb( + readableStream: streamWeb.ReadableStream, + options?: Pick, + ): Readable; + /** + * A utility method for creating a web `ReadableStream` from a `Readable`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamReadable: Readable): streamWeb.ReadableStream; + } + interface WritableOptions extends StreamOptions { + decodeStrings?: boolean | undefined; + defaultEncoding?: BufferEncoding | undefined; + write?( + this: Writable, + chunk: any, + encoding: BufferEncoding, + callback: (error?: Error | null) => void, + ): void; + writev?( + this: Writable, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + final?(this: Writable, callback: (error?: Error | null) => void): void; + } + /** + * @since v0.9.4 + */ + class Writable extends WritableBase { + /** + * A utility method for creating a `Writable` from a web `WritableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb( + writableStream: streamWeb.WritableStream, + options?: Pick, + ): Writable; + /** + * A utility method for creating a web `WritableStream` from a `Writable`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamWritable: Writable): streamWeb.WritableStream; + } + interface DuplexOptions extends ReadableOptions, WritableOptions { + allowHalfOpen?: boolean | undefined; + readableObjectMode?: boolean | undefined; + writableObjectMode?: boolean | undefined; + readableHighWaterMark?: number | undefined; + writableHighWaterMark?: number | undefined; + writableCorked?: number | undefined; + construct?(this: Duplex, callback: (error?: Error | null) => void): void; + read?(this: Duplex, size: number): void; + write?(this: Duplex, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + writev?( + this: Duplex, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + final?(this: Duplex, callback: (error?: Error | null) => void): void; + destroy?(this: Duplex, error: Error | null, callback: (error?: Error | null) => void): void; + } + /** + * Duplex streams are streams that implement both the `Readable` and `Writable` interfaces. + * + * Examples of `Duplex` streams include: + * + * * `TCP sockets` + * * `zlib streams` + * * `crypto streams` + * @since v0.9.4 + */ + class Duplex extends ReadableBase implements WritableBase { + readonly writable: boolean; + readonly writableEnded: boolean; + readonly writableFinished: boolean; + readonly writableHighWaterMark: number; + readonly writableLength: number; + readonly writableObjectMode: boolean; + readonly writableCorked: number; + readonly writableNeedDrain: boolean; + readonly closed: boolean; + readonly errored: Error | null; + /** + * If `false` then the stream will automatically end the writable side when the + * readable side ends. Set initially by the `allowHalfOpen` constructor option, + * which defaults to `true`. + * + * This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is + * emitted. + * @since v0.9.4 + */ + allowHalfOpen: boolean; + constructor(opts?: DuplexOptions); + /** + * A utility method for creating duplex streams. + * + * - `Stream` converts writable stream into writable `Duplex` and readable stream + * to `Duplex`. + * - `Blob` converts into readable `Duplex`. + * - `string` converts into readable `Duplex`. + * - `ArrayBuffer` converts into readable `Duplex`. + * - `AsyncIterable` converts into a readable `Duplex`. Cannot yield `null`. + * - `AsyncGeneratorFunction` converts into a readable/writable transform + * `Duplex`. Must take a source `AsyncIterable` as first parameter. Cannot yield + * `null`. + * - `AsyncFunction` converts into a writable `Duplex`. Must return + * either `null` or `undefined` + * - `Object ({ writable, readable })` converts `readable` and + * `writable` into `Stream` and then combines them into `Duplex` where the + * `Duplex` will write to the `writable` and read from the `readable`. + * - `Promise` converts into readable `Duplex`. Value `null` is ignored. + * + * @since v16.8.0 + */ + static from( + src: + | Stream + | NodeBlob + | ArrayBuffer + | string + | Iterable + | AsyncIterable + | AsyncGeneratorFunction + | Promise + | Object, + ): Duplex; + _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + _writev?( + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + _final(callback: (error?: Error | null) => void): void; + write(chunk: any, encoding?: BufferEncoding, cb?: (error: Error | null | undefined) => void): boolean; + write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean; + setDefaultEncoding(encoding: BufferEncoding): this; + end(cb?: () => void): this; + end(chunk: any, cb?: () => void): this; + end(chunk: any, encoding?: BufferEncoding, cb?: () => void): this; + cork(): void; + uncork(): void; + /** + * A utility method for creating a web `ReadableStream` and `WritableStream` from a `Duplex`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamDuplex: Duplex): { + readable: streamWeb.ReadableStream; + writable: streamWeb.WritableStream; + }; + /** + * A utility method for creating a `Duplex` from a web `ReadableStream` and `WritableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb( + duplexStream: { + readable: streamWeb.ReadableStream; + writable: streamWeb.WritableStream; + }, + options?: Pick< + DuplexOptions, + "allowHalfOpen" | "decodeStrings" | "encoding" | "highWaterMark" | "objectMode" | "signal" + >, + ): Duplex; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. data + * 3. drain + * 4. end + * 5. error + * 6. finish + * 7. pause + * 8. pipe + * 9. readable + * 10. resume + * 11. unpipe + */ + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: any) => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "finish", listener: () => void): this; + addListener(event: "pause", listener: () => void): this; + addListener(event: "pipe", listener: (src: Readable) => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "resume", listener: () => void): this; + addListener(event: "unpipe", listener: (src: Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: "close"): boolean; + emit(event: "data", chunk: any): boolean; + emit(event: "drain"): boolean; + emit(event: "end"): boolean; + emit(event: "error", err: Error): boolean; + emit(event: "finish"): boolean; + emit(event: "pause"): boolean; + emit(event: "pipe", src: Readable): boolean; + emit(event: "readable"): boolean; + emit(event: "resume"): boolean; + emit(event: "unpipe", src: Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: any) => void): this; + on(event: "drain", listener: () => void): this; + on(event: "end", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "pause", listener: () => void): this; + on(event: "pipe", listener: (src: Readable) => void): this; + on(event: "readable", listener: () => void): this; + on(event: "resume", listener: () => void): this; + on(event: "unpipe", listener: (src: Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: any) => void): this; + once(event: "drain", listener: () => void): this; + once(event: "end", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "pause", listener: () => void): this; + once(event: "pipe", listener: (src: Readable) => void): this; + once(event: "readable", listener: () => void): this; + once(event: "resume", listener: () => void): this; + once(event: "unpipe", listener: (src: Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: any) => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "pause", listener: () => void): this; + prependListener(event: "pipe", listener: (src: Readable) => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "resume", listener: () => void): this; + prependListener(event: "unpipe", listener: (src: Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: any) => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "pause", listener: () => void): this; + prependOnceListener(event: "pipe", listener: (src: Readable) => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "resume", listener: () => void): this; + prependOnceListener(event: "unpipe", listener: (src: Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "data", listener: (chunk: any) => void): this; + removeListener(event: "drain", listener: () => void): this; + removeListener(event: "end", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "finish", listener: () => void): this; + removeListener(event: "pause", listener: () => void): this; + removeListener(event: "pipe", listener: (src: Readable) => void): this; + removeListener(event: "readable", listener: () => void): this; + removeListener(event: "resume", listener: () => void): this; + removeListener(event: "unpipe", listener: (src: Readable) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + type TransformCallback = (error?: Error | null, data?: any) => void; + interface TransformOptions extends DuplexOptions { + construct?(this: Transform, callback: (error?: Error | null) => void): void; + read?(this: Transform, size: number): void; + write?( + this: Transform, + chunk: any, + encoding: BufferEncoding, + callback: (error?: Error | null) => void, + ): void; + writev?( + this: Transform, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void, + ): void; + final?(this: Transform, callback: (error?: Error | null) => void): void; + destroy?(this: Transform, error: Error | null, callback: (error?: Error | null) => void): void; + transform?(this: Transform, chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; + flush?(this: Transform, callback: TransformCallback): void; + } + /** + * Transform streams are `Duplex` streams where the output is in some way + * related to the input. Like all `Duplex` streams, `Transform` streams + * implement both the `Readable` and `Writable` interfaces. + * + * Examples of `Transform` streams include: + * + * * `zlib streams` + * * `crypto streams` + * @since v0.9.4 + */ + class Transform extends Duplex { + constructor(opts?: TransformOptions); + _transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; + _flush(callback: TransformCallback): void; + } + /** + * The `stream.PassThrough` class is a trivial implementation of a `Transform` stream that simply passes the input bytes across to the output. Its purpose is + * primarily for examples and testing, but there are some use cases where`stream.PassThrough` is useful as a building block for novel sorts of streams. + */ + class PassThrough extends Transform {} + /** + * A stream to attach a signal to. + * + * Attaches an AbortSignal to a readable or writeable stream. This lets code + * control stream destruction using an `AbortController`. + * + * Calling `abort` on the `AbortController` corresponding to the passed`AbortSignal` will behave the same way as calling `.destroy(new AbortError())`on the stream, and `controller.error(new + * AbortError())` for webstreams. + * + * ```js + * const fs = require('node:fs'); + * + * const controller = new AbortController(); + * const read = addAbortSignal( + * controller.signal, + * fs.createReadStream(('object.json')), + * ); + * // Later, abort the operation closing the stream + * controller.abort(); + * ``` + * + * Or using an `AbortSignal` with a readable stream as an async iterable: + * + * ```js + * const controller = new AbortController(); + * setTimeout(() => controller.abort(), 10_000); // set a timeout + * const stream = addAbortSignal( + * controller.signal, + * fs.createReadStream(('object.json')), + * ); + * (async () => { + * try { + * for await (const chunk of stream) { + * await process(chunk); + * } + * } catch (e) { + * if (e.name === 'AbortError') { + * // The operation was cancelled + * } else { + * throw e; + * } + * } + * })(); + * ``` + * + * Or using an `AbortSignal` with a ReadableStream: + * + * ```js + * const controller = new AbortController(); + * const rs = new ReadableStream({ + * start(controller) { + * controller.enqueue('hello'); + * controller.enqueue('world'); + * controller.close(); + * }, + * }); + * + * addAbortSignal(controller.signal, rs); + * + * finished(rs, (err) => { + * if (err) { + * if (err.name === 'AbortError') { + * // The operation was cancelled + * } + * } + * }); + * + * const reader = rs.getReader(); + * + * reader.read().then(({ value, done }) => { + * console.log(value); // hello + * console.log(done); // false + * controller.abort(); + * }); + * ``` + * @since v15.4.0 + * @param signal A signal representing possible cancellation + * @param stream a stream to attach a signal to + */ + function addAbortSignal(signal: AbortSignal, stream: T): T; + /** + * Returns the default highWaterMark used by streams. + * Defaults to `16384` (16 KiB), or `16` for `objectMode`. + * @since v19.9.0 + * @param objectMode + */ + function getDefaultHighWaterMark(objectMode: boolean): number; + /** + * Sets the default highWaterMark used by streams. + * @since v19.9.0 + * @param objectMode + * @param value highWaterMark value + */ + function setDefaultHighWaterMark(objectMode: boolean, value: number): void; + interface FinishedOptions extends Abortable { + error?: boolean | undefined; + readable?: boolean | undefined; + writable?: boolean | undefined; + } + /** + * A readable and/or writable stream/webstream. + * + * A function to get notified when a stream is no longer readable, writable + * or has experienced an error or a premature close event. + * + * ```js + * const { finished } = require('node:stream'); + * const fs = require('node:fs'); + * + * const rs = fs.createReadStream('archive.tar'); + * + * finished(rs, (err) => { + * if (err) { + * console.error('Stream failed.', err); + * } else { + * console.log('Stream is done reading.'); + * } + * }); + * + * rs.resume(); // Drain the stream. + * ``` + * + * Especially useful in error handling scenarios where a stream is destroyed + * prematurely (like an aborted HTTP request), and will not emit `'end'`or `'finish'`. + * + * The `finished` API provides `promise version`. + * + * `stream.finished()` leaves dangling event listeners (in particular`'error'`, `'end'`, `'finish'` and `'close'`) after `callback` has been + * invoked. The reason for this is so that unexpected `'error'` events (due to + * incorrect stream implementations) do not cause unexpected crashes. + * If this is unwanted behavior then the returned cleanup function needs to be + * invoked in the callback: + * + * ```js + * const cleanup = finished(rs, (err) => { + * cleanup(); + * // ... + * }); + * ``` + * @since v10.0.0 + * @param stream A readable and/or writable stream. + * @param callback A callback function that takes an optional error argument. + * @return A cleanup function which removes all registered listeners. + */ + function finished( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + options: FinishedOptions, + callback: (err?: NodeJS.ErrnoException | null) => void, + ): () => void; + function finished( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + callback: (err?: NodeJS.ErrnoException | null) => void, + ): () => void; + namespace finished { + function __promisify__( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + options?: FinishedOptions, + ): Promise; + } + type PipelineSourceFunction = () => Iterable | AsyncIterable; + type PipelineSource = Iterable | AsyncIterable | NodeJS.ReadableStream | PipelineSourceFunction; + type PipelineTransform, U> = + | NodeJS.ReadWriteStream + | (( + source: S extends (...args: any[]) => Iterable | AsyncIterable ? AsyncIterable + : S, + ) => AsyncIterable); + type PipelineTransformSource = PipelineSource | PipelineTransform; + type PipelineDestinationIterableFunction = (source: AsyncIterable) => AsyncIterable; + type PipelineDestinationPromiseFunction = (source: AsyncIterable) => Promise

; + type PipelineDestination, P> = S extends + PipelineTransformSource ? + | NodeJS.WritableStream + | PipelineDestinationIterableFunction + | PipelineDestinationPromiseFunction + : never; + type PipelineCallback> = S extends + PipelineDestinationPromiseFunction ? (err: NodeJS.ErrnoException | null, value: P) => void + : (err: NodeJS.ErrnoException | null) => void; + type PipelinePromise> = S extends + PipelineDestinationPromiseFunction ? Promise

: Promise; + interface PipelineOptions { + signal?: AbortSignal | undefined; + end?: boolean | undefined; + } + /** + * A module method to pipe between streams and generators forwarding errors and + * properly cleaning up and provide a callback when the pipeline is complete. + * + * ```js + * const { pipeline } = require('node:stream'); + * const fs = require('node:fs'); + * const zlib = require('node:zlib'); + * + * // Use the pipeline API to easily pipe a series of streams + * // together and get notified when the pipeline is fully done. + * + * // A pipeline to gzip a potentially huge tar file efficiently: + * + * pipeline( + * fs.createReadStream('archive.tar'), + * zlib.createGzip(), + * fs.createWriteStream('archive.tar.gz'), + * (err) => { + * if (err) { + * console.error('Pipeline failed.', err); + * } else { + * console.log('Pipeline succeeded.'); + * } + * }, + * ); + * ``` + * + * The `pipeline` API provides a `promise version`. + * + * `stream.pipeline()` will call `stream.destroy(err)` on all streams except: + * + * * `Readable` streams which have emitted `'end'` or `'close'`. + * * `Writable` streams which have emitted `'finish'` or `'close'`. + * + * `stream.pipeline()` leaves dangling event listeners on the streams + * after the `callback` has been invoked. In the case of reuse of streams after + * failure, this can cause event listener leaks and swallowed errors. If the last + * stream is readable, dangling event listeners will be removed so that the last + * stream can be consumed later. + * + * `stream.pipeline()` closes all the streams when an error is raised. + * The `IncomingRequest` usage with `pipeline` could lead to an unexpected behavior + * once it would destroy the socket without sending the expected response. + * See the example below: + * + * ```js + * const fs = require('node:fs'); + * const http = require('node:http'); + * const { pipeline } = require('node:stream'); + * + * const server = http.createServer((req, res) => { + * const fileStream = fs.createReadStream('./fileNotExist.txt'); + * pipeline(fileStream, res, (err) => { + * if (err) { + * console.log(err); // No such file + * // this message can't be sent once `pipeline` already destroyed the socket + * return res.end('error!!!'); + * } + * }); + * }); + * ``` + * @since v10.0.0 + * @param callback Called when the pipeline is fully done. + */ + function pipeline, B extends PipelineDestination>( + source: A, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + transform4: T4, + destination: B, + callback?: PipelineCallback, + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline( + streams: ReadonlyArray, + callback?: (err: NodeJS.ErrnoException | null) => void, + ): NodeJS.WritableStream; + function pipeline( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array< + NodeJS.ReadWriteStream | NodeJS.WritableStream | ((err: NodeJS.ErrnoException | null) => void) + > + ): NodeJS.WritableStream; + namespace pipeline { + function __promisify__, B extends PipelineDestination>( + source: A, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + transform4: T4, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function __promisify__( + streams: ReadonlyArray, + options?: PipelineOptions, + ): Promise; + function __promisify__( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array + ): Promise; + } + interface Pipe { + close(): void; + hasRef(): boolean; + ref(): void; + unref(): void; + } + /** + * Returns whether the stream has encountered an error. + * @since v17.3.0, v16.14.0 + * @experimental + */ + function isErrored(stream: Readable | Writable | NodeJS.ReadableStream | NodeJS.WritableStream): boolean; + /** + * Returns whether the stream is readable. + * @since v17.4.0, v16.14.0 + * @experimental + */ + function isReadable(stream: Readable | NodeJS.ReadableStream): boolean; + const promises: typeof streamPromises; + const consumers: typeof streamConsumers; + } + export = internal; +} +declare module "node:stream" { + import stream = require("stream"); + export = stream; +} diff --git a/dist/node_modules/@types/node/ts4.8/stream/consumers.d.ts b/dist/node_modules/@types/node/ts4.8/stream/consumers.d.ts new file mode 100644 index 0000000..5ad9cba --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/stream/consumers.d.ts @@ -0,0 +1,12 @@ +declare module "stream/consumers" { + import { Blob as NodeBlob } from "node:buffer"; + import { Readable } from "node:stream"; + function buffer(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function text(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function arrayBuffer(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function blob(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; + function json(stream: NodeJS.ReadableStream | Readable | AsyncIterable): Promise; +} +declare module "node:stream/consumers" { + export * from "stream/consumers"; +} diff --git a/dist/node_modules/@types/node/ts4.8/stream/promises.d.ts b/dist/node_modules/@types/node/ts4.8/stream/promises.d.ts new file mode 100644 index 0000000..6eac5b7 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/stream/promises.d.ts @@ -0,0 +1,83 @@ +declare module "stream/promises" { + import { + FinishedOptions, + PipelineDestination, + PipelineOptions, + PipelinePromise, + PipelineSource, + PipelineTransform, + } from "node:stream"; + function finished( + stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, + options?: FinishedOptions, + ): Promise; + function pipeline, B extends PipelineDestination>( + source: A, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination, + >( + source: A, + transform1: T1, + transform2: T2, + transform3: T3, + transform4: T4, + destination: B, + options?: PipelineOptions, + ): PipelinePromise; + function pipeline( + streams: ReadonlyArray, + options?: PipelineOptions, + ): Promise; + function pipeline( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array + ): Promise; +} +declare module "node:stream/promises" { + export * from "stream/promises"; +} diff --git a/dist/node_modules/@types/node/ts4.8/stream/web.d.ts b/dist/node_modules/@types/node/ts4.8/stream/web.d.ts new file mode 100644 index 0000000..361594d --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/stream/web.d.ts @@ -0,0 +1,366 @@ +declare module "stream/web" { + // stub module, pending copy&paste from .d.ts or manual impl + // copy from lib.dom.d.ts + interface ReadableWritablePair { + readable: ReadableStream; + /** + * Provides a convenient, chainable way of piping this readable stream + * through a transform stream (or any other { writable, readable } + * pair). It simply pipes the stream into the writable side of the + * supplied pair, and returns the readable side for further use. + * + * Piping a stream will lock it for the duration of the pipe, preventing + * any other consumer from acquiring a reader. + */ + writable: WritableStream; + } + interface StreamPipeOptions { + preventAbort?: boolean; + preventCancel?: boolean; + /** + * Pipes this readable stream to a given writable stream destination. + * The way in which the piping process behaves under various error + * conditions can be customized with a number of passed options. It + * returns a promise that fulfills when the piping process completes + * successfully, or rejects if any errors were encountered. + * + * Piping a stream will lock it for the duration of the pipe, preventing + * any other consumer from acquiring a reader. + * + * Errors and closures of the source and destination streams propagate + * as follows: + * + * An error in this source readable stream will abort destination, + * unless preventAbort is truthy. The returned promise will be rejected + * with the source's error, or with any error that occurs during + * aborting the destination. + * + * An error in destination will cancel this source readable stream, + * unless preventCancel is truthy. The returned promise will be rejected + * with the destination's error, or with any error that occurs during + * canceling the source. + * + * When this source readable stream closes, destination will be closed, + * unless preventClose is truthy. The returned promise will be fulfilled + * once this process completes, unless an error is encountered while + * closing the destination, in which case it will be rejected with that + * error. + * + * If destination starts out closed or closing, this source readable + * stream will be canceled, unless preventCancel is true. The returned + * promise will be rejected with an error indicating piping to a closed + * stream failed, or with any error that occurs during canceling the + * source. + * + * The signal option can be set to an AbortSignal to allow aborting an + * ongoing pipe operation via the corresponding AbortController. In this + * case, this source readable stream will be canceled, and destination + * aborted, unless the respective options preventCancel or preventAbort + * are set. + */ + preventClose?: boolean; + signal?: AbortSignal; + } + interface ReadableStreamGenericReader { + readonly closed: Promise; + cancel(reason?: any): Promise; + } + interface ReadableStreamDefaultReadValueResult { + done: false; + value: T; + } + interface ReadableStreamDefaultReadDoneResult { + done: true; + value?: undefined; + } + type ReadableStreamController = ReadableStreamDefaultController; + type ReadableStreamDefaultReadResult = + | ReadableStreamDefaultReadValueResult + | ReadableStreamDefaultReadDoneResult; + interface ReadableStreamReadValueResult { + done: false; + value: T; + } + interface ReadableStreamReadDoneResult { + done: true; + value?: T; + } + type ReadableStreamReadResult = ReadableStreamReadValueResult | ReadableStreamReadDoneResult; + interface ReadableByteStreamControllerCallback { + (controller: ReadableByteStreamController): void | PromiseLike; + } + interface UnderlyingSinkAbortCallback { + (reason?: any): void | PromiseLike; + } + interface UnderlyingSinkCloseCallback { + (): void | PromiseLike; + } + interface UnderlyingSinkStartCallback { + (controller: WritableStreamDefaultController): any; + } + interface UnderlyingSinkWriteCallback { + (chunk: W, controller: WritableStreamDefaultController): void | PromiseLike; + } + interface UnderlyingSourceCancelCallback { + (reason?: any): void | PromiseLike; + } + interface UnderlyingSourcePullCallback { + (controller: ReadableStreamController): void | PromiseLike; + } + interface UnderlyingSourceStartCallback { + (controller: ReadableStreamController): any; + } + interface TransformerFlushCallback { + (controller: TransformStreamDefaultController): void | PromiseLike; + } + interface TransformerStartCallback { + (controller: TransformStreamDefaultController): any; + } + interface TransformerTransformCallback { + (chunk: I, controller: TransformStreamDefaultController): void | PromiseLike; + } + interface UnderlyingByteSource { + autoAllocateChunkSize?: number; + cancel?: ReadableStreamErrorCallback; + pull?: ReadableByteStreamControllerCallback; + start?: ReadableByteStreamControllerCallback; + type: "bytes"; + } + interface UnderlyingSource { + cancel?: UnderlyingSourceCancelCallback; + pull?: UnderlyingSourcePullCallback; + start?: UnderlyingSourceStartCallback; + type?: undefined; + } + interface UnderlyingSink { + abort?: UnderlyingSinkAbortCallback; + close?: UnderlyingSinkCloseCallback; + start?: UnderlyingSinkStartCallback; + type?: undefined; + write?: UnderlyingSinkWriteCallback; + } + interface ReadableStreamErrorCallback { + (reason: any): void | PromiseLike; + } + /** This Streams API interface represents a readable stream of byte data. */ + interface ReadableStream { + readonly locked: boolean; + cancel(reason?: any): Promise; + getReader(): ReadableStreamDefaultReader; + getReader(options: { mode: "byob" }): ReadableStreamBYOBReader; + pipeThrough(transform: ReadableWritablePair, options?: StreamPipeOptions): ReadableStream; + pipeTo(destination: WritableStream, options?: StreamPipeOptions): Promise; + tee(): [ReadableStream, ReadableStream]; + values(options?: { preventCancel?: boolean }): AsyncIterableIterator; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + const ReadableStream: { + prototype: ReadableStream; + new(underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy): ReadableStream; + new(underlyingSource?: UnderlyingSource, strategy?: QueuingStrategy): ReadableStream; + }; + interface ReadableStreamDefaultReader extends ReadableStreamGenericReader { + read(): Promise>; + releaseLock(): void; + } + interface ReadableStreamBYOBReader extends ReadableStreamGenericReader { + read(view: T): Promise>; + releaseLock(): void; + } + const ReadableStreamDefaultReader: { + prototype: ReadableStreamDefaultReader; + new(stream: ReadableStream): ReadableStreamDefaultReader; + }; + const ReadableStreamBYOBReader: any; + const ReadableStreamBYOBRequest: any; + interface ReadableByteStreamController { + readonly byobRequest: undefined; + readonly desiredSize: number | null; + close(): void; + enqueue(chunk: ArrayBufferView): void; + error(error?: any): void; + } + const ReadableByteStreamController: { + prototype: ReadableByteStreamController; + new(): ReadableByteStreamController; + }; + interface ReadableStreamDefaultController { + readonly desiredSize: number | null; + close(): void; + enqueue(chunk?: R): void; + error(e?: any): void; + } + const ReadableStreamDefaultController: { + prototype: ReadableStreamDefaultController; + new(): ReadableStreamDefaultController; + }; + interface Transformer { + flush?: TransformerFlushCallback; + readableType?: undefined; + start?: TransformerStartCallback; + transform?: TransformerTransformCallback; + writableType?: undefined; + } + interface TransformStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const TransformStream: { + prototype: TransformStream; + new( + transformer?: Transformer, + writableStrategy?: QueuingStrategy, + readableStrategy?: QueuingStrategy, + ): TransformStream; + }; + interface TransformStreamDefaultController { + readonly desiredSize: number | null; + enqueue(chunk?: O): void; + error(reason?: any): void; + terminate(): void; + } + const TransformStreamDefaultController: { + prototype: TransformStreamDefaultController; + new(): TransformStreamDefaultController; + }; + /** + * This Streams API interface provides a standard abstraction for writing + * streaming data to a destination, known as a sink. This object comes with + * built-in back pressure and queuing. + */ + interface WritableStream { + readonly locked: boolean; + abort(reason?: any): Promise; + close(): Promise; + getWriter(): WritableStreamDefaultWriter; + } + const WritableStream: { + prototype: WritableStream; + new(underlyingSink?: UnderlyingSink, strategy?: QueuingStrategy): WritableStream; + }; + /** + * This Streams API interface is the object returned by + * WritableStream.getWriter() and once created locks the < writer to the + * WritableStream ensuring that no other streams can write to the underlying + * sink. + */ + interface WritableStreamDefaultWriter { + readonly closed: Promise; + readonly desiredSize: number | null; + readonly ready: Promise; + abort(reason?: any): Promise; + close(): Promise; + releaseLock(): void; + write(chunk?: W): Promise; + } + const WritableStreamDefaultWriter: { + prototype: WritableStreamDefaultWriter; + new(stream: WritableStream): WritableStreamDefaultWriter; + }; + /** + * This Streams API interface represents a controller allowing control of a + * WritableStream's state. When constructing a WritableStream, the + * underlying sink is given a corresponding WritableStreamDefaultController + * instance to manipulate. + */ + interface WritableStreamDefaultController { + error(e?: any): void; + } + const WritableStreamDefaultController: { + prototype: WritableStreamDefaultController; + new(): WritableStreamDefaultController; + }; + interface QueuingStrategy { + highWaterMark?: number; + size?: QueuingStrategySize; + } + interface QueuingStrategySize { + (chunk?: T): number; + } + interface QueuingStrategyInit { + /** + * Creates a new ByteLengthQueuingStrategy with the provided high water + * mark. + * + * Note that the provided high water mark will not be validated ahead of + * time. Instead, if it is negative, NaN, or not a number, the resulting + * ByteLengthQueuingStrategy will cause the corresponding stream + * constructor to throw. + */ + highWaterMark: number; + } + /** + * This Streams API interface provides a built-in byte length queuing + * strategy that can be used when constructing streams. + */ + interface ByteLengthQueuingStrategy extends QueuingStrategy { + readonly highWaterMark: number; + readonly size: QueuingStrategySize; + } + const ByteLengthQueuingStrategy: { + prototype: ByteLengthQueuingStrategy; + new(init: QueuingStrategyInit): ByteLengthQueuingStrategy; + }; + /** + * This Streams API interface provides a built-in byte length queuing + * strategy that can be used when constructing streams. + */ + interface CountQueuingStrategy extends QueuingStrategy { + readonly highWaterMark: number; + readonly size: QueuingStrategySize; + } + const CountQueuingStrategy: { + prototype: CountQueuingStrategy; + new(init: QueuingStrategyInit): CountQueuingStrategy; + }; + interface TextEncoderStream { + /** Returns "utf-8". */ + readonly encoding: "utf-8"; + readonly readable: ReadableStream; + readonly writable: WritableStream; + readonly [Symbol.toStringTag]: string; + } + const TextEncoderStream: { + prototype: TextEncoderStream; + new(): TextEncoderStream; + }; + interface TextDecoderOptions { + fatal?: boolean; + ignoreBOM?: boolean; + } + type BufferSource = ArrayBufferView | ArrayBuffer; + interface TextDecoderStream { + /** Returns encoding's name, lower cased. */ + readonly encoding: string; + /** Returns `true` if error mode is "fatal", and `false` otherwise. */ + readonly fatal: boolean; + /** Returns `true` if ignore BOM flag is set, and `false` otherwise. */ + readonly ignoreBOM: boolean; + readonly readable: ReadableStream; + readonly writable: WritableStream; + readonly [Symbol.toStringTag]: string; + } + const TextDecoderStream: { + prototype: TextDecoderStream; + new(encoding?: string, options?: TextDecoderOptions): TextDecoderStream; + }; + interface CompressionStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const CompressionStream: { + prototype: CompressionStream; + new(format: string): CompressionStream; + }; + interface DecompressionStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const DecompressionStream: { + prototype: DecompressionStream; + new(format: string): DecompressionStream; + }; +} +declare module "node:stream/web" { + export * from "stream/web"; +} diff --git a/dist/node_modules/@types/node/ts4.8/string_decoder.d.ts b/dist/node_modules/@types/node/ts4.8/string_decoder.d.ts new file mode 100644 index 0000000..cd76bf8 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/string_decoder.d.ts @@ -0,0 +1,67 @@ +/** + * The `node:string_decoder` module provides an API for decoding `Buffer` objects + * into strings in a manner that preserves encoded multi-byte UTF-8 and UTF-16 + * characters. It can be accessed using: + * + * ```js + * const { StringDecoder } = require('node:string_decoder'); + * ``` + * + * The following example shows the basic use of the `StringDecoder` class. + * + * ```js + * const { StringDecoder } = require('node:string_decoder'); + * const decoder = new StringDecoder('utf8'); + * + * const cent = Buffer.from([0xC2, 0xA2]); + * console.log(decoder.write(cent)); // Prints: ¢ + * + * const euro = Buffer.from([0xE2, 0x82, 0xAC]); + * console.log(decoder.write(euro)); // Prints: € + * ``` + * + * When a `Buffer` instance is written to the `StringDecoder` instance, an + * internal buffer is used to ensure that the decoded string does not contain + * any incomplete multibyte characters. These are held in the buffer until the + * next call to `stringDecoder.write()` or until `stringDecoder.end()` is called. + * + * In the following example, the three UTF-8 encoded bytes of the European Euro + * symbol (`€`) are written over three separate operations: + * + * ```js + * const { StringDecoder } = require('node:string_decoder'); + * const decoder = new StringDecoder('utf8'); + * + * decoder.write(Buffer.from([0xE2])); + * decoder.write(Buffer.from([0x82])); + * console.log(decoder.end(Buffer.from([0xAC]))); // Prints: € + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/string_decoder.js) + */ +declare module "string_decoder" { + class StringDecoder { + constructor(encoding?: BufferEncoding); + /** + * Returns a decoded string, ensuring that any incomplete multibyte characters at + * the end of the `Buffer`, or `TypedArray`, or `DataView` are omitted from the + * returned string and stored in an internal buffer for the next call to`stringDecoder.write()` or `stringDecoder.end()`. + * @since v0.1.99 + * @param buffer The bytes to decode. + */ + write(buffer: string | Buffer | NodeJS.ArrayBufferView): string; + /** + * Returns any remaining input stored in the internal buffer as a string. Bytes + * representing incomplete UTF-8 and UTF-16 characters will be replaced with + * substitution characters appropriate for the character encoding. + * + * If the `buffer` argument is provided, one final call to `stringDecoder.write()`is performed before returning the remaining input. + * After `end()` is called, the `stringDecoder` object can be reused for new input. + * @since v0.9.3 + * @param buffer The bytes to decode. + */ + end(buffer?: string | Buffer | NodeJS.ArrayBufferView): string; + } +} +declare module "node:string_decoder" { + export * from "string_decoder"; +} diff --git a/dist/node_modules/@types/node/ts4.8/test.d.ts b/dist/node_modules/@types/node/ts4.8/test.d.ts new file mode 100644 index 0000000..149c9e0 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/test.d.ts @@ -0,0 +1,1465 @@ +/** + * The `node:test` module facilitates the creation of JavaScript tests. + * To access it: + * + * ```js + * import test from 'node:test'; + * ``` + * + * This module is only available under the `node:` scheme. The following will not + * work: + * + * ```js + * import test from 'test'; + * ``` + * + * Tests created via the `test` module consist of a single function that is + * processed in one of three ways: + * + * 1. A synchronous function that is considered failing if it throws an exception, + * and is considered passing otherwise. + * 2. A function that returns a `Promise` that is considered failing if the`Promise` rejects, and is considered passing if the `Promise` fulfills. + * 3. A function that receives a callback function. If the callback receives any + * truthy value as its first argument, the test is considered failing. If a + * falsy value is passed as the first argument to the callback, the test is + * considered passing. If the test function receives a callback function and + * also returns a `Promise`, the test will fail. + * + * The following example illustrates how tests are written using the`test` module. + * + * ```js + * test('synchronous passing test', (t) => { + * // This test passes because it does not throw an exception. + * assert.strictEqual(1, 1); + * }); + * + * test('synchronous failing test', (t) => { + * // This test fails because it throws an exception. + * assert.strictEqual(1, 2); + * }); + * + * test('asynchronous passing test', async (t) => { + * // This test passes because the Promise returned by the async + * // function is settled and not rejected. + * assert.strictEqual(1, 1); + * }); + * + * test('asynchronous failing test', async (t) => { + * // This test fails because the Promise returned by the async + * // function is rejected. + * assert.strictEqual(1, 2); + * }); + * + * test('failing test using Promises', (t) => { + * // Promises can be used directly as well. + * return new Promise((resolve, reject) => { + * setImmediate(() => { + * reject(new Error('this will cause the test to fail')); + * }); + * }); + * }); + * + * test('callback passing test', (t, done) => { + * // done() is the callback function. When the setImmediate() runs, it invokes + * // done() with no arguments. + * setImmediate(done); + * }); + * + * test('callback failing test', (t, done) => { + * // When the setImmediate() runs, done() is invoked with an Error object and + * // the test fails. + * setImmediate(() => { + * done(new Error('callback failure')); + * }); + * }); + * ``` + * + * If any tests fail, the process exit code is set to `1`. + * @since v18.0.0, v16.17.0 + * @see [source](https://github.com/nodejs/node/blob/v20.4.0/lib/test.js) + */ +declare module "node:test" { + import { Readable } from "node:stream"; + import { AsyncResource } from "node:async_hooks"; + /** + * **Note:**`shard` is used to horizontally parallelize test running across + * machines or processes, ideal for large-scale executions across varied + * environments. It's incompatible with `watch` mode, tailored for rapid + * code iteration by automatically rerunning tests on file changes. + * + * ```js + * import { tap } from 'node:test/reporters'; + * import { run } from 'node:test'; + * import process from 'node:process'; + * import path from 'node:path'; + * + * run({ files: [path.resolve('./tests/test.js')] }) + * .compose(tap) + * .pipe(process.stdout); + * ``` + * @since v18.9.0, v16.19.0 + * @param options Configuration options for running tests. The following properties are supported: + */ + function run(options?: RunOptions): TestsStream; + /** + * The `test()` function is the value imported from the `test` module. Each + * invocation of this function results in reporting the test to the `TestsStream`. + * + * The `TestContext` object passed to the `fn` argument can be used to perform + * actions related to the current test. Examples include skipping the test, adding + * additional diagnostic information, or creating subtests. + * + * `test()` returns a `Promise` that fulfills once the test completes. + * if `test()` is called within a `describe()` block, it fulfills immediately. + * The return value can usually be discarded for top level tests. + * However, the return value from subtests should be used to prevent the parent + * test from finishing first and cancelling the subtest + * as shown in the following example. + * + * ```js + * test('top level test', async (t) => { + * // The setTimeout() in the following subtest would cause it to outlive its + * // parent test if 'await' is removed on the next line. Once the parent test + * // completes, it will cancel any outstanding subtests. + * await t.test('longer running subtest', async (t) => { + * return new Promise((resolve, reject) => { + * setTimeout(resolve, 1000); + * }); + * }); + * }); + * ``` + * + * The `timeout` option can be used to fail the test if it takes longer than`timeout` milliseconds to complete. However, it is not a reliable mechanism for + * canceling tests because a running test might block the application thread and + * thus prevent the scheduled cancellation. + * @since v18.0.0, v16.17.0 + * @param [name='The name'] The name of the test, which is displayed when reporting test results. + * @param options Configuration options for the test. The following properties are supported: + * @param [fn='A no-op function'] The function under test. The first argument to this function is a {@link TestContext} object. If the test uses callbacks, the callback function is passed as the + * second argument. + * @return Fulfilled with `undefined` once the test completes, or immediately if the test runs within {@link describe}. + */ + function test(name?: string, fn?: TestFn): Promise; + function test(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function test(options?: TestOptions, fn?: TestFn): Promise; + function test(fn?: TestFn): Promise; + namespace test { + export { after, afterEach, before, beforeEach, describe, it, mock, only, run, skip, test, todo }; + } + /** + * The `describe()` function imported from the `node:test` module. Each + * invocation of this function results in the creation of a Subtest. + * After invocation of top level `describe` functions, + * all top level tests and suites will execute. + * @param [name='The name'] The name of the suite, which is displayed when reporting test results. + * @param options Configuration options for the suite. supports the same options as `test([name][, options][, fn])`. + * @param [fn='A no-op function'] The function under suite declaring all subtests and subsuites. The first argument to this function is a {@link SuiteContext} object. + * @return Immediately fulfilled with `undefined`. + */ + function describe(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function describe(name?: string, fn?: SuiteFn): Promise; + function describe(options?: TestOptions, fn?: SuiteFn): Promise; + function describe(fn?: SuiteFn): Promise; + namespace describe { + /** + * Shorthand for skipping a suite, same as `describe([name], { skip: true }[, fn])`. + */ + function skip(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function skip(name?: string, fn?: SuiteFn): Promise; + function skip(options?: TestOptions, fn?: SuiteFn): Promise; + function skip(fn?: SuiteFn): Promise; + /** + * Shorthand for marking a suite as `TODO`, same as `describe([name], { todo: true }[, fn])`. + */ + function todo(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function todo(name?: string, fn?: SuiteFn): Promise; + function todo(options?: TestOptions, fn?: SuiteFn): Promise; + function todo(fn?: SuiteFn): Promise; + /** + * Shorthand for marking a suite as `only`, same as `describe([name], { only: true }[, fn])`. + * @since v18.15.0 + */ + function only(name?: string, options?: TestOptions, fn?: SuiteFn): Promise; + function only(name?: string, fn?: SuiteFn): Promise; + function only(options?: TestOptions, fn?: SuiteFn): Promise; + function only(fn?: SuiteFn): Promise; + } + /** + * Shorthand for `test()`. + * + * The `it()` function is imported from the `node:test` module. + * @since v18.6.0, v16.17.0 + */ + function it(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function it(name?: string, fn?: TestFn): Promise; + function it(options?: TestOptions, fn?: TestFn): Promise; + function it(fn?: TestFn): Promise; + namespace it { + /** + * Shorthand for skipping a test, same as `it([name], { skip: true }[, fn])`. + */ + function skip(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function skip(name?: string, fn?: TestFn): Promise; + function skip(options?: TestOptions, fn?: TestFn): Promise; + function skip(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `TODO`, same as `it([name], { todo: true }[, fn])`. + */ + function todo(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function todo(name?: string, fn?: TestFn): Promise; + function todo(options?: TestOptions, fn?: TestFn): Promise; + function todo(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `only`, same as `it([name], { only: true }[, fn])`. + * @since v18.15.0 + */ + function only(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function only(name?: string, fn?: TestFn): Promise; + function only(options?: TestOptions, fn?: TestFn): Promise; + function only(fn?: TestFn): Promise; + } + /** + * Shorthand for skipping a test, same as `test([name], { skip: true }[, fn])`. + * @since v20.2.0 + */ + function skip(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function skip(name?: string, fn?: TestFn): Promise; + function skip(options?: TestOptions, fn?: TestFn): Promise; + function skip(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `TODO`, same as `test([name], { todo: true }[, fn])`. + * @since v20.2.0 + */ + function todo(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function todo(name?: string, fn?: TestFn): Promise; + function todo(options?: TestOptions, fn?: TestFn): Promise; + function todo(fn?: TestFn): Promise; + /** + * Shorthand for marking a test as `only`, same as `test([name], { only: true }[, fn])`. + * @since v20.2.0 + */ + function only(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function only(name?: string, fn?: TestFn): Promise; + function only(options?: TestOptions, fn?: TestFn): Promise; + function only(fn?: TestFn): Promise; + /** + * The type of a function under test. The first argument to this function is a + * {@link TestContext} object. If the test uses callbacks, the callback function is passed as + * the second argument. + */ + type TestFn = (t: TestContext, done: (result?: any) => void) => void | Promise; + /** + * The type of a function under Suite. + */ + type SuiteFn = (s: SuiteContext) => void | Promise; + interface TestShard { + /** + * A positive integer between 1 and `` that specifies the index of the shard to run. + */ + index: number; + /** + * A positive integer that specifies the total number of shards to split the test files to. + */ + total: number; + } + interface RunOptions { + /** + * If a number is provided, then that many files would run in parallel. + * If truthy, it would run (number of cpu cores - 1) files in parallel. + * If falsy, it would only run one file at a time. + * If unspecified, subtests inherit this value from their parent. + * @default true + */ + concurrency?: number | boolean | undefined; + /** + * An array containing the list of files to run. + * If unspecified, the test runner execution model will be used. + */ + files?: readonly string[] | undefined; + /** + * Allows aborting an in-progress test execution. + * @default undefined + */ + signal?: AbortSignal | undefined; + /** + * A number of milliseconds the test will fail after. + * If unspecified, subtests inherit this value from their parent. + * @default Infinity + */ + timeout?: number | undefined; + /** + * Sets inspector port of test child process. + * If a nullish value is provided, each process gets its own port, + * incremented from the primary's `process.debugPort`. + */ + inspectPort?: number | (() => number) | undefined; + /** + * That can be used to only run tests whose name matches the provided pattern. + * Test name patterns are interpreted as JavaScript regular expressions. + * For each test that is executed, any corresponding test hooks, such as `beforeEach()`, are also run. + */ + testNamePatterns?: string | RegExp | string[] | RegExp[]; + /** + * If truthy, the test context will only run tests that have the `only` option set + */ + only?: boolean; + /** + * A function that accepts the TestsStream instance and can be used to setup listeners before any tests are run. + */ + setup?: (root: Test) => void | Promise; + /** + * Whether to run in watch mode or not. + * @default false + */ + watch?: boolean | undefined; + /** + * Running tests in a specific shard. + * @default undefined + */ + shard?: TestShard | undefined; + } + class Test extends AsyncResource { + concurrency: number; + nesting: number; + only: boolean; + reporter: TestsStream; + runOnlySubtests: boolean; + testNumber: number; + timeout: number | null; + } + /** + * A successful call to `run()` method will return a new `TestsStream` object, streaming a series of events representing the execution of the tests.`TestsStream` will emit events, in the + * order of the tests definition + * @since v18.9.0, v16.19.0 + */ + class TestsStream extends Readable implements NodeJS.ReadableStream { + addListener(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + addListener(event: "test:fail", listener: (data: TestFail) => void): this; + addListener(event: "test:pass", listener: (data: TestPass) => void): this; + addListener(event: "test:plan", listener: (data: TestPlan) => void): this; + addListener(event: "test:start", listener: (data: TestStart) => void): this; + addListener(event: "test:stderr", listener: (data: TestStderr) => void): this; + addListener(event: "test:stdout", listener: (data: TestStdout) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + emit(event: "test:diagnostic", data: DiagnosticData): boolean; + emit(event: "test:fail", data: TestFail): boolean; + emit(event: "test:pass", data: TestPass): boolean; + emit(event: "test:plan", data: TestPlan): boolean; + emit(event: "test:start", data: TestStart): boolean; + emit(event: "test:stderr", data: TestStderr): boolean; + emit(event: "test:stdout", data: TestStdout): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + on(event: "test:fail", listener: (data: TestFail) => void): this; + on(event: "test:pass", listener: (data: TestPass) => void): this; + on(event: "test:plan", listener: (data: TestPlan) => void): this; + on(event: "test:start", listener: (data: TestStart) => void): this; + on(event: "test:stderr", listener: (data: TestStderr) => void): this; + on(event: "test:stdout", listener: (data: TestStdout) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + once(event: "test:fail", listener: (data: TestFail) => void): this; + once(event: "test:pass", listener: (data: TestPass) => void): this; + once(event: "test:plan", listener: (data: TestPlan) => void): this; + once(event: "test:start", listener: (data: TestStart) => void): this; + once(event: "test:stderr", listener: (data: TestStderr) => void): this; + once(event: "test:stdout", listener: (data: TestStdout) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + prependListener(event: "test:fail", listener: (data: TestFail) => void): this; + prependListener(event: "test:pass", listener: (data: TestPass) => void): this; + prependListener(event: "test:plan", listener: (data: TestPlan) => void): this; + prependListener(event: "test:start", listener: (data: TestStart) => void): this; + prependListener(event: "test:stderr", listener: (data: TestStderr) => void): this; + prependListener(event: "test:stdout", listener: (data: TestStdout) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "test:diagnostic", listener: (data: DiagnosticData) => void): this; + prependOnceListener(event: "test:fail", listener: (data: TestFail) => void): this; + prependOnceListener(event: "test:pass", listener: (data: TestPass) => void): this; + prependOnceListener(event: "test:plan", listener: (data: TestPlan) => void): this; + prependOnceListener(event: "test:start", listener: (data: TestStart) => void): this; + prependOnceListener(event: "test:stderr", listener: (data: TestStderr) => void): this; + prependOnceListener(event: "test:stdout", listener: (data: TestStdout) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + } + /** + * An instance of `TestContext` is passed to each test function in order to + * interact with the test runner. However, the `TestContext` constructor is not + * exposed as part of the API. + * @since v18.0.0, v16.17.0 + */ + class TestContext { + /** + * This function is used to create a hook running before subtest of the current test. + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as + * the second argument. Default: A no-op function. + * @param options Configuration options for the hook. + * @since v20.1.0 + */ + before: typeof before; + /** + * This function is used to create a hook running before each subtest of the current test. + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as + * the second argument. Default: A no-op function. + * @param options Configuration options for the hook. + * @since v18.8.0 + */ + beforeEach: typeof beforeEach; + /** + * This function is used to create a hook that runs after the current test finishes. + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as + * the second argument. Default: A no-op function. + * @param options Configuration options for the hook. + * @since v18.13.0 + */ + after: typeof after; + /** + * This function is used to create a hook running after each subtest of the current test. + * @param fn The hook function. If the hook uses callbacks, the callback function is passed as + * the second argument. Default: A no-op function. + * @param options Configuration options for the hook. + * @since v18.8.0 + */ + afterEach: typeof afterEach; + /** + * This function is used to write diagnostics to the output. Any diagnostic + * information is included at the end of the test's results. This function does + * not return a value. + * + * ```js + * test('top level test', (t) => { + * t.diagnostic('A diagnostic message'); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param message Message to be reported. + */ + diagnostic(message: string): void; + /** + * The name of the test. + * @since v18.8.0, v16.18.0 + */ + readonly name: string; + /** + * If `shouldRunOnlyTests` is truthy, the test context will only run tests that + * have the `only` option set. Otherwise, all tests are run. If Node.js was not + * started with the `--test-only` command-line option, this function is a + * no-op. + * + * ```js + * test('top level test', (t) => { + * // The test context can be set to run subtests with the 'only' option. + * t.runOnly(true); + * return Promise.all([ + * t.test('this subtest is now skipped'), + * t.test('this subtest is run', { only: true }), + * ]); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param shouldRunOnlyTests Whether or not to run `only` tests. + */ + runOnly(shouldRunOnlyTests: boolean): void; + /** + * ```js + * test('top level test', async (t) => { + * await fetch('some/uri', { signal: t.signal }); + * }); + * ``` + * @since v18.7.0, v16.17.0 + */ + readonly signal: AbortSignal; + /** + * This function causes the test's output to indicate the test as skipped. If`message` is provided, it is included in the output. Calling `skip()` does + * not terminate execution of the test function. This function does not return a + * value. + * + * ```js + * test('top level test', (t) => { + * // Make sure to return here as well if the test contains additional logic. + * t.skip('this is skipped'); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param message Optional skip message. + */ + skip(message?: string): void; + /** + * This function adds a `TODO` directive to the test's output. If `message` is + * provided, it is included in the output. Calling `todo()` does not terminate + * execution of the test function. This function does not return a value. + * + * ```js + * test('top level test', (t) => { + * // This test is marked as `TODO` + * t.todo('this is a todo'); + * }); + * ``` + * @since v18.0.0, v16.17.0 + * @param message Optional `TODO` message. + */ + todo(message?: string): void; + /** + * This function is used to create subtests under the current test. This function behaves in + * the same fashion as the top level {@link test} function. + * @since v18.0.0 + * @param name The name of the test, which is displayed when reporting test results. + * Default: The `name` property of fn, or `''` if `fn` does not have a name. + * @param options Configuration options for the test + * @param fn The function under test. This first argument to this function is a + * {@link TestContext} object. If the test uses callbacks, the callback function is + * passed as the second argument. Default: A no-op function. + * @returns A {@link Promise} resolved with `undefined` once the test completes. + */ + test: typeof test; + /** + * Each test provides its own MockTracker instance. + */ + readonly mock: MockTracker; + } + /** + * An instance of `SuiteContext` is passed to each suite function in order to + * interact with the test runner. However, the `SuiteContext` constructor is not + * exposed as part of the API. + * @since v18.7.0, v16.17.0 + */ + class SuiteContext { + /** + * The name of the suite. + * @since v18.8.0, v16.18.0 + */ + readonly name: string; + /** + * Can be used to abort test subtasks when the test has been aborted. + * @since v18.7.0, v16.17.0 + */ + readonly signal: AbortSignal; + } + interface TestOptions { + /** + * If a number is provided, then that many tests would run in parallel. + * If truthy, it would run (number of cpu cores - 1) tests in parallel. + * For subtests, it will be `Infinity` tests in parallel. + * If falsy, it would only run one test at a time. + * If unspecified, subtests inherit this value from their parent. + * @default false + */ + concurrency?: number | boolean | undefined; + /** + * If truthy, and the test context is configured to run `only` tests, then this test will be + * run. Otherwise, the test is skipped. + * @default false + */ + only?: boolean | undefined; + /** + * Allows aborting an in-progress test. + * @since v18.8.0 + */ + signal?: AbortSignal | undefined; + /** + * If truthy, the test is skipped. If a string is provided, that string is displayed in the + * test results as the reason for skipping the test. + * @default false + */ + skip?: boolean | string | undefined; + /** + * A number of milliseconds the test will fail after. If unspecified, subtests inherit this + * value from their parent. + * @default Infinity + * @since v18.7.0 + */ + timeout?: number | undefined; + /** + * If truthy, the test marked as `TODO`. If a string is provided, that string is displayed in + * the test results as the reason why the test is `TODO`. + * @default false + */ + todo?: boolean | string | undefined; + } + /** + * This function is used to create a hook running before running a suite. + * + * ```js + * describe('tests', async () => { + * before(() => console.log('about to run some test')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param [fn='A no-op function'] The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. The following properties are supported: + */ + function before(fn?: HookFn, options?: HookOptions): void; + /** + * This function is used to create a hook running after running a suite. + * + * ```js + * describe('tests', async () => { + * after(() => console.log('finished running tests')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param [fn='A no-op function'] The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. The following properties are supported: + */ + function after(fn?: HookFn, options?: HookOptions): void; + /** + * This function is used to create a hook running + * before each subtest of the current suite. + * + * ```js + * describe('tests', async () => { + * beforeEach(() => console.log('about to run a test')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param [fn='A no-op function'] The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. The following properties are supported: + */ + function beforeEach(fn?: HookFn, options?: HookOptions): void; + /** + * This function is used to create a hook running + * after each subtest of the current test. + * + * ```js + * describe('tests', async () => { + * afterEach(() => console.log('finished running a test')); + * it('is a subtest', () => { + * assert.ok('some relevant assertion here'); + * }); + * }); + * ``` + * @since v18.8.0, v16.18.0 + * @param [fn='A no-op function'] The hook function. If the hook uses callbacks, the callback function is passed as the second argument. + * @param options Configuration options for the hook. The following properties are supported: + */ + function afterEach(fn?: HookFn, options?: HookOptions): void; + /** + * The hook function. If the hook uses callbacks, the callback function is passed as the + * second argument. + */ + type HookFn = (s: SuiteContext, done: (result?: any) => void) => any; + /** + * Configuration options for hooks. + * @since v18.8.0 + */ + interface HookOptions { + /** + * Allows aborting an in-progress hook. + */ + signal?: AbortSignal | undefined; + /** + * A number of milliseconds the hook will fail after. If unspecified, subtests inherit this + * value from their parent. + * @default Infinity + */ + timeout?: number | undefined; + } + interface MockFunctionOptions { + /** + * The number of times that the mock will use the behavior of `implementation`. + * Once the mock function has been called `times` times, + * it will automatically restore the behavior of `original`. + * This value must be an integer greater than zero. + * @default Infinity + */ + times?: number | undefined; + } + interface MockMethodOptions extends MockFunctionOptions { + /** + * If `true`, `object[methodName]` is treated as a getter. + * This option cannot be used with the `setter` option. + */ + getter?: boolean | undefined; + /** + * If `true`, `object[methodName]` is treated as a setter. + * This option cannot be used with the `getter` option. + */ + setter?: boolean | undefined; + } + type Mock = F & { + mock: MockFunctionContext; + }; + type NoOpFunction = (...args: any[]) => undefined; + type FunctionPropertyNames = { + [K in keyof T]: T[K] extends Function ? K : never; + }[keyof T]; + /** + * The `MockTracker` class is used to manage mocking functionality. The test runner + * module provides a top level `mock` export which is a `MockTracker` instance. + * Each test also provides its own `MockTracker` instance via the test context's`mock` property. + * @since v19.1.0, v18.13.0 + */ + class MockTracker { + /** + * This function is used to create a mock function. + * + * The following example creates a mock function that increments a counter by one + * on each invocation. The `times` option is used to modify the mock behavior such + * that the first two invocations add two to the counter instead of one. + * + * ```js + * test('mocks a counting function', (t) => { + * let cnt = 0; + * + * function addOne() { + * cnt++; + * return cnt; + * } + * + * function addTwo() { + * cnt += 2; + * return cnt; + * } + * + * const fn = t.mock.fn(addOne, addTwo, { times: 2 }); + * + * assert.strictEqual(fn(), 2); + * assert.strictEqual(fn(), 4); + * assert.strictEqual(fn(), 5); + * assert.strictEqual(fn(), 6); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param [original='A no-op function'] An optional function to create a mock on. + * @param implementation An optional function used as the mock implementation for `original`. This is useful for creating mocks that exhibit one behavior for a specified number of calls and + * then restore the behavior of `original`. + * @param options Optional configuration options for the mock function. The following properties are supported: + * @return The mocked function. The mocked function contains a special `mock` property, which is an instance of {@link MockFunctionContext}, and can be used for inspecting and changing the + * behavior of the mocked function. + */ + fn(original?: F, options?: MockFunctionOptions): Mock; + fn( + original?: F, + implementation?: Implementation, + options?: MockFunctionOptions, + ): Mock; + /** + * This function is used to create a mock on an existing object method. The + * following example demonstrates how a mock is created on an existing object + * method. + * + * ```js + * test('spies on an object method', (t) => { + * const number = { + * value: 5, + * subtract(a) { + * return this.value - a; + * }, + * }; + * + * t.mock.method(number, 'subtract'); + * assert.strictEqual(number.subtract.mock.calls.length, 0); + * assert.strictEqual(number.subtract(3), 2); + * assert.strictEqual(number.subtract.mock.calls.length, 1); + * + * const call = number.subtract.mock.calls[0]; + * + * assert.deepStrictEqual(call.arguments, [3]); + * assert.strictEqual(call.result, 2); + * assert.strictEqual(call.error, undefined); + * assert.strictEqual(call.target, undefined); + * assert.strictEqual(call.this, number); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param object The object whose method is being mocked. + * @param methodName The identifier of the method on `object` to mock. If `object[methodName]` is not a function, an error is thrown. + * @param implementation An optional function used as the mock implementation for `object[methodName]`. + * @param options Optional configuration options for the mock method. The following properties are supported: + * @return The mocked method. The mocked method contains a special `mock` property, which is an instance of {@link MockFunctionContext}, and can be used for inspecting and changing the + * behavior of the mocked method. + */ + method< + MockedObject extends object, + MethodName extends FunctionPropertyNames, + >( + object: MockedObject, + methodName: MethodName, + options?: MockFunctionOptions, + ): MockedObject[MethodName] extends Function ? Mock + : never; + method< + MockedObject extends object, + MethodName extends FunctionPropertyNames, + Implementation extends Function, + >( + object: MockedObject, + methodName: MethodName, + implementation: Implementation, + options?: MockFunctionOptions, + ): MockedObject[MethodName] extends Function ? Mock + : never; + method( + object: MockedObject, + methodName: keyof MockedObject, + options: MockMethodOptions, + ): Mock; + method( + object: MockedObject, + methodName: keyof MockedObject, + implementation: Function, + options: MockMethodOptions, + ): Mock; + + /** + * This function is syntax sugar for `MockTracker.method` with `options.getter`set to `true`. + * @since v19.3.0, v18.13.0 + */ + getter< + MockedObject extends object, + MethodName extends keyof MockedObject, + >( + object: MockedObject, + methodName: MethodName, + options?: MockFunctionOptions, + ): Mock<() => MockedObject[MethodName]>; + getter< + MockedObject extends object, + MethodName extends keyof MockedObject, + Implementation extends Function, + >( + object: MockedObject, + methodName: MethodName, + implementation?: Implementation, + options?: MockFunctionOptions, + ): Mock<(() => MockedObject[MethodName]) | Implementation>; + /** + * This function is syntax sugar for `MockTracker.method` with `options.setter`set to `true`. + * @since v19.3.0, v18.13.0 + */ + setter< + MockedObject extends object, + MethodName extends keyof MockedObject, + >( + object: MockedObject, + methodName: MethodName, + options?: MockFunctionOptions, + ): Mock<(value: MockedObject[MethodName]) => void>; + setter< + MockedObject extends object, + MethodName extends keyof MockedObject, + Implementation extends Function, + >( + object: MockedObject, + methodName: MethodName, + implementation?: Implementation, + options?: MockFunctionOptions, + ): Mock<((value: MockedObject[MethodName]) => void) | Implementation>; + /** + * This function restores the default behavior of all mocks that were previously + * created by this `MockTracker` and disassociates the mocks from the`MockTracker` instance. Once disassociated, the mocks can still be used, but the`MockTracker` instance can no longer be + * used to reset their behavior or + * otherwise interact with them. + * + * After each test completes, this function is called on the test context's`MockTracker`. If the global `MockTracker` is used extensively, calling this + * function manually is recommended. + * @since v19.1.0, v18.13.0 + */ + reset(): void; + /** + * This function restores the default behavior of all mocks that were previously + * created by this `MockTracker`. Unlike `mock.reset()`, `mock.restoreAll()` does + * not disassociate the mocks from the `MockTracker` instance. + * @since v19.1.0, v18.13.0 + */ + restoreAll(): void; + timers: MockTimers; + } + const mock: MockTracker; + interface MockFunctionCall< + F extends Function, + ReturnType = F extends (...args: any) => infer T ? T + : F extends abstract new(...args: any) => infer T ? T + : unknown, + Args = F extends (...args: infer Y) => any ? Y + : F extends abstract new(...args: infer Y) => any ? Y + : unknown[], + > { + /** + * An array of the arguments passed to the mock function. + */ + arguments: Args; + /** + * If the mocked function threw then this property contains the thrown value. + */ + error: unknown | undefined; + /** + * The value returned by the mocked function. + * + * If the mocked function threw, it will be `undefined`. + */ + result: ReturnType | undefined; + /** + * An `Error` object whose stack can be used to determine the callsite of the mocked function invocation. + */ + stack: Error; + /** + * If the mocked function is a constructor, this field contains the class being constructed. + * Otherwise this will be `undefined`. + */ + target: F extends abstract new(...args: any) => any ? F : undefined; + /** + * The mocked function's `this` value. + */ + this: unknown; + } + /** + * The `MockFunctionContext` class is used to inspect or manipulate the behavior of + * mocks created via the `MockTracker` APIs. + * @since v19.1.0, v18.13.0 + */ + class MockFunctionContext { + /** + * A getter that returns a copy of the internal array used to track calls to the + * mock. Each entry in the array is an object with the following properties. + * @since v19.1.0, v18.13.0 + */ + readonly calls: Array>; + /** + * This function returns the number of times that this mock has been invoked. This + * function is more efficient than checking `ctx.calls.length` because `ctx.calls`is a getter that creates a copy of the internal call tracking array. + * @since v19.1.0, v18.13.0 + * @return The number of times that this mock has been invoked. + */ + callCount(): number; + /** + * This function is used to change the behavior of an existing mock. + * + * The following example creates a mock function using `t.mock.fn()`, calls the + * mock function, and then changes the mock implementation to a different function. + * + * ```js + * test('changes a mock behavior', (t) => { + * let cnt = 0; + * + * function addOne() { + * cnt++; + * return cnt; + * } + * + * function addTwo() { + * cnt += 2; + * return cnt; + * } + * + * const fn = t.mock.fn(addOne); + * + * assert.strictEqual(fn(), 1); + * fn.mock.mockImplementation(addTwo); + * assert.strictEqual(fn(), 3); + * assert.strictEqual(fn(), 5); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param implementation The function to be used as the mock's new implementation. + */ + mockImplementation(implementation: Function): void; + /** + * This function is used to change the behavior of an existing mock for a single + * invocation. Once invocation `onCall` has occurred, the mock will revert to + * whatever behavior it would have used had `mockImplementationOnce()` not been + * called. + * + * The following example creates a mock function using `t.mock.fn()`, calls the + * mock function, changes the mock implementation to a different function for the + * next invocation, and then resumes its previous behavior. + * + * ```js + * test('changes a mock behavior once', (t) => { + * let cnt = 0; + * + * function addOne() { + * cnt++; + * return cnt; + * } + * + * function addTwo() { + * cnt += 2; + * return cnt; + * } + * + * const fn = t.mock.fn(addOne); + * + * assert.strictEqual(fn(), 1); + * fn.mock.mockImplementationOnce(addTwo); + * assert.strictEqual(fn(), 3); + * assert.strictEqual(fn(), 4); + * }); + * ``` + * @since v19.1.0, v18.13.0 + * @param implementation The function to be used as the mock's implementation for the invocation number specified by `onCall`. + * @param onCall The invocation number that will use `implementation`. If the specified invocation has already occurred then an exception is thrown. + */ + mockImplementationOnce(implementation: Function, onCall?: number): void; + /** + * Resets the call history of the mock function. + * @since v19.3.0, v18.13.0 + */ + resetCalls(): void; + /** + * Resets the implementation of the mock function to its original behavior. The + * mock can still be used after calling this function. + * @since v19.1.0, v18.13.0 + */ + restore(): void; + } + type Timer = "setInterval" | "setTimeout" | "setImmediate" | "Date"; + + interface MockTimersOptions { + apis: Timer[]; + now?: number | Date; + } + /** + * Mocking timers is a technique commonly used in software testing to simulate and + * control the behavior of timers, such as `setInterval` and `setTimeout`, + * without actually waiting for the specified time intervals. + * + * The MockTimers API also allows for mocking of the `Date` constructor and + * `setImmediate`/`clearImmediate` functions. + * + * The `MockTracker` provides a top-level `timers` export + * which is a `MockTimers` instance. + * @since v20.4.0 + * @experimental + */ + class MockTimers { + /** + * Enables timer mocking for the specified timers. + * + * **Note:** When you enable mocking for a specific timer, its associated + * clear function will also be implicitly mocked. + * + * **Note:** Mocking `Date` will affect the behavior of the mocked timers + * as they use the same internal clock. + * + * Example usage without setting initial time: + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.enable({ apis: ['setInterval', 'Date'], now: 1234 }); + * ``` + * + * The above example enables mocking for the `Date` constructor, `setInterval` timer and + * implicitly mocks the `clearInterval` function. Only the `Date` constructor from `globalThis`, + * `setInterval` and `clearInterval` functions from `node:timers`,`node:timers/promises`, and `globalThis` will be mocked. + * + * Example usage with initial time set + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.enable({ apis: ['Date'], now: 1000 }); + * ``` + * + * Example usage with initial Date object as time set + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.enable({ apis: ['Date'], now: new Date() }); + * ``` + * + * Alternatively, if you call `mock.timers.enable()` without any parameters: + * + * All timers (`'setInterval'`, `'clearInterval'`, `'Date'`, `'setImmediate'`, `'clearImmediate'`, `'setTimeout'`, and `'clearTimeout'`) + * will be mocked. + * + * The `setInterval`, `clearInterval`, `setTimeout`, and `clearTimeout` functions from `node:timers`, `node:timers/promises`, + * and `globalThis` will be mocked. + * The `Date` constructor from `globalThis` will be mocked. + * + * If there is no initial epoch set, the initial date will be based on 0 in the Unix epoch. This is `January 1st, 1970, 00:00:00 UTC`. You can set an initial date by passing a now property to the `.enable()` method. This value will be used as the initial date for the mocked Date object. It can either be a positive integer, or another Date object. + * @since v20.4.0 + */ + enable(options?: MockTimersOptions): void; + /** + * You can use the `.setTime()` method to manually move the mocked date to another time. This method only accepts a positive integer. + * Note: This method will execute any mocked timers that are in the past from the new time. + * In the below example we are setting a new time for the mocked date. + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * test('sets the time of a date object', (context) => { + * // Optionally choose what to mock + * context.mock.timers.enable({ apis: ['Date'], now: 100 }); + * assert.strictEqual(Date.now(), 100); + * // Advance in time will also advance the date + * context.mock.timers.setTime(1000); + * context.mock.timers.tick(200); + * assert.strictEqual(Date.now(), 1200); + * }); + * ``` + */ + setTime(time: number): void; + /** + * This function restores the default behavior of all mocks that were previously + * created by this `MockTimers` instance and disassociates the mocks + * from the `MockTracker` instance. + * + * **Note:** After each test completes, this function is called on + * the test context's `MockTracker`. + * + * ```js + * import { mock } from 'node:test'; + * mock.timers.reset(); + * ``` + * @since v20.4.0 + */ + reset(): void; + /** + * Advances time for all mocked timers. + * + * **Note:** This diverges from how `setTimeout` in Node.js behaves and accepts + * only positive numbers. In Node.js, `setTimeout` with negative numbers is + * only supported for web compatibility reasons. + * + * The following example mocks a `setTimeout` function and + * by using `.tick` advances in + * time triggering all pending timers. + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('mocks setTimeout to be executed synchronously without having to actually wait for it', (context) => { + * const fn = context.mock.fn(); + * + * context.mock.timers.enable({ apis: ['setTimeout'] }); + * + * setTimeout(fn, 9999); + * + * assert.strictEqual(fn.mock.callCount(), 0); + * + * // Advance in time + * context.mock.timers.tick(9999); + * + * assert.strictEqual(fn.mock.callCount(), 1); + * }); + * ``` + * + * Alternativelly, the `.tick` function can be called many times + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('mocks setTimeout to be executed synchronously without having to actually wait for it', (context) => { + * const fn = context.mock.fn(); + * context.mock.timers.enable({ apis: ['setTimeout'] }); + * const nineSecs = 9000; + * setTimeout(fn, nineSecs); + * + * const twoSeconds = 3000; + * context.mock.timers.tick(twoSeconds); + * context.mock.timers.tick(twoSeconds); + * context.mock.timers.tick(twoSeconds); + * + * assert.strictEqual(fn.mock.callCount(), 1); + * }); + * ``` + * + * Advancing time using `.tick` will also advance the time for any `Date` object + * created after the mock was enabled (if `Date` was also set to be mocked). + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('mocks setTimeout to be executed synchronously without having to actually wait for it', (context) => { + * const fn = context.mock.fn(); + * + * context.mock.timers.enable({ apis: ['setTimeout', 'Date'] }); + * setTimeout(fn, 9999); + * + * assert.strictEqual(fn.mock.callCount(), 0); + * assert.strictEqual(Date.now(), 0); + * + * // Advance in time + * context.mock.timers.tick(9999); + * assert.strictEqual(fn.mock.callCount(), 1); + * assert.strictEqual(Date.now(), 9999); + * }); + * ``` + * @since v20.4.0 + */ + tick(milliseconds: number): void; + /** + * Triggers all pending mocked timers immediately. If the `Date` object is also + * mocked, it will also advance the `Date` object to the furthest timer's time. + * + * The example below triggers all pending timers immediately, + * causing them to execute without any delay. + * + * ```js + * import assert from 'node:assert'; + * import { test } from 'node:test'; + * + * test('runAll functions following the given order', (context) => { + * context.mock.timers.enable({ apis: ['setTimeout', 'Date'] }); + * const results = []; + * setTimeout(() => results.push(1), 9999); + * + * // Notice that if both timers have the same timeout, + * // the order of execution is guaranteed + * setTimeout(() => results.push(3), 8888); + * setTimeout(() => results.push(2), 8888); + * + * assert.deepStrictEqual(results, []); + * + * context.mock.timers.runAll(); + * assert.deepStrictEqual(results, [3, 2, 1]); + * // The Date object is also advanced to the furthest timer's time + * assert.strictEqual(Date.now(), 9999); + * }); + * ``` + * + * **Note:** The `runAll()` function is specifically designed for + * triggering timers in the context of timer mocking. + * It does not have any effect on real-time system + * clocks or actual timers outside of the mocking environment. + * @since v20.4.0 + */ + runAll(): void; + /** + * Calls {@link MockTimers.reset()}. + */ + [Symbol.dispose](): void; + } + export { + after, + afterEach, + before, + beforeEach, + describe, + it, + Mock, + mock, + only, + run, + skip, + test, + test as default, + todo, + }; +} + +interface TestLocationInfo { + /** + * The column number where the test is defined, or + * `undefined` if the test was run through the REPL. + */ + column?: number; + /** + * The path of the test file, `undefined` if test is not ran through a file. + */ + file?: string; + /** + * The line number where the test is defined, or + * `undefined` if the test was run through the REPL. + */ + line?: number; +} +interface DiagnosticData extends TestLocationInfo { + /** + * The diagnostic message. + */ + message: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestFail extends TestLocationInfo { + /** + * Additional execution metadata. + */ + details: { + /** + * The duration of the test in milliseconds. + */ + duration_ms: number; + /** + * The error thrown by the test. + */ + error: Error; + /** + * The type of the test, used to denote whether this is a suite. + * @since 20.0.0, 19.9.0, 18.17.0 + */ + type?: "suite"; + }; + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The ordinal number of the test. + */ + testNumber: number; + /** + * Present if `context.todo` is called. + */ + todo?: string | boolean; + /** + * Present if `context.skip` is called. + */ + skip?: string | boolean; +} +interface TestPass extends TestLocationInfo { + /** + * Additional execution metadata. + */ + details: { + /** + * The duration of the test in milliseconds. + */ + duration_ms: number; + /** + * The type of the test, used to denote whether this is a suite. + * @since 20.0.0, 19.9.0, 18.17.0 + */ + type?: "suite"; + }; + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The ordinal number of the test. + */ + testNumber: number; + /** + * Present if `context.todo` is called. + */ + todo?: string | boolean; + /** + * Present if `context.skip` is called. + */ + skip?: string | boolean; +} +interface TestPlan extends TestLocationInfo { + /** + * The nesting level of the test. + */ + nesting: number; + /** + * The number of subtests that have ran. + */ + count: number; +} +interface TestStart extends TestLocationInfo { + /** + * The test name. + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestStderr extends TestLocationInfo { + /** + * The message written to `stderr` + */ + message: string; +} +interface TestStdout extends TestLocationInfo { + /** + * The message written to `stdout` + */ + message: string; +} +interface TestEnqueue extends TestLocationInfo { + /** + * The test name + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; +} +interface TestDequeue extends TestLocationInfo { + /** + * The test name + */ + name: string; + /** + * The nesting level of the test. + */ + nesting: number; +} + +/** + * The `node:test/reporters` module exposes the builtin-reporters for `node:test`. + * To access it: + * + * ```js + * import test from 'node:test/reporters'; + * ``` + * + * This module is only available under the `node:` scheme. The following will not + * work: + * + * ```js + * import test from 'test/reporters'; + * ``` + * @since v19.9.0 + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/test/reporters.js) + */ +declare module "node:test/reporters" { + import { Transform, TransformOptions } from "node:stream"; + + type TestEvent = + | { type: "test:diagnostic"; data: DiagnosticData } + | { type: "test:fail"; data: TestFail } + | { type: "test:pass"; data: TestPass } + | { type: "test:plan"; data: TestPlan } + | { type: "test:start"; data: TestStart } + | { type: "test:stderr"; data: TestStderr } + | { type: "test:stdout"; data: TestStdout } + | { type: "test:enqueue"; data: TestEnqueue } + | { type: "test:dequeue"; data: TestDequeue } + | { type: "test:watch:drained" }; + type TestEventGenerator = AsyncGenerator; + + /** + * The `dot` reporter outputs the test results in a compact format, + * where each passing test is represented by a `.`, + * and each failing test is represented by a `X`. + */ + function dot(source: TestEventGenerator): AsyncGenerator<"\n" | "." | "X", void>; + /** + * The `tap` reporter outputs the test results in the [TAP](https://testanything.org/) format. + */ + function tap(source: TestEventGenerator): AsyncGenerator; + /** + * The `spec` reporter outputs the test results in a human-readable format. + */ + class Spec extends Transform { + constructor(); + } + /** + * The `junit` reporter outputs test results in a jUnit XML format + */ + function junit(source: TestEventGenerator): AsyncGenerator; + class Lcov extends Transform { + constructor(opts?: TransformOptions); + } + export { dot, junit, Lcov as lcov, Spec as spec, tap, TestEvent }; +} diff --git a/dist/node_modules/@types/node/ts4.8/timers.d.ts b/dist/node_modules/@types/node/ts4.8/timers.d.ts new file mode 100644 index 0000000..039f31f --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/timers.d.ts @@ -0,0 +1,240 @@ +/** + * The `timer` module exposes a global API for scheduling functions to + * be called at some future period of time. Because the timer functions are + * globals, there is no need to call `require('node:timers')` to use the API. + * + * The timer functions within Node.js implement a similar API as the timers API + * provided by Web Browsers but use a different internal implementation that is + * built around the Node.js [Event Loop](https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#setimmediate-vs-settimeout). + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/timers.js) + */ +declare module "timers" { + import { Abortable } from "node:events"; + import { + setImmediate as setImmediatePromise, + setInterval as setIntervalPromise, + setTimeout as setTimeoutPromise, + } from "node:timers/promises"; + interface TimerOptions extends Abortable { + /** + * Set to `false` to indicate that the scheduled `Timeout` + * should not require the Node.js event loop to remain active. + * @default true + */ + ref?: boolean | undefined; + } + let setTimeout: typeof global.setTimeout; + let clearTimeout: typeof global.clearTimeout; + let setInterval: typeof global.setInterval; + let clearInterval: typeof global.clearInterval; + let setImmediate: typeof global.setImmediate; + let clearImmediate: typeof global.clearImmediate; + global { + namespace NodeJS { + // compatibility with older typings + interface Timer extends RefCounted { + hasRef(): boolean; + refresh(): this; + [Symbol.toPrimitive](): number; + } + /** + * This object is created internally and is returned from `setImmediate()`. It + * can be passed to `clearImmediate()` in order to cancel the scheduled + * actions. + * + * By default, when an immediate is scheduled, the Node.js event loop will continue + * running as long as the immediate is active. The `Immediate` object returned by `setImmediate()` exports both `immediate.ref()` and `immediate.unref()`functions that can be used to + * control this default behavior. + */ + class Immediate implements RefCounted { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the`Immediate` is active. Calling `immediate.ref()` multiple times will have no + * effect. + * + * By default, all `Immediate` objects are "ref'ed", making it normally unnecessary + * to call `immediate.ref()` unless `immediate.unref()` had been called previously. + * @since v9.7.0 + * @return a reference to `immediate` + */ + ref(): this; + /** + * When called, the active `Immediate` object will not require the Node.js event + * loop to remain active. If there is no other activity keeping the event loop + * running, the process may exit before the `Immediate` object's callback is + * invoked. Calling `immediate.unref()` multiple times will have no effect. + * @since v9.7.0 + * @return a reference to `immediate` + */ + unref(): this; + /** + * If true, the `Immediate` object will keep the Node.js event loop active. + * @since v11.0.0 + */ + hasRef(): boolean; + _onImmediate: Function; // to distinguish it from the Timeout class + /** + * Cancels the immediate. This is similar to calling `clearImmediate()`. + * @since v20.5.0 + */ + [Symbol.dispose](): void; + } + /** + * This object is created internally and is returned from `setTimeout()` and `setInterval()`. It can be passed to either `clearTimeout()` or `clearInterval()` in order to cancel the + * scheduled actions. + * + * By default, when a timer is scheduled using either `setTimeout()` or `setInterval()`, the Node.js event loop will continue running as long as the + * timer is active. Each of the `Timeout` objects returned by these functions + * export both `timeout.ref()` and `timeout.unref()` functions that can be used to + * control this default behavior. + */ + class Timeout implements Timer { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the`Timeout` is active. Calling `timeout.ref()` multiple times will have no effect. + * + * By default, all `Timeout` objects are "ref'ed", making it normally unnecessary + * to call `timeout.ref()` unless `timeout.unref()` had been called previously. + * @since v0.9.1 + * @return a reference to `timeout` + */ + ref(): this; + /** + * When called, the active `Timeout` object will not require the Node.js event loop + * to remain active. If there is no other activity keeping the event loop running, + * the process may exit before the `Timeout` object's callback is invoked. Calling`timeout.unref()` multiple times will have no effect. + * @since v0.9.1 + * @return a reference to `timeout` + */ + unref(): this; + /** + * If true, the `Timeout` object will keep the Node.js event loop active. + * @since v11.0.0 + */ + hasRef(): boolean; + /** + * Sets the timer's start time to the current time, and reschedules the timer to + * call its callback at the previously specified duration adjusted to the current + * time. This is useful for refreshing a timer without allocating a new + * JavaScript object. + * + * Using this on a timer that has already called its callback will reactivate the + * timer. + * @since v10.2.0 + * @return a reference to `timeout` + */ + refresh(): this; + [Symbol.toPrimitive](): number; + /** + * Cancels the timeout. + * @since v20.5.0 + */ + [Symbol.dispose](): void; + } + } + /** + * Schedules execution of a one-time `callback` after `delay` milliseconds. + * + * The `callback` will likely not be invoked in precisely `delay` milliseconds. + * Node.js makes no guarantees about the exact timing of when callbacks will fire, + * nor of their ordering. The callback will be called as close as possible to the + * time specified. + * + * When `delay` is larger than `2147483647` or less than `1`, the `delay`will be set to `1`. Non-integer delays are truncated to an integer. + * + * If `callback` is not a function, a `TypeError` will be thrown. + * + * This method has a custom variant for promises that is available using `timersPromises.setTimeout()`. + * @since v0.0.1 + * @param callback The function to call when the timer elapses. + * @param [delay=1] The number of milliseconds to wait before calling the `callback`. + * @param args Optional arguments to pass when the `callback` is called. + * @return for use with {@link clearTimeout} + */ + function setTimeout( + callback: (...args: TArgs) => void, + ms?: number, + ...args: TArgs + ): NodeJS.Timeout; + // util.promisify no rest args compability + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + function setTimeout(callback: (args: void) => void, ms?: number): NodeJS.Timeout; + namespace setTimeout { + const __promisify__: typeof setTimeoutPromise; + } + /** + * Cancels a `Timeout` object created by `setTimeout()`. + * @since v0.0.1 + * @param timeout A `Timeout` object as returned by {@link setTimeout} or the `primitive` of the `Timeout` object as a string or a number. + */ + function clearTimeout(timeoutId: NodeJS.Timeout | string | number | undefined): void; + /** + * Schedules repeated execution of `callback` every `delay` milliseconds. + * + * When `delay` is larger than `2147483647` or less than `1`, the `delay` will be + * set to `1`. Non-integer delays are truncated to an integer. + * + * If `callback` is not a function, a `TypeError` will be thrown. + * + * This method has a custom variant for promises that is available using `timersPromises.setInterval()`. + * @since v0.0.1 + * @param callback The function to call when the timer elapses. + * @param [delay=1] The number of milliseconds to wait before calling the `callback`. + * @param args Optional arguments to pass when the `callback` is called. + * @return for use with {@link clearInterval} + */ + function setInterval( + callback: (...args: TArgs) => void, + ms?: number, + ...args: TArgs + ): NodeJS.Timeout; + // util.promisify no rest args compability + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + function setInterval(callback: (args: void) => void, ms?: number): NodeJS.Timeout; + namespace setInterval { + const __promisify__: typeof setIntervalPromise; + } + /** + * Cancels a `Timeout` object created by `setInterval()`. + * @since v0.0.1 + * @param timeout A `Timeout` object as returned by {@link setInterval} or the `primitive` of the `Timeout` object as a string or a number. + */ + function clearInterval(intervalId: NodeJS.Timeout | string | number | undefined): void; + /** + * Schedules the "immediate" execution of the `callback` after I/O events' + * callbacks. + * + * When multiple calls to `setImmediate()` are made, the `callback` functions are + * queued for execution in the order in which they are created. The entire callback + * queue is processed every event loop iteration. If an immediate timer is queued + * from inside an executing callback, that timer will not be triggered until the + * next event loop iteration. + * + * If `callback` is not a function, a `TypeError` will be thrown. + * + * This method has a custom variant for promises that is available using `timersPromises.setImmediate()`. + * @since v0.9.1 + * @param callback The function to call at the end of this turn of the Node.js `Event Loop` + * @param args Optional arguments to pass when the `callback` is called. + * @return for use with {@link clearImmediate} + */ + function setImmediate( + callback: (...args: TArgs) => void, + ...args: TArgs + ): NodeJS.Immediate; + // util.promisify no rest args compability + // eslint-disable-next-line @typescript-eslint/no-invalid-void-type + function setImmediate(callback: (args: void) => void): NodeJS.Immediate; + namespace setImmediate { + const __promisify__: typeof setImmediatePromise; + } + /** + * Cancels an `Immediate` object created by `setImmediate()`. + * @since v0.9.1 + * @param immediate An `Immediate` object as returned by {@link setImmediate}. + */ + function clearImmediate(immediateId: NodeJS.Immediate | undefined): void; + function queueMicrotask(callback: () => void): void; + } +} +declare module "node:timers" { + export * from "timers"; +} diff --git a/dist/node_modules/@types/node/ts4.8/timers/promises.d.ts b/dist/node_modules/@types/node/ts4.8/timers/promises.d.ts new file mode 100644 index 0000000..5a54dc7 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/timers/promises.d.ts @@ -0,0 +1,93 @@ +/** + * The `timers/promises` API provides an alternative set of timer functions + * that return `Promise` objects. The API is accessible via`require('node:timers/promises')`. + * + * ```js + * import { + * setTimeout, + * setImmediate, + * setInterval, + * } from 'timers/promises'; + * ``` + * @since v15.0.0 + */ +declare module "timers/promises" { + import { TimerOptions } from "node:timers"; + /** + * ```js + * import { + * setTimeout, + * } from 'timers/promises'; + * + * const res = await setTimeout(100, 'result'); + * + * console.log(res); // Prints 'result' + * ``` + * @since v15.0.0 + * @param [delay=1] The number of milliseconds to wait before fulfilling the promise. + * @param value A value with which the promise is fulfilled. + */ + function setTimeout(delay?: number, value?: T, options?: TimerOptions): Promise; + /** + * ```js + * import { + * setImmediate, + * } from 'timers/promises'; + * + * const res = await setImmediate('result'); + * + * console.log(res); // Prints 'result' + * ``` + * @since v15.0.0 + * @param value A value with which the promise is fulfilled. + */ + function setImmediate(value?: T, options?: TimerOptions): Promise; + /** + * Returns an async iterator that generates values in an interval of `delay` ms. + * If `ref` is `true`, you need to call `next()` of async iterator explicitly + * or implicitly to keep the event loop alive. + * + * ```js + * import { + * setInterval, + * } from 'timers/promises'; + * + * const interval = 100; + * for await (const startTime of setInterval(interval, Date.now())) { + * const now = Date.now(); + * console.log(now); + * if ((now - startTime) > 1000) + * break; + * } + * console.log(Date.now()); + * ``` + * @since v15.9.0 + */ + function setInterval(delay?: number, value?: T, options?: TimerOptions): AsyncIterable; + interface Scheduler { + /** + * ```js + * import { scheduler } from 'node:timers/promises'; + * + * await scheduler.wait(1000); // Wait one second before continuing + * ``` + * An experimental API defined by the Scheduling APIs draft specification being developed as a standard Web Platform API. + * Calling timersPromises.scheduler.wait(delay, options) is roughly equivalent to calling timersPromises.setTimeout(delay, undefined, options) except that the ref option is not supported. + * @since v16.14.0 + * @experimental + * @param [delay=1] The number of milliseconds to wait before fulfilling the promise. + */ + wait: (delay?: number, options?: TimerOptions) => Promise; + /** + * An experimental API defined by the Scheduling APIs draft specification being developed as a standard Web Platform API. + * Calling timersPromises.scheduler.yield() is equivalent to calling timersPromises.setImmediate() with no arguments. + * @since v16.14.0 + * @experimental + */ + yield: () => Promise; + } + const scheduler: Scheduler; +} +declare module "node:timers/promises" { + export * from "timers/promises"; +} diff --git a/dist/node_modules/@types/node/ts4.8/tls.d.ts b/dist/node_modules/@types/node/ts4.8/tls.d.ts new file mode 100644 index 0000000..9f6d6be --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/tls.d.ts @@ -0,0 +1,1210 @@ +/** + * The `node:tls` module provides an implementation of the Transport Layer Security + * (TLS) and Secure Socket Layer (SSL) protocols that is built on top of OpenSSL. + * The module can be accessed using: + * + * ```js + * const tls = require('node:tls'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/tls.js) + */ +declare module "tls" { + import { X509Certificate } from "node:crypto"; + import * as net from "node:net"; + import * as stream from "stream"; + const CLIENT_RENEG_LIMIT: number; + const CLIENT_RENEG_WINDOW: number; + interface Certificate { + /** + * Country code. + */ + C: string; + /** + * Street. + */ + ST: string; + /** + * Locality. + */ + L: string; + /** + * Organization. + */ + O: string; + /** + * Organizational unit. + */ + OU: string; + /** + * Common name. + */ + CN: string; + } + interface PeerCertificate { + /** + * `true` if a Certificate Authority (CA), `false` otherwise. + * @since v18.13.0 + */ + ca: boolean; + /** + * The DER encoded X.509 certificate data. + */ + raw: Buffer; + /** + * The certificate subject. + */ + subject: Certificate; + /** + * The certificate issuer, described in the same terms as the `subject`. + */ + issuer: Certificate; + /** + * The date-time the certificate is valid from. + */ + valid_from: string; + /** + * The date-time the certificate is valid to. + */ + valid_to: string; + /** + * The certificate serial number, as a hex string. + */ + serialNumber: string; + /** + * The SHA-1 digest of the DER encoded certificate. + * It is returned as a `:` separated hexadecimal string. + */ + fingerprint: string; + /** + * The SHA-256 digest of the DER encoded certificate. + * It is returned as a `:` separated hexadecimal string. + */ + fingerprint256: string; + /** + * The SHA-512 digest of the DER encoded certificate. + * It is returned as a `:` separated hexadecimal string. + */ + fingerprint512: string; + /** + * The extended key usage, a set of OIDs. + */ + ext_key_usage?: string[]; + /** + * A string containing concatenated names for the subject, + * an alternative to the `subject` names. + */ + subjectaltname?: string; + /** + * An array describing the AuthorityInfoAccess, used with OCSP. + */ + infoAccess?: NodeJS.Dict; + /** + * For RSA keys: The RSA bit size. + * + * For EC keys: The key size in bits. + */ + bits?: number; + /** + * The RSA exponent, as a string in hexadecimal number notation. + */ + exponent?: string; + /** + * The RSA modulus, as a hexadecimal string. + */ + modulus?: string; + /** + * The public key. + */ + pubkey?: Buffer; + /** + * The ASN.1 name of the OID of the elliptic curve. + * Well-known curves are identified by an OID. + * While it is unusual, it is possible that the curve + * is identified by its mathematical properties, + * in which case it will not have an OID. + */ + asn1Curve?: string; + /** + * The NIST name for the elliptic curve,if it has one + * (not all well-known curves have been assigned names by NIST). + */ + nistCurve?: string; + } + interface DetailedPeerCertificate extends PeerCertificate { + /** + * The issuer certificate object. + * For self-signed certificates, this may be a circular reference. + */ + issuerCertificate: DetailedPeerCertificate; + } + interface CipherNameAndProtocol { + /** + * The cipher name. + */ + name: string; + /** + * SSL/TLS protocol version. + */ + version: string; + /** + * IETF name for the cipher suite. + */ + standardName: string; + } + interface EphemeralKeyInfo { + /** + * The supported types are 'DH' and 'ECDH'. + */ + type: string; + /** + * The name property is available only when type is 'ECDH'. + */ + name?: string | undefined; + /** + * The size of parameter of an ephemeral key exchange. + */ + size: number; + } + interface KeyObject { + /** + * Private keys in PEM format. + */ + pem: string | Buffer; + /** + * Optional passphrase. + */ + passphrase?: string | undefined; + } + interface PxfObject { + /** + * PFX or PKCS12 encoded private key and certificate chain. + */ + buf: string | Buffer; + /** + * Optional passphrase. + */ + passphrase?: string | undefined; + } + interface TLSSocketOptions extends SecureContextOptions, CommonConnectionOptions { + /** + * If true the TLS socket will be instantiated in server-mode. + * Defaults to false. + */ + isServer?: boolean | undefined; + /** + * An optional net.Server instance. + */ + server?: net.Server | undefined; + /** + * An optional Buffer instance containing a TLS session. + */ + session?: Buffer | undefined; + /** + * If true, specifies that the OCSP status request extension will be + * added to the client hello and an 'OCSPResponse' event will be + * emitted on the socket before establishing a secure communication + */ + requestOCSP?: boolean | undefined; + } + /** + * Performs transparent encryption of written data and all required TLS + * negotiation. + * + * Instances of `tls.TLSSocket` implement the duplex `Stream` interface. + * + * Methods that return TLS connection metadata (e.g.{@link TLSSocket.getPeerCertificate}) will only return data while the + * connection is open. + * @since v0.11.4 + */ + class TLSSocket extends net.Socket { + /** + * Construct a new tls.TLSSocket object from an existing TCP socket. + */ + constructor(socket: net.Socket | stream.Duplex, options?: TLSSocketOptions); + /** + * This property is `true` if the peer certificate was signed by one of the CAs + * specified when creating the `tls.TLSSocket` instance, otherwise `false`. + * @since v0.11.4 + */ + authorized: boolean; + /** + * Returns the reason why the peer's certificate was not been verified. This + * property is set only when `tlsSocket.authorized === false`. + * @since v0.11.4 + */ + authorizationError: Error; + /** + * Always returns `true`. This may be used to distinguish TLS sockets from regular`net.Socket` instances. + * @since v0.11.4 + */ + encrypted: true; + /** + * String containing the selected ALPN protocol. + * Before a handshake has completed, this value is always null. + * When a handshake is completed but not ALPN protocol was selected, tlsSocket.alpnProtocol equals false. + */ + alpnProtocol: string | false | null; + /** + * Returns an object representing the local certificate. The returned object has + * some properties corresponding to the fields of the certificate. + * + * See {@link TLSSocket.getPeerCertificate} for an example of the certificate + * structure. + * + * If there is no local certificate, an empty object will be returned. If the + * socket has been destroyed, `null` will be returned. + * @since v11.2.0 + */ + getCertificate(): PeerCertificate | object | null; + /** + * Returns an object containing information on the negotiated cipher suite. + * + * For example, a TLSv1.2 protocol with AES256-SHA cipher: + * + * ```json + * { + * "name": "AES256-SHA", + * "standardName": "TLS_RSA_WITH_AES_256_CBC_SHA", + * "version": "SSLv3" + * } + * ``` + * + * See [SSL\_CIPHER\_get\_name](https://www.openssl.org/docs/man1.1.1/man3/SSL_CIPHER_get_name.html) for more information. + * @since v0.11.4 + */ + getCipher(): CipherNameAndProtocol; + /** + * Returns an object representing the type, name, and size of parameter of + * an ephemeral key exchange in `perfect forward secrecy` on a client + * connection. It returns an empty object when the key exchange is not + * ephemeral. As this is only supported on a client socket; `null` is returned + * if called on a server socket. The supported types are `'DH'` and `'ECDH'`. The`name` property is available only when type is `'ECDH'`. + * + * For example: `{ type: 'ECDH', name: 'prime256v1', size: 256 }`. + * @since v5.0.0 + */ + getEphemeralKeyInfo(): EphemeralKeyInfo | object | null; + /** + * As the `Finished` messages are message digests of the complete handshake + * (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can + * be used for external authentication procedures when the authentication + * provided by SSL/TLS is not desired or is not enough. + * + * Corresponds to the `SSL_get_finished` routine in OpenSSL and may be used + * to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929). + * @since v9.9.0 + * @return The latest `Finished` message that has been sent to the socket as part of a SSL/TLS handshake, or `undefined` if no `Finished` message has been sent yet. + */ + getFinished(): Buffer | undefined; + /** + * Returns an object representing the peer's certificate. If the peer does not + * provide a certificate, an empty object will be returned. If the socket has been + * destroyed, `null` will be returned. + * + * If the full certificate chain was requested, each certificate will include an`issuerCertificate` property containing an object representing its issuer's + * certificate. + * @since v0.11.4 + * @param detailed Include the full certificate chain if `true`, otherwise include just the peer's certificate. + * @return A certificate object. + */ + getPeerCertificate(detailed: true): DetailedPeerCertificate; + getPeerCertificate(detailed?: false): PeerCertificate; + getPeerCertificate(detailed?: boolean): PeerCertificate | DetailedPeerCertificate; + /** + * As the `Finished` messages are message digests of the complete handshake + * (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can + * be used for external authentication procedures when the authentication + * provided by SSL/TLS is not desired or is not enough. + * + * Corresponds to the `SSL_get_peer_finished` routine in OpenSSL and may be used + * to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929). + * @since v9.9.0 + * @return The latest `Finished` message that is expected or has actually been received from the socket as part of a SSL/TLS handshake, or `undefined` if there is no `Finished` message so + * far. + */ + getPeerFinished(): Buffer | undefined; + /** + * Returns a string containing the negotiated SSL/TLS protocol version of the + * current connection. The value `'unknown'` will be returned for connected + * sockets that have not completed the handshaking process. The value `null` will + * be returned for server sockets or disconnected client sockets. + * + * Protocol versions are: + * + * * `'SSLv3'` + * * `'TLSv1'` + * * `'TLSv1.1'` + * * `'TLSv1.2'` + * * `'TLSv1.3'` + * + * See the OpenSSL [`SSL_get_version`](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_version.html) documentation for more information. + * @since v5.7.0 + */ + getProtocol(): string | null; + /** + * Returns the TLS session data or `undefined` if no session was + * negotiated. On the client, the data can be provided to the `session` option of {@link connect} to resume the connection. On the server, it may be useful + * for debugging. + * + * See `Session Resumption` for more information. + * + * Note: `getSession()` works only for TLSv1.2 and below. For TLSv1.3, applications + * must use the `'session'` event (it also works for TLSv1.2 and below). + * @since v0.11.4 + */ + getSession(): Buffer | undefined; + /** + * See [SSL\_get\_shared\_sigalgs](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_shared_sigalgs.html) for more information. + * @since v12.11.0 + * @return List of signature algorithms shared between the server and the client in the order of decreasing preference. + */ + getSharedSigalgs(): string[]; + /** + * For a client, returns the TLS session ticket if one is available, or`undefined`. For a server, always returns `undefined`. + * + * It may be useful for debugging. + * + * See `Session Resumption` for more information. + * @since v0.11.4 + */ + getTLSTicket(): Buffer | undefined; + /** + * See `Session Resumption` for more information. + * @since v0.5.6 + * @return `true` if the session was reused, `false` otherwise. + */ + isSessionReused(): boolean; + /** + * The `tlsSocket.renegotiate()` method initiates a TLS renegotiation process. + * Upon completion, the `callback` function will be passed a single argument + * that is either an `Error` (if the request failed) or `null`. + * + * This method can be used to request a peer's certificate after the secure + * connection has been established. + * + * When running as the server, the socket will be destroyed with an error after`handshakeTimeout` timeout. + * + * For TLSv1.3, renegotiation cannot be initiated, it is not supported by the + * protocol. + * @since v0.11.8 + * @param callback If `renegotiate()` returned `true`, callback is attached once to the `'secure'` event. If `renegotiate()` returned `false`, `callback` will be called in the next tick with + * an error, unless the `tlsSocket` has been destroyed, in which case `callback` will not be called at all. + * @return `true` if renegotiation was initiated, `false` otherwise. + */ + renegotiate( + options: { + rejectUnauthorized?: boolean | undefined; + requestCert?: boolean | undefined; + }, + callback: (err: Error | null) => void, + ): undefined | boolean; + /** + * The `tlsSocket.setMaxSendFragment()` method sets the maximum TLS fragment size. + * Returns `true` if setting the limit succeeded; `false` otherwise. + * + * Smaller fragment sizes decrease the buffering latency on the client: larger + * fragments are buffered by the TLS layer until the entire fragment is received + * and its integrity is verified; large fragments can span multiple roundtrips + * and their processing can be delayed due to packet loss or reordering. However, + * smaller fragments add extra TLS framing bytes and CPU overhead, which may + * decrease overall server throughput. + * @since v0.11.11 + * @param [size=16384] The maximum TLS fragment size. The maximum value is `16384`. + */ + setMaxSendFragment(size: number): boolean; + /** + * Disables TLS renegotiation for this `TLSSocket` instance. Once called, attempts + * to renegotiate will trigger an `'error'` event on the `TLSSocket`. + * @since v8.4.0 + */ + disableRenegotiation(): void; + /** + * When enabled, TLS packet trace information is written to `stderr`. This can be + * used to debug TLS connection problems. + * + * The format of the output is identical to the output of`openssl s_client -trace` or `openssl s_server -trace`. While it is produced by + * OpenSSL's `SSL_trace()` function, the format is undocumented, can change + * without notice, and should not be relied on. + * @since v12.2.0 + */ + enableTrace(): void; + /** + * Returns the peer certificate as an `X509Certificate` object. + * + * If there is no peer certificate, or the socket has been destroyed,`undefined` will be returned. + * @since v15.9.0 + */ + getPeerX509Certificate(): X509Certificate | undefined; + /** + * Returns the local certificate as an `X509Certificate` object. + * + * If there is no local certificate, or the socket has been destroyed,`undefined` will be returned. + * @since v15.9.0 + */ + getX509Certificate(): X509Certificate | undefined; + /** + * Keying material is used for validations to prevent different kind of attacks in + * network protocols, for example in the specifications of IEEE 802.1X. + * + * Example + * + * ```js + * const keyingMaterial = tlsSocket.exportKeyingMaterial( + * 128, + * 'client finished'); + * + * /* + * Example return value of keyingMaterial: + * + * + * ``` + * + * See the OpenSSL [`SSL_export_keying_material`](https://www.openssl.org/docs/man1.1.1/man3/SSL_export_keying_material.html) documentation for more + * information. + * @since v13.10.0, v12.17.0 + * @param length number of bytes to retrieve from keying material + * @param label an application specific label, typically this will be a value from the [IANA Exporter Label + * Registry](https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#exporter-labels). + * @param context Optionally provide a context. + * @return requested bytes of the keying material + */ + exportKeyingMaterial(length: number, label: string, context: Buffer): Buffer; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; + addListener(event: "secureConnect", listener: () => void): this; + addListener(event: "session", listener: (session: Buffer) => void): this; + addListener(event: "keylog", listener: (line: Buffer) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "OCSPResponse", response: Buffer): boolean; + emit(event: "secureConnect"): boolean; + emit(event: "session", session: Buffer): boolean; + emit(event: "keylog", line: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "OCSPResponse", listener: (response: Buffer) => void): this; + on(event: "secureConnect", listener: () => void): this; + on(event: "session", listener: (session: Buffer) => void): this; + on(event: "keylog", listener: (line: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "OCSPResponse", listener: (response: Buffer) => void): this; + once(event: "secureConnect", listener: () => void): this; + once(event: "session", listener: (session: Buffer) => void): this; + once(event: "keylog", listener: (line: Buffer) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; + prependListener(event: "secureConnect", listener: () => void): this; + prependListener(event: "session", listener: (session: Buffer) => void): this; + prependListener(event: "keylog", listener: (line: Buffer) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; + prependOnceListener(event: "secureConnect", listener: () => void): this; + prependOnceListener(event: "session", listener: (session: Buffer) => void): this; + prependOnceListener(event: "keylog", listener: (line: Buffer) => void): this; + } + interface CommonConnectionOptions { + /** + * An optional TLS context object from tls.createSecureContext() + */ + secureContext?: SecureContext | undefined; + /** + * When enabled, TLS packet trace information is written to `stderr`. This can be + * used to debug TLS connection problems. + * @default false + */ + enableTrace?: boolean | undefined; + /** + * If true the server will request a certificate from clients that + * connect and attempt to verify that certificate. Defaults to + * false. + */ + requestCert?: boolean | undefined; + /** + * An array of strings or a Buffer naming possible ALPN protocols. + * (Protocols should be ordered by their priority.) + */ + ALPNProtocols?: string[] | Uint8Array[] | Uint8Array | undefined; + /** + * SNICallback(servername, cb) A function that will be + * called if the client supports SNI TLS extension. Two arguments + * will be passed when called: servername and cb. SNICallback should + * invoke cb(null, ctx), where ctx is a SecureContext instance. + * (tls.createSecureContext(...) can be used to get a proper + * SecureContext.) If SNICallback wasn't provided the default callback + * with high-level API will be used (see below). + */ + SNICallback?: ((servername: string, cb: (err: Error | null, ctx?: SecureContext) => void) => void) | undefined; + /** + * If true the server will reject any connection which is not + * authorized with the list of supplied CAs. This option only has an + * effect if requestCert is true. + * @default true + */ + rejectUnauthorized?: boolean | undefined; + } + interface TlsOptions extends SecureContextOptions, CommonConnectionOptions, net.ServerOpts { + /** + * Abort the connection if the SSL/TLS handshake does not finish in the + * specified number of milliseconds. A 'tlsClientError' is emitted on + * the tls.Server object whenever a handshake times out. Default: + * 120000 (120 seconds). + */ + handshakeTimeout?: number | undefined; + /** + * The number of seconds after which a TLS session created by the + * server will no longer be resumable. See Session Resumption for more + * information. Default: 300. + */ + sessionTimeout?: number | undefined; + /** + * 48-bytes of cryptographically strong pseudo-random data. + */ + ticketKeys?: Buffer | undefined; + /** + * @param socket + * @param identity identity parameter sent from the client. + * @return pre-shared key that must either be + * a buffer or `null` to stop the negotiation process. Returned PSK must be + * compatible with the selected cipher's digest. + * + * When negotiating TLS-PSK (pre-shared keys), this function is called + * with the identity provided by the client. + * If the return value is `null` the negotiation process will stop and an + * "unknown_psk_identity" alert message will be sent to the other party. + * If the server wishes to hide the fact that the PSK identity was not known, + * the callback must provide some random data as `psk` to make the connection + * fail with "decrypt_error" before negotiation is finished. + * PSK ciphers are disabled by default, and using TLS-PSK thus + * requires explicitly specifying a cipher suite with the `ciphers` option. + * More information can be found in the RFC 4279. + */ + pskCallback?(socket: TLSSocket, identity: string): DataView | NodeJS.TypedArray | null; + /** + * hint to send to a client to help + * with selecting the identity during TLS-PSK negotiation. Will be ignored + * in TLS 1.3. Upon failing to set pskIdentityHint `tlsClientError` will be + * emitted with `ERR_TLS_PSK_SET_IDENTIY_HINT_FAILED` code. + */ + pskIdentityHint?: string | undefined; + } + interface PSKCallbackNegotation { + psk: DataView | NodeJS.TypedArray; + identity: string; + } + interface ConnectionOptions extends SecureContextOptions, CommonConnectionOptions { + host?: string | undefined; + port?: number | undefined; + path?: string | undefined; // Creates unix socket connection to path. If this option is specified, `host` and `port` are ignored. + socket?: stream.Duplex | undefined; // Establish secure connection on a given socket rather than creating a new socket + checkServerIdentity?: typeof checkServerIdentity | undefined; + servername?: string | undefined; // SNI TLS Extension + session?: Buffer | undefined; + minDHSize?: number | undefined; + lookup?: net.LookupFunction | undefined; + timeout?: number | undefined; + /** + * When negotiating TLS-PSK (pre-shared keys), this function is called + * with optional identity `hint` provided by the server or `null` + * in case of TLS 1.3 where `hint` was removed. + * It will be necessary to provide a custom `tls.checkServerIdentity()` + * for the connection as the default one will try to check hostname/IP + * of the server against the certificate but that's not applicable for PSK + * because there won't be a certificate present. + * More information can be found in the RFC 4279. + * + * @param hint message sent from the server to help client + * decide which identity to use during negotiation. + * Always `null` if TLS 1.3 is used. + * @returns Return `null` to stop the negotiation process. `psk` must be + * compatible with the selected cipher's digest. + * `identity` must use UTF-8 encoding. + */ + pskCallback?(hint: string | null): PSKCallbackNegotation | null; + } + /** + * Accepts encrypted connections using TLS or SSL. + * @since v0.3.2 + */ + class Server extends net.Server { + constructor(secureConnectionListener?: (socket: TLSSocket) => void); + constructor(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void); + /** + * The `server.addContext()` method adds a secure context that will be used if + * the client request's SNI name matches the supplied `hostname` (or wildcard). + * + * When there are multiple matching contexts, the most recently added one is + * used. + * @since v0.5.3 + * @param hostname A SNI host name or wildcard (e.g. `'*'`) + * @param context An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc), or a TLS context object created + * with {@link createSecureContext} itself. + */ + addContext(hostname: string, context: SecureContextOptions): void; + /** + * Returns the session ticket keys. + * + * See `Session Resumption` for more information. + * @since v3.0.0 + * @return A 48-byte buffer containing the session ticket keys. + */ + getTicketKeys(): Buffer; + /** + * The `server.setSecureContext()` method replaces the secure context of an + * existing server. Existing connections to the server are not interrupted. + * @since v11.0.0 + * @param options An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc). + */ + setSecureContext(options: SecureContextOptions): void; + /** + * Sets the session ticket keys. + * + * Changes to the ticket keys are effective only for future server connections. + * Existing or currently pending server connections will use the previous keys. + * + * See `Session Resumption` for more information. + * @since v3.0.0 + * @param keys A 48-byte buffer containing the session ticket keys. + */ + setTicketKeys(keys: Buffer): void; + /** + * events.EventEmitter + * 1. tlsClientError + * 2. newSession + * 3. OCSPRequest + * 4. resumeSession + * 5. secureConnection + * 6. keylog + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + addListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + addListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + addListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + addListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + addListener(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "tlsClientError", err: Error, tlsSocket: TLSSocket): boolean; + emit(event: "newSession", sessionId: Buffer, sessionData: Buffer, callback: () => void): boolean; + emit( + event: "OCSPRequest", + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ): boolean; + emit( + event: "resumeSession", + sessionId: Buffer, + callback: (err: Error | null, sessionData: Buffer | null) => void, + ): boolean; + emit(event: "secureConnection", tlsSocket: TLSSocket): boolean; + emit(event: "keylog", line: Buffer, tlsSocket: TLSSocket): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + on(event: "newSession", listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + on( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + on( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + on(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + on(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + once( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + once( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + once( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + once(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + once(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + prependListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + prependListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + prependListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + prependListener(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; + prependOnceListener( + event: "newSession", + listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void, + ): this; + prependOnceListener( + event: "OCSPRequest", + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependOnceListener( + event: "resumeSession", + listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void, + ): this; + prependOnceListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: "keylog", listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + } + /** + * @deprecated since v0.11.3 Use `tls.TLSSocket` instead. + */ + interface SecurePair { + encrypted: TLSSocket; + cleartext: TLSSocket; + } + type SecureVersion = "TLSv1.3" | "TLSv1.2" | "TLSv1.1" | "TLSv1"; + interface SecureContextOptions { + /** + * If set, this will be called when a client opens a connection using the ALPN extension. + * One argument will be passed to the callback: an object containing `servername` and `protocols` fields, + * respectively containing the server name from the SNI extension (if any) and an array of + * ALPN protocol name strings. The callback must return either one of the strings listed in `protocols`, + * which will be returned to the client as the selected ALPN protocol, or `undefined`, + * to reject the connection with a fatal alert. If a string is returned that does not match one of + * the client's ALPN protocols, an error will be thrown. + * This option cannot be used with the `ALPNProtocols` option, and setting both options will throw an error. + */ + ALPNCallback?: ((arg: { servername: string; protocols: string[] }) => string | undefined) | undefined; + /** + * Optionally override the trusted CA certificates. Default is to trust + * the well-known CAs curated by Mozilla. Mozilla's CAs are completely + * replaced when CAs are explicitly specified using this option. + */ + ca?: string | Buffer | Array | undefined; + /** + * Cert chains in PEM format. One cert chain should be provided per + * private key. Each cert chain should consist of the PEM formatted + * certificate for a provided private key, followed by the PEM + * formatted intermediate certificates (if any), in order, and not + * including the root CA (the root CA must be pre-known to the peer, + * see ca). When providing multiple cert chains, they do not have to + * be in the same order as their private keys in key. If the + * intermediate certificates are not provided, the peer will not be + * able to validate the certificate, and the handshake will fail. + */ + cert?: string | Buffer | Array | undefined; + /** + * Colon-separated list of supported signature algorithms. The list + * can contain digest algorithms (SHA256, MD5 etc.), public key + * algorithms (RSA-PSS, ECDSA etc.), combination of both (e.g + * 'RSA+SHA384') or TLS v1.3 scheme names (e.g. rsa_pss_pss_sha512). + */ + sigalgs?: string | undefined; + /** + * Cipher suite specification, replacing the default. For more + * information, see modifying the default cipher suite. Permitted + * ciphers can be obtained via tls.getCiphers(). Cipher names must be + * uppercased in order for OpenSSL to accept them. + */ + ciphers?: string | undefined; + /** + * Name of an OpenSSL engine which can provide the client certificate. + */ + clientCertEngine?: string | undefined; + /** + * PEM formatted CRLs (Certificate Revocation Lists). + */ + crl?: string | Buffer | Array | undefined; + /** + * `'auto'` or custom Diffie-Hellman parameters, required for non-ECDHE perfect forward secrecy. + * If omitted or invalid, the parameters are silently discarded and DHE ciphers will not be available. + * ECDHE-based perfect forward secrecy will still be available. + */ + dhparam?: string | Buffer | undefined; + /** + * A string describing a named curve or a colon separated list of curve + * NIDs or names, for example P-521:P-384:P-256, to use for ECDH key + * agreement. Set to auto to select the curve automatically. Use + * crypto.getCurves() to obtain a list of available curve names. On + * recent releases, openssl ecparam -list_curves will also display the + * name and description of each available elliptic curve. Default: + * tls.DEFAULT_ECDH_CURVE. + */ + ecdhCurve?: string | undefined; + /** + * Attempt to use the server's cipher suite preferences instead of the + * client's. When true, causes SSL_OP_CIPHER_SERVER_PREFERENCE to be + * set in secureOptions + */ + honorCipherOrder?: boolean | undefined; + /** + * Private keys in PEM format. PEM allows the option of private keys + * being encrypted. Encrypted keys will be decrypted with + * options.passphrase. Multiple keys using different algorithms can be + * provided either as an array of unencrypted key strings or buffers, + * or an array of objects in the form {pem: [, + * passphrase: ]}. The object form can only occur in an array. + * object.passphrase is optional. Encrypted keys will be decrypted with + * object.passphrase if provided, or options.passphrase if it is not. + */ + key?: string | Buffer | Array | undefined; + /** + * Name of an OpenSSL engine to get private key from. Should be used + * together with privateKeyIdentifier. + */ + privateKeyEngine?: string | undefined; + /** + * Identifier of a private key managed by an OpenSSL engine. Should be + * used together with privateKeyEngine. Should not be set together with + * key, because both options define a private key in different ways. + */ + privateKeyIdentifier?: string | undefined; + /** + * Optionally set the maximum TLS version to allow. One + * of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the + * `secureProtocol` option, use one or the other. + * **Default:** `'TLSv1.3'`, unless changed using CLI options. Using + * `--tls-max-v1.2` sets the default to `'TLSv1.2'`. Using `--tls-max-v1.3` sets the default to + * `'TLSv1.3'`. If multiple of the options are provided, the highest maximum is used. + */ + maxVersion?: SecureVersion | undefined; + /** + * Optionally set the minimum TLS version to allow. One + * of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the + * `secureProtocol` option, use one or the other. It is not recommended to use + * less than TLSv1.2, but it may be required for interoperability. + * **Default:** `'TLSv1.2'`, unless changed using CLI options. Using + * `--tls-v1.0` sets the default to `'TLSv1'`. Using `--tls-v1.1` sets the default to + * `'TLSv1.1'`. Using `--tls-min-v1.3` sets the default to + * 'TLSv1.3'. If multiple of the options are provided, the lowest minimum is used. + */ + minVersion?: SecureVersion | undefined; + /** + * Shared passphrase used for a single private key and/or a PFX. + */ + passphrase?: string | undefined; + /** + * PFX or PKCS12 encoded private key and certificate chain. pfx is an + * alternative to providing key and cert individually. PFX is usually + * encrypted, if it is, passphrase will be used to decrypt it. Multiple + * PFX can be provided either as an array of unencrypted PFX buffers, + * or an array of objects in the form {buf: [, + * passphrase: ]}. The object form can only occur in an array. + * object.passphrase is optional. Encrypted PFX will be decrypted with + * object.passphrase if provided, or options.passphrase if it is not. + */ + pfx?: string | Buffer | Array | undefined; + /** + * Optionally affect the OpenSSL protocol behavior, which is not + * usually necessary. This should be used carefully if at all! Value is + * a numeric bitmask of the SSL_OP_* options from OpenSSL Options + */ + secureOptions?: number | undefined; // Value is a numeric bitmask of the `SSL_OP_*` options + /** + * Legacy mechanism to select the TLS protocol version to use, it does + * not support independent control of the minimum and maximum version, + * and does not support limiting the protocol to TLSv1.3. Use + * minVersion and maxVersion instead. The possible values are listed as + * SSL_METHODS, use the function names as strings. For example, use + * 'TLSv1_1_method' to force TLS version 1.1, or 'TLS_method' to allow + * any TLS protocol version up to TLSv1.3. It is not recommended to use + * TLS versions less than 1.2, but it may be required for + * interoperability. Default: none, see minVersion. + */ + secureProtocol?: string | undefined; + /** + * Opaque identifier used by servers to ensure session state is not + * shared between applications. Unused by clients. + */ + sessionIdContext?: string | undefined; + /** + * 48-bytes of cryptographically strong pseudo-random data. + * See Session Resumption for more information. + */ + ticketKeys?: Buffer | undefined; + /** + * The number of seconds after which a TLS session created by the + * server will no longer be resumable. See Session Resumption for more + * information. Default: 300. + */ + sessionTimeout?: number | undefined; + } + interface SecureContext { + context: any; + } + /** + * Verifies the certificate `cert` is issued to `hostname`. + * + * Returns [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object, populating it with `reason`, `host`, and `cert` on + * failure. On success, returns [undefined](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Undefined_type). + * + * This function is intended to be used in combination with the`checkServerIdentity` option that can be passed to {@link connect} and as + * such operates on a `certificate object`. For other purposes, consider using `x509.checkHost()` instead. + * + * This function can be overwritten by providing an alternative function as the`options.checkServerIdentity` option that is passed to `tls.connect()`. The + * overwriting function can call `tls.checkServerIdentity()` of course, to augment + * the checks done with additional verification. + * + * This function is only called if the certificate passed all other checks, such as + * being issued by trusted CA (`options.ca`). + * + * Earlier versions of Node.js incorrectly accepted certificates for a given`hostname` if a matching `uniformResourceIdentifier` subject alternative name + * was present (see [CVE-2021-44531](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44531)). Applications that wish to accept`uniformResourceIdentifier` subject alternative names can use + * a custom`options.checkServerIdentity` function that implements the desired behavior. + * @since v0.8.4 + * @param hostname The host name or IP address to verify the certificate against. + * @param cert A `certificate object` representing the peer's certificate. + */ + function checkServerIdentity(hostname: string, cert: PeerCertificate): Error | undefined; + /** + * Creates a new {@link Server}. The `secureConnectionListener`, if provided, is + * automatically set as a listener for the `'secureConnection'` event. + * + * The `ticketKeys` options is automatically shared between `node:cluster` module + * workers. + * + * The following illustrates a simple echo server: + * + * ```js + * const tls = require('node:tls'); + * const fs = require('node:fs'); + * + * const options = { + * key: fs.readFileSync('server-key.pem'), + * cert: fs.readFileSync('server-cert.pem'), + * + * // This is necessary only if using client certificate authentication. + * requestCert: true, + * + * // This is necessary only if the client uses a self-signed certificate. + * ca: [ fs.readFileSync('client-cert.pem') ], + * }; + * + * const server = tls.createServer(options, (socket) => { + * console.log('server connected', + * socket.authorized ? 'authorized' : 'unauthorized'); + * socket.write('welcome!\n'); + * socket.setEncoding('utf8'); + * socket.pipe(socket); + * }); + * server.listen(8000, () => { + * console.log('server bound'); + * }); + * ``` + * + * The server can be tested by connecting to it using the example client from {@link connect}. + * @since v0.3.2 + */ + function createServer(secureConnectionListener?: (socket: TLSSocket) => void): Server; + function createServer(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void): Server; + /** + * The `callback` function, if specified, will be added as a listener for the `'secureConnect'` event. + * + * `tls.connect()` returns a {@link TLSSocket} object. + * + * Unlike the `https` API, `tls.connect()` does not enable the + * SNI (Server Name Indication) extension by default, which may cause some + * servers to return an incorrect certificate or reject the connection + * altogether. To enable SNI, set the `servername` option in addition + * to `host`. + * + * The following illustrates a client for the echo server example from {@link createServer}: + * + * ```js + * // Assumes an echo server that is listening on port 8000. + * const tls = require('node:tls'); + * const fs = require('node:fs'); + * + * const options = { + * // Necessary only if the server requires client certificate authentication. + * key: fs.readFileSync('client-key.pem'), + * cert: fs.readFileSync('client-cert.pem'), + * + * // Necessary only if the server uses a self-signed certificate. + * ca: [ fs.readFileSync('server-cert.pem') ], + * + * // Necessary only if the server's cert isn't for "localhost". + * checkServerIdentity: () => { return null; }, + * }; + * + * const socket = tls.connect(8000, options, () => { + * console.log('client connected', + * socket.authorized ? 'authorized' : 'unauthorized'); + * process.stdin.pipe(socket); + * process.stdin.resume(); + * }); + * socket.setEncoding('utf8'); + * socket.on('data', (data) => { + * console.log(data); + * }); + * socket.on('end', () => { + * console.log('server ends connection'); + * }); + * ``` + * @since v0.11.3 + */ + function connect(options: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + function connect( + port: number, + host?: string, + options?: ConnectionOptions, + secureConnectListener?: () => void, + ): TLSSocket; + function connect(port: number, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + /** + * Creates a new secure pair object with two streams, one of which reads and writes + * the encrypted data and the other of which reads and writes the cleartext data. + * Generally, the encrypted stream is piped to/from an incoming encrypted data + * stream and the cleartext one is used as a replacement for the initial encrypted + * stream. + * + * `tls.createSecurePair()` returns a `tls.SecurePair` object with `cleartext` and`encrypted` stream properties. + * + * Using `cleartext` has the same API as {@link TLSSocket}. + * + * The `tls.createSecurePair()` method is now deprecated in favor of`tls.TLSSocket()`. For example, the code: + * + * ```js + * pair = tls.createSecurePair(// ... ); + * pair.encrypted.pipe(socket); + * socket.pipe(pair.encrypted); + * ``` + * + * can be replaced by: + * + * ```js + * secureSocket = tls.TLSSocket(socket, options); + * ``` + * + * where `secureSocket` has the same API as `pair.cleartext`. + * @since v0.3.2 + * @deprecated Since v0.11.3 - Use {@link TLSSocket} instead. + * @param context A secure context object as returned by `tls.createSecureContext()` + * @param isServer `true` to specify that this TLS connection should be opened as a server. + * @param requestCert `true` to specify whether a server should request a certificate from a connecting client. Only applies when `isServer` is `true`. + * @param rejectUnauthorized If not `false` a server automatically reject clients with invalid certificates. Only applies when `isServer` is `true`. + */ + function createSecurePair( + context?: SecureContext, + isServer?: boolean, + requestCert?: boolean, + rejectUnauthorized?: boolean, + ): SecurePair; + /** + * {@link createServer} sets the default value of the `honorCipherOrder` option + * to `true`, other APIs that create secure contexts leave it unset. + * + * {@link createServer} uses a 128 bit truncated SHA1 hash value generated + * from `process.argv` as the default value of the `sessionIdContext` option, other + * APIs that create secure contexts have no default value. + * + * The `tls.createSecureContext()` method creates a `SecureContext` object. It is + * usable as an argument to several `tls` APIs, such as `server.addContext()`, + * but has no public methods. The {@link Server} constructor and the {@link createServer} method do not support the `secureContext` option. + * + * A key is _required_ for ciphers that use certificates. Either `key` or`pfx` can be used to provide it. + * + * If the `ca` option is not given, then Node.js will default to using [Mozilla's publicly trusted list of + * CAs](https://hg.mozilla.org/mozilla-central/raw-file/tip/security/nss/lib/ckfw/builtins/certdata.txt). + * + * Custom DHE parameters are discouraged in favor of the new `dhparam: 'auto'`option. When set to `'auto'`, well-known DHE parameters of sufficient strength + * will be selected automatically. Otherwise, if necessary, `openssl dhparam` can + * be used to create custom parameters. The key length must be greater than or + * equal to 1024 bits or else an error will be thrown. Although 1024 bits is + * permissible, use 2048 bits or larger for stronger security. + * @since v0.11.13 + */ + function createSecureContext(options?: SecureContextOptions): SecureContext; + /** + * Returns an array with the names of the supported TLS ciphers. The names are + * lower-case for historical reasons, but must be uppercased to be used in + * the `ciphers` option of {@link createSecureContext}. + * + * Not all supported ciphers are enabled by default. See `Modifying the default TLS cipher suite`. + * + * Cipher names that start with `'tls_'` are for TLSv1.3, all the others are for + * TLSv1.2 and below. + * + * ```js + * console.log(tls.getCiphers()); // ['aes128-gcm-sha256', 'aes128-sha', ...] + * ``` + * @since v0.10.2 + */ + function getCiphers(): string[]; + /** + * The default curve name to use for ECDH key agreement in a tls server. + * The default value is 'auto'. See tls.createSecureContext() for further + * information. + */ + let DEFAULT_ECDH_CURVE: string; + /** + * The default value of the maxVersion option of + * tls.createSecureContext(). It can be assigned any of the supported TLS + * protocol versions, 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1'. Default: + * 'TLSv1.3', unless changed using CLI options. Using --tls-max-v1.2 sets + * the default to 'TLSv1.2'. Using --tls-max-v1.3 sets the default to + * 'TLSv1.3'. If multiple of the options are provided, the highest maximum + * is used. + */ + let DEFAULT_MAX_VERSION: SecureVersion; + /** + * The default value of the minVersion option of tls.createSecureContext(). + * It can be assigned any of the supported TLS protocol versions, + * 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1'. Default: 'TLSv1.2', unless + * changed using CLI options. Using --tls-min-v1.0 sets the default to + * 'TLSv1'. Using --tls-min-v1.1 sets the default to 'TLSv1.1'. Using + * --tls-min-v1.3 sets the default to 'TLSv1.3'. If multiple of the options + * are provided, the lowest minimum is used. + */ + let DEFAULT_MIN_VERSION: SecureVersion; + /** + * The default value of the ciphers option of tls.createSecureContext(). + * It can be assigned any of the supported OpenSSL ciphers. + * Defaults to the content of crypto.constants.defaultCoreCipherList, unless + * changed using CLI options using --tls-default-ciphers. + */ + let DEFAULT_CIPHERS: string; + /** + * An immutable array of strings representing the root certificates (in PEM + * format) used for verifying peer certificates. This is the default value + * of the ca option to tls.createSecureContext(). + */ + const rootCertificates: readonly string[]; +} +declare module "node:tls" { + export * from "tls"; +} diff --git a/dist/node_modules/@types/node/ts4.8/trace_events.d.ts b/dist/node_modules/@types/node/ts4.8/trace_events.d.ts new file mode 100644 index 0000000..3361359 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/trace_events.d.ts @@ -0,0 +1,182 @@ +/** + * The `node:trace_events` module provides a mechanism to centralize tracing + * information generated by V8, Node.js core, and userspace code. + * + * Tracing can be enabled with the `--trace-event-categories` command-line flag + * or by using the `node:trace_events` module. The `--trace-event-categories` flag + * accepts a list of comma-separated category names. + * + * The available categories are: + * + * * `node`: An empty placeholder. + * * `node.async_hooks`: Enables capture of detailed `async_hooks` trace data. + * The `async_hooks` events have a unique `asyncId` and a special `triggerId` `triggerAsyncId` property. + * * `node.bootstrap`: Enables capture of Node.js bootstrap milestones. + * * `node.console`: Enables capture of `console.time()` and `console.count()`output. + * * `node.threadpoolwork.sync`: Enables capture of trace data for threadpool + * synchronous operations, such as `blob`, `zlib`, `crypto` and `node_api`. + * * `node.threadpoolwork.async`: Enables capture of trace data for threadpool + * asynchronous operations, such as `blob`, `zlib`, `crypto` and `node_api`. + * * `node.dns.native`: Enables capture of trace data for DNS queries. + * * `node.net.native`: Enables capture of trace data for network. + * * `node.environment`: Enables capture of Node.js Environment milestones. + * * `node.fs.sync`: Enables capture of trace data for file system sync methods. + * * `node.fs_dir.sync`: Enables capture of trace data for file system sync + * directory methods. + * * `node.fs.async`: Enables capture of trace data for file system async methods. + * * `node.fs_dir.async`: Enables capture of trace data for file system async + * directory methods. + * * `node.perf`: Enables capture of `Performance API` measurements. + * * `node.perf.usertiming`: Enables capture of only Performance API User Timing + * measures and marks. + * * `node.perf.timerify`: Enables capture of only Performance API timerify + * measurements. + * * `node.promises.rejections`: Enables capture of trace data tracking the number + * of unhandled Promise rejections and handled-after-rejections. + * * `node.vm.script`: Enables capture of trace data for the `node:vm` module's`runInNewContext()`, `runInContext()`, and `runInThisContext()` methods. + * * `v8`: The `V8` events are GC, compiling, and execution related. + * * `node.http`: Enables capture of trace data for http request / response. + * + * By default the `node`, `node.async_hooks`, and `v8` categories are enabled. + * + * ```bash + * node --trace-event-categories v8,node,node.async_hooks server.js + * ``` + * + * Prior versions of Node.js required the use of the `--trace-events-enabled`flag to enable trace events. This requirement has been removed. However, the`--trace-events-enabled` flag _may_ still be + * used and will enable the`node`, `node.async_hooks`, and `v8` trace event categories by default. + * + * ```bash + * node --trace-events-enabled + * + * # is equivalent to + * + * node --trace-event-categories v8,node,node.async_hooks + * ``` + * + * Alternatively, trace events may be enabled using the `node:trace_events` module: + * + * ```js + * const trace_events = require('node:trace_events'); + * const tracing = trace_events.createTracing({ categories: ['node.perf'] }); + * tracing.enable(); // Enable trace event capture for the 'node.perf' category + * + * // do work + * + * tracing.disable(); // Disable trace event capture for the 'node.perf' category + * ``` + * + * Running Node.js with tracing enabled will produce log files that can be opened + * in the [`chrome://tracing`](https://www.chromium.org/developers/how-tos/trace-event-profiling-tool) tab of Chrome. + * + * The logging file is by default called `node_trace.${rotation}.log`, where`${rotation}` is an incrementing log-rotation id. The filepath pattern can + * be specified with `--trace-event-file-pattern` that accepts a template + * string that supports `${rotation}` and `${pid}`: + * + * ```bash + * node --trace-event-categories v8 --trace-event-file-pattern '${pid}-${rotation}.log' server.js + * ``` + * + * To guarantee that the log file is properly generated after signal events like`SIGINT`, `SIGTERM`, or `SIGBREAK`, make sure to have the appropriate handlers + * in your code, such as: + * + * ```js + * process.on('SIGINT', function onSigint() { + * console.info('Received SIGINT.'); + * process.exit(130); // Or applicable exit code depending on OS and signal + * }); + * ``` + * + * The tracing system uses the same time source + * as the one used by `process.hrtime()`. + * However the trace-event timestamps are expressed in microseconds, + * unlike `process.hrtime()` which returns nanoseconds. + * + * The features from this module are not available in `Worker` threads. + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/trace_events.js) + */ +declare module "trace_events" { + /** + * The `Tracing` object is used to enable or disable tracing for sets of + * categories. Instances are created using the + * `trace_events.createTracing()` method. + * + * When created, the `Tracing` object is disabled. Calling the + * `tracing.enable()` method adds the categories to the set of enabled trace + * event categories. Calling `tracing.disable()` will remove the categories + * from the set of enabled trace event categories. + */ + interface Tracing { + /** + * A comma-separated list of the trace event categories covered by this + * `Tracing` object. + */ + readonly categories: string; + /** + * Disables this `Tracing` object. + * + * Only trace event categories _not_ covered by other enabled `Tracing` + * objects and _not_ specified by the `--trace-event-categories` flag + * will be disabled. + */ + disable(): void; + /** + * Enables this `Tracing` object for the set of categories covered by + * the `Tracing` object. + */ + enable(): void; + /** + * `true` only if the `Tracing` object has been enabled. + */ + readonly enabled: boolean; + } + interface CreateTracingOptions { + /** + * An array of trace category names. Values included in the array are + * coerced to a string when possible. An error will be thrown if the + * value cannot be coerced. + */ + categories: string[]; + } + /** + * Creates and returns a `Tracing` object for the given set of `categories`. + * + * ```js + * const trace_events = require('node:trace_events'); + * const categories = ['node.perf', 'node.async_hooks']; + * const tracing = trace_events.createTracing({ categories }); + * tracing.enable(); + * // do stuff + * tracing.disable(); + * ``` + * @since v10.0.0 + * @return . + */ + function createTracing(options: CreateTracingOptions): Tracing; + /** + * Returns a comma-separated list of all currently-enabled trace event + * categories. The current set of enabled trace event categories is determined + * by the _union_ of all currently-enabled `Tracing` objects and any categories + * enabled using the `--trace-event-categories` flag. + * + * Given the file `test.js` below, the command`node --trace-event-categories node.perf test.js` will print`'node.async_hooks,node.perf'` to the console. + * + * ```js + * const trace_events = require('node:trace_events'); + * const t1 = trace_events.createTracing({ categories: ['node.async_hooks'] }); + * const t2 = trace_events.createTracing({ categories: ['node.perf'] }); + * const t3 = trace_events.createTracing({ categories: ['v8'] }); + * + * t1.enable(); + * t2.enable(); + * + * console.log(trace_events.getEnabledCategories()); + * ``` + * @since v10.0.0 + */ + function getEnabledCategories(): string | undefined; +} +declare module "node:trace_events" { + export * from "trace_events"; +} diff --git a/dist/node_modules/@types/node/ts4.8/tty.d.ts b/dist/node_modules/@types/node/ts4.8/tty.d.ts new file mode 100644 index 0000000..1c0dafd --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/tty.d.ts @@ -0,0 +1,208 @@ +/** + * The `node:tty` module provides the `tty.ReadStream` and `tty.WriteStream`classes. In most cases, it will not be necessary or possible to use this module + * directly. However, it can be accessed using: + * + * ```js + * const tty = require('node:tty'); + * ``` + * + * When Node.js detects that it is being run with a text terminal ("TTY") + * attached, `process.stdin` will, by default, be initialized as an instance of`tty.ReadStream` and both `process.stdout` and `process.stderr` will, by + * default, be instances of `tty.WriteStream`. The preferred method of determining + * whether Node.js is being run within a TTY context is to check that the value of + * the `process.stdout.isTTY` property is `true`: + * + * ```console + * $ node -p -e "Boolean(process.stdout.isTTY)" + * true + * $ node -p -e "Boolean(process.stdout.isTTY)" | cat + * false + * ``` + * + * In most cases, there should be little to no reason for an application to + * manually create instances of the `tty.ReadStream` and `tty.WriteStream`classes. + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/tty.js) + */ +declare module "tty" { + import * as net from "node:net"; + /** + * The `tty.isatty()` method returns `true` if the given `fd` is associated with + * a TTY and `false` if it is not, including whenever `fd` is not a non-negative + * integer. + * @since v0.5.8 + * @param fd A numeric file descriptor + */ + function isatty(fd: number): boolean; + /** + * Represents the readable side of a TTY. In normal circumstances `process.stdin` will be the only `tty.ReadStream` instance in a Node.js + * process and there should be no reason to create additional instances. + * @since v0.5.8 + */ + class ReadStream extends net.Socket { + constructor(fd: number, options?: net.SocketConstructorOpts); + /** + * A `boolean` that is `true` if the TTY is currently configured to operate as a + * raw device. + * + * This flag is always `false` when a process starts, even if the terminal is + * operating in raw mode. Its value will change with subsequent calls to`setRawMode`. + * @since v0.7.7 + */ + isRaw: boolean; + /** + * Allows configuration of `tty.ReadStream` so that it operates as a raw device. + * + * When in raw mode, input is always available character-by-character, not + * including modifiers. Additionally, all special processing of characters by the + * terminal is disabled, including echoing input + * characters. Ctrl+C will no longer cause a `SIGINT` when + * in this mode. + * @since v0.7.7 + * @param mode If `true`, configures the `tty.ReadStream` to operate as a raw device. If `false`, configures the `tty.ReadStream` to operate in its default mode. The `readStream.isRaw` + * property will be set to the resulting mode. + * @return The read stream instance. + */ + setRawMode(mode: boolean): this; + /** + * A `boolean` that is always `true` for `tty.ReadStream` instances. + * @since v0.5.8 + */ + isTTY: boolean; + } + /** + * -1 - to the left from cursor + * 0 - the entire line + * 1 - to the right from cursor + */ + type Direction = -1 | 0 | 1; + /** + * Represents the writable side of a TTY. In normal circumstances,`process.stdout` and `process.stderr` will be the only`tty.WriteStream` instances created for a Node.js process and there + * should be no reason to create additional instances. + * @since v0.5.8 + */ + class WriteStream extends net.Socket { + constructor(fd: number); + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: "resize", listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: "resize"): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: "resize", listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: "resize", listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "resize", listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "resize", listener: () => void): this; + /** + * `writeStream.clearLine()` clears the current line of this `WriteStream` in a + * direction identified by `dir`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + clearLine(dir: Direction, callback?: () => void): boolean; + /** + * `writeStream.clearScreenDown()` clears this `WriteStream` from the current + * cursor down. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + clearScreenDown(callback?: () => void): boolean; + /** + * `writeStream.cursorTo()` moves this `WriteStream`'s cursor to the specified + * position. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + cursorTo(x: number, y?: number, callback?: () => void): boolean; + cursorTo(x: number, callback: () => void): boolean; + /** + * `writeStream.moveCursor()` moves this `WriteStream`'s cursor _relative_ to its + * current position. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + moveCursor(dx: number, dy: number, callback?: () => void): boolean; + /** + * Returns: + * + * * `1` for 2, + * * `4` for 16, + * * `8` for 256, + * * `24` for 16,777,216 colors supported. + * + * Use this to determine what colors the terminal supports. Due to the nature of + * colors in terminals it is possible to either have false positives or false + * negatives. It depends on process information and the environment variables that + * may lie about what terminal is used. + * It is possible to pass in an `env` object to simulate the usage of a specific + * terminal. This can be useful to check how specific environment settings behave. + * + * To enforce a specific color support, use one of the below environment settings. + * + * * 2 colors: `FORCE_COLOR = 0` (Disables colors) + * * 16 colors: `FORCE_COLOR = 1` + * * 256 colors: `FORCE_COLOR = 2` + * * 16,777,216 colors: `FORCE_COLOR = 3` + * + * Disabling color support is also possible by using the `NO_COLOR` and`NODE_DISABLE_COLORS` environment variables. + * @since v9.9.0 + * @param [env=process.env] An object containing the environment variables to check. This enables simulating the usage of a specific terminal. + */ + getColorDepth(env?: object): number; + /** + * Returns `true` if the `writeStream` supports at least as many colors as provided + * in `count`. Minimum support is 2 (black and white). + * + * This has the same false positives and negatives as described in `writeStream.getColorDepth()`. + * + * ```js + * process.stdout.hasColors(); + * // Returns true or false depending on if `stdout` supports at least 16 colors. + * process.stdout.hasColors(256); + * // Returns true or false depending on if `stdout` supports at least 256 colors. + * process.stdout.hasColors({ TMUX: '1' }); + * // Returns true. + * process.stdout.hasColors(2 ** 24, { TMUX: '1' }); + * // Returns false (the environment setting pretends to support 2 ** 8 colors). + * ``` + * @since v11.13.0, v10.16.0 + * @param [count=16] The number of colors that are requested (minimum 2). + * @param [env=process.env] An object containing the environment variables to check. This enables simulating the usage of a specific terminal. + */ + hasColors(count?: number): boolean; + hasColors(env?: object): boolean; + hasColors(count: number, env?: object): boolean; + /** + * `writeStream.getWindowSize()` returns the size of the TTY + * corresponding to this `WriteStream`. The array is of the type`[numColumns, numRows]` where `numColumns` and `numRows` represent the number + * of columns and rows in the corresponding TTY. + * @since v0.7.7 + */ + getWindowSize(): [number, number]; + /** + * A `number` specifying the number of columns the TTY currently has. This property + * is updated whenever the `'resize'` event is emitted. + * @since v0.7.7 + */ + columns: number; + /** + * A `number` specifying the number of rows the TTY currently has. This property + * is updated whenever the `'resize'` event is emitted. + * @since v0.7.7 + */ + rows: number; + /** + * A `boolean` that is always `true`. + * @since v0.5.8 + */ + isTTY: boolean; + } +} +declare module "node:tty" { + export * from "tty"; +} diff --git a/dist/node_modules/@types/node/ts4.8/url.d.ts b/dist/node_modules/@types/node/ts4.8/url.d.ts new file mode 100644 index 0000000..dc2d0a0 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/url.d.ts @@ -0,0 +1,944 @@ +/** + * The `node:url` module provides utilities for URL resolution and parsing. It can + * be accessed using: + * + * ```js + * import url from 'node:url'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.11.0/lib/url.js) + */ +declare module "url" { + import { Blob as NodeBlob } from "node:buffer"; + import { ClientRequestArgs } from "node:http"; + import { ParsedUrlQuery, ParsedUrlQueryInput } from "node:querystring"; + // Input to `url.format` + interface UrlObject { + auth?: string | null | undefined; + hash?: string | null | undefined; + host?: string | null | undefined; + hostname?: string | null | undefined; + href?: string | null | undefined; + pathname?: string | null | undefined; + protocol?: string | null | undefined; + search?: string | null | undefined; + slashes?: boolean | null | undefined; + port?: string | number | null | undefined; + query?: string | null | ParsedUrlQueryInput | undefined; + } + // Output of `url.parse` + interface Url { + auth: string | null; + hash: string | null; + host: string | null; + hostname: string | null; + href: string; + path: string | null; + pathname: string | null; + protocol: string | null; + search: string | null; + slashes: boolean | null; + port: string | null; + query: string | null | ParsedUrlQuery; + } + interface UrlWithParsedQuery extends Url { + query: ParsedUrlQuery; + } + interface UrlWithStringQuery extends Url { + query: string | null; + } + /** + * The `url.parse()` method takes a URL string, parses it, and returns a URL + * object. + * + * A `TypeError` is thrown if `urlString` is not a string. + * + * A `URIError` is thrown if the `auth` property is present but cannot be decoded. + * + * `url.parse()` uses a lenient, non-standard algorithm for parsing URL + * strings. It is prone to security issues such as [host name spoofing](https://hackerone.com/reports/678487) and incorrect handling of usernames and passwords. Do not use with untrusted + * input. CVEs are not issued for `url.parse()` vulnerabilities. Use the `WHATWG URL` API instead. + * @since v0.1.25 + * @deprecated Use the WHATWG URL API instead. + * @param urlString The URL string to parse. + * @param [parseQueryString=false] If `true`, the `query` property will always be set to an object returned by the {@link querystring} module's `parse()` method. If `false`, the `query` property + * on the returned URL object will be an unparsed, undecoded string. + * @param [slashesDenoteHost=false] If `true`, the first token after the literal string `//` and preceding the next `/` will be interpreted as the `host`. For instance, given `//foo/bar`, the + * result would be `{host: 'foo', pathname: '/bar'}` rather than `{pathname: '//foo/bar'}`. + */ + function parse(urlString: string): UrlWithStringQuery; + function parse( + urlString: string, + parseQueryString: false | undefined, + slashesDenoteHost?: boolean, + ): UrlWithStringQuery; + function parse(urlString: string, parseQueryString: true, slashesDenoteHost?: boolean): UrlWithParsedQuery; + function parse(urlString: string, parseQueryString: boolean, slashesDenoteHost?: boolean): Url; + /** + * The `url.format()` method returns a formatted URL string derived from `urlObject`. + * + * ```js + * const url = require('node:url'); + * url.format({ + * protocol: 'https', + * hostname: 'example.com', + * pathname: '/some/path', + * query: { + * page: 1, + * format: 'json', + * }, + * }); + * + * // => 'https://example.com/some/path?page=1&format=json' + * ``` + * + * If `urlObject` is not an object or a string, `url.format()` will throw a `TypeError`. + * + * The formatting process operates as follows: + * + * * A new empty string `result` is created. + * * If `urlObject.protocol` is a string, it is appended as-is to `result`. + * * Otherwise, if `urlObject.protocol` is not `undefined` and is not a string, an `Error` is thrown. + * * For all string values of `urlObject.protocol` that _do not end_ with an ASCII + * colon (`:`) character, the literal string `:` will be appended to `result`. + * * If either of the following conditions is true, then the literal string `//` will be appended to `result`: + * * `urlObject.slashes` property is true; + * * `urlObject.protocol` begins with `http`, `https`, `ftp`, `gopher`, or `file`; + * * If the value of the `urlObject.auth` property is truthy, and either `urlObject.host` or `urlObject.hostname` are not `undefined`, the value of `urlObject.auth` will be coerced into a string + * and appended to `result` followed by the literal string `@`. + * * If the `urlObject.host` property is `undefined` then: + * * If the `urlObject.hostname` is a string, it is appended to `result`. + * * Otherwise, if `urlObject.hostname` is not `undefined` and is not a string, + * an `Error` is thrown. + * * If the `urlObject.port` property value is truthy, and `urlObject.hostname` is not `undefined`: + * * The literal string `:` is appended to `result`, and + * * The value of `urlObject.port` is coerced to a string and appended to `result`. + * * Otherwise, if the `urlObject.host` property value is truthy, the value of `urlObject.host` is coerced to a string and appended to `result`. + * * If the `urlObject.pathname` property is a string that is not an empty string: + * * If the `urlObject.pathname` _does not start_ with an ASCII forward slash + * (`/`), then the literal string `'/'` is appended to `result`. + * * The value of `urlObject.pathname` is appended to `result`. + * * Otherwise, if `urlObject.pathname` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.search` property is `undefined` and if the `urlObject.query`property is an `Object`, the literal string `?` is appended to `result` followed by the output of calling the + * `querystring` module's `stringify()` method passing the value of `urlObject.query`. + * * Otherwise, if `urlObject.search` is a string: + * * If the value of `urlObject.search` _does not start_ with the ASCII question + * mark (`?`) character, the literal string `?` is appended to `result`. + * * The value of `urlObject.search` is appended to `result`. + * * Otherwise, if `urlObject.search` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.hash` property is a string: + * * If the value of `urlObject.hash` _does not start_ with the ASCII hash (`#`) + * character, the literal string `#` is appended to `result`. + * * The value of `urlObject.hash` is appended to `result`. + * * Otherwise, if the `urlObject.hash` property is not `undefined` and is not a + * string, an `Error` is thrown. + * * `result` is returned. + * @since v0.1.25 + * @legacy Use the WHATWG URL API instead. + * @param urlObject A URL object (as returned by `url.parse()` or constructed otherwise). If a string, it is converted to an object by passing it to `url.parse()`. + */ + function format(urlObject: URL, options?: URLFormatOptions): string; + /** + * The `url.format()` method returns a formatted URL string derived from `urlObject`. + * + * ```js + * const url = require('node:url'); + * url.format({ + * protocol: 'https', + * hostname: 'example.com', + * pathname: '/some/path', + * query: { + * page: 1, + * format: 'json', + * }, + * }); + * + * // => 'https://example.com/some/path?page=1&format=json' + * ``` + * + * If `urlObject` is not an object or a string, `url.format()` will throw a `TypeError`. + * + * The formatting process operates as follows: + * + * * A new empty string `result` is created. + * * If `urlObject.protocol` is a string, it is appended as-is to `result`. + * * Otherwise, if `urlObject.protocol` is not `undefined` and is not a string, an `Error` is thrown. + * * For all string values of `urlObject.protocol` that _do not end_ with an ASCII + * colon (`:`) character, the literal string `:` will be appended to `result`. + * * If either of the following conditions is true, then the literal string `//` will be appended to `result`: + * * `urlObject.slashes` property is true; + * * `urlObject.protocol` begins with `http`, `https`, `ftp`, `gopher`, or `file`; + * * If the value of the `urlObject.auth` property is truthy, and either `urlObject.host` or `urlObject.hostname` are not `undefined`, the value of `urlObject.auth` will be coerced into a string + * and appended to `result` followed by the literal string `@`. + * * If the `urlObject.host` property is `undefined` then: + * * If the `urlObject.hostname` is a string, it is appended to `result`. + * * Otherwise, if `urlObject.hostname` is not `undefined` and is not a string, + * an `Error` is thrown. + * * If the `urlObject.port` property value is truthy, and `urlObject.hostname` is not `undefined`: + * * The literal string `:` is appended to `result`, and + * * The value of `urlObject.port` is coerced to a string and appended to `result`. + * * Otherwise, if the `urlObject.host` property value is truthy, the value of `urlObject.host` is coerced to a string and appended to `result`. + * * If the `urlObject.pathname` property is a string that is not an empty string: + * * If the `urlObject.pathname` _does not start_ with an ASCII forward slash + * (`/`), then the literal string `'/'` is appended to `result`. + * * The value of `urlObject.pathname` is appended to `result`. + * * Otherwise, if `urlObject.pathname` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.search` property is `undefined` and if the `urlObject.query`property is an `Object`, the literal string `?` is appended to `result` followed by the output of calling the + * `querystring` module's `stringify()` method passing the value of `urlObject.query`. + * * Otherwise, if `urlObject.search` is a string: + * * If the value of `urlObject.search` _does not start_ with the ASCII question + * mark (`?`) character, the literal string `?` is appended to `result`. + * * The value of `urlObject.search` is appended to `result`. + * * Otherwise, if `urlObject.search` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.hash` property is a string: + * * If the value of `urlObject.hash` _does not start_ with the ASCII hash (`#`) + * character, the literal string `#` is appended to `result`. + * * The value of `urlObject.hash` is appended to `result`. + * * Otherwise, if the `urlObject.hash` property is not `undefined` and is not a + * string, an `Error` is thrown. + * * `result` is returned. + * @since v0.1.25 + * @legacy Use the WHATWG URL API instead. + * @param urlObject A URL object (as returned by `url.parse()` or constructed otherwise). If a string, it is converted to an object by passing it to `url.parse()`. + */ + function format(urlObject: UrlObject | string): string; + /** + * The `url.resolve()` method resolves a target URL relative to a base URL in a + * manner similar to that of a web browser resolving an anchor tag. + * + * ```js + * const url = require('node:url'); + * url.resolve('/one/two/three', 'four'); // '/one/two/four' + * url.resolve('http://example.com/', '/one'); // 'http://example.com/one' + * url.resolve('http://example.com/one', '/two'); // 'http://example.com/two' + * ``` + * + * To achieve the same result using the WHATWG URL API: + * + * ```js + * function resolve(from, to) { + * const resolvedUrl = new URL(to, new URL(from, 'resolve://')); + * if (resolvedUrl.protocol === 'resolve:') { + * // `from` is a relative URL. + * const { pathname, search, hash } = resolvedUrl; + * return pathname + search + hash; + * } + * return resolvedUrl.toString(); + * } + * + * resolve('/one/two/three', 'four'); // '/one/two/four' + * resolve('http://example.com/', '/one'); // 'http://example.com/one' + * resolve('http://example.com/one', '/two'); // 'http://example.com/two' + * ``` + * @since v0.1.25 + * @legacy Use the WHATWG URL API instead. + * @param from The base URL to use if `to` is a relative URL. + * @param to The target URL to resolve. + */ + function resolve(from: string, to: string): string; + /** + * Returns the [Punycode](https://tools.ietf.org/html/rfc5891#section-4.4) ASCII serialization of the `domain`. If `domain` is an + * invalid domain, the empty string is returned. + * + * It performs the inverse operation to {@link domainToUnicode}. + * + * ```js + * import url from 'node:url'; + * + * console.log(url.domainToASCII('español.com')); + * // Prints xn--espaol-zwa.com + * console.log(url.domainToASCII('中文.com')); + * // Prints xn--fiq228c.com + * console.log(url.domainToASCII('xn--iñvalid.com')); + * // Prints an empty string + * ``` + * @since v7.4.0, v6.13.0 + */ + function domainToASCII(domain: string): string; + /** + * Returns the Unicode serialization of the `domain`. If `domain` is an invalid + * domain, the empty string is returned. + * + * It performs the inverse operation to {@link domainToASCII}. + * + * ```js + * import url from 'node:url'; + * + * console.log(url.domainToUnicode('xn--espaol-zwa.com')); + * // Prints español.com + * console.log(url.domainToUnicode('xn--fiq228c.com')); + * // Prints 中文.com + * console.log(url.domainToUnicode('xn--iñvalid.com')); + * // Prints an empty string + * ``` + * @since v7.4.0, v6.13.0 + */ + function domainToUnicode(domain: string): string; + /** + * This function ensures the correct decodings of percent-encoded characters as + * well as ensuring a cross-platform valid absolute path string. + * + * ```js + * import { fileURLToPath } from 'node:url'; + * + * const __filename = fileURLToPath(import.meta.url); + * + * new URL('file:///C:/path/').pathname; // Incorrect: /C:/path/ + * fileURLToPath('file:///C:/path/'); // Correct: C:\path\ (Windows) + * + * new URL('file://nas/foo.txt').pathname; // Incorrect: /foo.txt + * fileURLToPath('file://nas/foo.txt'); // Correct: \\nas\foo.txt (Windows) + * + * new URL('file:///你好.txt').pathname; // Incorrect: /%E4%BD%A0%E5%A5%BD.txt + * fileURLToPath('file:///你好.txt'); // Correct: /你好.txt (POSIX) + * + * new URL('file:///hello world').pathname; // Incorrect: /hello%20world + * fileURLToPath('file:///hello world'); // Correct: /hello world (POSIX) + * ``` + * @since v10.12.0 + * @param url The file URL string or URL object to convert to a path. + * @return The fully-resolved platform-specific Node.js file path. + */ + function fileURLToPath(url: string | URL): string; + /** + * This function ensures that `path` is resolved absolutely, and that the URL + * control characters are correctly encoded when converting into a File URL. + * + * ```js + * import { pathToFileURL } from 'node:url'; + * + * new URL('/foo#1', 'file:'); // Incorrect: file:///foo#1 + * pathToFileURL('/foo#1'); // Correct: file:///foo%231 (POSIX) + * + * new URL('/some/path%.c', 'file:'); // Incorrect: file:///some/path%.c + * pathToFileURL('/some/path%.c'); // Correct: file:///some/path%25.c (POSIX) + * ``` + * @since v10.12.0 + * @param path The path to convert to a File URL. + * @return The file URL object. + */ + function pathToFileURL(path: string): URL; + /** + * This utility function converts a URL object into an ordinary options object as + * expected by the `http.request()` and `https.request()` APIs. + * + * ```js + * import { urlToHttpOptions } from 'node:url'; + * const myURL = new URL('https://a:b@測試?abc#foo'); + * + * console.log(urlToHttpOptions(myURL)); + * /* + * { + * protocol: 'https:', + * hostname: 'xn--g6w251d', + * hash: '#foo', + * search: '?abc', + * pathname: '/', + * path: '/?abc', + * href: 'https://a:b@xn--g6w251d/?abc#foo', + * auth: 'a:b' + * } + * + * ``` + * @since v15.7.0, v14.18.0 + * @param url The `WHATWG URL` object to convert to an options object. + * @return Options object + */ + function urlToHttpOptions(url: URL): ClientRequestArgs; + interface URLFormatOptions { + /** + * `true` if the serialized URL string should include the username and password, `false` otherwise. + * @default true + */ + auth?: boolean | undefined; + /** + * `true` if the serialized URL string should include the fragment, `false` otherwise. + * @default true + */ + fragment?: boolean | undefined; + /** + * `true` if the serialized URL string should include the search query, `false` otherwise. + * @default true + */ + search?: boolean | undefined; + /** + * `true` if Unicode characters appearing in the host component of the URL string should be encoded directly as opposed to + * being Punycode encoded. + * @default false + */ + unicode?: boolean | undefined; + } + /** + * Browser-compatible `URL` class, implemented by following the WHATWG URL + * Standard. [Examples of parsed URLs](https://url.spec.whatwg.org/#example-url-parsing) may be found in the Standard itself. + * The `URL` class is also available on the global object. + * + * In accordance with browser conventions, all properties of `URL` objects + * are implemented as getters and setters on the class prototype, rather than as + * data properties on the object itself. Thus, unlike `legacy urlObject`s, + * using the `delete` keyword on any properties of `URL` objects (e.g. `delete myURL.protocol`, `delete myURL.pathname`, etc) has no effect but will still + * return `true`. + * @since v7.0.0, v6.13.0 + */ + class URL { + /** + * Creates a `'blob:nodedata:...'` URL string that represents the given `Blob` object and can be used to retrieve the `Blob` later. + * + * ```js + * const { + * Blob, + * resolveObjectURL, + * } = require('node:buffer'); + * + * const blob = new Blob(['hello']); + * const id = URL.createObjectURL(blob); + * + * // later... + * + * const otherBlob = resolveObjectURL(id); + * console.log(otherBlob.size); + * ``` + * + * The data stored by the registered `Blob` will be retained in memory until `URL.revokeObjectURL()` is called to remove it. + * + * `Blob` objects are registered within the current thread. If using Worker + * Threads, `Blob` objects registered within one Worker will not be available + * to other workers or the main thread. + * @since v16.7.0 + * @experimental + */ + static createObjectURL(blob: NodeBlob): string; + /** + * Removes the stored `Blob` identified by the given ID. Attempting to revoke a + * ID that isn't registered will silently fail. + * @since v16.7.0 + * @experimental + * @param id A `'blob:nodedata:...` URL string returned by a prior call to `URL.createObjectURL()`. + */ + static revokeObjectURL(id: string): void; + /** + * Checks if an `input` relative to the `base` can be parsed to a `URL`. + * + * ```js + * const isValid = URL.canParse('/foo', 'https://example.org/'); // true + * + * const isNotValid = URL.canParse('/foo'); // false + * ``` + * @since v19.9.0 + * @param input The absolute or relative input URL to parse. If `input` is relative, then `base` is required. If `input` is absolute, the `base` is ignored. If `input` is not a string, it is + * `converted to a string` first. + * @param base The base URL to resolve against if the `input` is not absolute. If `base` is not a string, it is `converted to a string` first. + */ + static canParse(input: string, base?: string): boolean; + constructor(input: string | { toString: () => string }, base?: string | URL); + /** + * Gets and sets the fragment portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org/foo#bar'); + * console.log(myURL.hash); + * // Prints #bar + * + * myURL.hash = 'baz'; + * console.log(myURL.href); + * // Prints https://example.org/foo#baz + * ``` + * + * Invalid URL characters included in the value assigned to the `hash` property + * are `percent-encoded`. The selection of which characters to + * percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + hash: string; + /** + * Gets and sets the host portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org:81/foo'); + * console.log(myURL.host); + * // Prints example.org:81 + * + * myURL.host = 'example.com:82'; + * console.log(myURL.href); + * // Prints https://example.com:82/foo + * ``` + * + * Invalid host values assigned to the `host` property are ignored. + */ + host: string; + /** + * Gets and sets the host name portion of the URL. The key difference between`url.host` and `url.hostname` is that `url.hostname` does _not_ include the + * port. + * + * ```js + * const myURL = new URL('https://example.org:81/foo'); + * console.log(myURL.hostname); + * // Prints example.org + * + * // Setting the hostname does not change the port + * myURL.hostname = 'example.com'; + * console.log(myURL.href); + * // Prints https://example.com:81/foo + * + * // Use myURL.host to change the hostname and port + * myURL.host = 'example.org:82'; + * console.log(myURL.href); + * // Prints https://example.org:82/foo + * ``` + * + * Invalid host name values assigned to the `hostname` property are ignored. + */ + hostname: string; + /** + * Gets and sets the serialized URL. + * + * ```js + * const myURL = new URL('https://example.org/foo'); + * console.log(myURL.href); + * // Prints https://example.org/foo + * + * myURL.href = 'https://example.com/bar'; + * console.log(myURL.href); + * // Prints https://example.com/bar + * ``` + * + * Getting the value of the `href` property is equivalent to calling {@link toString}. + * + * Setting the value of this property to a new value is equivalent to creating a + * new `URL` object using `new URL(value)`. Each of the `URL`object's properties will be modified. + * + * If the value assigned to the `href` property is not a valid URL, a `TypeError`will be thrown. + */ + href: string; + /** + * Gets the read-only serialization of the URL's origin. + * + * ```js + * const myURL = new URL('https://example.org/foo/bar?baz'); + * console.log(myURL.origin); + * // Prints https://example.org + * ``` + * + * ```js + * const idnURL = new URL('https://測試'); + * console.log(idnURL.origin); + * // Prints https://xn--g6w251d + * + * console.log(idnURL.hostname); + * // Prints xn--g6w251d + * ``` + */ + readonly origin: string; + /** + * Gets and sets the password portion of the URL. + * + * ```js + * const myURL = new URL('https://abc:xyz@example.com'); + * console.log(myURL.password); + * // Prints xyz + * + * myURL.password = '123'; + * console.log(myURL.href); + * // Prints https://abc:123@example.com/ + * ``` + * + * Invalid URL characters included in the value assigned to the `password` property + * are `percent-encoded`. The selection of which characters to + * percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + password: string; + /** + * Gets and sets the path portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org/abc/xyz?123'); + * console.log(myURL.pathname); + * // Prints /abc/xyz + * + * myURL.pathname = '/abcdef'; + * console.log(myURL.href); + * // Prints https://example.org/abcdef?123 + * ``` + * + * Invalid URL characters included in the value assigned to the `pathname` property are `percent-encoded`. The selection of which characters + * to percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + pathname: string; + /** + * Gets and sets the port portion of the URL. + * + * The port value may be a number or a string containing a number in the range `0` to `65535` (inclusive). Setting the value to the default port of the `URL` objects given `protocol` will + * result in the `port` value becoming + * the empty string (`''`). + * + * The port value can be an empty string in which case the port depends on + * the protocol/scheme: + * + * + * + * Upon assigning a value to the port, the value will first be converted to a + * string using `.toString()`. + * + * If that string is invalid but it begins with a number, the leading number is + * assigned to `port`. + * If the number lies outside the range denoted above, it is ignored. + * + * ```js + * const myURL = new URL('https://example.org:8888'); + * console.log(myURL.port); + * // Prints 8888 + * + * // Default ports are automatically transformed to the empty string + * // (HTTPS protocol's default port is 443) + * myURL.port = '443'; + * console.log(myURL.port); + * // Prints the empty string + * console.log(myURL.href); + * // Prints https://example.org/ + * + * myURL.port = 1234; + * console.log(myURL.port); + * // Prints 1234 + * console.log(myURL.href); + * // Prints https://example.org:1234/ + * + * // Completely invalid port strings are ignored + * myURL.port = 'abcd'; + * console.log(myURL.port); + * // Prints 1234 + * + * // Leading numbers are treated as a port number + * myURL.port = '5678abcd'; + * console.log(myURL.port); + * // Prints 5678 + * + * // Non-integers are truncated + * myURL.port = 1234.5678; + * console.log(myURL.port); + * // Prints 1234 + * + * // Out-of-range numbers which are not represented in scientific notation + * // will be ignored. + * myURL.port = 1e10; // 10000000000, will be range-checked as described below + * console.log(myURL.port); + * // Prints 1234 + * ``` + * + * Numbers which contain a decimal point, + * such as floating-point numbers or numbers in scientific notation, + * are not an exception to this rule. + * Leading numbers up to the decimal point will be set as the URL's port, + * assuming they are valid: + * + * ```js + * myURL.port = 4.567e21; + * console.log(myURL.port); + * // Prints 4 (because it is the leading number in the string '4.567e21') + * ``` + */ + port: string; + /** + * Gets and sets the protocol portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org'); + * console.log(myURL.protocol); + * // Prints https: + * + * myURL.protocol = 'ftp'; + * console.log(myURL.href); + * // Prints ftp://example.org/ + * ``` + * + * Invalid URL protocol values assigned to the `protocol` property are ignored. + */ + protocol: string; + /** + * Gets and sets the serialized query portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org/abc?123'); + * console.log(myURL.search); + * // Prints ?123 + * + * myURL.search = 'abc=xyz'; + * console.log(myURL.href); + * // Prints https://example.org/abc?abc=xyz + * ``` + * + * Any invalid URL characters appearing in the value assigned the `search`property will be `percent-encoded`. The selection of which + * characters to percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + search: string; + /** + * Gets the `URLSearchParams` object representing the query parameters of the + * URL. This property is read-only but the `URLSearchParams` object it provides + * can be used to mutate the URL instance; to replace the entirety of query + * parameters of the URL, use the {@link search} setter. See `URLSearchParams` documentation for details. + * + * Use care when using `.searchParams` to modify the `URL` because, + * per the WHATWG specification, the `URLSearchParams` object uses + * different rules to determine which characters to percent-encode. For + * instance, the `URL` object will not percent encode the ASCII tilde (`~`) + * character, while `URLSearchParams` will always encode it: + * + * ```js + * const myURL = new URL('https://example.org/abc?foo=~bar'); + * + * console.log(myURL.search); // prints ?foo=~bar + * + * // Modify the URL via searchParams... + * myURL.searchParams.sort(); + * + * console.log(myURL.search); // prints ?foo=%7Ebar + * ``` + */ + readonly searchParams: URLSearchParams; + /** + * Gets and sets the username portion of the URL. + * + * ```js + * const myURL = new URL('https://abc:xyz@example.com'); + * console.log(myURL.username); + * // Prints abc + * + * myURL.username = '123'; + * console.log(myURL.href); + * // Prints https://123:xyz@example.com/ + * ``` + * + * Any invalid URL characters appearing in the value assigned the `username` property will be `percent-encoded`. The selection of which + * characters to percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + username: string; + /** + * The `toString()` method on the `URL` object returns the serialized URL. The + * value returned is equivalent to that of {@link href} and {@link toJSON}. + */ + toString(): string; + /** + * The `toJSON()` method on the `URL` object returns the serialized URL. The + * value returned is equivalent to that of {@link href} and {@link toString}. + * + * This method is automatically called when an `URL` object is serialized + * with [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). + * + * ```js + * const myURLs = [ + * new URL('https://www.example.com'), + * new URL('https://test.example.org'), + * ]; + * console.log(JSON.stringify(myURLs)); + * // Prints ["https://www.example.com/","https://test.example.org/"] + * ``` + */ + toJSON(): string; + } + /** + * The `URLSearchParams` API provides read and write access to the query of a `URL`. The `URLSearchParams` class can also be used standalone with one of the + * four following constructors. + * The `URLSearchParams` class is also available on the global object. + * + * The WHATWG `URLSearchParams` interface and the `querystring` module have + * similar purpose, but the purpose of the `querystring` module is more + * general, as it allows the customization of delimiter characters (`&` and `=`). + * On the other hand, this API is designed purely for URL query strings. + * + * ```js + * const myURL = new URL('https://example.org/?abc=123'); + * console.log(myURL.searchParams.get('abc')); + * // Prints 123 + * + * myURL.searchParams.append('abc', 'xyz'); + * console.log(myURL.href); + * // Prints https://example.org/?abc=123&abc=xyz + * + * myURL.searchParams.delete('abc'); + * myURL.searchParams.set('a', 'b'); + * console.log(myURL.href); + * // Prints https://example.org/?a=b + * + * const newSearchParams = new URLSearchParams(myURL.searchParams); + * // The above is equivalent to + * // const newSearchParams = new URLSearchParams(myURL.search); + * + * newSearchParams.append('a', 'c'); + * console.log(myURL.href); + * // Prints https://example.org/?a=b + * console.log(newSearchParams.toString()); + * // Prints a=b&a=c + * + * // newSearchParams.toString() is implicitly called + * myURL.search = newSearchParams; + * console.log(myURL.href); + * // Prints https://example.org/?a=b&a=c + * newSearchParams.delete('a'); + * console.log(myURL.href); + * // Prints https://example.org/?a=b&a=c + * ``` + * @since v7.5.0, v6.13.0 + */ + class URLSearchParams implements Iterable<[string, string]> { + constructor( + init?: + | URLSearchParams + | string + | Record + | Iterable<[string, string]> + | ReadonlyArray<[string, string]>, + ); + /** + * Append a new name-value pair to the query string. + */ + append(name: string, value: string): void; + /** + * If `value` is provided, removes all name-value pairs + * where name is `name` and value is `value`. + * + * If `value` is not provided, removes all name-value pairs whose name is `name`. + */ + delete(name: string, value?: string): void; + /** + * Returns an ES6 `Iterator` over each of the name-value pairs in the query. + * Each item of the iterator is a JavaScript `Array`. The first item of the `Array` is the `name`, the second item of the `Array` is the `value`. + * + * Alias for `urlSearchParams[@@iterator]()`. + */ + entries(): IterableIterator<[string, string]>; + /** + * Iterates over each name-value pair in the query and invokes the given function. + * + * ```js + * const myURL = new URL('https://example.org/?a=b&c=d'); + * myURL.searchParams.forEach((value, name, searchParams) => { + * console.log(name, value, myURL.searchParams === searchParams); + * }); + * // Prints: + * // a b true + * // c d true + * ``` + * @param fn Invoked for each name-value pair in the query + * @param thisArg To be used as `this` value for when `fn` is called + */ + forEach( + fn: (this: TThis, value: string, name: string, searchParams: URLSearchParams) => void, + thisArg?: TThis, + ): void; + /** + * Returns the value of the first name-value pair whose name is `name`. If there + * are no such pairs, `null` is returned. + * @return or `null` if there is no name-value pair with the given `name`. + */ + get(name: string): string | null; + /** + * Returns the values of all name-value pairs whose name is `name`. If there are + * no such pairs, an empty array is returned. + */ + getAll(name: string): string[]; + /** + * Checks if the `URLSearchParams` object contains key-value pair(s) based on `name` and an optional `value` argument. + * + * If `value` is provided, returns `true` when name-value pair with + * same `name` and `value` exists. + * + * If `value` is not provided, returns `true` if there is at least one name-value + * pair whose name is `name`. + */ + has(name: string, value?: string): boolean; + /** + * Returns an ES6 `Iterator` over the names of each name-value pair. + * + * ```js + * const params = new URLSearchParams('foo=bar&foo=baz'); + * for (const name of params.keys()) { + * console.log(name); + * } + * // Prints: + * // foo + * // foo + * ``` + */ + keys(): IterableIterator; + /** + * Sets the value in the `URLSearchParams` object associated with `name` to`value`. If there are any pre-existing name-value pairs whose names are `name`, + * set the first such pair's value to `value` and remove all others. If not, + * append the name-value pair to the query string. + * + * ```js + * const params = new URLSearchParams(); + * params.append('foo', 'bar'); + * params.append('foo', 'baz'); + * params.append('abc', 'def'); + * console.log(params.toString()); + * // Prints foo=bar&foo=baz&abc=def + * + * params.set('foo', 'def'); + * params.set('xyz', 'opq'); + * console.log(params.toString()); + * // Prints foo=def&abc=def&xyz=opq + * ``` + */ + set(name: string, value: string): void; + /** + * The total number of parameter entries. + * @since v19.8.0 + */ + readonly size: number; + /** + * Sort all existing name-value pairs in-place by their names. Sorting is done + * with a [stable sorting algorithm](https://en.wikipedia.org/wiki/Sorting_algorithm#Stability), so relative order between name-value pairs + * with the same name is preserved. + * + * This method can be used, in particular, to increase cache hits. + * + * ```js + * const params = new URLSearchParams('query[]=abc&type=search&query[]=123'); + * params.sort(); + * console.log(params.toString()); + * // Prints query%5B%5D=abc&query%5B%5D=123&type=search + * ``` + * @since v7.7.0, v6.13.0 + */ + sort(): void; + /** + * Returns the search parameters serialized as a string, with characters + * percent-encoded where necessary. + */ + toString(): string; + /** + * Returns an ES6 `Iterator` over the values of each name-value pair. + */ + values(): IterableIterator; + [Symbol.iterator](): IterableIterator<[string, string]>; + } + import { URL as _URL, URLSearchParams as _URLSearchParams } from "url"; + global { + interface URLSearchParams extends _URLSearchParams {} + interface URL extends _URL {} + interface Global { + URL: typeof _URL; + URLSearchParams: typeof _URLSearchParams; + } + /** + * `URL` class is a global reference for `require('url').URL` + * https://nodejs.org/api/url.html#the-whatwg-url-api + * @since v10.0.0 + */ + var URL: typeof globalThis extends { + onmessage: any; + URL: infer T; + } ? T + : typeof _URL; + /** + * `URLSearchParams` class is a global reference for `require('url').URLSearchParams` + * https://nodejs.org/api/url.html#class-urlsearchparams + * @since v10.0.0 + */ + var URLSearchParams: typeof globalThis extends { + onmessage: any; + URLSearchParams: infer T; + } ? T + : typeof _URLSearchParams; + } +} +declare module "node:url" { + export * from "url"; +} diff --git a/dist/node_modules/@types/node/ts4.8/util.d.ts b/dist/node_modules/@types/node/ts4.8/util.d.ts new file mode 100644 index 0000000..08712f5 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/util.d.ts @@ -0,0 +1,2183 @@ +/** + * The `node:util` module supports the needs of Node.js internal APIs. Many of the + * utilities are useful for application and module developers as well. To access + * it: + * + * ```js + * const util = require('node:util'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/util.js) + */ +declare module "util" { + import * as types from "node:util/types"; + export interface InspectOptions { + /** + * If `true`, object's non-enumerable symbols and properties are included in the formatted result. + * `WeakMap` and `WeakSet` entries are also included as well as user defined prototype properties (excluding method properties). + * @default false + */ + showHidden?: boolean | undefined; + /** + * Specifies the number of times to recurse while formatting object. + * This is useful for inspecting large objects. + * To recurse up to the maximum call stack size pass `Infinity` or `null`. + * @default 2 + */ + depth?: number | null | undefined; + /** + * If `true`, the output is styled with ANSI color codes. Colors are customizable. + */ + colors?: boolean | undefined; + /** + * If `false`, `[util.inspect.custom](depth, opts, inspect)` functions are not invoked. + * @default true + */ + customInspect?: boolean | undefined; + /** + * If `true`, `Proxy` inspection includes the target and handler objects. + * @default false + */ + showProxy?: boolean | undefined; + /** + * Specifies the maximum number of `Array`, `TypedArray`, `WeakMap`, and `WeakSet` elements + * to include when formatting. Set to `null` or `Infinity` to show all elements. + * Set to `0` or negative to show no elements. + * @default 100 + */ + maxArrayLength?: number | null | undefined; + /** + * Specifies the maximum number of characters to + * include when formatting. Set to `null` or `Infinity` to show all elements. + * Set to `0` or negative to show no characters. + * @default 10000 + */ + maxStringLength?: number | null | undefined; + /** + * The length at which input values are split across multiple lines. + * Set to `Infinity` to format the input as a single line + * (in combination with `compact` set to `true` or any number >= `1`). + * @default 80 + */ + breakLength?: number | undefined; + /** + * Setting this to `false` causes each object key + * to be displayed on a new line. It will also add new lines to text that is + * longer than `breakLength`. If set to a number, the most `n` inner elements + * are united on a single line as long as all properties fit into + * `breakLength`. Short array elements are also grouped together. Note that no + * text will be reduced below 16 characters, no matter the `breakLength` size. + * For more information, see the example below. + * @default true + */ + compact?: boolean | number | undefined; + /** + * If set to `true` or a function, all properties of an object, and `Set` and `Map` + * entries are sorted in the resulting string. + * If set to `true` the default sort is used. + * If set to a function, it is used as a compare function. + */ + sorted?: boolean | ((a: string, b: string) => number) | undefined; + /** + * If set to `true`, getters are going to be + * inspected as well. If set to `'get'` only getters without setter are going + * to be inspected. If set to `'set'` only getters having a corresponding + * setter are going to be inspected. This might cause side effects depending on + * the getter function. + * @default false + */ + getters?: "get" | "set" | boolean | undefined; + /** + * If set to `true`, an underscore is used to separate every three digits in all bigints and numbers. + * @default false + */ + numericSeparator?: boolean | undefined; + } + export type Style = + | "special" + | "number" + | "bigint" + | "boolean" + | "undefined" + | "null" + | "string" + | "symbol" + | "date" + | "regexp" + | "module"; + export type CustomInspectFunction = (depth: number, options: InspectOptionsStylized) => any; // TODO: , inspect: inspect + export interface InspectOptionsStylized extends InspectOptions { + stylize(text: string, styleType: Style): string; + } + /** + * The `util.format()` method returns a formatted string using the first argument + * as a `printf`\-like format string which can contain zero or more format + * specifiers. Each specifier is replaced with the converted value from the + * corresponding argument. Supported specifiers are: + * + * If a specifier does not have a corresponding argument, it is not replaced: + * + * ```js + * util.format('%s:%s', 'foo'); + * // Returns: 'foo:%s' + * ``` + * + * Values that are not part of the format string are formatted using`util.inspect()` if their type is not `string`. + * + * If there are more arguments passed to the `util.format()` method than the + * number of specifiers, the extra arguments are concatenated to the returned + * string, separated by spaces: + * + * ```js + * util.format('%s:%s', 'foo', 'bar', 'baz'); + * // Returns: 'foo:bar baz' + * ``` + * + * If the first argument does not contain a valid format specifier, `util.format()`returns a string that is the concatenation of all arguments separated by spaces: + * + * ```js + * util.format(1, 2, 3); + * // Returns: '1 2 3' + * ``` + * + * If only one argument is passed to `util.format()`, it is returned as it is + * without any formatting: + * + * ```js + * util.format('%% %s'); + * // Returns: '%% %s' + * ``` + * + * `util.format()` is a synchronous method that is intended as a debugging tool. + * Some input values can have a significant performance overhead that can block the + * event loop. Use this function with care and never in a hot code path. + * @since v0.5.3 + * @param format A `printf`-like format string. + */ + export function format(format?: any, ...param: any[]): string; + /** + * This function is identical to {@link format}, except in that it takes + * an `inspectOptions` argument which specifies options that are passed along to {@link inspect}. + * + * ```js + * util.formatWithOptions({ colors: true }, 'See object %O', { foo: 42 }); + * // Returns 'See object { foo: 42 }', where `42` is colored as a number + * // when printed to a terminal. + * ``` + * @since v10.0.0 + */ + export function formatWithOptions(inspectOptions: InspectOptions, format?: any, ...param: any[]): string; + /** + * Returns the string name for a numeric error code that comes from a Node.js API. + * The mapping between error codes and error names is platform-dependent. + * See `Common System Errors` for the names of common errors. + * + * ```js + * fs.access('file/that/does/not/exist', (err) => { + * const name = util.getSystemErrorName(err.errno); + * console.error(name); // ENOENT + * }); + * ``` + * @since v9.7.0 + */ + export function getSystemErrorName(err: number): string; + /** + * Returns a Map of all system error codes available from the Node.js API. + * The mapping between error codes and error names is platform-dependent. + * See `Common System Errors` for the names of common errors. + * + * ```js + * fs.access('file/that/does/not/exist', (err) => { + * const errorMap = util.getSystemErrorMap(); + * const name = errorMap.get(err.errno); + * console.error(name); // ENOENT + * }); + * ``` + * @since v16.0.0, v14.17.0 + */ + export function getSystemErrorMap(): Map; + /** + * The `util.log()` method prints the given `string` to `stdout` with an included + * timestamp. + * + * ```js + * const util = require('node:util'); + * + * util.log('Timestamped message.'); + * ``` + * @since v0.3.0 + * @deprecated Since v6.0.0 - Use a third party module instead. + */ + export function log(string: string): void; + /** + * Returns the `string` after replacing any surrogate code points + * (or equivalently, any unpaired surrogate code units) with the + * Unicode "replacement character" U+FFFD. + * @since v16.8.0, v14.18.0 + */ + export function toUSVString(string: string): string; + /** + * Creates and returns an `AbortController` instance whose `AbortSignal` is marked + * as transferable and can be used with `structuredClone()` or `postMessage()`. + * @since v18.11.0 + * @experimental + * @returns A transferable AbortController + */ + export function transferableAbortController(): AbortController; + /** + * Marks the given `AbortSignal` as transferable so that it can be used with`structuredClone()` and `postMessage()`. + * + * ```js + * const signal = transferableAbortSignal(AbortSignal.timeout(100)); + * const channel = new MessageChannel(); + * channel.port2.postMessage(signal, [signal]); + * ``` + * @since v18.11.0 + * @experimental + * @param signal The AbortSignal + * @returns The same AbortSignal + */ + export function transferableAbortSignal(signal: AbortSignal): AbortSignal; + /** + * Listens to abort event on the provided `signal` and + * returns a promise that is fulfilled when the `signal` is + * aborted. If the passed `resource` is garbage collected before the `signal` is + * aborted, the returned promise shall remain pending indefinitely. + * + * ```js + * import { aborted } from 'node:util'; + * + * const dependent = obtainSomethingAbortable(); + * + * aborted(dependent.signal, dependent).then(() => { + * // Do something when dependent is aborted. + * }); + * + * dependent.on('event', () => { + * dependent.abort(); + * }); + * ``` + * @since v19.7.0 + * @experimental + * @param resource Any non-null entity, reference to which is held weakly. + */ + export function aborted(signal: AbortSignal, resource: any): Promise; + /** + * The `util.inspect()` method returns a string representation of `object` that is + * intended for debugging. The output of `util.inspect` may change at any time + * and should not be depended upon programmatically. Additional `options` may be + * passed that alter the result.`util.inspect()` will use the constructor's name and/or `@@toStringTag` to make + * an identifiable tag for an inspected value. + * + * ```js + * class Foo { + * get [Symbol.toStringTag]() { + * return 'bar'; + * } + * } + * + * class Bar {} + * + * const baz = Object.create(null, { [Symbol.toStringTag]: { value: 'foo' } }); + * + * util.inspect(new Foo()); // 'Foo [bar] {}' + * util.inspect(new Bar()); // 'Bar {}' + * util.inspect(baz); // '[foo] {}' + * ``` + * + * Circular references point to their anchor by using a reference index: + * + * ```js + * const { inspect } = require('node:util'); + * + * const obj = {}; + * obj.a = [obj]; + * obj.b = {}; + * obj.b.inner = obj.b; + * obj.b.obj = obj; + * + * console.log(inspect(obj)); + * // { + * // a: [ [Circular *1] ], + * // b: { inner: [Circular *2], obj: [Circular *1] } + * // } + * ``` + * + * The following example inspects all properties of the `util` object: + * + * ```js + * const util = require('node:util'); + * + * console.log(util.inspect(util, { showHidden: true, depth: null })); + * ``` + * + * The following example highlights the effect of the `compact` option: + * + * ```js + * const util = require('node:util'); + * + * const o = { + * a: [1, 2, [[ + * 'Lorem ipsum dolor sit amet,\nconsectetur adipiscing elit, sed do ' + + * 'eiusmod \ntempor incididunt ut labore et dolore magna aliqua.', + * 'test', + * 'foo']], 4], + * b: new Map([['za', 1], ['zb', 'test']]), + * }; + * console.log(util.inspect(o, { compact: true, depth: 5, breakLength: 80 })); + * + * // { a: + * // [ 1, + * // 2, + * // [ [ 'Lorem ipsum dolor sit amet,\nconsectetur [...]', // A long line + * // 'test', + * // 'foo' ] ], + * // 4 ], + * // b: Map(2) { 'za' => 1, 'zb' => 'test' } } + * + * // Setting `compact` to false or an integer creates more reader friendly output. + * console.log(util.inspect(o, { compact: false, depth: 5, breakLength: 80 })); + * + * // { + * // a: [ + * // 1, + * // 2, + * // [ + * // [ + * // 'Lorem ipsum dolor sit amet,\n' + + * // 'consectetur adipiscing elit, sed do eiusmod \n' + + * // 'tempor incididunt ut labore et dolore magna aliqua.', + * // 'test', + * // 'foo' + * // ] + * // ], + * // 4 + * // ], + * // b: Map(2) { + * // 'za' => 1, + * // 'zb' => 'test' + * // } + * // } + * + * // Setting `breakLength` to e.g. 150 will print the "Lorem ipsum" text in a + * // single line. + * ``` + * + * The `showHidden` option allows [`WeakMap`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakMap) and + * [`WeakSet`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet) entries to be + * inspected. If there are more entries than `maxArrayLength`, there is no + * guarantee which entries are displayed. That means retrieving the same [`WeakSet`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet) entries twice may + * result in different output. Furthermore, entries + * with no remaining strong references may be garbage collected at any time. + * + * ```js + * const { inspect } = require('node:util'); + * + * const obj = { a: 1 }; + * const obj2 = { b: 2 }; + * const weakSet = new WeakSet([obj, obj2]); + * + * console.log(inspect(weakSet, { showHidden: true })); + * // WeakSet { { a: 1 }, { b: 2 } } + * ``` + * + * The `sorted` option ensures that an object's property insertion order does not + * impact the result of `util.inspect()`. + * + * ```js + * const { inspect } = require('node:util'); + * const assert = require('node:assert'); + * + * const o1 = { + * b: [2, 3, 1], + * a: '`a` comes before `b`', + * c: new Set([2, 3, 1]), + * }; + * console.log(inspect(o1, { sorted: true })); + * // { a: '`a` comes before `b`', b: [ 2, 3, 1 ], c: Set(3) { 1, 2, 3 } } + * console.log(inspect(o1, { sorted: (a, b) => b.localeCompare(a) })); + * // { c: Set(3) { 3, 2, 1 }, b: [ 2, 3, 1 ], a: '`a` comes before `b`' } + * + * const o2 = { + * c: new Set([2, 1, 3]), + * a: '`a` comes before `b`', + * b: [2, 3, 1], + * }; + * assert.strict.equal( + * inspect(o1, { sorted: true }), + * inspect(o2, { sorted: true }), + * ); + * ``` + * + * The `numericSeparator` option adds an underscore every three digits to all + * numbers. + * + * ```js + * const { inspect } = require('node:util'); + * + * const thousand = 1_000; + * const million = 1_000_000; + * const bigNumber = 123_456_789n; + * const bigDecimal = 1_234.123_45; + * + * console.log(inspect(thousand, { numericSeparator: true })); + * // 1_000 + * console.log(inspect(million, { numericSeparator: true })); + * // 1_000_000 + * console.log(inspect(bigNumber, { numericSeparator: true })); + * // 123_456_789n + * console.log(inspect(bigDecimal, { numericSeparator: true })); + * // 1_234.123_45 + * ``` + * + * `util.inspect()` is a synchronous method intended for debugging. Its maximum + * output length is approximately 128 MiB. Inputs that result in longer output will + * be truncated. + * @since v0.3.0 + * @param object Any JavaScript primitive or `Object`. + * @return The representation of `object`. + */ + export function inspect(object: any, showHidden?: boolean, depth?: number | null, color?: boolean): string; + export function inspect(object: any, options?: InspectOptions): string; + export namespace inspect { + let colors: NodeJS.Dict<[number, number]>; + let styles: { + [K in Style]: string; + }; + let defaultOptions: InspectOptions; + /** + * Allows changing inspect settings from the repl. + */ + let replDefaults: InspectOptions; + /** + * That can be used to declare custom inspect functions. + */ + const custom: unique symbol; + } + /** + * Alias for [`Array.isArray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/isArray). + * + * Returns `true` if the given `object` is an `Array`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isArray([]); + * // Returns: true + * util.isArray(new Array()); + * // Returns: true + * util.isArray({}); + * // Returns: false + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Use `isArray` instead. + */ + export function isArray(object: unknown): object is unknown[]; + /** + * Returns `true` if the given `object` is a `RegExp`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isRegExp(/some regexp/); + * // Returns: true + * util.isRegExp(new RegExp('another regexp')); + * // Returns: true + * util.isRegExp({}); + * // Returns: false + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Deprecated + */ + export function isRegExp(object: unknown): object is RegExp; + /** + * Returns `true` if the given `object` is a `Date`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isDate(new Date()); + * // Returns: true + * util.isDate(Date()); + * // false (without 'new' returns a String) + * util.isDate({}); + * // Returns: false + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Use {@link types.isDate} instead. + */ + export function isDate(object: unknown): object is Date; + /** + * Returns `true` if the given `object` is an `Error`. Otherwise, returns`false`. + * + * ```js + * const util = require('node:util'); + * + * util.isError(new Error()); + * // Returns: true + * util.isError(new TypeError()); + * // Returns: true + * util.isError({ name: 'Error', message: 'an error occurred' }); + * // Returns: false + * ``` + * + * This method relies on `Object.prototype.toString()` behavior. It is + * possible to obtain an incorrect result when the `object` argument manipulates`@@toStringTag`. + * + * ```js + * const util = require('node:util'); + * const obj = { name: 'Error', message: 'an error occurred' }; + * + * util.isError(obj); + * // Returns: false + * obj[Symbol.toStringTag] = 'Error'; + * util.isError(obj); + * // Returns: true + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Use {@link types.isNativeError} instead. + */ + export function isError(object: unknown): object is Error; + /** + * Usage of `util.inherits()` is discouraged. Please use the ES6 `class` and`extends` keywords to get language level inheritance support. Also note + * that the two styles are [semantically incompatible](https://github.com/nodejs/node/issues/4179). + * + * Inherit the prototype methods from one [constructor](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/constructor) into another. The + * prototype of `constructor` will be set to a new object created from`superConstructor`. + * + * This mainly adds some input validation on top of`Object.setPrototypeOf(constructor.prototype, superConstructor.prototype)`. + * As an additional convenience, `superConstructor` will be accessible + * through the `constructor.super_` property. + * + * ```js + * const util = require('node:util'); + * const EventEmitter = require('node:events'); + * + * function MyStream() { + * EventEmitter.call(this); + * } + * + * util.inherits(MyStream, EventEmitter); + * + * MyStream.prototype.write = function(data) { + * this.emit('data', data); + * }; + * + * const stream = new MyStream(); + * + * console.log(stream instanceof EventEmitter); // true + * console.log(MyStream.super_ === EventEmitter); // true + * + * stream.on('data', (data) => { + * console.log(`Received data: "${data}"`); + * }); + * stream.write('It works!'); // Received data: "It works!" + * ``` + * + * ES6 example using `class` and `extends`: + * + * ```js + * const EventEmitter = require('node:events'); + * + * class MyStream extends EventEmitter { + * write(data) { + * this.emit('data', data); + * } + * } + * + * const stream = new MyStream(); + * + * stream.on('data', (data) => { + * console.log(`Received data: "${data}"`); + * }); + * stream.write('With ES6'); + * ``` + * @since v0.3.0 + * @legacy Use ES2015 class syntax and `extends` keyword instead. + */ + export function inherits(constructor: unknown, superConstructor: unknown): void; + export type DebugLoggerFunction = (msg: string, ...param: unknown[]) => void; + export interface DebugLogger extends DebugLoggerFunction { + enabled: boolean; + } + /** + * The `util.debuglog()` method is used to create a function that conditionally + * writes debug messages to `stderr` based on the existence of the `NODE_DEBUG`environment variable. If the `section` name appears within the value of that + * environment variable, then the returned function operates similar to `console.error()`. If not, then the returned function is a no-op. + * + * ```js + * const util = require('node:util'); + * const debuglog = util.debuglog('foo'); + * + * debuglog('hello from foo [%d]', 123); + * ``` + * + * If this program is run with `NODE_DEBUG=foo` in the environment, then + * it will output something like: + * + * ```console + * FOO 3245: hello from foo [123] + * ``` + * + * where `3245` is the process id. If it is not run with that + * environment variable set, then it will not print anything. + * + * The `section` supports wildcard also: + * + * ```js + * const util = require('node:util'); + * const debuglog = util.debuglog('foo-bar'); + * + * debuglog('hi there, it\'s foo-bar [%d]', 2333); + * ``` + * + * if it is run with `NODE_DEBUG=foo*` in the environment, then it will output + * something like: + * + * ```console + * FOO-BAR 3257: hi there, it's foo-bar [2333] + * ``` + * + * Multiple comma-separated `section` names may be specified in the `NODE_DEBUG`environment variable: `NODE_DEBUG=fs,net,tls`. + * + * The optional `callback` argument can be used to replace the logging function + * with a different function that doesn't have any initialization or + * unnecessary wrapping. + * + * ```js + * const util = require('node:util'); + * let debuglog = util.debuglog('internals', (debug) => { + * // Replace with a logging function that optimizes out + * // testing if the section is enabled + * debuglog = debug; + * }); + * ``` + * @since v0.11.3 + * @param section A string identifying the portion of the application for which the `debuglog` function is being created. + * @param callback A callback invoked the first time the logging function is called with a function argument that is a more optimized logging function. + * @return The logging function + */ + export function debuglog(section: string, callback?: (fn: DebugLoggerFunction) => void): DebugLogger; + export const debug: typeof debuglog; + /** + * Returns `true` if the given `object` is a `Boolean`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isBoolean(1); + * // Returns: false + * util.isBoolean(0); + * // Returns: false + * util.isBoolean(false); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'boolean'` instead. + */ + export function isBoolean(object: unknown): object is boolean; + /** + * Returns `true` if the given `object` is a `Buffer`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isBuffer({ length: 0 }); + * // Returns: false + * util.isBuffer([]); + * // Returns: false + * util.isBuffer(Buffer.from('hello world')); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `isBuffer` instead. + */ + export function isBuffer(object: unknown): object is Buffer; + /** + * Returns `true` if the given `object` is a `Function`. Otherwise, returns`false`. + * + * ```js + * const util = require('node:util'); + * + * function Foo() {} + * const Bar = () => {}; + * + * util.isFunction({}); + * // Returns: false + * util.isFunction(Foo); + * // Returns: true + * util.isFunction(Bar); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'function'` instead. + */ + export function isFunction(object: unknown): boolean; + /** + * Returns `true` if the given `object` is strictly `null`. Otherwise, returns`false`. + * + * ```js + * const util = require('node:util'); + * + * util.isNull(0); + * // Returns: false + * util.isNull(undefined); + * // Returns: false + * util.isNull(null); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value === null` instead. + */ + export function isNull(object: unknown): object is null; + /** + * Returns `true` if the given `object` is `null` or `undefined`. Otherwise, + * returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isNullOrUndefined(0); + * // Returns: false + * util.isNullOrUndefined(undefined); + * // Returns: true + * util.isNullOrUndefined(null); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value === undefined || value === null` instead. + */ + export function isNullOrUndefined(object: unknown): object is null | undefined; + /** + * Returns `true` if the given `object` is a `Number`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isNumber(false); + * // Returns: false + * util.isNumber(Infinity); + * // Returns: true + * util.isNumber(0); + * // Returns: true + * util.isNumber(NaN); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'number'` instead. + */ + export function isNumber(object: unknown): object is number; + /** + * Returns `true` if the given `object` is strictly an `Object`**and** not a`Function` (even though functions are objects in JavaScript). + * Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isObject(5); + * // Returns: false + * util.isObject(null); + * // Returns: false + * util.isObject({}); + * // Returns: true + * util.isObject(() => {}); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value !== null && typeof value === 'object'` instead. + */ + export function isObject(object: unknown): boolean; + /** + * Returns `true` if the given `object` is a primitive type. Otherwise, returns`false`. + * + * ```js + * const util = require('node:util'); + * + * util.isPrimitive(5); + * // Returns: true + * util.isPrimitive('foo'); + * // Returns: true + * util.isPrimitive(false); + * // Returns: true + * util.isPrimitive(null); + * // Returns: true + * util.isPrimitive(undefined); + * // Returns: true + * util.isPrimitive({}); + * // Returns: false + * util.isPrimitive(() => {}); + * // Returns: false + * util.isPrimitive(/^$/); + * // Returns: false + * util.isPrimitive(new Date()); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `(typeof value !== 'object' && typeof value !== 'function') || value === null` instead. + */ + export function isPrimitive(object: unknown): boolean; + /** + * Returns `true` if the given `object` is a `string`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isString(''); + * // Returns: true + * util.isString('foo'); + * // Returns: true + * util.isString(String('foo')); + * // Returns: true + * util.isString(5); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'string'` instead. + */ + export function isString(object: unknown): object is string; + /** + * Returns `true` if the given `object` is a `Symbol`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * util.isSymbol(5); + * // Returns: false + * util.isSymbol('foo'); + * // Returns: false + * util.isSymbol(Symbol('foo')); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'symbol'` instead. + */ + export function isSymbol(object: unknown): object is symbol; + /** + * Returns `true` if the given `object` is `undefined`. Otherwise, returns `false`. + * + * ```js + * const util = require('node:util'); + * + * const foo = undefined; + * util.isUndefined(5); + * // Returns: false + * util.isUndefined(foo); + * // Returns: true + * util.isUndefined(null); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value === undefined` instead. + */ + export function isUndefined(object: unknown): object is undefined; + /** + * The `util.deprecate()` method wraps `fn` (which may be a function or class) in + * such a way that it is marked as deprecated. + * + * ```js + * const util = require('node:util'); + * + * exports.obsoleteFunction = util.deprecate(() => { + * // Do something here. + * }, 'obsoleteFunction() is deprecated. Use newShinyFunction() instead.'); + * ``` + * + * When called, `util.deprecate()` will return a function that will emit a`DeprecationWarning` using the `'warning'` event. The warning will + * be emitted and printed to `stderr` the first time the returned function is + * called. After the warning is emitted, the wrapped function is called without + * emitting a warning. + * + * If the same optional `code` is supplied in multiple calls to `util.deprecate()`, + * the warning will be emitted only once for that `code`. + * + * ```js + * const util = require('node:util'); + * + * const fn1 = util.deprecate(someFunction, someMessage, 'DEP0001'); + * const fn2 = util.deprecate(someOtherFunction, someOtherMessage, 'DEP0001'); + * fn1(); // Emits a deprecation warning with code DEP0001 + * fn2(); // Does not emit a deprecation warning because it has the same code + * ``` + * + * If either the `--no-deprecation` or `--no-warnings` command-line flags are + * used, or if the `process.noDeprecation` property is set to `true`_prior_ to + * the first deprecation warning, the `util.deprecate()` method does nothing. + * + * If the `--trace-deprecation` or `--trace-warnings` command-line flags are set, + * or the `process.traceDeprecation` property is set to `true`, a warning and a + * stack trace are printed to `stderr` the first time the deprecated function is + * called. + * + * If the `--throw-deprecation` command-line flag is set, or the`process.throwDeprecation` property is set to `true`, then an exception will be + * thrown when the deprecated function is called. + * + * The `--throw-deprecation` command-line flag and `process.throwDeprecation`property take precedence over `--trace-deprecation` and`process.traceDeprecation`. + * @since v0.8.0 + * @param fn The function that is being deprecated. + * @param msg A warning message to display when the deprecated function is invoked. + * @param code A deprecation code. See the `list of deprecated APIs` for a list of codes. + * @return The deprecated function wrapped to emit a warning. + */ + export function deprecate(fn: T, msg: string, code?: string): T; + /** + * Returns `true` if there is deep strict equality between `val1` and `val2`. + * Otherwise, returns `false`. + * + * See `assert.deepStrictEqual()` for more information about deep strict + * equality. + * @since v9.0.0 + */ + export function isDeepStrictEqual(val1: unknown, val2: unknown): boolean; + /** + * Returns `str` with any ANSI escape codes removed. + * + * ```js + * console.log(util.stripVTControlCharacters('\u001B[4mvalue\u001B[0m')); + * // Prints "value" + * ``` + * @since v16.11.0 + */ + export function stripVTControlCharacters(str: string): string; + /** + * Takes an `async` function (or a function that returns a `Promise`) and returns a + * function following the error-first callback style, i.e. taking + * an `(err, value) => ...` callback as the last argument. In the callback, the + * first argument will be the rejection reason (or `null` if the `Promise`resolved), and the second argument will be the resolved value. + * + * ```js + * const util = require('node:util'); + * + * async function fn() { + * return 'hello world'; + * } + * const callbackFunction = util.callbackify(fn); + * + * callbackFunction((err, ret) => { + * if (err) throw err; + * console.log(ret); + * }); + * ``` + * + * Will print: + * + * ```text + * hello world + * ``` + * + * The callback is executed asynchronously, and will have a limited stack trace. + * If the callback throws, the process will emit an `'uncaughtException'` event, and if not handled will exit. + * + * Since `null` has a special meaning as the first argument to a callback, if a + * wrapped function rejects a `Promise` with a falsy value as a reason, the value + * is wrapped in an `Error` with the original value stored in a field named`reason`. + * + * ```js + * function fn() { + * return Promise.reject(null); + * } + * const callbackFunction = util.callbackify(fn); + * + * callbackFunction((err, ret) => { + * // When the Promise was rejected with `null` it is wrapped with an Error and + * // the original value is stored in `reason`. + * err && Object.hasOwn(err, 'reason') && err.reason === null; // true + * }); + * ``` + * @since v8.2.0 + * @param fn An `async` function + * @return a callback style function + */ + export function callbackify(fn: () => Promise): (callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: () => Promise, + ): (callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1) => Promise, + ): (arg1: T1, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1) => Promise, + ): (arg1: T1, callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2) => Promise, + ): (arg1: T1, arg2: T2, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2) => Promise, + ): (arg1: T1, arg2: T2, callback: (err: NodeJS.ErrnoException | null, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3) => Promise, + ): (arg1: T1, arg2: T2, arg3: T3, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3) => Promise, + ): (arg1: T1, arg2: T2, arg3: T3, callback: (err: NodeJS.ErrnoException | null, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise, + ): ( + arg1: T1, + arg2: T2, + arg3: T3, + arg4: T4, + callback: (err: NodeJS.ErrnoException | null, result: TResult) => void, + ) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise, + ): ( + arg1: T1, + arg2: T2, + arg3: T3, + arg4: T4, + arg5: T5, + callback: (err: NodeJS.ErrnoException | null, result: TResult) => void, + ) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6) => Promise, + ): ( + arg1: T1, + arg2: T2, + arg3: T3, + arg4: T4, + arg5: T5, + arg6: T6, + callback: (err: NodeJS.ErrnoException) => void, + ) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6) => Promise, + ): ( + arg1: T1, + arg2: T2, + arg3: T3, + arg4: T4, + arg5: T5, + arg6: T6, + callback: (err: NodeJS.ErrnoException | null, result: TResult) => void, + ) => void; + export interface CustomPromisifyLegacy extends Function { + __promisify__: TCustom; + } + export interface CustomPromisifySymbol extends Function { + [promisify.custom]: TCustom; + } + export type CustomPromisify = + | CustomPromisifySymbol + | CustomPromisifyLegacy; + /** + * Takes a function following the common error-first callback style, i.e. taking + * an `(err, value) => ...` callback as the last argument, and returns a version + * that returns promises. + * + * ```js + * const util = require('node:util'); + * const fs = require('node:fs'); + * + * const stat = util.promisify(fs.stat); + * stat('.').then((stats) => { + * // Do something with `stats` + * }).catch((error) => { + * // Handle the error. + * }); + * ``` + * + * Or, equivalently using `async function`s: + * + * ```js + * const util = require('node:util'); + * const fs = require('node:fs'); + * + * const stat = util.promisify(fs.stat); + * + * async function callStat() { + * const stats = await stat('.'); + * console.log(`This directory is owned by ${stats.uid}`); + * } + * + * callStat(); + * ``` + * + * If there is an `original[util.promisify.custom]` property present, `promisify`will return its value, see `Custom promisified functions`. + * + * `promisify()` assumes that `original` is a function taking a callback as its + * final argument in all cases. If `original` is not a function, `promisify()`will throw an error. If `original` is a function but its last argument is not + * an error-first callback, it will still be passed an error-first + * callback as its last argument. + * + * Using `promisify()` on class methods or other methods that use `this` may not + * work as expected unless handled specially: + * + * ```js + * const util = require('node:util'); + * + * class Foo { + * constructor() { + * this.a = 42; + * } + * + * bar(callback) { + * callback(null, this.a); + * } + * } + * + * const foo = new Foo(); + * + * const naiveBar = util.promisify(foo.bar); + * // TypeError: Cannot read property 'a' of undefined + * // naiveBar().then(a => console.log(a)); + * + * naiveBar.call(foo).then((a) => console.log(a)); // '42' + * + * const bindBar = naiveBar.bind(foo); + * bindBar().then((a) => console.log(a)); // '42' + * ``` + * @since v8.0.0 + */ + export function promisify(fn: CustomPromisify): TCustom; + export function promisify( + fn: (callback: (err: any, result: TResult) => void) => void, + ): () => Promise; + export function promisify(fn: (callback: (err?: any) => void) => void): () => Promise; + export function promisify( + fn: (arg1: T1, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1) => Promise; + export function promisify(fn: (arg1: T1, callback: (err?: any) => void) => void): (arg1: T1) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1, arg2: T2) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, callback: (err?: any) => void) => void, + ): (arg1: T1, arg2: T2) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, callback: (err?: any) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err?: any) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: any, result: TResult) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err?: any) => void) => void, + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise; + export function promisify(fn: Function): Function; + export namespace promisify { + /** + * That can be used to declare custom promisified variants of functions. + */ + const custom: unique symbol; + } + /** + * An implementation of the [WHATWG Encoding Standard](https://encoding.spec.whatwg.org/) `TextDecoder` API. + * + * ```js + * const decoder = new TextDecoder(); + * const u8arr = new Uint8Array([72, 101, 108, 108, 111]); + * console.log(decoder.decode(u8arr)); // Hello + * ``` + * @since v8.3.0 + */ + export class TextDecoder { + /** + * The encoding supported by the `TextDecoder` instance. + */ + readonly encoding: string; + /** + * The value will be `true` if decoding errors result in a `TypeError` being + * thrown. + */ + readonly fatal: boolean; + /** + * The value will be `true` if the decoding result will include the byte order + * mark. + */ + readonly ignoreBOM: boolean; + constructor( + encoding?: string, + options?: { + fatal?: boolean | undefined; + ignoreBOM?: boolean | undefined; + }, + ); + /** + * Decodes the `input` and returns a string. If `options.stream` is `true`, any + * incomplete byte sequences occurring at the end of the `input` are buffered + * internally and emitted after the next call to `textDecoder.decode()`. + * + * If `textDecoder.fatal` is `true`, decoding errors that occur will result in a`TypeError` being thrown. + * @param input An `ArrayBuffer`, `DataView`, or `TypedArray` instance containing the encoded data. + */ + decode( + input?: NodeJS.ArrayBufferView | ArrayBuffer | null, + options?: { + stream?: boolean | undefined; + }, + ): string; + } + export interface EncodeIntoResult { + /** + * The read Unicode code units of input. + */ + read: number; + /** + * The written UTF-8 bytes of output. + */ + written: number; + } + export { types }; + + //// TextEncoder/Decoder + /** + * An implementation of the [WHATWG Encoding Standard](https://encoding.spec.whatwg.org/) `TextEncoder` API. All + * instances of `TextEncoder` only support UTF-8 encoding. + * + * ```js + * const encoder = new TextEncoder(); + * const uint8array = encoder.encode('this is some data'); + * ``` + * + * The `TextEncoder` class is also available on the global object. + * @since v8.3.0 + */ + export class TextEncoder { + /** + * The encoding supported by the `TextEncoder` instance. Always set to `'utf-8'`. + */ + readonly encoding: string; + /** + * UTF-8 encodes the `input` string and returns a `Uint8Array` containing the + * encoded bytes. + * @param [input='an empty string'] The text to encode. + */ + encode(input?: string): Uint8Array; + /** + * UTF-8 encodes the `src` string to the `dest` Uint8Array and returns an object + * containing the read Unicode code units and written UTF-8 bytes. + * + * ```js + * const encoder = new TextEncoder(); + * const src = 'this is some data'; + * const dest = new Uint8Array(10); + * const { read, written } = encoder.encodeInto(src, dest); + * ``` + * @param src The text to encode. + * @param dest The array to hold the encode result. + */ + encodeInto(src: string, dest: Uint8Array): EncodeIntoResult; + } + import { TextDecoder as _TextDecoder, TextEncoder as _TextEncoder } from "util"; + global { + /** + * `TextDecoder` class is a global reference for `require('util').TextDecoder` + * https://nodejs.org/api/globals.html#textdecoder + * @since v11.0.0 + */ + var TextDecoder: typeof globalThis extends { + onmessage: any; + TextDecoder: infer TextDecoder; + } ? TextDecoder + : typeof _TextDecoder; + /** + * `TextEncoder` class is a global reference for `require('util').TextEncoder` + * https://nodejs.org/api/globals.html#textencoder + * @since v11.0.0 + */ + var TextEncoder: typeof globalThis extends { + onmessage: any; + TextEncoder: infer TextEncoder; + } ? TextEncoder + : typeof _TextEncoder; + } + + //// parseArgs + /** + * Provides a higher level API for command-line argument parsing than interacting + * with `process.argv` directly. Takes a specification for the expected arguments + * and returns a structured object with the parsed options and positionals. + * + * ```js + * import { parseArgs } from 'node:util'; + * const args = ['-f', '--bar', 'b']; + * const options = { + * foo: { + * type: 'boolean', + * short: 'f', + * }, + * bar: { + * type: 'string', + * }, + * }; + * const { + * values, + * positionals, + * } = parseArgs({ args, options }); + * console.log(values, positionals); + * // Prints: [Object: null prototype] { foo: true, bar: 'b' } [] + * ``` + * @since v18.3.0, v16.17.0 + * @param config Used to provide arguments for parsing and to configure the parser. `config` supports the following properties: + * @return The parsed command line arguments: + */ + export function parseArgs(config?: T): ParsedResults; + interface ParseArgsOptionConfig { + /** + * Type of argument. + */ + type: "string" | "boolean"; + /** + * Whether this option can be provided multiple times. + * If `true`, all values will be collected in an array. + * If `false`, values for the option are last-wins. + * @default false. + */ + multiple?: boolean | undefined; + /** + * A single character alias for the option. + */ + short?: string | undefined; + /** + * The default option value when it is not set by args. + * It must be of the same type as the the `type` property. + * When `multiple` is `true`, it must be an array. + * @since v18.11.0 + */ + default?: string | boolean | string[] | boolean[] | undefined; + } + interface ParseArgsOptionsConfig { + [longOption: string]: ParseArgsOptionConfig; + } + export interface ParseArgsConfig { + /** + * Array of argument strings. + */ + args?: string[] | undefined; + /** + * Used to describe arguments known to the parser. + */ + options?: ParseArgsOptionsConfig | undefined; + /** + * Should an error be thrown when unknown arguments are encountered, + * or when arguments are passed that do not match the `type` configured in `options`. + * @default true + */ + strict?: boolean | undefined; + /** + * Whether this command accepts positional arguments. + */ + allowPositionals?: boolean | undefined; + /** + * Return the parsed tokens. This is useful for extending the built-in behavior, + * from adding additional checks through to reprocessing the tokens in different ways. + * @default false + */ + tokens?: boolean | undefined; + } + /* + IfDefaultsTrue and IfDefaultsFalse are helpers to handle default values for missing boolean properties. + TypeScript does not have exact types for objects: https://github.com/microsoft/TypeScript/issues/12936 + This means it is impossible to distinguish between "field X is definitely not present" and "field X may or may not be present". + But we expect users to generally provide their config inline or `as const`, which means TS will always know whether a given field is present. + So this helper treats "not definitely present" (i.e., not `extends boolean`) as being "definitely not present", i.e. it should have its default value. + This is technically incorrect but is a much nicer UX for the common case. + The IfDefaultsTrue version is for things which default to true; the IfDefaultsFalse version is for things which default to false. + */ + type IfDefaultsTrue = T extends true ? IfTrue + : T extends false ? IfFalse + : IfTrue; + + // we put the `extends false` condition first here because `undefined` compares like `any` when `strictNullChecks: false` + type IfDefaultsFalse = T extends false ? IfFalse + : T extends true ? IfTrue + : IfFalse; + + type ExtractOptionValue = IfDefaultsTrue< + T["strict"], + O["type"] extends "string" ? string : O["type"] extends "boolean" ? boolean : string | boolean, + string | boolean + >; + + type ParsedValues = + & IfDefaultsTrue + & (T["options"] extends ParseArgsOptionsConfig ? { + -readonly [LongOption in keyof T["options"]]: IfDefaultsFalse< + T["options"][LongOption]["multiple"], + undefined | Array>, + undefined | ExtractOptionValue + >; + } + : {}); + + type ParsedPositionals = IfDefaultsTrue< + T["strict"], + IfDefaultsFalse, + IfDefaultsTrue + >; + + type PreciseTokenForOptions< + K extends string, + O extends ParseArgsOptionConfig, + > = O["type"] extends "string" ? { + kind: "option"; + index: number; + name: K; + rawName: string; + value: string; + inlineValue: boolean; + } + : O["type"] extends "boolean" ? { + kind: "option"; + index: number; + name: K; + rawName: string; + value: undefined; + inlineValue: undefined; + } + : OptionToken & { name: K }; + + type TokenForOptions< + T extends ParseArgsConfig, + K extends keyof T["options"] = keyof T["options"], + > = K extends unknown + ? T["options"] extends ParseArgsOptionsConfig ? PreciseTokenForOptions + : OptionToken + : never; + + type ParsedOptionToken = IfDefaultsTrue, OptionToken>; + + type ParsedPositionalToken = IfDefaultsTrue< + T["strict"], + IfDefaultsFalse, + IfDefaultsTrue + >; + + type ParsedTokens = Array< + ParsedOptionToken | ParsedPositionalToken | { kind: "option-terminator"; index: number } + >; + + type PreciseParsedResults = IfDefaultsFalse< + T["tokens"], + { + values: ParsedValues; + positionals: ParsedPositionals; + tokens: ParsedTokens; + }, + { + values: ParsedValues; + positionals: ParsedPositionals; + } + >; + + type OptionToken = + | { kind: "option"; index: number; name: string; rawName: string; value: string; inlineValue: boolean } + | { + kind: "option"; + index: number; + name: string; + rawName: string; + value: undefined; + inlineValue: undefined; + }; + + type Token = + | OptionToken + | { kind: "positional"; index: number; value: string } + | { kind: "option-terminator"; index: number }; + + // If ParseArgsConfig extends T, then the user passed config constructed elsewhere. + // So we can't rely on the `"not definitely present" implies "definitely not present"` assumption mentioned above. + type ParsedResults = ParseArgsConfig extends T ? { + values: { + [longOption: string]: undefined | string | boolean | Array; + }; + positionals: string[]; + tokens?: Token[]; + } + : PreciseParsedResults; + + /** + * An implementation of [the MIMEType class](https://bmeck.github.io/node-proposal-mime-api/). + * + * In accordance with browser conventions, all properties of `MIMEType` objects + * are implemented as getters and setters on the class prototype, rather than as + * data properties on the object itself. + * + * A MIME string is a structured string containing multiple meaningful + * components. When parsed, a `MIMEType` object is returned containing + * properties for each of these components. + * @since v19.1.0, v18.13.0 + * @experimental + */ + export class MIMEType { + /** + * Creates a new MIMEType object by parsing the input. + * + * A `TypeError` will be thrown if the `input` is not a valid MIME. + * Note that an effort will be made to coerce the given values into strings. + * @param input The input MIME to parse. + */ + constructor(input: string | { toString: () => string }); + + /** + * Gets and sets the type portion of the MIME. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const myMIME = new MIMEType('text/javascript'); + * console.log(myMIME.type); + * // Prints: text + * myMIME.type = 'application'; + * console.log(myMIME.type); + * // Prints: application + * console.log(String(myMIME)); + * // Prints: application/javascript + * ``` + */ + type: string; + /** + * Gets and sets the subtype portion of the MIME. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const myMIME = new MIMEType('text/ecmascript'); + * console.log(myMIME.subtype); + * // Prints: ecmascript + * myMIME.subtype = 'javascript'; + * console.log(myMIME.subtype); + * // Prints: javascript + * console.log(String(myMIME)); + * // Prints: text/javascript + * ``` + */ + subtype: string; + /** + * Gets the essence of the MIME. This property is read only. + * Use `mime.type` or `mime.subtype` to alter the MIME. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const myMIME = new MIMEType('text/javascript;key=value'); + * console.log(myMIME.essence); + * // Prints: text/javascript + * myMIME.type = 'application'; + * console.log(myMIME.essence); + * // Prints: application/javascript + * console.log(String(myMIME)); + * // Prints: application/javascript;key=value + * ``` + */ + readonly essence: string; + /** + * Gets the `MIMEParams` object representing the + * parameters of the MIME. This property is read-only. See `MIMEParams` documentation for details. + */ + readonly params: MIMEParams; + /** + * The `toString()` method on the `MIMEType` object returns the serialized MIME. + * + * Because of the need for standard compliance, this method does not allow users + * to customize the serialization process of the MIME. + */ + toString(): string; + } + /** + * The `MIMEParams` API provides read and write access to the parameters of a`MIMEType`. + * @since v19.1.0, v18.13.0 + */ + export class MIMEParams { + /** + * Remove all name-value pairs whose name is `name`. + */ + delete(name: string): void; + /** + * Returns an iterator over each of the name-value pairs in the parameters. + * Each item of the iterator is a JavaScript `Array`. The first item of the array + * is the `name`, the second item of the array is the `value`. + */ + entries(): IterableIterator<[name: string, value: string]>; + /** + * Returns the value of the first name-value pair whose name is `name`. If there + * are no such pairs, `null` is returned. + * @return or `null` if there is no name-value pair with the given `name`. + */ + get(name: string): string | null; + /** + * Returns `true` if there is at least one name-value pair whose name is `name`. + */ + has(name: string): boolean; + /** + * Returns an iterator over the names of each name-value pair. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const { params } = new MIMEType('text/plain;foo=0;bar=1'); + * for (const name of params.keys()) { + * console.log(name); + * } + * // Prints: + * // foo + * // bar + * ``` + */ + keys(): IterableIterator; + /** + * Sets the value in the `MIMEParams` object associated with `name` to`value`. If there are any pre-existing name-value pairs whose names are `name`, + * set the first such pair's value to `value`. + * + * ```js + * import { MIMEType } from 'node:util'; + * + * const { params } = new MIMEType('text/plain;foo=0;bar=1'); + * params.set('foo', 'def'); + * params.set('baz', 'xyz'); + * console.log(params.toString()); + * // Prints: foo=def;bar=1;baz=xyz + * ``` + */ + set(name: string, value: string): void; + /** + * Returns an iterator over the values of each name-value pair. + */ + values(): IterableIterator; + /** + * Returns an iterator over each of the name-value pairs in the parameters. + */ + [Symbol.iterator]: typeof MIMEParams.prototype.entries; + } +} +declare module "util/types" { + import { KeyObject, webcrypto } from "node:crypto"; + /** + * Returns `true` if the value is a built-in [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) or + * [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instance. + * + * See also `util.types.isArrayBuffer()` and `util.types.isSharedArrayBuffer()`. + * + * ```js + * util.types.isAnyArrayBuffer(new ArrayBuffer()); // Returns true + * util.types.isAnyArrayBuffer(new SharedArrayBuffer()); // Returns true + * ``` + * @since v10.0.0 + */ + function isAnyArrayBuffer(object: unknown): object is ArrayBufferLike; + /** + * Returns `true` if the value is an `arguments` object. + * + * ```js + * function foo() { + * util.types.isArgumentsObject(arguments); // Returns true + * } + * ``` + * @since v10.0.0 + */ + function isArgumentsObject(object: unknown): object is IArguments; + /** + * Returns `true` if the value is a built-in [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) instance. + * This does _not_ include [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instances. Usually, it is + * desirable to test for both; See `util.types.isAnyArrayBuffer()` for that. + * + * ```js + * util.types.isArrayBuffer(new ArrayBuffer()); // Returns true + * util.types.isArrayBuffer(new SharedArrayBuffer()); // Returns false + * ``` + * @since v10.0.0 + */ + function isArrayBuffer(object: unknown): object is ArrayBuffer; + /** + * Returns `true` if the value is an instance of one of the [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) views, such as typed + * array objects or [`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView). Equivalent to + * [`ArrayBuffer.isView()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView). + * + * ```js + * util.types.isArrayBufferView(new Int8Array()); // true + * util.types.isArrayBufferView(Buffer.from('hello world')); // true + * util.types.isArrayBufferView(new DataView(new ArrayBuffer(16))); // true + * util.types.isArrayBufferView(new ArrayBuffer()); // false + * ``` + * @since v10.0.0 + */ + function isArrayBufferView(object: unknown): object is NodeJS.ArrayBufferView; + /** + * Returns `true` if the value is an [async function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function). + * This only reports back what the JavaScript engine is seeing; + * in particular, the return value may not match the original source code if + * a transpilation tool was used. + * + * ```js + * util.types.isAsyncFunction(function foo() {}); // Returns false + * util.types.isAsyncFunction(async function foo() {}); // Returns true + * ``` + * @since v10.0.0 + */ + function isAsyncFunction(object: unknown): boolean; + /** + * Returns `true` if the value is a `BigInt64Array` instance. + * + * ```js + * util.types.isBigInt64Array(new BigInt64Array()); // Returns true + * util.types.isBigInt64Array(new BigUint64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isBigInt64Array(value: unknown): value is BigInt64Array; + /** + * Returns `true` if the value is a `BigUint64Array` instance. + * + * ```js + * util.types.isBigUint64Array(new BigInt64Array()); // Returns false + * util.types.isBigUint64Array(new BigUint64Array()); // Returns true + * ``` + * @since v10.0.0 + */ + function isBigUint64Array(value: unknown): value is BigUint64Array; + /** + * Returns `true` if the value is a boolean object, e.g. created + * by `new Boolean()`. + * + * ```js + * util.types.isBooleanObject(false); // Returns false + * util.types.isBooleanObject(true); // Returns false + * util.types.isBooleanObject(new Boolean(false)); // Returns true + * util.types.isBooleanObject(new Boolean(true)); // Returns true + * util.types.isBooleanObject(Boolean(false)); // Returns false + * util.types.isBooleanObject(Boolean(true)); // Returns false + * ``` + * @since v10.0.0 + */ + function isBooleanObject(object: unknown): object is Boolean; + /** + * Returns `true` if the value is any boxed primitive object, e.g. created + * by `new Boolean()`, `new String()` or `Object(Symbol())`. + * + * For example: + * + * ```js + * util.types.isBoxedPrimitive(false); // Returns false + * util.types.isBoxedPrimitive(new Boolean(false)); // Returns true + * util.types.isBoxedPrimitive(Symbol('foo')); // Returns false + * util.types.isBoxedPrimitive(Object(Symbol('foo'))); // Returns true + * util.types.isBoxedPrimitive(Object(BigInt(5))); // Returns true + * ``` + * @since v10.11.0 + */ + function isBoxedPrimitive(object: unknown): object is String | Number | BigInt | Boolean | Symbol; + /** + * Returns `true` if the value is a built-in [`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView) instance. + * + * ```js + * const ab = new ArrayBuffer(20); + * util.types.isDataView(new DataView(ab)); // Returns true + * util.types.isDataView(new Float64Array()); // Returns false + * ``` + * + * See also [`ArrayBuffer.isView()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView). + * @since v10.0.0 + */ + function isDataView(object: unknown): object is DataView; + /** + * Returns `true` if the value is a built-in [`Date`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date) instance. + * + * ```js + * util.types.isDate(new Date()); // Returns true + * ``` + * @since v10.0.0 + */ + function isDate(object: unknown): object is Date; + /** + * Returns `true` if the value is a native `External` value. + * + * A native `External` value is a special type of object that contains a + * raw C++ pointer (`void*`) for access from native code, and has no other + * properties. Such objects are created either by Node.js internals or native + * addons. In JavaScript, they are [frozen](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze) objects with a`null` prototype. + * + * ```c + * #include + * #include + * napi_value result; + * static napi_value MyNapi(napi_env env, napi_callback_info info) { + * int* raw = (int*) malloc(1024); + * napi_status status = napi_create_external(env, (void*) raw, NULL, NULL, &result); + * if (status != napi_ok) { + * napi_throw_error(env, NULL, "napi_create_external failed"); + * return NULL; + * } + * return result; + * } + * ... + * DECLARE_NAPI_PROPERTY("myNapi", MyNapi) + * ... + * ``` + * + * ```js + * const native = require('napi_addon.node'); + * const data = native.myNapi(); + * util.types.isExternal(data); // returns true + * util.types.isExternal(0); // returns false + * util.types.isExternal(new String('foo')); // returns false + * ``` + * + * For further information on `napi_create_external`, refer to `napi_create_external()`. + * @since v10.0.0 + */ + function isExternal(object: unknown): boolean; + /** + * Returns `true` if the value is a built-in [`Float32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float32Array) instance. + * + * ```js + * util.types.isFloat32Array(new ArrayBuffer()); // Returns false + * util.types.isFloat32Array(new Float32Array()); // Returns true + * util.types.isFloat32Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isFloat32Array(object: unknown): object is Float32Array; + /** + * Returns `true` if the value is a built-in [`Float64Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float64Array) instance. + * + * ```js + * util.types.isFloat64Array(new ArrayBuffer()); // Returns false + * util.types.isFloat64Array(new Uint8Array()); // Returns false + * util.types.isFloat64Array(new Float64Array()); // Returns true + * ``` + * @since v10.0.0 + */ + function isFloat64Array(object: unknown): object is Float64Array; + /** + * Returns `true` if the value is a generator function. + * This only reports back what the JavaScript engine is seeing; + * in particular, the return value may not match the original source code if + * a transpilation tool was used. + * + * ```js + * util.types.isGeneratorFunction(function foo() {}); // Returns false + * util.types.isGeneratorFunction(function* foo() {}); // Returns true + * ``` + * @since v10.0.0 + */ + function isGeneratorFunction(object: unknown): object is GeneratorFunction; + /** + * Returns `true` if the value is a generator object as returned from a + * built-in generator function. + * This only reports back what the JavaScript engine is seeing; + * in particular, the return value may not match the original source code if + * a transpilation tool was used. + * + * ```js + * function* foo() {} + * const generator = foo(); + * util.types.isGeneratorObject(generator); // Returns true + * ``` + * @since v10.0.0 + */ + function isGeneratorObject(object: unknown): object is Generator; + /** + * Returns `true` if the value is a built-in [`Int8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int8Array) instance. + * + * ```js + * util.types.isInt8Array(new ArrayBuffer()); // Returns false + * util.types.isInt8Array(new Int8Array()); // Returns true + * util.types.isInt8Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isInt8Array(object: unknown): object is Int8Array; + /** + * Returns `true` if the value is a built-in [`Int16Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int16Array) instance. + * + * ```js + * util.types.isInt16Array(new ArrayBuffer()); // Returns false + * util.types.isInt16Array(new Int16Array()); // Returns true + * util.types.isInt16Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isInt16Array(object: unknown): object is Int16Array; + /** + * Returns `true` if the value is a built-in [`Int32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int32Array) instance. + * + * ```js + * util.types.isInt32Array(new ArrayBuffer()); // Returns false + * util.types.isInt32Array(new Int32Array()); // Returns true + * util.types.isInt32Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isInt32Array(object: unknown): object is Int32Array; + /** + * Returns `true` if the value is a built-in [`Map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map) instance. + * + * ```js + * util.types.isMap(new Map()); // Returns true + * ``` + * @since v10.0.0 + */ + function isMap( + object: T | {}, + ): object is T extends ReadonlyMap ? (unknown extends T ? never : ReadonlyMap) + : Map; + /** + * Returns `true` if the value is an iterator returned for a built-in [`Map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map) instance. + * + * ```js + * const map = new Map(); + * util.types.isMapIterator(map.keys()); // Returns true + * util.types.isMapIterator(map.values()); // Returns true + * util.types.isMapIterator(map.entries()); // Returns true + * util.types.isMapIterator(map[Symbol.iterator]()); // Returns true + * ``` + * @since v10.0.0 + */ + function isMapIterator(object: unknown): boolean; + /** + * Returns `true` if the value is an instance of a [Module Namespace Object](https://tc39.github.io/ecma262/#sec-module-namespace-exotic-objects). + * + * ```js + * import * as ns from './a.js'; + * + * util.types.isModuleNamespaceObject(ns); // Returns true + * ``` + * @since v10.0.0 + */ + function isModuleNamespaceObject(value: unknown): boolean; + /** + * Returns `true` if the value was returned by the constructor of a [built-in `Error` type](https://tc39.es/ecma262/#sec-error-objects). + * + * ```js + * console.log(util.types.isNativeError(new Error())); // true + * console.log(util.types.isNativeError(new TypeError())); // true + * console.log(util.types.isNativeError(new RangeError())); // true + * ``` + * + * Subclasses of the native error types are also native errors: + * + * ```js + * class MyError extends Error {} + * console.log(util.types.isNativeError(new MyError())); // true + * ``` + * + * A value being `instanceof` a native error class is not equivalent to `isNativeError()`returning `true` for that value. `isNativeError()` returns `true` for errors + * which come from a different [realm](https://tc39.es/ecma262/#realm) while `instanceof Error` returns `false`for these errors: + * + * ```js + * const vm = require('node:vm'); + * const context = vm.createContext({}); + * const myError = vm.runInContext('new Error()', context); + * console.log(util.types.isNativeError(myError)); // true + * console.log(myError instanceof Error); // false + * ``` + * + * Conversely, `isNativeError()` returns `false` for all objects which were not + * returned by the constructor of a native error. That includes values + * which are `instanceof` native errors: + * + * ```js + * const myError = { __proto__: Error.prototype }; + * console.log(util.types.isNativeError(myError)); // false + * console.log(myError instanceof Error); // true + * ``` + * @since v10.0.0 + */ + function isNativeError(object: unknown): object is Error; + /** + * Returns `true` if the value is a number object, e.g. created + * by `new Number()`. + * + * ```js + * util.types.isNumberObject(0); // Returns false + * util.types.isNumberObject(new Number(0)); // Returns true + * ``` + * @since v10.0.0 + */ + function isNumberObject(object: unknown): object is Number; + /** + * Returns `true` if the value is a built-in [`Promise`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise). + * + * ```js + * util.types.isPromise(Promise.resolve(42)); // Returns true + * ``` + * @since v10.0.0 + */ + function isPromise(object: unknown): object is Promise; + /** + * Returns `true` if the value is a [`Proxy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy) instance. + * + * ```js + * const target = {}; + * const proxy = new Proxy(target, {}); + * util.types.isProxy(target); // Returns false + * util.types.isProxy(proxy); // Returns true + * ``` + * @since v10.0.0 + */ + function isProxy(object: unknown): boolean; + /** + * Returns `true` if the value is a regular expression object. + * + * ```js + * util.types.isRegExp(/abc/); // Returns true + * util.types.isRegExp(new RegExp('abc')); // Returns true + * ``` + * @since v10.0.0 + */ + function isRegExp(object: unknown): object is RegExp; + /** + * Returns `true` if the value is a built-in [`Set`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set) instance. + * + * ```js + * util.types.isSet(new Set()); // Returns true + * ``` + * @since v10.0.0 + */ + function isSet( + object: T | {}, + ): object is T extends ReadonlySet ? (unknown extends T ? never : ReadonlySet) : Set; + /** + * Returns `true` if the value is an iterator returned for a built-in [`Set`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set) instance. + * + * ```js + * const set = new Set(); + * util.types.isSetIterator(set.keys()); // Returns true + * util.types.isSetIterator(set.values()); // Returns true + * util.types.isSetIterator(set.entries()); // Returns true + * util.types.isSetIterator(set[Symbol.iterator]()); // Returns true + * ``` + * @since v10.0.0 + */ + function isSetIterator(object: unknown): boolean; + /** + * Returns `true` if the value is a built-in [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instance. + * This does _not_ include [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) instances. Usually, it is + * desirable to test for both; See `util.types.isAnyArrayBuffer()` for that. + * + * ```js + * util.types.isSharedArrayBuffer(new ArrayBuffer()); // Returns false + * util.types.isSharedArrayBuffer(new SharedArrayBuffer()); // Returns true + * ``` + * @since v10.0.0 + */ + function isSharedArrayBuffer(object: unknown): object is SharedArrayBuffer; + /** + * Returns `true` if the value is a string object, e.g. created + * by `new String()`. + * + * ```js + * util.types.isStringObject('foo'); // Returns false + * util.types.isStringObject(new String('foo')); // Returns true + * ``` + * @since v10.0.0 + */ + function isStringObject(object: unknown): object is String; + /** + * Returns `true` if the value is a symbol object, created + * by calling `Object()` on a `Symbol` primitive. + * + * ```js + * const symbol = Symbol('foo'); + * util.types.isSymbolObject(symbol); // Returns false + * util.types.isSymbolObject(Object(symbol)); // Returns true + * ``` + * @since v10.0.0 + */ + function isSymbolObject(object: unknown): object is Symbol; + /** + * Returns `true` if the value is a built-in [`TypedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray) instance. + * + * ```js + * util.types.isTypedArray(new ArrayBuffer()); // Returns false + * util.types.isTypedArray(new Uint8Array()); // Returns true + * util.types.isTypedArray(new Float64Array()); // Returns true + * ``` + * + * See also [`ArrayBuffer.isView()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView). + * @since v10.0.0 + */ + function isTypedArray(object: unknown): object is NodeJS.TypedArray; + /** + * Returns `true` if the value is a built-in [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) instance. + * + * ```js + * util.types.isUint8Array(new ArrayBuffer()); // Returns false + * util.types.isUint8Array(new Uint8Array()); // Returns true + * util.types.isUint8Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint8Array(object: unknown): object is Uint8Array; + /** + * Returns `true` if the value is a built-in [`Uint8ClampedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8ClampedArray) instance. + * + * ```js + * util.types.isUint8ClampedArray(new ArrayBuffer()); // Returns false + * util.types.isUint8ClampedArray(new Uint8ClampedArray()); // Returns true + * util.types.isUint8ClampedArray(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint8ClampedArray(object: unknown): object is Uint8ClampedArray; + /** + * Returns `true` if the value is a built-in [`Uint16Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint16Array) instance. + * + * ```js + * util.types.isUint16Array(new ArrayBuffer()); // Returns false + * util.types.isUint16Array(new Uint16Array()); // Returns true + * util.types.isUint16Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint16Array(object: unknown): object is Uint16Array; + /** + * Returns `true` if the value is a built-in [`Uint32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint32Array) instance. + * + * ```js + * util.types.isUint32Array(new ArrayBuffer()); // Returns false + * util.types.isUint32Array(new Uint32Array()); // Returns true + * util.types.isUint32Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint32Array(object: unknown): object is Uint32Array; + /** + * Returns `true` if the value is a built-in [`WeakMap`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakMap) instance. + * + * ```js + * util.types.isWeakMap(new WeakMap()); // Returns true + * ``` + * @since v10.0.0 + */ + function isWeakMap(object: unknown): object is WeakMap; + /** + * Returns `true` if the value is a built-in [`WeakSet`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet) instance. + * + * ```js + * util.types.isWeakSet(new WeakSet()); // Returns true + * ``` + * @since v10.0.0 + */ + function isWeakSet(object: unknown): object is WeakSet; + /** + * Returns `true` if `value` is a `KeyObject`, `false` otherwise. + * @since v16.2.0 + */ + function isKeyObject(object: unknown): object is KeyObject; + /** + * Returns `true` if `value` is a `CryptoKey`, `false` otherwise. + * @since v16.2.0 + */ + function isCryptoKey(object: unknown): object is webcrypto.CryptoKey; +} +declare module "node:util" { + export * from "util"; +} +declare module "node:util/types" { + export * from "util/types"; +} diff --git a/dist/node_modules/@types/node/ts4.8/v8.d.ts b/dist/node_modules/@types/node/ts4.8/v8.d.ts new file mode 100644 index 0000000..b2aeee2 --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/v8.d.ts @@ -0,0 +1,764 @@ +/** + * The `node:v8` module exposes APIs that are specific to the version of [V8](https://developers.google.com/v8/) built into the Node.js binary. It can be accessed using: + * + * ```js + * const v8 = require('node:v8'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.11.0/lib/v8.js) + */ +declare module "v8" { + import { Readable } from "node:stream"; + interface HeapSpaceInfo { + space_name: string; + space_size: number; + space_used_size: number; + space_available_size: number; + physical_space_size: number; + } + // ** Signifies if the --zap_code_space option is enabled or not. 1 == enabled, 0 == disabled. */ + type DoesZapCodeSpaceFlag = 0 | 1; + interface HeapInfo { + total_heap_size: number; + total_heap_size_executable: number; + total_physical_size: number; + total_available_size: number; + used_heap_size: number; + heap_size_limit: number; + malloced_memory: number; + peak_malloced_memory: number; + does_zap_garbage: DoesZapCodeSpaceFlag; + number_of_native_contexts: number; + number_of_detached_contexts: number; + total_global_handles_size: number; + used_global_handles_size: number; + external_memory: number; + } + interface HeapCodeStatistics { + code_and_metadata_size: number; + bytecode_and_metadata_size: number; + external_script_source_size: number; + } + interface HeapSnapshotOptions { + /** + * If true, expose internals in the heap snapshot. + * @default false + */ + exposeInternals?: boolean; + /** + * If true, expose numeric values in artificial fields. + * @default false + */ + exposeNumericValues?: boolean; + } + /** + * Returns an integer representing a version tag derived from the V8 version, + * command-line flags, and detected CPU features. This is useful for determining + * whether a `vm.Script` `cachedData` buffer is compatible with this instance + * of V8. + * + * ```js + * console.log(v8.cachedDataVersionTag()); // 3947234607 + * // The value returned by v8.cachedDataVersionTag() is derived from the V8 + * // version, command-line flags, and detected CPU features. Test that the value + * // does indeed update when flags are toggled. + * v8.setFlagsFromString('--allow_natives_syntax'); + * console.log(v8.cachedDataVersionTag()); // 183726201 + * ``` + * @since v8.0.0 + */ + function cachedDataVersionTag(): number; + /** + * Returns an object with the following properties: + * + * `does_zap_garbage` is a 0/1 boolean, which signifies whether the`--zap_code_space` option is enabled or not. This makes V8 overwrite heap + * garbage with a bit pattern. The RSS footprint (resident set size) gets bigger + * because it continuously touches all heap pages and that makes them less likely + * to get swapped out by the operating system. + * + * `number_of_native_contexts` The value of native\_context is the number of the + * top-level contexts currently active. Increase of this number over time indicates + * a memory leak. + * + * `number_of_detached_contexts` The value of detached\_context is the number + * of contexts that were detached and not yet garbage collected. This number + * being non-zero indicates a potential memory leak. + * + * `total_global_handles_size` The value of total\_global\_handles\_size is the + * total memory size of V8 global handles. + * + * `used_global_handles_size` The value of used\_global\_handles\_size is the + * used memory size of V8 global handles. + * + * `external_memory` The value of external\_memory is the memory size of array + * buffers and external strings. + * + * ```js + * { + * total_heap_size: 7326976, + * total_heap_size_executable: 4194304, + * total_physical_size: 7326976, + * total_available_size: 1152656, + * used_heap_size: 3476208, + * heap_size_limit: 1535115264, + * malloced_memory: 16384, + * peak_malloced_memory: 1127496, + * does_zap_garbage: 0, + * number_of_native_contexts: 1, + * number_of_detached_contexts: 0, + * total_global_handles_size: 8192, + * used_global_handles_size: 3296, + * external_memory: 318824 + * } + * ``` + * @since v1.0.0 + */ + function getHeapStatistics(): HeapInfo; + /** + * Returns statistics about the V8 heap spaces, i.e. the segments which make up + * the V8 heap. Neither the ordering of heap spaces, nor the availability of a + * heap space can be guaranteed as the statistics are provided via the + * V8 [`GetHeapSpaceStatistics`](https://v8docs.nodesource.com/node-13.2/d5/dda/classv8_1_1_isolate.html#ac673576f24fdc7a33378f8f57e1d13a4) function and may change from one V8 version to the + * next. + * + * The value returned is an array of objects containing the following properties: + * + * ```json + * [ + * { + * "space_name": "new_space", + * "space_size": 2063872, + * "space_used_size": 951112, + * "space_available_size": 80824, + * "physical_space_size": 2063872 + * }, + * { + * "space_name": "old_space", + * "space_size": 3090560, + * "space_used_size": 2493792, + * "space_available_size": 0, + * "physical_space_size": 3090560 + * }, + * { + * "space_name": "code_space", + * "space_size": 1260160, + * "space_used_size": 644256, + * "space_available_size": 960, + * "physical_space_size": 1260160 + * }, + * { + * "space_name": "map_space", + * "space_size": 1094160, + * "space_used_size": 201608, + * "space_available_size": 0, + * "physical_space_size": 1094160 + * }, + * { + * "space_name": "large_object_space", + * "space_size": 0, + * "space_used_size": 0, + * "space_available_size": 1490980608, + * "physical_space_size": 0 + * } + * ] + * ``` + * @since v6.0.0 + */ + function getHeapSpaceStatistics(): HeapSpaceInfo[]; + /** + * The `v8.setFlagsFromString()` method can be used to programmatically set + * V8 command-line flags. This method should be used with care. Changing settings + * after the VM has started may result in unpredictable behavior, including + * crashes and data loss; or it may simply do nothing. + * + * The V8 options available for a version of Node.js may be determined by running`node --v8-options`. + * + * Usage: + * + * ```js + * // Print GC events to stdout for one minute. + * const v8 = require('node:v8'); + * v8.setFlagsFromString('--trace_gc'); + * setTimeout(() => { v8.setFlagsFromString('--notrace_gc'); }, 60e3); + * ``` + * @since v1.0.0 + */ + function setFlagsFromString(flags: string): void; + /** + * Generates a snapshot of the current V8 heap and returns a Readable + * Stream that may be used to read the JSON serialized representation. + * This JSON stream format is intended to be used with tools such as + * Chrome DevTools. The JSON schema is undocumented and specific to the + * V8 engine. Therefore, the schema may change from one version of V8 to the next. + * + * Creating a heap snapshot requires memory about twice the size of the heap at + * the time the snapshot is created. This results in the risk of OOM killers + * terminating the process. + * + * Generating a snapshot is a synchronous operation which blocks the event loop + * for a duration depending on the heap size. + * + * ```js + * // Print heap snapshot to the console + * const v8 = require('node:v8'); + * const stream = v8.getHeapSnapshot(); + * stream.pipe(process.stdout); + * ``` + * @since v11.13.0 + * @return A Readable containing the V8 heap snapshot. + */ + function getHeapSnapshot(options?: HeapSnapshotOptions): Readable; + /** + * Generates a snapshot of the current V8 heap and writes it to a JSON + * file. This file is intended to be used with tools such as Chrome + * DevTools. The JSON schema is undocumented and specific to the V8 + * engine, and may change from one version of V8 to the next. + * + * A heap snapshot is specific to a single V8 isolate. When using `worker threads`, a heap snapshot generated from the main thread will + * not contain any information about the workers, and vice versa. + * + * Creating a heap snapshot requires memory about twice the size of the heap at + * the time the snapshot is created. This results in the risk of OOM killers + * terminating the process. + * + * Generating a snapshot is a synchronous operation which blocks the event loop + * for a duration depending on the heap size. + * + * ```js + * const { writeHeapSnapshot } = require('node:v8'); + * const { + * Worker, + * isMainThread, + * parentPort, + * } = require('node:worker_threads'); + * + * if (isMainThread) { + * const worker = new Worker(__filename); + * + * worker.once('message', (filename) => { + * console.log(`worker heapdump: ${filename}`); + * // Now get a heapdump for the main thread. + * console.log(`main thread heapdump: ${writeHeapSnapshot()}`); + * }); + * + * // Tell the worker to create a heapdump. + * worker.postMessage('heapdump'); + * } else { + * parentPort.once('message', (message) => { + * if (message === 'heapdump') { + * // Generate a heapdump for the worker + * // and return the filename to the parent. + * parentPort.postMessage(writeHeapSnapshot()); + * } + * }); + * } + * ``` + * @since v11.13.0 + * @param filename The file path where the V8 heap snapshot is to be saved. If not specified, a file name with the pattern `'Heap-${yyyymmdd}-${hhmmss}-${pid}-${thread_id}.heapsnapshot'` will be + * generated, where `{pid}` will be the PID of the Node.js process, `{thread_id}` will be `0` when `writeHeapSnapshot()` is called from the main Node.js thread or the id of a + * worker thread. + * @return The filename where the snapshot was saved. + */ + function writeHeapSnapshot(filename?: string, options?: HeapSnapshotOptions): string; + /** + * Get statistics about code and its metadata in the heap, see + * V8 [`GetHeapCodeAndMetadataStatistics`](https://v8docs.nodesource.com/node-13.2/d5/dda/classv8_1_1_isolate.html#a6079122af17612ef54ef3348ce170866) API. Returns an object with the + * following properties: + * + * ```js + * { + * code_and_metadata_size: 212208, + * bytecode_and_metadata_size: 161368, + * external_script_source_size: 1410794, + * cpu_profiler_metadata_size: 0, + * } + * ``` + * @since v12.8.0 + */ + function getHeapCodeStatistics(): HeapCodeStatistics; + /** + * @since v8.0.0 + */ + class Serializer { + /** + * Writes out a header, which includes the serialization format version. + */ + writeHeader(): void; + /** + * Serializes a JavaScript value and adds the serialized representation to the + * internal buffer. + * + * This throws an error if `value` cannot be serialized. + */ + writeValue(val: any): boolean; + /** + * Returns the stored internal buffer. This serializer should not be used once + * the buffer is released. Calling this method results in undefined behavior + * if a previous write has failed. + */ + releaseBuffer(): Buffer; + /** + * Marks an `ArrayBuffer` as having its contents transferred out of band. + * Pass the corresponding `ArrayBuffer` in the deserializing context to `deserializer.transferArrayBuffer()`. + * @param id A 32-bit unsigned integer. + * @param arrayBuffer An `ArrayBuffer` instance. + */ + transferArrayBuffer(id: number, arrayBuffer: ArrayBuffer): void; + /** + * Write a raw 32-bit unsigned integer. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeUint32(value: number): void; + /** + * Write a raw 64-bit unsigned integer, split into high and low 32-bit parts. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeUint64(hi: number, lo: number): void; + /** + * Write a JS `number` value. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeDouble(value: number): void; + /** + * Write raw bytes into the serializer's internal buffer. The deserializer + * will require a way to compute the length of the buffer. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeRawBytes(buffer: NodeJS.TypedArray): void; + } + /** + * A subclass of `Serializer` that serializes `TypedArray`(in particular `Buffer`) and `DataView` objects as host objects, and only + * stores the part of their underlying `ArrayBuffer`s that they are referring to. + * @since v8.0.0 + */ + class DefaultSerializer extends Serializer {} + /** + * @since v8.0.0 + */ + class Deserializer { + constructor(data: NodeJS.TypedArray); + /** + * Reads and validates a header (including the format version). + * May, for example, reject an invalid or unsupported wire format. In that case, + * an `Error` is thrown. + */ + readHeader(): boolean; + /** + * Deserializes a JavaScript value from the buffer and returns it. + */ + readValue(): any; + /** + * Marks an `ArrayBuffer` as having its contents transferred out of band. + * Pass the corresponding `ArrayBuffer` in the serializing context to `serializer.transferArrayBuffer()` (or return the `id` from `serializer._getSharedArrayBufferId()` in the case of + * `SharedArrayBuffer`s). + * @param id A 32-bit unsigned integer. + * @param arrayBuffer An `ArrayBuffer` instance. + */ + transferArrayBuffer(id: number, arrayBuffer: ArrayBuffer): void; + /** + * Reads the underlying wire format version. Likely mostly to be useful to + * legacy code reading old wire format versions. May not be called before`.readHeader()`. + */ + getWireFormatVersion(): number; + /** + * Read a raw 32-bit unsigned integer and return it. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readUint32(): number; + /** + * Read a raw 64-bit unsigned integer and return it as an array `[hi, lo]`with two 32-bit unsigned integer entries. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readUint64(): [number, number]; + /** + * Read a JS `number` value. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readDouble(): number; + /** + * Read raw bytes from the deserializer's internal buffer. The `length` parameter + * must correspond to the length of the buffer that was passed to `serializer.writeRawBytes()`. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readRawBytes(length: number): Buffer; + } + /** + * A subclass of `Deserializer` corresponding to the format written by `DefaultSerializer`. + * @since v8.0.0 + */ + class DefaultDeserializer extends Deserializer {} + /** + * Uses a `DefaultSerializer` to serialize `value` into a buffer. + * + * `ERR_BUFFER_TOO_LARGE` will be thrown when trying to + * serialize a huge object which requires buffer + * larger than `buffer.constants.MAX_LENGTH`. + * @since v8.0.0 + */ + function serialize(value: any): Buffer; + /** + * Uses a `DefaultDeserializer` with default options to read a JS value + * from a buffer. + * @since v8.0.0 + * @param buffer A buffer returned by {@link serialize}. + */ + function deserialize(buffer: NodeJS.TypedArray): any; + /** + * The `v8.takeCoverage()` method allows the user to write the coverage started by `NODE_V8_COVERAGE` to disk on demand. This method can be invoked multiple + * times during the lifetime of the process. Each time the execution counter will + * be reset and a new coverage report will be written to the directory specified + * by `NODE_V8_COVERAGE`. + * + * When the process is about to exit, one last coverage will still be written to + * disk unless {@link stopCoverage} is invoked before the process exits. + * @since v15.1.0, v14.18.0, v12.22.0 + */ + function takeCoverage(): void; + /** + * The `v8.stopCoverage()` method allows the user to stop the coverage collection + * started by `NODE_V8_COVERAGE`, so that V8 can release the execution count + * records and optimize code. This can be used in conjunction with {@link takeCoverage} if the user wants to collect the coverage on demand. + * @since v15.1.0, v14.18.0, v12.22.0 + */ + function stopCoverage(): void; + /** + * The API is a no-op if `--heapsnapshot-near-heap-limit` is already set from the command line or the API is called more than once. + * `limit` must be a positive integer. See [`--heapsnapshot-near-heap-limit`](https://nodejs.org/docs/latest-v20.x/api/cli.html#--heapsnapshot-near-heap-limitmax_count) for more information. + * @experimental + * @since v18.10.0, v16.18.0 + */ + function setHeapSnapshotNearHeapLimit(limit: number): void; + /** + * This API collects GC data in current thread. + * @since v19.6.0, v18.15.0 + */ + class GCProfiler { + /** + * Start collecting GC data. + * @since v19.6.0, v18.15.0 + */ + start(): void; + /** + * Stop collecting GC data and return an object.The content of object + * is as follows. + * + * ```json + * { + * "version": 1, + * "startTime": 1674059033862, + * "statistics": [ + * { + * "gcType": "Scavenge", + * "beforeGC": { + * "heapStatistics": { + * "totalHeapSize": 5005312, + * "totalHeapSizeExecutable": 524288, + * "totalPhysicalSize": 5226496, + * "totalAvailableSize": 4341325216, + * "totalGlobalHandlesSize": 8192, + * "usedGlobalHandlesSize": 2112, + * "usedHeapSize": 4883840, + * "heapSizeLimit": 4345298944, + * "mallocedMemory": 254128, + * "externalMemory": 225138, + * "peakMallocedMemory": 181760 + * }, + * "heapSpaceStatistics": [ + * { + * "spaceName": "read_only_space", + * "spaceSize": 0, + * "spaceUsedSize": 0, + * "spaceAvailableSize": 0, + * "physicalSpaceSize": 0 + * } + * ] + * }, + * "cost": 1574.14, + * "afterGC": { + * "heapStatistics": { + * "totalHeapSize": 6053888, + * "totalHeapSizeExecutable": 524288, + * "totalPhysicalSize": 5500928, + * "totalAvailableSize": 4341101384, + * "totalGlobalHandlesSize": 8192, + * "usedGlobalHandlesSize": 2112, + * "usedHeapSize": 4059096, + * "heapSizeLimit": 4345298944, + * "mallocedMemory": 254128, + * "externalMemory": 225138, + * "peakMallocedMemory": 181760 + * }, + * "heapSpaceStatistics": [ + * { + * "spaceName": "read_only_space", + * "spaceSize": 0, + * "spaceUsedSize": 0, + * "spaceAvailableSize": 0, + * "physicalSpaceSize": 0 + * } + * ] + * } + * } + * ], + * "endTime": 1674059036865 + * } + * ``` + * + * Here's an example. + * + * ```js + * const { GCProfiler } = require('v8'); + * const profiler = new GCProfiler(); + * profiler.start(); + * setTimeout(() => { + * console.log(profiler.stop()); + * }, 1000); + * ``` + * @since v19.6.0, v18.15.0 + */ + stop(): GCProfilerResult; + } + interface GCProfilerResult { + version: number; + startTime: number; + endTime: number; + statistics: Array<{ + gcType: string; + cost: number; + beforeGC: { + heapStatistics: HeapStatistics; + heapSpaceStatistics: HeapSpaceStatistics[]; + }; + afterGC: { + heapStatistics: HeapStatistics; + heapSpaceStatistics: HeapSpaceStatistics[]; + }; + }>; + } + interface HeapStatistics { + totalHeapSize: number; + totalHeapSizeExecutable: number; + totalPhysicalSize: number; + totalAvailableSize: number; + totalGlobalHandlesSize: number; + usedGlobalHandlesSize: number; + usedHeapSize: number; + heapSizeLimit: number; + mallocedMemory: number; + externalMemory: number; + peakMallocedMemory: number; + } + interface HeapSpaceStatistics { + spaceName: string; + spaceSize: number; + spaceUsedSize: number; + spaceAvailableSize: number; + physicalSpaceSize: number; + } + /** + * Called when a promise is constructed. This does not mean that corresponding before/after events will occur, only that the possibility exists. This will + * happen if a promise is created without ever getting a continuation. + * @since v17.1.0, v16.14.0 + * @param promise The promise being created. + * @param parent The promise continued from, if applicable. + */ + interface Init { + (promise: Promise, parent: Promise): void; + } + /** + * Called before a promise continuation executes. This can be in the form of `then()`, `catch()`, or `finally()` handlers or an await resuming. + * + * The before callback will be called 0 to N times. The before callback will typically be called 0 times if no continuation was ever made for the promise. + * The before callback may be called many times in the case where many continuations have been made from the same promise. + * @since v17.1.0, v16.14.0 + */ + interface Before { + (promise: Promise): void; + } + /** + * Called immediately after a promise continuation executes. This may be after a `then()`, `catch()`, or `finally()` handler or before an await after another await. + * @since v17.1.0, v16.14.0 + */ + interface After { + (promise: Promise): void; + } + /** + * Called when the promise receives a resolution or rejection value. This may occur synchronously in the case of {@link Promise.resolve()} or + * {@link Promise.reject()}. + * @since v17.1.0, v16.14.0 + */ + interface Settled { + (promise: Promise): void; + } + /** + * Key events in the lifetime of a promise have been categorized into four areas: creation of a promise, before/after a continuation handler is called or + * around an await, and when the promise resolves or rejects. + * + * Because promises are asynchronous resources whose lifecycle is tracked via the promise hooks mechanism, the `init()`, `before()`, `after()`, and + * `settled()` callbacks must not be async functions as they create more promises which would produce an infinite loop. + * @since v17.1.0, v16.14.0 + */ + interface HookCallbacks { + init?: Init; + before?: Before; + after?: After; + settled?: Settled; + } + interface PromiseHooks { + /** + * The `init` hook must be a plain function. Providing an async function will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param init The {@link Init | `init` callback} to call when a promise is created. + * @return Call to stop the hook. + */ + onInit: (init: Init) => Function; + /** + * The `settled` hook must be a plain function. Providing an async function will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param settled The {@link Settled | `settled` callback} to call when a promise is created. + * @return Call to stop the hook. + */ + onSettled: (settled: Settled) => Function; + /** + * The `before` hook must be a plain function. Providing an async function will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param before The {@link Before | `before` callback} to call before a promise continuation executes. + * @return Call to stop the hook. + */ + onBefore: (before: Before) => Function; + /** + * The `after` hook must be a plain function. Providing an async function will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param after The {@link After | `after` callback} to call after a promise continuation executes. + * @return Call to stop the hook. + */ + onAfter: (after: After) => Function; + /** + * Registers functions to be called for different lifetime events of each promise. + * The callbacks `init()`/`before()`/`after()`/`settled()` are called for the respective events during a promise's lifetime. + * All callbacks are optional. For example, if only promise creation needs to be tracked, then only the init callback needs to be passed. + * The hook callbacks must be plain functions. Providing async functions will throw as it would produce an infinite microtask loop. + * @since v17.1.0, v16.14.0 + * @param callbacks The {@link HookCallbacks | Hook Callbacks} to register + * @return Used for disabling hooks + */ + createHook: (callbacks: HookCallbacks) => Function; + } + /** + * The `promiseHooks` interface can be used to track promise lifecycle events. + * @since v17.1.0, v16.14.0 + */ + const promiseHooks: PromiseHooks; + type StartupSnapshotCallbackFn = (args: any) => any; + interface StartupSnapshot { + /** + * Add a callback that will be called when the Node.js instance is about to get serialized into a snapshot and exit. + * This can be used to release resources that should not or cannot be serialized or to convert user data into a form more suitable for serialization. + * @since v18.6.0, v16.17.0 + */ + addSerializeCallback(callback: StartupSnapshotCallbackFn, data?: any): void; + /** + * Add a callback that will be called when the Node.js instance is deserialized from a snapshot. + * The `callback` and the `data` (if provided) will be serialized into the snapshot, they can be used to re-initialize the state of the application or + * to re-acquire resources that the application needs when the application is restarted from the snapshot. + * @since v18.6.0, v16.17.0 + */ + addDeserializeCallback(callback: StartupSnapshotCallbackFn, data?: any): void; + /** + * This sets the entry point of the Node.js application when it is deserialized from a snapshot. This can be called only once in the snapshot building script. + * If called, the deserialized application no longer needs an additional entry point script to start up and will simply invoke the callback along with the deserialized + * data (if provided), otherwise an entry point script still needs to be provided to the deserialized application. + * @since v18.6.0, v16.17.0 + */ + setDeserializeMainFunction(callback: StartupSnapshotCallbackFn, data?: any): void; + /** + * Returns true if the Node.js instance is run to build a snapshot. + * @since v18.6.0, v16.17.0 + */ + isBuildingSnapshot(): boolean; + } + /** + * The `v8.startupSnapshot` interface can be used to add serialization and deserialization hooks for custom startup snapshots. + * + * ```bash + * $ node --snapshot-blob snapshot.blob --build-snapshot entry.js + * # This launches a process with the snapshot + * $ node --snapshot-blob snapshot.blob + * ``` + * + * In the example above, `entry.js` can use methods from the `v8.startupSnapshot` interface to specify how to save information for custom objects + * in the snapshot during serialization and how the information can be used to synchronize these objects during deserialization of the snapshot. + * For example, if the `entry.js` contains the following script: + * + * ```js + * 'use strict'; + * + * const fs = require('node:fs'); + * const zlib = require('node:zlib'); + * const path = require('node:path'); + * const assert = require('node:assert'); + * + * const v8 = require('node:v8'); + * + * class BookShelf { + * storage = new Map(); + * + * // Reading a series of files from directory and store them into storage. + * constructor(directory, books) { + * for (const book of books) { + * this.storage.set(book, fs.readFileSync(path.join(directory, book))); + * } + * } + * + * static compressAll(shelf) { + * for (const [ book, content ] of shelf.storage) { + * shelf.storage.set(book, zlib.gzipSync(content)); + * } + * } + * + * static decompressAll(shelf) { + * for (const [ book, content ] of shelf.storage) { + * shelf.storage.set(book, zlib.gunzipSync(content)); + * } + * } + * } + * + * // __dirname here is where the snapshot script is placed + * // during snapshot building time. + * const shelf = new BookShelf(__dirname, [ + * 'book1.en_US.txt', + * 'book1.es_ES.txt', + * 'book2.zh_CN.txt', + * ]); + * + * assert(v8.startupSnapshot.isBuildingSnapshot()); + * // On snapshot serialization, compress the books to reduce size. + * v8.startupSnapshot.addSerializeCallback(BookShelf.compressAll, shelf); + * // On snapshot deserialization, decompress the books. + * v8.startupSnapshot.addDeserializeCallback(BookShelf.decompressAll, shelf); + * v8.startupSnapshot.setDeserializeMainFunction((shelf) => { + * // process.env and process.argv are refreshed during snapshot + * // deserialization. + * const lang = process.env.BOOK_LANG || 'en_US'; + * const book = process.argv[1]; + * const name = `${book}.${lang}.txt`; + * console.log(shelf.storage.get(name)); + * }, shelf); + * ``` + * + * The resulted binary will get print the data deserialized from the snapshot during start up, using the refreshed `process.env` and `process.argv` of the launched process: + * + * ```bash + * $ BOOK_LANG=es_ES node --snapshot-blob snapshot.blob book1 + * # Prints content of book1.es_ES.txt deserialized from the snapshot. + * ``` + * + * Currently the application deserialized from a user-land snapshot cannot be snapshotted again, so these APIs are only available to applications that are not deserialized from a user-land snapshot. + * + * @experimental + * @since v18.6.0, v16.17.0 + */ + const startupSnapshot: StartupSnapshot; +} +declare module "node:v8" { + export * from "v8"; +} diff --git a/dist/node_modules/@types/node/ts4.8/vm.d.ts b/dist/node_modules/@types/node/ts4.8/vm.d.ts new file mode 100644 index 0000000..3a310cc --- /dev/null +++ b/dist/node_modules/@types/node/ts4.8/vm.d.ts @@ -0,0 +1,903 @@ +/** + * The `node:vm` module enables compiling and running code within V8 Virtual + * Machine contexts. + * + * **The `node:vm` module is not a security** + * **mechanism. Do not use it to run untrusted code.** + * + * JavaScript code can be compiled and run immediately or + * compiled, saved, and run later. + * + * A common use case is to run the code in a different V8 Context. This means + * invoked code has a different global object than the invoking code. + * + * One can provide the context by `contextifying` an + * object. The invoked code treats any property in the context like a + * global variable. Any changes to global variables caused by the invoked + * code are reflected in the context object. + * + * ```js + * const vm = require('node:vm'); + * + * const x = 1; + * + * const context = { x: 2 }; + * vm.createContext(context); // Contextify the object. + * + * const code = 'x += 40; var y = 17;'; + * // `x` and `y` are global variables in the context. + * // Initially, x has the value 2 because that is the value of context.x. + * vm.runInContext(code, context); + * + * console.log(context.x); // 42 + * console.log(context.y); // 17 + * + * console.log(x); // 1; y is not defined. + * ``` + * @see [source](https://github.com/nodejs/node/blob/v20.2.0/lib/vm.js) + */ +declare module "vm" { + import { ImportAttributes } from "node:module"; + interface Context extends NodeJS.Dict {} + interface BaseOptions { + /** + * Specifies the filename used in stack traces produced by this script. + * Default: `''`. + */ + filename?: string | undefined; + /** + * Specifies the line number offset that is displayed in stack traces produced by this script. + * Default: `0`. + */ + lineOffset?: number | undefined; + /** + * Specifies the column number offset that is displayed in stack traces produced by this script. + * @default 0 + */ + columnOffset?: number | undefined; + } + interface ScriptOptions extends BaseOptions { + /** + * V8's code cache data for the supplied source. + */ + cachedData?: Buffer | NodeJS.ArrayBufferView | undefined; + /** @deprecated in favor of `script.createCachedData()` */ + produceCachedData?: boolean | undefined; + /** + * Called during evaluation of this module when `import()` is called. + * If this option is not specified, calls to `import()` will reject with `ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING`. + */ + importModuleDynamically?: + | ((specifier: string, script: Script, importAttributes: ImportAttributes) => Module) + | undefined; + } + interface RunningScriptOptions extends BaseOptions { + /** + * When `true`, if an `Error` occurs while compiling the `code`, the line of code causing the error is attached to the stack trace. + * Default: `true`. + */ + displayErrors?: boolean | undefined; + /** + * Specifies the number of milliseconds to execute code before terminating execution. + * If execution is terminated, an `Error` will be thrown. This value must be a strictly positive integer. + */ + timeout?: number | undefined; + /** + * If `true`, the execution will be terminated when `SIGINT` (Ctrl+C) is received. + * Existing handlers for the event that have been attached via `process.on('SIGINT')` will be disabled during script execution, but will continue to work after that. + * If execution is terminated, an `Error` will be thrown. + * Default: `false`. + */ + breakOnSigint?: boolean | undefined; + } + interface RunningScriptInNewContextOptions extends RunningScriptOptions { + /** + * Human-readable name of the newly created context. + */ + contextName?: CreateContextOptions["name"]; + /** + * Origin corresponding to the newly created context for display purposes. The origin should be formatted like a URL, + * but with only the scheme, host, and port (if necessary), like the value of the `url.origin` property of a `URL` object. + * Most notably, this string should omit the trailing slash, as that denotes a path. + */ + contextOrigin?: CreateContextOptions["origin"]; + contextCodeGeneration?: CreateContextOptions["codeGeneration"]; + /** + * If set to `afterEvaluate`, microtasks will be run immediately after the script has run. + */ + microtaskMode?: CreateContextOptions["microtaskMode"]; + } + interface RunningCodeOptions extends RunningScriptOptions { + cachedData?: ScriptOptions["cachedData"]; + importModuleDynamically?: ScriptOptions["importModuleDynamically"]; + } + interface RunningCodeInNewContextOptions extends RunningScriptInNewContextOptions { + cachedData?: ScriptOptions["cachedData"]; + importModuleDynamically?: ScriptOptions["importModuleDynamically"]; + } + interface CompileFunctionOptions extends BaseOptions { + /** + * Provides an optional data with V8's code cache data for the supplied source. + */ + cachedData?: Buffer | undefined; + /** + * Specifies whether to produce new cache data. + * Default: `false`, + */ + produceCachedData?: boolean | undefined; + /** + * The sandbox/context in which the said function should be compiled in. + */ + parsingContext?: Context | undefined; + /** + * An array containing a collection of context extensions (objects wrapping the current scope) to be applied while compiling + */ + contextExtensions?: Object[] | undefined; + } + interface CreateContextOptions { + /** + * Human-readable name of the newly created context. + * @default 'VM Context i' Where i is an ascending numerical index of the created context. + */ + name?: string | undefined; + /** + * Corresponds to the newly created context for display purposes. + * The origin should be formatted like a `URL`, but with only the scheme, host, and port (if necessary), + * like the value of the `url.origin` property of a URL object. + * Most notably, this string should omit the trailing slash, as that denotes a path. + * @default '' + */ + origin?: string | undefined; + codeGeneration?: + | { + /** + * If set to false any calls to eval or function constructors (Function, GeneratorFunction, etc) + * will throw an EvalError. + * @default true + */ + strings?: boolean | undefined; + /** + * If set to false any attempt to compile a WebAssembly module will throw a WebAssembly.CompileError. + * @default true + */ + wasm?: boolean | undefined; + } + | undefined; + /** + * If set to `afterEvaluate`, microtasks will be run immediately after the script has run. + */ + microtaskMode?: "afterEvaluate" | undefined; + } + type MeasureMemoryMode = "summary" | "detailed"; + interface MeasureMemoryOptions { + /** + * @default 'summary' + */ + mode?: MeasureMemoryMode | undefined; + /** + * @default 'default' + */ + execution?: "default" | "eager" | undefined; + } + interface MemoryMeasurement { + total: { + jsMemoryEstimate: number; + jsMemoryRange: [number, number]; + }; + } + /** + * Instances of the `vm.Script` class contain precompiled scripts that can be + * executed in specific contexts. + * @since v0.3.1 + */ + class Script { + constructor(code: string, options?: ScriptOptions | string); + /** + * Runs the compiled code contained by the `vm.Script` object within the given`contextifiedObject` and returns the result. Running code does not have access + * to local scope. + * + * The following example compiles code that increments a global variable, sets + * the value of another global variable, then execute the code multiple times. + * The globals are contained in the `context` object. + * + * ```js + * const vm = require('node:vm'); + * + * const context = { + * animal: 'cat', + * count: 2, + * }; + * + * const script = new vm.Script('count += 1; name = "kitty";'); + * + * vm.createContext(context); + * for (let i = 0; i < 10; ++i) { + * script.runInContext(context); + * } + * + * console.log(context); + * // Prints: { animal: 'cat', count: 12, name: 'kitty' } + * ``` + * + * Using the `timeout` or `breakOnSigint` options will result in new event loops + * and corresponding threads being started, which have a non-zero performance + * overhead. + * @since v0.3.1 + * @param contextifiedObject A `contextified` object as returned by the `vm.createContext()` method. + * @return the result of the very last statement executed in the script. + */ + runInContext(contextifiedObject: Context, options?: RunningScriptOptions): any; + /** + * First contextifies the given `contextObject`, runs the compiled code contained + * by the `vm.Script` object within the created context, and returns the result. + * Running code does not have access to local scope. + * + * The following example compiles code that sets a global variable, then executes + * the code multiple times in different contexts. The globals are set on and + * contained within each individual `context`. + * + * ```js + * const vm = require('node:vm'); + * + * const script = new vm.Script('globalVar = "set"'); + * + * const contexts = [{}, {}, {}]; + * contexts.forEach((context) => { + * script.runInNewContext(context); + * }); + * + * console.log(contexts); + * // Prints: [{ globalVar: 'set' }, { globalVar: 'set' }, { globalVar: 'set' }] + * ``` + * @since v0.3.1 + * @param contextObject An object that will be `contextified`. If `undefined`, a new object will be created. + * @return the result of the very last statement executed in the script. + */ + runInNewContext(contextObject?: Context, options?: RunningScriptInNewContextOptions): any; + /** + * Runs the compiled code contained by the `vm.Script` within the context of the + * current `global` object. Running code does not have access to local scope, but _does_ have access to the current `global` object. + * + * The following example compiles code that increments a `global` variable then + * executes that code multiple times: + * + * ```js + * const vm = require('node:vm'); + * + * global.globalVar = 0; + * + * const script = new vm.Script('globalVar += 1', { filename: 'myfile.vm' }); + * + * for (let i = 0; i < 1000; ++i) { + * script.runInThisContext(); + * } + * + * console.log(globalVar); + * + * // 1000 + * ``` + * @since v0.3.1 + * @return the result of the very last statement executed in the script. + */ + runInThisContext(options?: RunningScriptOptions): any; + /** + * Creates a code cache that can be used with the `Script` constructor's`cachedData` option. Returns a `Buffer`. This method may be called at any + * time and any number of times. + * + * The code cache of the `Script` doesn't contain any JavaScript observable + * states. The code cache is safe to be saved along side the script source and + * used to construct new `Script` instances multiple times. + * + * Functions in the `Script` source can be marked as lazily compiled and they are + * not compiled at construction of the `Script`. These functions are going to be + * compiled when they are invoked the first time. The code cache serializes the + * metadata that V8 currently knows about the `Script` that it can use to speed up + * future compilations. + * + * ```js + * const script = new vm.Script(` + * function add(a, b) { + * return a + b; + * } + * + * const x = add(1, 2); + * `); + * + * const cacheWithoutAdd = script.createCachedData(); + * // In `cacheWithoutAdd` the function `add()` is marked for full compilation + * // upon invocation. + * + * script.runInThisContext(); + * + * const cacheWithAdd = script.createCachedData(); + * // `cacheWithAdd` contains fully compiled function `add()`. + * ``` + * @since v10.6.0 + */ + createCachedData(): Buffer; + /** @deprecated in favor of `script.createCachedData()` */ + cachedDataProduced?: boolean | undefined; + /** + * When `cachedData` is supplied to create the `vm.Script`, this value will be set + * to either `true` or `false` depending on acceptance of the data by V8\. + * Otherwise the value is `undefined`. + * @since v5.7.0 + */ + cachedDataRejected?: boolean | undefined; + cachedData?: Buffer | undefined; + /** + * When the script is compiled from a source that contains a source map magic + * comment, this property will be set to the URL of the source map. + * + * ```js + * import vm from 'node:vm'; + * + * const script = new vm.Script(` + * function myFunc() {} + * //# sourceMappingURL=sourcemap.json + * `); + * + * console.log(script.sourceMapURL); + * // Prints: sourcemap.json + * ``` + * @since v19.1.0, v18.13.0 + */ + sourceMapURL?: string | undefined; + } + /** + * If given a `contextObject`, the `vm.createContext()` method will `prepare + * that object` so that it can be used in calls to {@link runInContext} or `script.runInContext()`. Inside such scripts, + * the `contextObject` will be the global object, retaining all of its existing + * properties but also having the built-in objects and functions any standard [global object](https://es5.github.io/#x15.1) has. Outside of scripts run by the vm module, global variables + * will remain unchanged. + * + * ```js + * const vm = require('node:vm'); + * + * global.globalVar = 3; + * + * const context = { globalVar: 1 }; + * vm.createContext(context); + * + * vm.runInContext('globalVar *= 2;', context); + * + * console.log(context); + * // Prints: { globalVar: 2 } + * + * console.log(global.globalVar); + * // Prints: 3 + * ``` + * + * If `contextObject` is omitted (or passed explicitly as `undefined`), a new, + * empty `contextified` object will be returned. + * + * The `vm.createContext()` method is primarily useful for creating a single + * context that can be used to run multiple scripts. For instance, if emulating a + * web browser, the method can be used to create a single context representing a + * window's global object, then run all ` +``` + + + +Node + + +Install with `npm install deprecation` + +```js +const { Deprecation } = require("deprecation"); +// or: import { Deprecation } from "deprecation"; +``` + + + + + +```js +function foo() { + bar(); +} + +function bar() { + baz(); +} + +function baz() { + console.warn(new Deprecation("[my-lib] foo() is deprecated, use bar()")); +} + +foo(); +// { Deprecation: [my-lib] foo() is deprecated, use bar() +// at baz (/path/to/file.js:12:15) +// at bar (/path/to/file.js:8:3) +// at foo (/path/to/file.js:4:3) +``` + +To log a deprecation message only once, you can use the [once](https://www.npmjs.com/package/once) module. + +```js +const Deprecation = require("deprecation"); +const once = require("once"); + +const deprecateFoo = once(console.warn); + +function foo() { + deprecateFoo(new Deprecation("[my-lib] foo() is deprecated, use bar()")); +} + +foo(); +foo(); // logs nothing +``` + +## License + +[ISC](LICENSE) diff --git a/dist/node_modules/deprecation/dist-node/index.js b/dist/node_modules/deprecation/dist-node/index.js new file mode 100644 index 0000000..9da1775 --- /dev/null +++ b/dist/node_modules/deprecation/dist-node/index.js @@ -0,0 +1,20 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +exports.Deprecation = Deprecation; diff --git a/dist/node_modules/deprecation/dist-src/index.js b/dist/node_modules/deprecation/dist-src/index.js new file mode 100644 index 0000000..7950fdc --- /dev/null +++ b/dist/node_modules/deprecation/dist-src/index.js @@ -0,0 +1,14 @@ +export class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} \ No newline at end of file diff --git a/dist/node_modules/deprecation/dist-types/index.d.ts b/dist/node_modules/deprecation/dist-types/index.d.ts new file mode 100644 index 0000000..e3ae7ad --- /dev/null +++ b/dist/node_modules/deprecation/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export class Deprecation extends Error { + name: "Deprecation"; +} diff --git a/dist/node_modules/deprecation/dist-web/index.js b/dist/node_modules/deprecation/dist-web/index.js new file mode 100644 index 0000000..c6bbda7 --- /dev/null +++ b/dist/node_modules/deprecation/dist-web/index.js @@ -0,0 +1,16 @@ +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +export { Deprecation }; diff --git a/dist/node_modules/deprecation/package.json b/dist/node_modules/deprecation/package.json new file mode 100644 index 0000000..a45fd51 --- /dev/null +++ b/dist/node_modules/deprecation/package.json @@ -0,0 +1,34 @@ +{ + "name": "deprecation", + "description": "Log a deprecation message with stack", + "version": "2.3.1", + "license": "ISC", + "files": [ + "dist-*/", + "bin/" + ], + "esnext": "dist-src/index.js", + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "pika": true, + "sideEffects": false, + "keywords": [ + "deprecate", + "deprecated", + "deprecation" + ], + "repository": { + "type": "git", + "url": "https://github.com/gr2m/deprecation.git" + }, + "dependencies": {}, + "devDependencies": { + "@pika/pack": "^0.3.7", + "@pika/plugin-build-node": "^0.4.0", + "@pika/plugin-build-types": "^0.4.0", + "@pika/plugin-build-web": "^0.4.0", + "@pika/plugin-standard-pkg": "^0.4.0", + "semantic-release": "^15.13.3" + } +} diff --git a/dist/node_modules/ecdsa-sig-formatter/CODEOWNERS b/dist/node_modules/ecdsa-sig-formatter/CODEOWNERS new file mode 100644 index 0000000..4451d3d --- /dev/null +++ b/dist/node_modules/ecdsa-sig-formatter/CODEOWNERS @@ -0,0 +1 @@ +* @omsmith diff --git a/dist/node_modules/ecdsa-sig-formatter/LICENSE b/dist/node_modules/ecdsa-sig-formatter/LICENSE new file mode 100644 index 0000000..8754ed6 --- /dev/null +++ b/dist/node_modules/ecdsa-sig-formatter/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2015 D2L Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/dist/node_modules/ecdsa-sig-formatter/README.md b/dist/node_modules/ecdsa-sig-formatter/README.md new file mode 100644 index 0000000..daa95d6 --- /dev/null +++ b/dist/node_modules/ecdsa-sig-formatter/README.md @@ -0,0 +1,65 @@ +# ecdsa-sig-formatter + +[![Build Status](https://travis-ci.org/Brightspace/node-ecdsa-sig-formatter.svg?branch=master)](https://travis-ci.org/Brightspace/node-ecdsa-sig-formatter) [![Coverage Status](https://coveralls.io/repos/Brightspace/node-ecdsa-sig-formatter/badge.svg)](https://coveralls.io/r/Brightspace/node-ecdsa-sig-formatter) + +Translate between JOSE and ASN.1/DER encodings for ECDSA signatures + +## Install +```sh +npm install ecdsa-sig-formatter --save +``` + +## Usage +```js +var format = require('ecdsa-sig-formatter'); + +var derSignature = '..'; // asn.1/DER encoded ecdsa signature + +var joseSignature = format.derToJose(derSignature); + +``` + +### API + +--- + +#### `.derToJose(Buffer|String signature, String alg)` -> `String` + +Convert the ASN.1/DER encoded signature to a JOSE-style concatenated signature. +Returns a _base64 url_ encoded `String`. + +* If _signature_ is a `String`, it should be _base64_ encoded +* _alg_ must be one of _ES256_, _ES384_ or _ES512_ + +--- + +#### `.joseToDer(Buffer|String signature, String alg)` -> `Buffer` + +Convert the JOSE-style concatenated signature to an ASN.1/DER encoded +signature. Returns a `Buffer` + +* If _signature_ is a `String`, it should be _base64 url_ encoded +* _alg_ must be one of _ES256_, _ES384_ or _ES512_ + +## Contributing + +1. **Fork** the repository. Committing directly against this repository is + highly discouraged. + +2. Make your modifications in a branch, updating and writing new unit tests + as necessary in the `spec` directory. + +3. Ensure that all tests pass with `npm test` + +4. `rebase` your changes against master. *Do not merge*. + +5. Submit a pull request to this repository. Wait for tests to run and someone + to chime in. + +### Code Style + +This repository is configured with [EditorConfig][EditorConfig] and +[ESLint][ESLint] rules. + +[EditorConfig]: http://editorconfig.org/ +[ESLint]: http://eslint.org diff --git a/dist/node_modules/ecdsa-sig-formatter/package.json b/dist/node_modules/ecdsa-sig-formatter/package.json new file mode 100644 index 0000000..6fb5ebf --- /dev/null +++ b/dist/node_modules/ecdsa-sig-formatter/package.json @@ -0,0 +1,46 @@ +{ + "name": "ecdsa-sig-formatter", + "version": "1.0.11", + "description": "Translate ECDSA signatures between ASN.1/DER and JOSE-style concatenation", + "main": "src/ecdsa-sig-formatter.js", + "scripts": { + "check-style": "eslint .", + "pretest": "npm run check-style", + "test": "istanbul cover --root src _mocha -- spec", + "report-cov": "cat ./coverage/lcov.info | coveralls" + }, + "typings": "./src/ecdsa-sig-formatter.d.ts", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/Brightspace/node-ecdsa-sig-formatter.git" + }, + "keywords": [ + "ecdsa", + "der", + "asn.1", + "jwt", + "jwa", + "jsonwebtoken", + "jose" + ], + "author": "D2L Corporation", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/Brightspace/node-ecdsa-sig-formatter/issues" + }, + "homepage": "https://github.com/Brightspace/node-ecdsa-sig-formatter#readme", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "devDependencies": { + "bench": "^0.3.6", + "chai": "^3.5.0", + "coveralls": "^2.11.9", + "eslint": "^2.12.0", + "eslint-config-brightspace": "^0.2.1", + "istanbul": "^0.4.3", + "jwk-to-pem": "^1.2.5", + "mocha": "^2.5.3", + "native-crypto": "^1.7.0" + } +} diff --git a/dist/node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.d.ts b/dist/node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.d.ts new file mode 100644 index 0000000..9693aa0 --- /dev/null +++ b/dist/node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.d.ts @@ -0,0 +1,17 @@ +/// + +declare module "ecdsa-sig-formatter" { + /** + * Convert the ASN.1/DER encoded signature to a JOSE-style concatenated signature. Returns a base64 url encoded String. + * If signature is a String, it should be base64 encoded + * alg must be one of ES256, ES384 or ES512 + */ + export function derToJose(signature: Buffer | string, alg: string): string; + + /** + * Convert the JOSE-style concatenated signature to an ASN.1/DER encoded signature. Returns a Buffer + * If signature is a String, it should be base64 url encoded + * alg must be one of ES256, ES384 or ES512 + */ + export function joseToDer(signature: Buffer | string, alg: string): Buffer +} diff --git a/dist/node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.js b/dist/node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.js new file mode 100644 index 0000000..38eeb9b --- /dev/null +++ b/dist/node_modules/ecdsa-sig-formatter/src/ecdsa-sig-formatter.js @@ -0,0 +1,187 @@ +'use strict'; + +var Buffer = require('safe-buffer').Buffer; + +var getParamBytesForAlg = require('./param-bytes-for-alg'); + +var MAX_OCTET = 0x80, + CLASS_UNIVERSAL = 0, + PRIMITIVE_BIT = 0x20, + TAG_SEQ = 0x10, + TAG_INT = 0x02, + ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), + ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); + +function base64Url(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function signatureAsBuffer(signature) { + if (Buffer.isBuffer(signature)) { + return signature; + } else if ('string' === typeof signature) { + return Buffer.from(signature, 'base64'); + } + + throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); +} + +function derToJose(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + // the DER encoded param should at most be the param size, plus a padding + // zero, since due to being a signed integer + var maxEncodedParamLength = paramBytes + 1; + + var inputLength = signature.length; + + var offset = 0; + if (signature[offset++] !== ENCODED_TAG_SEQ) { + throw new Error('Could not find expected "seq"'); + } + + var seqLength = signature[offset++]; + if (seqLength === (MAX_OCTET | 1)) { + seqLength = signature[offset++]; + } + + if (inputLength - offset < seqLength) { + throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); + } + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "r"'); + } + + var rLength = signature[offset++]; + + if (inputLength - offset - 2 < rLength) { + throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); + } + + if (maxEncodedParamLength < rLength) { + throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var rOffset = offset; + offset += rLength; + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "s"'); + } + + var sLength = signature[offset++]; + + if (inputLength - offset !== sLength) { + throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); + } + + if (maxEncodedParamLength < sLength) { + throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var sOffset = offset; + offset += sLength; + + if (offset !== inputLength) { + throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); + } + + var rPadding = paramBytes - rLength, + sPadding = paramBytes - sLength; + + var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); + + for (offset = 0; offset < rPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); + + offset = paramBytes; + + for (var o = offset; offset < o + sPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); + + dst = dst.toString('base64'); + dst = base64Url(dst); + + return dst; +} + +function countPadding(buf, start, stop) { + var padding = 0; + while (start + padding < stop && buf[start + padding] === 0) { + ++padding; + } + + var needsSign = buf[start + padding] >= MAX_OCTET; + if (needsSign) { + --padding; + } + + return padding; +} + +function joseToDer(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + var signatureBytes = signature.length; + if (signatureBytes !== paramBytes * 2) { + throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); + } + + var rPadding = countPadding(signature, 0, paramBytes); + var sPadding = countPadding(signature, paramBytes, signature.length); + var rLength = paramBytes - rPadding; + var sLength = paramBytes - sPadding; + + var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; + + var shortLength = rsBytes < MAX_OCTET; + + var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); + + var offset = 0; + dst[offset++] = ENCODED_TAG_SEQ; + if (shortLength) { + // Bit 8 has value "0" + // bits 7-1 give the length. + dst[offset++] = rsBytes; + } else { + // Bit 8 of first octet has value "1" + // bits 7-1 give the number of additional length octets. + dst[offset++] = MAX_OCTET | 1; + // length, base 256 + dst[offset++] = rsBytes & 0xff; + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = rLength; + if (rPadding < 0) { + dst[offset++] = 0; + offset += signature.copy(dst, offset, 0, paramBytes); + } else { + offset += signature.copy(dst, offset, rPadding, paramBytes); + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = sLength; + if (sPadding < 0) { + dst[offset++] = 0; + signature.copy(dst, offset, paramBytes); + } else { + signature.copy(dst, offset, paramBytes + sPadding); + } + + return dst; +} + +module.exports = { + derToJose: derToJose, + joseToDer: joseToDer +}; diff --git a/dist/node_modules/ecdsa-sig-formatter/src/param-bytes-for-alg.js b/dist/node_modules/ecdsa-sig-formatter/src/param-bytes-for-alg.js new file mode 100644 index 0000000..9fe67ac --- /dev/null +++ b/dist/node_modules/ecdsa-sig-formatter/src/param-bytes-for-alg.js @@ -0,0 +1,23 @@ +'use strict'; + +function getParamSize(keySize) { + var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); + return result; +} + +var paramBytesForAlg = { + ES256: getParamSize(256), + ES384: getParamSize(384), + ES512: getParamSize(521) +}; + +function getParamBytesForAlg(alg) { + var paramBytes = paramBytesForAlg[alg]; + if (paramBytes) { + return paramBytes; + } + + throw new Error('Unknown algorithm "' + alg + '"'); +} + +module.exports = getParamBytesForAlg; diff --git a/dist/node_modules/fromentries/LICENSE b/dist/node_modules/fromentries/LICENSE new file mode 100644 index 0000000..c7e6852 --- /dev/null +++ b/dist/node_modules/fromentries/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/fromentries/README.md b/dist/node_modules/fromentries/README.md new file mode 100644 index 0000000..2e8d931 --- /dev/null +++ b/dist/node_modules/fromentries/README.md @@ -0,0 +1,70 @@ +# fromentries [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/fromentries/master.svg +[travis-url]: https://travis-ci.org/feross/fromentries +[npm-image]: https://img.shields.io/npm/v/fromentries.svg +[npm-url]: https://npmjs.org/package/fromentries +[downloads-image]: https://img.shields.io/npm/dm/fromentries.svg +[downloads-url]: https://npmjs.org/package/fromentries +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +### Object.fromEntries() ponyfill (in 6 lines) + +## Install + +``` +npm install fromentries +``` + +## Why this package? + +Existing polyfill packages (like +[`object.fromentries`](https://github.com/es-shims/Object.fromEntries)) +pull in a bunch of dependencies and **adds over 8 +KB** to the browser bundle size. This allows them to work in ES3 environments +like IE6, but it's also overkill; almost no one supports IE6 anymore. + +I'd rather not ship tons of extra code to website visitors. A polyfill for this +feature can be implemented in a few short lines of code using modern language +features. That's what `fromentries` (this package) does. + +This means that `fromentries` only works in evergreen browsers like: + +- Chrome +- Firefox +- Edge +- Safari +- Opera + +It does not work in browsers like IE11 and older (unless you transpile it first). + +## Usage + +```js +const fromEntries = require('fromentries') + +const map = new Map([ [ 'a', 1 ], [ 'b', 2 ], [ 'c', 3 ] ]) +const obj = fromEntries(map) +constole.log(obj) // { a: 1, b: 2, c: 3 } + +const searchParams = new URLSearchParams('foo=bar&baz=qux') +const obj2 = fromEntries(searchParams) +console.log(obj2) // { foo: 'bar', 'baz': 'qux' } +``` + +## What is a ponyfill? + +> A *ponyfill* is almost the same as a polyfill, but not quite. Instead of +> patching functionality for older browsers, a ponyfill provides that +> functionality as a standalone module you can use. + +Read more at [PonyFoo](https://ponyfoo.com/articles/polyfills-or-ponyfills). + +## See also + +- [TC39 proposal for Object.fromEntries](https://github.com/tc39/proposal-object-from-entries) + +## License + +MIT. Copyright (c) [Feross Aboukhadijeh](http://feross.org). diff --git a/dist/node_modules/fromentries/index.d.ts b/dist/node_modules/fromentries/index.d.ts new file mode 100644 index 0000000..c1c2b61 --- /dev/null +++ b/dist/node_modules/fromentries/index.d.ts @@ -0,0 +1,3 @@ +declare function fromEntries(entries: Iterable): { [k: string]: T }; + +export = fromEntries; diff --git a/dist/node_modules/fromentries/index.js b/dist/node_modules/fromentries/index.js new file mode 100644 index 0000000..bd03bf3 --- /dev/null +++ b/dist/node_modules/fromentries/index.js @@ -0,0 +1,7 @@ +/*! fromentries. MIT License. Feross Aboukhadijeh */ +module.exports = function fromEntries (iterable) { + return [...iterable].reduce((obj, [key, val]) => { + obj[key] = val + return obj + }, {}) +} diff --git a/dist/node_modules/fromentries/package.json b/dist/node_modules/fromentries/package.json new file mode 100644 index 0000000..8cca723 --- /dev/null +++ b/dist/node_modules/fromentries/package.json @@ -0,0 +1,58 @@ +{ + "name": "fromentries", + "description": "Object.fromEntries() ponyfill (in 6 lines)", + "version": "1.3.2", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/fromentries/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^5.0.1" + }, + "homepage": "https://github.com/feross/fromentries", + "keywords": [ + "Object.fromEntries", + "Object.entries", + "Object.values", + "Object.keys", + "entries", + "values", + "fromEntries", + "ES7", + "ES8", + "shim", + "object", + "keys", + "polyfill", + "ponyfill" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/fromentries.git" + }, + "scripts": { + "test": "standard && tape test/**/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/dist/node_modules/indent-string/index.d.ts b/dist/node_modules/indent-string/index.d.ts new file mode 100644 index 0000000..1185231 --- /dev/null +++ b/dist/node_modules/indent-string/index.d.ts @@ -0,0 +1,42 @@ +declare namespace indentString { + interface Options { + /** + The string to use for the indent. + + @default ' ' + */ + readonly indent?: string; + + /** + Also indent empty lines. + + @default false + */ + readonly includeEmptyLines?: boolean; + } +} + +/** +Indent each line in a string. + +@param string - The string to indent. +@param count - How many times you want `options.indent` repeated. Default: `1`. + +@example +``` +import indentString = require('indent-string'); + +indentString('Unicorns\nRainbows', 4); +//=> ' Unicorns\n Rainbows' + +indentString('Unicorns\nRainbows', 4, {indent: '♥'}); +//=> '♥♥♥♥Unicorns\n♥♥♥♥Rainbows' +``` +*/ +declare function indentString( + string: string, + count?: number, + options?: indentString.Options +): string; + +export = indentString; diff --git a/dist/node_modules/indent-string/index.js b/dist/node_modules/indent-string/index.js new file mode 100644 index 0000000..e1ab804 --- /dev/null +++ b/dist/node_modules/indent-string/index.js @@ -0,0 +1,35 @@ +'use strict'; + +module.exports = (string, count = 1, options) => { + options = { + indent: ' ', + includeEmptyLines: false, + ...options + }; + + if (typeof string !== 'string') { + throw new TypeError( + `Expected \`input\` to be a \`string\`, got \`${typeof string}\`` + ); + } + + if (typeof count !== 'number') { + throw new TypeError( + `Expected \`count\` to be a \`number\`, got \`${typeof count}\`` + ); + } + + if (typeof options.indent !== 'string') { + throw new TypeError( + `Expected \`options.indent\` to be a \`string\`, got \`${typeof options.indent}\`` + ); + } + + if (count === 0) { + return string; + } + + const regex = options.includeEmptyLines ? /^/gm : /^(?!\s*$)/gm; + + return string.replace(regex, options.indent.repeat(count)); +}; diff --git a/dist/node_modules/indent-string/license b/dist/node_modules/indent-string/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/dist/node_modules/indent-string/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/indent-string/package.json b/dist/node_modules/indent-string/package.json new file mode 100644 index 0000000..497bb83 --- /dev/null +++ b/dist/node_modules/indent-string/package.json @@ -0,0 +1,37 @@ +{ + "name": "indent-string", + "version": "4.0.0", + "description": "Indent each line in a string", + "license": "MIT", + "repository": "sindresorhus/indent-string", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "indent", + "string", + "pad", + "align", + "line", + "text", + "each", + "every" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/dist/node_modules/indent-string/readme.md b/dist/node_modules/indent-string/readme.md new file mode 100644 index 0000000..49967de --- /dev/null +++ b/dist/node_modules/indent-string/readme.md @@ -0,0 +1,70 @@ +# indent-string [![Build Status](https://travis-ci.org/sindresorhus/indent-string.svg?branch=master)](https://travis-ci.org/sindresorhus/indent-string) + +> Indent each line in a string + + +## Install + +``` +$ npm install indent-string +``` + + +## Usage + +```js +const indentString = require('indent-string'); + +indentString('Unicorns\nRainbows', 4); +//=> ' Unicorns\n Rainbows' + +indentString('Unicorns\nRainbows', 4, {indent: '♥'}); +//=> '♥♥♥♥Unicorns\n♥♥♥♥Rainbows' +``` + + +## API + +### indentString(string, [count], [options]) + +#### string + +Type: `string` + +The string to indent. + +#### count + +Type: `number`
+Default: `1` + +How many times you want `options.indent` repeated. + +#### options + +Type: `object` + +##### indent + +Type: `string`
+Default: `' '` + +The string to use for the indent. + +##### includeEmptyLines + +Type: `boolean`
+Default: `false` + +Also indent empty lines. + + +## Related + +- [indent-string-cli](https://github.com/sindresorhus/indent-string-cli) - CLI for this module +- [strip-indent](https://github.com/sindresorhus/strip-indent) - Strip leading whitespace from every line in a string + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/dist/node_modules/is-plain-object/LICENSE b/dist/node_modules/is-plain-object/LICENSE new file mode 100644 index 0000000..3f2eca1 --- /dev/null +++ b/dist/node_modules/is-plain-object/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/is-plain-object/README.md b/dist/node_modules/is-plain-object/README.md new file mode 100644 index 0000000..5c074ab --- /dev/null +++ b/dist/node_modules/is-plain-object/README.md @@ -0,0 +1,125 @@ +# is-plain-object [![NPM version](https://img.shields.io/npm/v/is-plain-object.svg?style=flat)](https://www.npmjs.com/package/is-plain-object) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-plain-object.svg?style=flat)](https://npmjs.org/package/is-plain-object) [![NPM total downloads](https://img.shields.io/npm/dt/is-plain-object.svg?style=flat)](https://npmjs.org/package/is-plain-object) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-plain-object.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-plain-object) + +> Returns true if an object was created by the `Object` constructor, or Object.create(null). + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-plain-object +``` + +Use [isobject](https://github.com/jonschlinkert/isobject) if you only want to check if the value is an object and not an array or null. + +## Usage + +with es modules +```js +import { isPlainObject } from 'is-plain-object'; +``` + +or with commonjs +```js +const { isPlainObject } = require('is-plain-object'); +``` + +**true** when created by the `Object` constructor, or Object.create(null). + +```js +isPlainObject(Object.create({})); +//=> true +isPlainObject(Object.create(Object.prototype)); +//=> true +isPlainObject({foo: 'bar'}); +//=> true +isPlainObject({}); +//=> true +isPlainObject(null); +//=> true +``` + +**false** when not created by the `Object` constructor. + +```js +isPlainObject(1); +//=> false +isPlainObject(['foo', 'bar']); +//=> false +isPlainObject([]); +//=> false +isPlainObject(new Foo); +//=> false +isPlainObject(Object.create(null)); +//=> false +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-number](https://www.npmjs.com/package/is-number): Returns true if a number or string value is a finite number. Useful for regex… [more](https://github.com/jonschlinkert/is-number) | [homepage](https://github.com/jonschlinkert/is-number "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 19 | [jonschlinkert](https://github.com/jonschlinkert) | +| 6 | [TrySound](https://github.com/TrySound) | +| 6 | [stevenvachon](https://github.com/stevenvachon) | +| 3 | [onokumus](https://github.com/onokumus) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 28, 2019._ diff --git a/dist/node_modules/is-plain-object/dist/is-plain-object.js b/dist/node_modules/is-plain-object/dist/is-plain-object.js new file mode 100644 index 0000000..d134e4f --- /dev/null +++ b/dist/node_modules/is-plain-object/dist/is-plain-object.js @@ -0,0 +1,38 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +function isObject(o) { + return Object.prototype.toString.call(o) === '[object Object]'; +} + +function isPlainObject(o) { + var ctor,prot; + + if (isObject(o) === false) return false; + + // If has modified constructor + ctor = o.constructor; + if (ctor === undefined) return true; + + // If has modified prototype + prot = ctor.prototype; + if (isObject(prot) === false) return false; + + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; + } + + // Most likely a plain Object + return true; +} + +exports.isPlainObject = isPlainObject; diff --git a/dist/node_modules/is-plain-object/dist/is-plain-object.mjs b/dist/node_modules/is-plain-object/dist/is-plain-object.mjs new file mode 100644 index 0000000..c2d9f35 --- /dev/null +++ b/dist/node_modules/is-plain-object/dist/is-plain-object.mjs @@ -0,0 +1,34 @@ +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +function isObject(o) { + return Object.prototype.toString.call(o) === '[object Object]'; +} + +function isPlainObject(o) { + var ctor,prot; + + if (isObject(o) === false) return false; + + // If has modified constructor + ctor = o.constructor; + if (ctor === undefined) return true; + + // If has modified prototype + prot = ctor.prototype; + if (isObject(prot) === false) return false; + + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; + } + + // Most likely a plain Object + return true; +} + +export { isPlainObject }; diff --git a/dist/node_modules/is-plain-object/is-plain-object.d.ts b/dist/node_modules/is-plain-object/is-plain-object.d.ts new file mode 100644 index 0000000..a359940 --- /dev/null +++ b/dist/node_modules/is-plain-object/is-plain-object.d.ts @@ -0,0 +1 @@ +export function isPlainObject(o: any): boolean; diff --git a/dist/node_modules/is-plain-object/package.json b/dist/node_modules/is-plain-object/package.json new file mode 100644 index 0000000..3ea169a --- /dev/null +++ b/dist/node_modules/is-plain-object/package.json @@ -0,0 +1,85 @@ +{ + "name": "is-plain-object", + "description": "Returns true if an object was created by the `Object` constructor, or Object.create(null).", + "version": "5.0.0", + "homepage": "https://github.com/jonschlinkert/is-plain-object", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Osman Nuri Okumuş (http://onokumus.com)", + "Steven Vachon (https://svachon.com)", + "(https://github.com/wtgtybhertgeghgtwtg)", + "Bogdan Chadkin (https://github.com/TrySound)" + ], + "repository": "jonschlinkert/is-plain-object", + "bugs": { + "url": "https://github.com/jonschlinkert/is-plain-object/issues" + }, + "license": "MIT", + "main": "dist/is-plain-object.js", + "module": "dist/is-plain-object.mjs", + "types": "is-plain-object.d.ts", + "files": [ + "is-plain-object.d.ts", + "dist" + ], + "exports": { + ".": { + "import": "./dist/is-plain-object.mjs", + "require": "./dist/is-plain-object.js" + }, + "./package.json": "./package.json" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "build": "rollup -c", + "test_browser": "mocha-headless-chrome --args=disable-web-security -f test/browser.html", + "test_node": "mocha -r esm", + "test": "npm run test_node && npm run build && npm run test_browser", + "prepare": "rollup -c" + }, + "devDependencies": { + "chai": "^4.2.0", + "esm": "^3.2.22", + "gulp-format-md": "^1.0.0", + "mocha": "^6.1.4", + "mocha-headless-chrome": "^3.1.0", + "rollup": "^2.22.1" + }, + "keywords": [ + "check", + "is", + "is-object", + "isobject", + "javascript", + "kind", + "kind-of", + "object", + "plain", + "type", + "typeof", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "is-number", + "isobject", + "kind-of" + ] + }, + "lint": { + "reflinks": true + } + } +} diff --git a/dist/node_modules/jsonwebtoken/LICENSE b/dist/node_modules/jsonwebtoken/LICENSE new file mode 100644 index 0000000..bcd1854 --- /dev/null +++ b/dist/node_modules/jsonwebtoken/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Auth0, Inc. (http://auth0.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/dist/node_modules/jsonwebtoken/README.md b/dist/node_modules/jsonwebtoken/README.md new file mode 100644 index 0000000..4e20dd9 --- /dev/null +++ b/dist/node_modules/jsonwebtoken/README.md @@ -0,0 +1,396 @@ +# jsonwebtoken + +| **Build** | **Dependency** | +|-----------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------| +| [![Build Status](https://secure.travis-ci.org/auth0/node-jsonwebtoken.svg?branch=master)](http://travis-ci.org/auth0/node-jsonwebtoken) | [![Dependency Status](https://david-dm.org/auth0/node-jsonwebtoken.svg)](https://david-dm.org/auth0/node-jsonwebtoken) | + + +An implementation of [JSON Web Tokens](https://tools.ietf.org/html/rfc7519). + +This was developed against `draft-ietf-oauth-json-web-token-08`. It makes use of [node-jws](https://github.com/brianloveswords/node-jws) + +# Install + +```bash +$ npm install jsonwebtoken +``` + +# Migration notes + +* [From v8 to v9](https://github.com/auth0/node-jsonwebtoken/wiki/Migration-Notes:-v8-to-v9) +* [From v7 to v8](https://github.com/auth0/node-jsonwebtoken/wiki/Migration-Notes:-v7-to-v8) + +# Usage + +### jwt.sign(payload, secretOrPrivateKey, [options, callback]) + +(Asynchronous) If a callback is supplied, the callback is called with the `err` or the JWT. + +(Synchronous) Returns the JsonWebToken as string + +`payload` could be an object literal, buffer or string representing valid JSON. +> **Please _note_ that** `exp` or any other claim is only set if the payload is an object literal. Buffer or string payloads are not checked for JSON validity. + +> If `payload` is not a buffer or a string, it will be coerced into a string using `JSON.stringify`. + +`secretOrPrivateKey` is a string (utf-8 encoded), buffer, object, or KeyObject containing either the secret for HMAC algorithms or the PEM +encoded private key for RSA and ECDSA. In case of a private key with passphrase an object `{ key, passphrase }` can be used (based on [crypto documentation](https://nodejs.org/api/crypto.html#crypto_sign_sign_private_key_output_format)), in this case be sure you pass the `algorithm` option. +When signing with RSA algorithms the minimum modulus length is 2048 except when the allowInsecureKeySizes option is set to true. Private keys below this size will be rejected with an error. + +`options`: + +* `algorithm` (default: `HS256`) +* `expiresIn`: expressed in seconds or a string describing a time span [vercel/ms](https://github.com/vercel/ms). + > Eg: `60`, `"2 days"`, `"10h"`, `"7d"`. A numeric value is interpreted as a seconds count. If you use a string be sure you provide the time units (days, hours, etc), otherwise milliseconds unit is used by default (`"120"` is equal to `"120ms"`). +* `notBefore`: expressed in seconds or a string describing a time span [vercel/ms](https://github.com/vercel/ms). + > Eg: `60`, `"2 days"`, `"10h"`, `"7d"`. A numeric value is interpreted as a seconds count. If you use a string be sure you provide the time units (days, hours, etc), otherwise milliseconds unit is used by default (`"120"` is equal to `"120ms"`). +* `audience` +* `issuer` +* `jwtid` +* `subject` +* `noTimestamp` +* `header` +* `keyid` +* `mutatePayload`: if true, the sign function will modify the payload object directly. This is useful if you need a raw reference to the payload after claims have been applied to it but before it has been encoded into a token. +* `allowInsecureKeySizes`: if true allows private keys with a modulus below 2048 to be used for RSA +* `allowInvalidAsymmetricKeyTypes`: if true, allows asymmetric keys which do not match the specified algorithm. This option is intended only for backwards compatability and should be avoided. + + + +> There are no default values for `expiresIn`, `notBefore`, `audience`, `subject`, `issuer`. These claims can also be provided in the payload directly with `exp`, `nbf`, `aud`, `sub` and `iss` respectively, but you **_can't_** include in both places. + +Remember that `exp`, `nbf` and `iat` are **NumericDate**, see related [Token Expiration (exp claim)](#token-expiration-exp-claim) + + +The header can be customized via the `options.header` object. + +Generated jwts will include an `iat` (issued at) claim by default unless `noTimestamp` is specified. If `iat` is inserted in the payload, it will be used instead of the real timestamp for calculating other things like `exp` given a timespan in `options.expiresIn`. + +Synchronous Sign with default (HMAC SHA256) + +```js +var jwt = require('jsonwebtoken'); +var token = jwt.sign({ foo: 'bar' }, 'shhhhh'); +``` + +Synchronous Sign with RSA SHA256 +```js +// sign with RSA SHA256 +var privateKey = fs.readFileSync('private.key'); +var token = jwt.sign({ foo: 'bar' }, privateKey, { algorithm: 'RS256' }); +``` + +Sign asynchronously +```js +jwt.sign({ foo: 'bar' }, privateKey, { algorithm: 'RS256' }, function(err, token) { + console.log(token); +}); +``` + +Backdate a jwt 30 seconds +```js +var older_token = jwt.sign({ foo: 'bar', iat: Math.floor(Date.now() / 1000) - 30 }, 'shhhhh'); +``` + +#### Token Expiration (exp claim) + +The standard for JWT defines an `exp` claim for expiration. The expiration is represented as a **NumericDate**: + +> A JSON numeric value representing the number of seconds from 1970-01-01T00:00:00Z UTC until the specified UTC date/time, ignoring leap seconds. This is equivalent to the IEEE Std 1003.1, 2013 Edition [POSIX.1] definition "Seconds Since the Epoch", in which each day is accounted for by exactly 86400 seconds, other than that non-integer values can be represented. See RFC 3339 [RFC3339] for details regarding date/times in general and UTC in particular. + +This means that the `exp` field should contain the number of seconds since the epoch. + +Signing a token with 1 hour of expiration: + +```javascript +jwt.sign({ + exp: Math.floor(Date.now() / 1000) + (60 * 60), + data: 'foobar' +}, 'secret'); +``` + +Another way to generate a token like this with this library is: + +```javascript +jwt.sign({ + data: 'foobar' +}, 'secret', { expiresIn: 60 * 60 }); + +//or even better: + +jwt.sign({ + data: 'foobar' +}, 'secret', { expiresIn: '1h' }); +``` + +### jwt.verify(token, secretOrPublicKey, [options, callback]) + +(Asynchronous) If a callback is supplied, function acts asynchronously. The callback is called with the decoded payload if the signature is valid and optional expiration, audience, or issuer are valid. If not, it will be called with the error. + +(Synchronous) If a callback is not supplied, function acts synchronously. Returns the payload decoded if the signature is valid and optional expiration, audience, or issuer are valid. If not, it will throw the error. + +> __Warning:__ When the token comes from an untrusted source (e.g. user input or external requests), the returned decoded payload should be treated like any other user input; please make sure to sanitize and only work with properties that are expected + +`token` is the JsonWebToken string + +`secretOrPublicKey` is a string (utf-8 encoded), buffer, or KeyObject containing either the secret for HMAC algorithms, or the PEM +encoded public key for RSA and ECDSA. +If `jwt.verify` is called asynchronous, `secretOrPublicKey` can be a function that should fetch the secret or public key. See below for a detailed example + +As mentioned in [this comment](https://github.com/auth0/node-jsonwebtoken/issues/208#issuecomment-231861138), there are other libraries that expect base64 encoded secrets (random bytes encoded using base64), if that is your case you can pass `Buffer.from(secret, 'base64')`, by doing this the secret will be decoded using base64 and the token verification will use the original random bytes. + +`options` + +* `algorithms`: List of strings with the names of the allowed algorithms. For instance, `["HS256", "HS384"]`. + > If not specified a defaults will be used based on the type of key provided + > * secret - ['HS256', 'HS384', 'HS512'] + > * rsa - ['RS256', 'RS384', 'RS512'] + > * ec - ['ES256', 'ES384', 'ES512'] + > * default - ['RS256', 'RS384', 'RS512'] +* `audience`: if you want to check audience (`aud`), provide a value here. The audience can be checked against a string, a regular expression or a list of strings and/or regular expressions. + > Eg: `"urn:foo"`, `/urn:f[o]{2}/`, `[/urn:f[o]{2}/, "urn:bar"]` +* `complete`: return an object with the decoded `{ payload, header, signature }` instead of only the usual content of the payload. +* `issuer` (optional): string or array of strings of valid values for the `iss` field. +* `jwtid` (optional): if you want to check JWT ID (`jti`), provide a string value here. +* `ignoreExpiration`: if `true` do not validate the expiration of the token. +* `ignoreNotBefore`... +* `subject`: if you want to check subject (`sub`), provide a value here +* `clockTolerance`: number of seconds to tolerate when checking the `nbf` and `exp` claims, to deal with small clock differences among different servers +* `maxAge`: the maximum allowed age for tokens to still be valid. It is expressed in seconds or a string describing a time span [vercel/ms](https://github.com/vercel/ms). + > Eg: `1000`, `"2 days"`, `"10h"`, `"7d"`. A numeric value is interpreted as a seconds count. If you use a string be sure you provide the time units (days, hours, etc), otherwise milliseconds unit is used by default (`"120"` is equal to `"120ms"`). +* `clockTimestamp`: the time in seconds that should be used as the current time for all necessary comparisons. +* `nonce`: if you want to check `nonce` claim, provide a string value here. It is used on Open ID for the ID Tokens. ([Open ID implementation notes](https://openid.net/specs/openid-connect-core-1_0.html#NonceNotes)) +* `allowInvalidAsymmetricKeyTypes`: if true, allows asymmetric keys which do not match the specified algorithm. This option is intended only for backwards compatability and should be avoided. + +```js +// verify a token symmetric - synchronous +var decoded = jwt.verify(token, 'shhhhh'); +console.log(decoded.foo) // bar + +// verify a token symmetric +jwt.verify(token, 'shhhhh', function(err, decoded) { + console.log(decoded.foo) // bar +}); + +// invalid token - synchronous +try { + var decoded = jwt.verify(token, 'wrong-secret'); +} catch(err) { + // err +} + +// invalid token +jwt.verify(token, 'wrong-secret', function(err, decoded) { + // err + // decoded undefined +}); + +// verify a token asymmetric +var cert = fs.readFileSync('public.pem'); // get public key +jwt.verify(token, cert, function(err, decoded) { + console.log(decoded.foo) // bar +}); + +// verify audience +var cert = fs.readFileSync('public.pem'); // get public key +jwt.verify(token, cert, { audience: 'urn:foo' }, function(err, decoded) { + // if audience mismatch, err == invalid audience +}); + +// verify issuer +var cert = fs.readFileSync('public.pem'); // get public key +jwt.verify(token, cert, { audience: 'urn:foo', issuer: 'urn:issuer' }, function(err, decoded) { + // if issuer mismatch, err == invalid issuer +}); + +// verify jwt id +var cert = fs.readFileSync('public.pem'); // get public key +jwt.verify(token, cert, { audience: 'urn:foo', issuer: 'urn:issuer', jwtid: 'jwtid' }, function(err, decoded) { + // if jwt id mismatch, err == invalid jwt id +}); + +// verify subject +var cert = fs.readFileSync('public.pem'); // get public key +jwt.verify(token, cert, { audience: 'urn:foo', issuer: 'urn:issuer', jwtid: 'jwtid', subject: 'subject' }, function(err, decoded) { + // if subject mismatch, err == invalid subject +}); + +// alg mismatch +var cert = fs.readFileSync('public.pem'); // get public key +jwt.verify(token, cert, { algorithms: ['RS256'] }, function (err, payload) { + // if token alg != RS256, err == invalid signature +}); + +// Verify using getKey callback +// Example uses https://github.com/auth0/node-jwks-rsa as a way to fetch the keys. +var jwksClient = require('jwks-rsa'); +var client = jwksClient({ + jwksUri: 'https://sandrino.auth0.com/.well-known/jwks.json' +}); +function getKey(header, callback){ + client.getSigningKey(header.kid, function(err, key) { + var signingKey = key.publicKey || key.rsaPublicKey; + callback(null, signingKey); + }); +} + +jwt.verify(token, getKey, options, function(err, decoded) { + console.log(decoded.foo) // bar +}); + +``` + +
+Need to peek into a JWT without verifying it? (Click to expand) + +### jwt.decode(token [, options]) + +(Synchronous) Returns the decoded payload without verifying if the signature is valid. + +> __Warning:__ This will __not__ verify whether the signature is valid. You should __not__ use this for untrusted messages. You most likely want to use `jwt.verify` instead. + +> __Warning:__ When the token comes from an untrusted source (e.g. user input or external request), the returned decoded payload should be treated like any other user input; please make sure to sanitize and only work with properties that are expected + + +`token` is the JsonWebToken string + +`options`: + +* `json`: force JSON.parse on the payload even if the header doesn't contain `"typ":"JWT"`. +* `complete`: return an object with the decoded payload and header. + +Example + +```js +// get the decoded payload ignoring signature, no secretOrPrivateKey needed +var decoded = jwt.decode(token); + +// get the decoded payload and header +var decoded = jwt.decode(token, {complete: true}); +console.log(decoded.header); +console.log(decoded.payload) +``` + +
+ +## Errors & Codes +Possible thrown errors during verification. +Error is the first argument of the verification callback. + +### TokenExpiredError + +Thrown error if the token is expired. + +Error object: + +* name: 'TokenExpiredError' +* message: 'jwt expired' +* expiredAt: [ExpDate] + +```js +jwt.verify(token, 'shhhhh', function(err, decoded) { + if (err) { + /* + err = { + name: 'TokenExpiredError', + message: 'jwt expired', + expiredAt: 1408621000 + } + */ + } +}); +``` + +### JsonWebTokenError +Error object: + +* name: 'JsonWebTokenError' +* message: + * 'invalid token' - the header or payload could not be parsed + * 'jwt malformed' - the token does not have three components (delimited by a `.`) + * 'jwt signature is required' + * 'invalid signature' + * 'jwt audience invalid. expected: [OPTIONS AUDIENCE]' + * 'jwt issuer invalid. expected: [OPTIONS ISSUER]' + * 'jwt id invalid. expected: [OPTIONS JWT ID]' + * 'jwt subject invalid. expected: [OPTIONS SUBJECT]' + +```js +jwt.verify(token, 'shhhhh', function(err, decoded) { + if (err) { + /* + err = { + name: 'JsonWebTokenError', + message: 'jwt malformed' + } + */ + } +}); +``` + +### NotBeforeError +Thrown if current time is before the nbf claim. + +Error object: + +* name: 'NotBeforeError' +* message: 'jwt not active' +* date: 2018-10-04T16:10:44.000Z + +```js +jwt.verify(token, 'shhhhh', function(err, decoded) { + if (err) { + /* + err = { + name: 'NotBeforeError', + message: 'jwt not active', + date: 2018-10-04T16:10:44.000Z + } + */ + } +}); +``` + + +## Algorithms supported + +Array of supported algorithms. The following algorithms are currently supported. + +| alg Parameter Value | Digital Signature or MAC Algorithm | +|---------------------|------------------------------------------------------------------------| +| HS256 | HMAC using SHA-256 hash algorithm | +| HS384 | HMAC using SHA-384 hash algorithm | +| HS512 | HMAC using SHA-512 hash algorithm | +| RS256 | RSASSA-PKCS1-v1_5 using SHA-256 hash algorithm | +| RS384 | RSASSA-PKCS1-v1_5 using SHA-384 hash algorithm | +| RS512 | RSASSA-PKCS1-v1_5 using SHA-512 hash algorithm | +| PS256 | RSASSA-PSS using SHA-256 hash algorithm (only node ^6.12.0 OR >=8.0.0) | +| PS384 | RSASSA-PSS using SHA-384 hash algorithm (only node ^6.12.0 OR >=8.0.0) | +| PS512 | RSASSA-PSS using SHA-512 hash algorithm (only node ^6.12.0 OR >=8.0.0) | +| ES256 | ECDSA using P-256 curve and SHA-256 hash algorithm | +| ES384 | ECDSA using P-384 curve and SHA-384 hash algorithm | +| ES512 | ECDSA using P-521 curve and SHA-512 hash algorithm | +| none | No digital signature or MAC value included | + +## Refreshing JWTs + +First of all, we recommend you to think carefully if auto-refreshing a JWT will not introduce any vulnerability in your system. + +We are not comfortable including this as part of the library, however, you can take a look at [this example](https://gist.github.com/ziluvatar/a3feb505c4c0ec37059054537b38fc48) to show how this could be accomplished. +Apart from that example there are [an issue](https://github.com/auth0/node-jsonwebtoken/issues/122) and [a pull request](https://github.com/auth0/node-jsonwebtoken/pull/172) to get more knowledge about this topic. + +# TODO + +* X.509 certificate chain is not checked + +## Issue Reporting + +If you have found a bug or if you have a feature request, please report them at this repository issues section. Please do not report security vulnerabilities on the public GitHub issue tracker. The [Responsible Disclosure Program](https://auth0.com/whitehat) details the procedure for disclosing security issues. + +## Author + +[Auth0](https://auth0.com) + +## License + +This project is licensed under the MIT license. See the [LICENSE](LICENSE) file for more info. diff --git a/dist/node_modules/jsonwebtoken/decode.js b/dist/node_modules/jsonwebtoken/decode.js new file mode 100644 index 0000000..8fe1adc --- /dev/null +++ b/dist/node_modules/jsonwebtoken/decode.js @@ -0,0 +1,30 @@ +var jws = require('jws'); + +module.exports = function (jwt, options) { + options = options || {}; + var decoded = jws.decode(jwt, options); + if (!decoded) { return null; } + var payload = decoded.payload; + + //try parse the payload + if(typeof payload === 'string') { + try { + var obj = JSON.parse(payload); + if(obj !== null && typeof obj === 'object') { + payload = obj; + } + } catch (e) { } + } + + //return header if `complete` option is enabled. header includes claims + //such as `kid` and `alg` used to select the key within a JWKS needed to + //verify the signature + if (options.complete === true) { + return { + header: decoded.header, + payload: payload, + signature: decoded.signature + }; + } + return payload; +}; diff --git a/dist/node_modules/jsonwebtoken/index.js b/dist/node_modules/jsonwebtoken/index.js new file mode 100644 index 0000000..161eb2d --- /dev/null +++ b/dist/node_modules/jsonwebtoken/index.js @@ -0,0 +1,8 @@ +module.exports = { + decode: require('./decode'), + verify: require('./verify'), + sign: require('./sign'), + JsonWebTokenError: require('./lib/JsonWebTokenError'), + NotBeforeError: require('./lib/NotBeforeError'), + TokenExpiredError: require('./lib/TokenExpiredError'), +}; diff --git a/dist/node_modules/jsonwebtoken/lib/JsonWebTokenError.js b/dist/node_modules/jsonwebtoken/lib/JsonWebTokenError.js new file mode 100644 index 0000000..e068222 --- /dev/null +++ b/dist/node_modules/jsonwebtoken/lib/JsonWebTokenError.js @@ -0,0 +1,14 @@ +var JsonWebTokenError = function (message, error) { + Error.call(this, message); + if(Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = 'JsonWebTokenError'; + this.message = message; + if (error) this.inner = error; +}; + +JsonWebTokenError.prototype = Object.create(Error.prototype); +JsonWebTokenError.prototype.constructor = JsonWebTokenError; + +module.exports = JsonWebTokenError; diff --git a/dist/node_modules/jsonwebtoken/lib/NotBeforeError.js b/dist/node_modules/jsonwebtoken/lib/NotBeforeError.js new file mode 100644 index 0000000..7b30084 --- /dev/null +++ b/dist/node_modules/jsonwebtoken/lib/NotBeforeError.js @@ -0,0 +1,13 @@ +var JsonWebTokenError = require('./JsonWebTokenError'); + +var NotBeforeError = function (message, date) { + JsonWebTokenError.call(this, message); + this.name = 'NotBeforeError'; + this.date = date; +}; + +NotBeforeError.prototype = Object.create(JsonWebTokenError.prototype); + +NotBeforeError.prototype.constructor = NotBeforeError; + +module.exports = NotBeforeError; \ No newline at end of file diff --git a/dist/node_modules/jsonwebtoken/lib/TokenExpiredError.js b/dist/node_modules/jsonwebtoken/lib/TokenExpiredError.js new file mode 100644 index 0000000..abb704f --- /dev/null +++ b/dist/node_modules/jsonwebtoken/lib/TokenExpiredError.js @@ -0,0 +1,13 @@ +var JsonWebTokenError = require('./JsonWebTokenError'); + +var TokenExpiredError = function (message, expiredAt) { + JsonWebTokenError.call(this, message); + this.name = 'TokenExpiredError'; + this.expiredAt = expiredAt; +}; + +TokenExpiredError.prototype = Object.create(JsonWebTokenError.prototype); + +TokenExpiredError.prototype.constructor = TokenExpiredError; + +module.exports = TokenExpiredError; \ No newline at end of file diff --git a/dist/node_modules/jsonwebtoken/lib/asymmetricKeyDetailsSupported.js b/dist/node_modules/jsonwebtoken/lib/asymmetricKeyDetailsSupported.js new file mode 100644 index 0000000..a6ede56 --- /dev/null +++ b/dist/node_modules/jsonwebtoken/lib/asymmetricKeyDetailsSupported.js @@ -0,0 +1,3 @@ +const semver = require('semver'); + +module.exports = semver.satisfies(process.version, '>=15.7.0'); diff --git a/dist/node_modules/jsonwebtoken/lib/psSupported.js b/dist/node_modules/jsonwebtoken/lib/psSupported.js new file mode 100644 index 0000000..8c04144 --- /dev/null +++ b/dist/node_modules/jsonwebtoken/lib/psSupported.js @@ -0,0 +1,3 @@ +var semver = require('semver'); + +module.exports = semver.satisfies(process.version, '^6.12.0 || >=8.0.0'); diff --git a/dist/node_modules/jsonwebtoken/lib/rsaPssKeyDetailsSupported.js b/dist/node_modules/jsonwebtoken/lib/rsaPssKeyDetailsSupported.js new file mode 100644 index 0000000..7fcf368 --- /dev/null +++ b/dist/node_modules/jsonwebtoken/lib/rsaPssKeyDetailsSupported.js @@ -0,0 +1,3 @@ +const semver = require('semver'); + +module.exports = semver.satisfies(process.version, '>=16.9.0'); diff --git a/dist/node_modules/jsonwebtoken/lib/timespan.js b/dist/node_modules/jsonwebtoken/lib/timespan.js new file mode 100644 index 0000000..e509869 --- /dev/null +++ b/dist/node_modules/jsonwebtoken/lib/timespan.js @@ -0,0 +1,18 @@ +var ms = require('ms'); + +module.exports = function (time, iat) { + var timestamp = iat || Math.floor(Date.now() / 1000); + + if (typeof time === 'string') { + var milliseconds = ms(time); + if (typeof milliseconds === 'undefined') { + return; + } + return Math.floor(timestamp + milliseconds / 1000); + } else if (typeof time === 'number') { + return timestamp + time; + } else { + return; + } + +}; \ No newline at end of file diff --git a/dist/node_modules/jsonwebtoken/lib/validateAsymmetricKey.js b/dist/node_modules/jsonwebtoken/lib/validateAsymmetricKey.js new file mode 100644 index 0000000..c10340b --- /dev/null +++ b/dist/node_modules/jsonwebtoken/lib/validateAsymmetricKey.js @@ -0,0 +1,66 @@ +const ASYMMETRIC_KEY_DETAILS_SUPPORTED = require('./asymmetricKeyDetailsSupported'); +const RSA_PSS_KEY_DETAILS_SUPPORTED = require('./rsaPssKeyDetailsSupported'); + +const allowedAlgorithmsForKeys = { + 'ec': ['ES256', 'ES384', 'ES512'], + 'rsa': ['RS256', 'PS256', 'RS384', 'PS384', 'RS512', 'PS512'], + 'rsa-pss': ['PS256', 'PS384', 'PS512'] +}; + +const allowedCurves = { + ES256: 'prime256v1', + ES384: 'secp384r1', + ES512: 'secp521r1', +}; + +module.exports = function(algorithm, key) { + if (!algorithm || !key) return; + + const keyType = key.asymmetricKeyType; + if (!keyType) return; + + const allowedAlgorithms = allowedAlgorithmsForKeys[keyType]; + + if (!allowedAlgorithms) { + throw new Error(`Unknown key type "${keyType}".`); + } + + if (!allowedAlgorithms.includes(algorithm)) { + throw new Error(`"alg" parameter for "${keyType}" key type must be one of: ${allowedAlgorithms.join(', ')}.`) + } + + /* + * Ignore the next block from test coverage because it gets executed + * conditionally depending on the Node version. Not ignoring it would + * prevent us from reaching the target % of coverage for versions of + * Node under 15.7.0. + */ + /* istanbul ignore next */ + if (ASYMMETRIC_KEY_DETAILS_SUPPORTED) { + switch (keyType) { + case 'ec': + const keyCurve = key.asymmetricKeyDetails.namedCurve; + const allowedCurve = allowedCurves[algorithm]; + + if (keyCurve !== allowedCurve) { + throw new Error(`"alg" parameter "${algorithm}" requires curve "${allowedCurve}".`); + } + break; + + case 'rsa-pss': + if (RSA_PSS_KEY_DETAILS_SUPPORTED) { + const length = parseInt(algorithm.slice(-3), 10); + const { hashAlgorithm, mgf1HashAlgorithm, saltLength } = key.asymmetricKeyDetails; + + if (hashAlgorithm !== `sha${length}` || mgf1HashAlgorithm !== hashAlgorithm) { + throw new Error(`Invalid key for this operation, its RSA-PSS parameters do not meet the requirements of "alg" ${algorithm}.`); + } + + if (saltLength !== undefined && saltLength > length >> 3) { + throw new Error(`Invalid key for this operation, its RSA-PSS parameter saltLength does not meet the requirements of "alg" ${algorithm}.`) + } + } + break; + } + } +} diff --git a/dist/node_modules/jsonwebtoken/package.json b/dist/node_modules/jsonwebtoken/package.json new file mode 100644 index 0000000..81f78da --- /dev/null +++ b/dist/node_modules/jsonwebtoken/package.json @@ -0,0 +1,71 @@ +{ + "name": "jsonwebtoken", + "version": "9.0.2", + "description": "JSON Web Token implementation (symmetric and asymmetric)", + "main": "index.js", + "nyc": { + "check-coverage": true, + "lines": 95, + "statements": 95, + "functions": 100, + "branches": 95, + "exclude": [ + "./test/**" + ], + "reporter": [ + "json", + "lcov", + "text-summary" + ] + }, + "scripts": { + "lint": "eslint .", + "coverage": "nyc mocha --use_strict", + "test": "npm run lint && npm run coverage && cost-of-modules" + }, + "repository": { + "type": "git", + "url": "https://github.com/auth0/node-jsonwebtoken" + }, + "keywords": [ + "jwt" + ], + "author": "auth0", + "license": "MIT", + "bugs": { + "url": "https://github.com/auth0/node-jsonwebtoken/issues" + }, + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "devDependencies": { + "atob": "^2.1.2", + "chai": "^4.1.2", + "conventional-changelog": "~1.1.0", + "cost-of-modules": "^1.0.1", + "eslint": "^4.19.1", + "mocha": "^5.2.0", + "nsp": "^2.6.2", + "nyc": "^11.9.0", + "sinon": "^6.0.0" + }, + "engines": { + "npm": ">=6", + "node": ">=12" + }, + "files": [ + "lib", + "decode.js", + "sign.js", + "verify.js" + ] +} diff --git a/dist/node_modules/jsonwebtoken/sign.js b/dist/node_modules/jsonwebtoken/sign.js new file mode 100644 index 0000000..82bf526 --- /dev/null +++ b/dist/node_modules/jsonwebtoken/sign.js @@ -0,0 +1,253 @@ +const timespan = require('./lib/timespan'); +const PS_SUPPORTED = require('./lib/psSupported'); +const validateAsymmetricKey = require('./lib/validateAsymmetricKey'); +const jws = require('jws'); +const includes = require('lodash.includes'); +const isBoolean = require('lodash.isboolean'); +const isInteger = require('lodash.isinteger'); +const isNumber = require('lodash.isnumber'); +const isPlainObject = require('lodash.isplainobject'); +const isString = require('lodash.isstring'); +const once = require('lodash.once'); +const { KeyObject, createSecretKey, createPrivateKey } = require('crypto') + +const SUPPORTED_ALGS = ['RS256', 'RS384', 'RS512', 'ES256', 'ES384', 'ES512', 'HS256', 'HS384', 'HS512', 'none']; +if (PS_SUPPORTED) { + SUPPORTED_ALGS.splice(3, 0, 'PS256', 'PS384', 'PS512'); +} + +const sign_options_schema = { + expiresIn: { isValid: function(value) { return isInteger(value) || (isString(value) && value); }, message: '"expiresIn" should be a number of seconds or string representing a timespan' }, + notBefore: { isValid: function(value) { return isInteger(value) || (isString(value) && value); }, message: '"notBefore" should be a number of seconds or string representing a timespan' }, + audience: { isValid: function(value) { return isString(value) || Array.isArray(value); }, message: '"audience" must be a string or array' }, + algorithm: { isValid: includes.bind(null, SUPPORTED_ALGS), message: '"algorithm" must be a valid string enum value' }, + header: { isValid: isPlainObject, message: '"header" must be an object' }, + encoding: { isValid: isString, message: '"encoding" must be a string' }, + issuer: { isValid: isString, message: '"issuer" must be a string' }, + subject: { isValid: isString, message: '"subject" must be a string' }, + jwtid: { isValid: isString, message: '"jwtid" must be a string' }, + noTimestamp: { isValid: isBoolean, message: '"noTimestamp" must be a boolean' }, + keyid: { isValid: isString, message: '"keyid" must be a string' }, + mutatePayload: { isValid: isBoolean, message: '"mutatePayload" must be a boolean' }, + allowInsecureKeySizes: { isValid: isBoolean, message: '"allowInsecureKeySizes" must be a boolean'}, + allowInvalidAsymmetricKeyTypes: { isValid: isBoolean, message: '"allowInvalidAsymmetricKeyTypes" must be a boolean'} +}; + +const registered_claims_schema = { + iat: { isValid: isNumber, message: '"iat" should be a number of seconds' }, + exp: { isValid: isNumber, message: '"exp" should be a number of seconds' }, + nbf: { isValid: isNumber, message: '"nbf" should be a number of seconds' } +}; + +function validate(schema, allowUnknown, object, parameterName) { + if (!isPlainObject(object)) { + throw new Error('Expected "' + parameterName + '" to be a plain object.'); + } + Object.keys(object) + .forEach(function(key) { + const validator = schema[key]; + if (!validator) { + if (!allowUnknown) { + throw new Error('"' + key + '" is not allowed in "' + parameterName + '"'); + } + return; + } + if (!validator.isValid(object[key])) { + throw new Error(validator.message); + } + }); +} + +function validateOptions(options) { + return validate(sign_options_schema, false, options, 'options'); +} + +function validatePayload(payload) { + return validate(registered_claims_schema, true, payload, 'payload'); +} + +const options_to_payload = { + 'audience': 'aud', + 'issuer': 'iss', + 'subject': 'sub', + 'jwtid': 'jti' +}; + +const options_for_objects = [ + 'expiresIn', + 'notBefore', + 'noTimestamp', + 'audience', + 'issuer', + 'subject', + 'jwtid', +]; + +module.exports = function (payload, secretOrPrivateKey, options, callback) { + if (typeof options === 'function') { + callback = options; + options = {}; + } else { + options = options || {}; + } + + const isObjectPayload = typeof payload === 'object' && + !Buffer.isBuffer(payload); + + const header = Object.assign({ + alg: options.algorithm || 'HS256', + typ: isObjectPayload ? 'JWT' : undefined, + kid: options.keyid + }, options.header); + + function failure(err) { + if (callback) { + return callback(err); + } + throw err; + } + + if (!secretOrPrivateKey && options.algorithm !== 'none') { + return failure(new Error('secretOrPrivateKey must have a value')); + } + + if (secretOrPrivateKey != null && !(secretOrPrivateKey instanceof KeyObject)) { + try { + secretOrPrivateKey = createPrivateKey(secretOrPrivateKey) + } catch (_) { + try { + secretOrPrivateKey = createSecretKey(typeof secretOrPrivateKey === 'string' ? Buffer.from(secretOrPrivateKey) : secretOrPrivateKey) + } catch (_) { + return failure(new Error('secretOrPrivateKey is not valid key material')); + } + } + } + + if (header.alg.startsWith('HS') && secretOrPrivateKey.type !== 'secret') { + return failure(new Error((`secretOrPrivateKey must be a symmetric key when using ${header.alg}`))) + } else if (/^(?:RS|PS|ES)/.test(header.alg)) { + if (secretOrPrivateKey.type !== 'private') { + return failure(new Error((`secretOrPrivateKey must be an asymmetric key when using ${header.alg}`))) + } + if (!options.allowInsecureKeySizes && + !header.alg.startsWith('ES') && + secretOrPrivateKey.asymmetricKeyDetails !== undefined && //KeyObject.asymmetricKeyDetails is supported in Node 15+ + secretOrPrivateKey.asymmetricKeyDetails.modulusLength < 2048) { + return failure(new Error(`secretOrPrivateKey has a minimum key size of 2048 bits for ${header.alg}`)); + } + } + + if (typeof payload === 'undefined') { + return failure(new Error('payload is required')); + } else if (isObjectPayload) { + try { + validatePayload(payload); + } + catch (error) { + return failure(error); + } + if (!options.mutatePayload) { + payload = Object.assign({},payload); + } + } else { + const invalid_options = options_for_objects.filter(function (opt) { + return typeof options[opt] !== 'undefined'; + }); + + if (invalid_options.length > 0) { + return failure(new Error('invalid ' + invalid_options.join(',') + ' option for ' + (typeof payload ) + ' payload')); + } + } + + if (typeof payload.exp !== 'undefined' && typeof options.expiresIn !== 'undefined') { + return failure(new Error('Bad "options.expiresIn" option the payload already has an "exp" property.')); + } + + if (typeof payload.nbf !== 'undefined' && typeof options.notBefore !== 'undefined') { + return failure(new Error('Bad "options.notBefore" option the payload already has an "nbf" property.')); + } + + try { + validateOptions(options); + } + catch (error) { + return failure(error); + } + + if (!options.allowInvalidAsymmetricKeyTypes) { + try { + validateAsymmetricKey(header.alg, secretOrPrivateKey); + } catch (error) { + return failure(error); + } + } + + const timestamp = payload.iat || Math.floor(Date.now() / 1000); + + if (options.noTimestamp) { + delete payload.iat; + } else if (isObjectPayload) { + payload.iat = timestamp; + } + + if (typeof options.notBefore !== 'undefined') { + try { + payload.nbf = timespan(options.notBefore, timestamp); + } + catch (err) { + return failure(err); + } + if (typeof payload.nbf === 'undefined') { + return failure(new Error('"notBefore" should be a number of seconds or string representing a timespan eg: "1d", "20h", 60')); + } + } + + if (typeof options.expiresIn !== 'undefined' && typeof payload === 'object') { + try { + payload.exp = timespan(options.expiresIn, timestamp); + } + catch (err) { + return failure(err); + } + if (typeof payload.exp === 'undefined') { + return failure(new Error('"expiresIn" should be a number of seconds or string representing a timespan eg: "1d", "20h", 60')); + } + } + + Object.keys(options_to_payload).forEach(function (key) { + const claim = options_to_payload[key]; + if (typeof options[key] !== 'undefined') { + if (typeof payload[claim] !== 'undefined') { + return failure(new Error('Bad "options.' + key + '" option. The payload already has an "' + claim + '" property.')); + } + payload[claim] = options[key]; + } + }); + + const encoding = options.encoding || 'utf8'; + + if (typeof callback === 'function') { + callback = callback && once(callback); + + jws.createSign({ + header: header, + privateKey: secretOrPrivateKey, + payload: payload, + encoding: encoding + }).once('error', callback) + .once('done', function (signature) { + // TODO: Remove in favor of the modulus length check before signing once node 15+ is the minimum supported version + if(!options.allowInsecureKeySizes && /^(?:RS|PS)/.test(header.alg) && signature.length < 256) { + return callback(new Error(`secretOrPrivateKey has a minimum key size of 2048 bits for ${header.alg}`)) + } + callback(null, signature); + }); + } else { + let signature = jws.sign({header: header, payload: payload, secret: secretOrPrivateKey, encoding: encoding}); + // TODO: Remove in favor of the modulus length check before signing once node 15+ is the minimum supported version + if(!options.allowInsecureKeySizes && /^(?:RS|PS)/.test(header.alg) && signature.length < 256) { + throw new Error(`secretOrPrivateKey has a minimum key size of 2048 bits for ${header.alg}`) + } + return signature + } +}; diff --git a/dist/node_modules/jsonwebtoken/verify.js b/dist/node_modules/jsonwebtoken/verify.js new file mode 100644 index 0000000..cdbfdc4 --- /dev/null +++ b/dist/node_modules/jsonwebtoken/verify.js @@ -0,0 +1,263 @@ +const JsonWebTokenError = require('./lib/JsonWebTokenError'); +const NotBeforeError = require('./lib/NotBeforeError'); +const TokenExpiredError = require('./lib/TokenExpiredError'); +const decode = require('./decode'); +const timespan = require('./lib/timespan'); +const validateAsymmetricKey = require('./lib/validateAsymmetricKey'); +const PS_SUPPORTED = require('./lib/psSupported'); +const jws = require('jws'); +const {KeyObject, createSecretKey, createPublicKey} = require("crypto"); + +const PUB_KEY_ALGS = ['RS256', 'RS384', 'RS512']; +const EC_KEY_ALGS = ['ES256', 'ES384', 'ES512']; +const RSA_KEY_ALGS = ['RS256', 'RS384', 'RS512']; +const HS_ALGS = ['HS256', 'HS384', 'HS512']; + +if (PS_SUPPORTED) { + PUB_KEY_ALGS.splice(PUB_KEY_ALGS.length, 0, 'PS256', 'PS384', 'PS512'); + RSA_KEY_ALGS.splice(RSA_KEY_ALGS.length, 0, 'PS256', 'PS384', 'PS512'); +} + +module.exports = function (jwtString, secretOrPublicKey, options, callback) { + if ((typeof options === 'function') && !callback) { + callback = options; + options = {}; + } + + if (!options) { + options = {}; + } + + //clone this object since we are going to mutate it. + options = Object.assign({}, options); + + let done; + + if (callback) { + done = callback; + } else { + done = function(err, data) { + if (err) throw err; + return data; + }; + } + + if (options.clockTimestamp && typeof options.clockTimestamp !== 'number') { + return done(new JsonWebTokenError('clockTimestamp must be a number')); + } + + if (options.nonce !== undefined && (typeof options.nonce !== 'string' || options.nonce.trim() === '')) { + return done(new JsonWebTokenError('nonce must be a non-empty string')); + } + + if (options.allowInvalidAsymmetricKeyTypes !== undefined && typeof options.allowInvalidAsymmetricKeyTypes !== 'boolean') { + return done(new JsonWebTokenError('allowInvalidAsymmetricKeyTypes must be a boolean')); + } + + const clockTimestamp = options.clockTimestamp || Math.floor(Date.now() / 1000); + + if (!jwtString){ + return done(new JsonWebTokenError('jwt must be provided')); + } + + if (typeof jwtString !== 'string') { + return done(new JsonWebTokenError('jwt must be a string')); + } + + const parts = jwtString.split('.'); + + if (parts.length !== 3){ + return done(new JsonWebTokenError('jwt malformed')); + } + + let decodedToken; + + try { + decodedToken = decode(jwtString, { complete: true }); + } catch(err) { + return done(err); + } + + if (!decodedToken) { + return done(new JsonWebTokenError('invalid token')); + } + + const header = decodedToken.header; + let getSecret; + + if(typeof secretOrPublicKey === 'function') { + if(!callback) { + return done(new JsonWebTokenError('verify must be called asynchronous if secret or public key is provided as a callback')); + } + + getSecret = secretOrPublicKey; + } + else { + getSecret = function(header, secretCallback) { + return secretCallback(null, secretOrPublicKey); + }; + } + + return getSecret(header, function(err, secretOrPublicKey) { + if(err) { + return done(new JsonWebTokenError('error in secret or public key callback: ' + err.message)); + } + + const hasSignature = parts[2].trim() !== ''; + + if (!hasSignature && secretOrPublicKey){ + return done(new JsonWebTokenError('jwt signature is required')); + } + + if (hasSignature && !secretOrPublicKey) { + return done(new JsonWebTokenError('secret or public key must be provided')); + } + + if (!hasSignature && !options.algorithms) { + return done(new JsonWebTokenError('please specify "none" in "algorithms" to verify unsigned tokens')); + } + + if (secretOrPublicKey != null && !(secretOrPublicKey instanceof KeyObject)) { + try { + secretOrPublicKey = createPublicKey(secretOrPublicKey); + } catch (_) { + try { + secretOrPublicKey = createSecretKey(typeof secretOrPublicKey === 'string' ? Buffer.from(secretOrPublicKey) : secretOrPublicKey); + } catch (_) { + return done(new JsonWebTokenError('secretOrPublicKey is not valid key material')) + } + } + } + + if (!options.algorithms) { + if (secretOrPublicKey.type === 'secret') { + options.algorithms = HS_ALGS; + } else if (['rsa', 'rsa-pss'].includes(secretOrPublicKey.asymmetricKeyType)) { + options.algorithms = RSA_KEY_ALGS + } else if (secretOrPublicKey.asymmetricKeyType === 'ec') { + options.algorithms = EC_KEY_ALGS + } else { + options.algorithms = PUB_KEY_ALGS + } + } + + if (options.algorithms.indexOf(decodedToken.header.alg) === -1) { + return done(new JsonWebTokenError('invalid algorithm')); + } + + if (header.alg.startsWith('HS') && secretOrPublicKey.type !== 'secret') { + return done(new JsonWebTokenError((`secretOrPublicKey must be a symmetric key when using ${header.alg}`))) + } else if (/^(?:RS|PS|ES)/.test(header.alg) && secretOrPublicKey.type !== 'public') { + return done(new JsonWebTokenError((`secretOrPublicKey must be an asymmetric key when using ${header.alg}`))) + } + + if (!options.allowInvalidAsymmetricKeyTypes) { + try { + validateAsymmetricKey(header.alg, secretOrPublicKey); + } catch (e) { + return done(e); + } + } + + let valid; + + try { + valid = jws.verify(jwtString, decodedToken.header.alg, secretOrPublicKey); + } catch (e) { + return done(e); + } + + if (!valid) { + return done(new JsonWebTokenError('invalid signature')); + } + + const payload = decodedToken.payload; + + if (typeof payload.nbf !== 'undefined' && !options.ignoreNotBefore) { + if (typeof payload.nbf !== 'number') { + return done(new JsonWebTokenError('invalid nbf value')); + } + if (payload.nbf > clockTimestamp + (options.clockTolerance || 0)) { + return done(new NotBeforeError('jwt not active', new Date(payload.nbf * 1000))); + } + } + + if (typeof payload.exp !== 'undefined' && !options.ignoreExpiration) { + if (typeof payload.exp !== 'number') { + return done(new JsonWebTokenError('invalid exp value')); + } + if (clockTimestamp >= payload.exp + (options.clockTolerance || 0)) { + return done(new TokenExpiredError('jwt expired', new Date(payload.exp * 1000))); + } + } + + if (options.audience) { + const audiences = Array.isArray(options.audience) ? options.audience : [options.audience]; + const target = Array.isArray(payload.aud) ? payload.aud : [payload.aud]; + + const match = target.some(function (targetAudience) { + return audiences.some(function (audience) { + return audience instanceof RegExp ? audience.test(targetAudience) : audience === targetAudience; + }); + }); + + if (!match) { + return done(new JsonWebTokenError('jwt audience invalid. expected: ' + audiences.join(' or '))); + } + } + + if (options.issuer) { + const invalid_issuer = + (typeof options.issuer === 'string' && payload.iss !== options.issuer) || + (Array.isArray(options.issuer) && options.issuer.indexOf(payload.iss) === -1); + + if (invalid_issuer) { + return done(new JsonWebTokenError('jwt issuer invalid. expected: ' + options.issuer)); + } + } + + if (options.subject) { + if (payload.sub !== options.subject) { + return done(new JsonWebTokenError('jwt subject invalid. expected: ' + options.subject)); + } + } + + if (options.jwtid) { + if (payload.jti !== options.jwtid) { + return done(new JsonWebTokenError('jwt jwtid invalid. expected: ' + options.jwtid)); + } + } + + if (options.nonce) { + if (payload.nonce !== options.nonce) { + return done(new JsonWebTokenError('jwt nonce invalid. expected: ' + options.nonce)); + } + } + + if (options.maxAge) { + if (typeof payload.iat !== 'number') { + return done(new JsonWebTokenError('iat required when maxAge is specified')); + } + + const maxAgeTimestamp = timespan(options.maxAge, payload.iat); + if (typeof maxAgeTimestamp === 'undefined') { + return done(new JsonWebTokenError('"maxAge" should be a number of seconds or string representing a timespan eg: "1d", "20h", 60')); + } + if (clockTimestamp >= maxAgeTimestamp + (options.clockTolerance || 0)) { + return done(new TokenExpiredError('maxAge exceeded', new Date(maxAgeTimestamp * 1000))); + } + } + + if (options.complete === true) { + const signature = decodedToken.signature; + + return done(null, { + header: header, + payload: payload, + signature: signature + }); + } + + return done(null, payload); + }); +}; diff --git a/dist/node_modules/jwa/LICENSE b/dist/node_modules/jwa/LICENSE new file mode 100644 index 0000000..caeb849 --- /dev/null +++ b/dist/node_modules/jwa/LICENSE @@ -0,0 +1,17 @@ +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/jwa/README.md b/dist/node_modules/jwa/README.md new file mode 100644 index 0000000..fb433e2 --- /dev/null +++ b/dist/node_modules/jwa/README.md @@ -0,0 +1,150 @@ +# node-jwa [![Build Status](https://travis-ci.org/brianloveswords/node-jwa.svg?branch=master)](https://travis-ci.org/brianloveswords/node-jwa) + +A +[JSON Web Algorithms](http://tools.ietf.org/id/draft-ietf-jose-json-web-algorithms-08.html) +implementation focusing (exclusively, at this point) on the algorithms necessary for +[JSON Web Signatures](http://self-issued.info/docs/draft-ietf-jose-json-web-signature.html). + +This library supports all of the required, recommended and optional cryptographic algorithms for JWS: + +alg Parameter Value | Digital Signature or MAC Algorithm +----------------|---------------------------- +HS256 | HMAC using SHA-256 hash algorithm +HS384 | HMAC using SHA-384 hash algorithm +HS512 | HMAC using SHA-512 hash algorithm +RS256 | RSASSA using SHA-256 hash algorithm +RS384 | RSASSA using SHA-384 hash algorithm +RS512 | RSASSA using SHA-512 hash algorithm +PS256 | RSASSA-PSS using SHA-256 hash algorithm +PS384 | RSASSA-PSS using SHA-384 hash algorithm +PS512 | RSASSA-PSS using SHA-512 hash algorithm +ES256 | ECDSA using P-256 curve and SHA-256 hash algorithm +ES384 | ECDSA using P-384 curve and SHA-384 hash algorithm +ES512 | ECDSA using P-521 curve and SHA-512 hash algorithm +none | No digital signature or MAC value included + +Please note that PS* only works on Node 6.12+ (excluding 7.x). + +# Requirements + +In order to run the tests, a recent version of OpenSSL is +required. **The version that comes with OS X (OpenSSL 0.9.8r 8 Feb +2011) is not recent enough**, as it does not fully support ECDSA +keys. You'll need to use a version > 1.0.0; I tested with OpenSSL 1.0.1c 10 May 2012. + +# Testing + +To run the tests, do + +```bash +$ npm test +``` + +This will generate a bunch of keypairs to use in testing. If you want to +generate new keypairs, do `make clean` before running `npm test` again. + +## Methodology + +I spawn `openssl dgst -sign` to test OpenSSL sign → JS verify and +`openssl dgst -verify` to test JS sign → OpenSSL verify for each of the +RSA and ECDSA algorithms. + +# Usage + +## jwa(algorithm) + +Creates a new `jwa` object with `sign` and `verify` methods for the +algorithm. Valid values for algorithm can be found in the table above +(`'HS256'`, `'HS384'`, etc) and are case-insensitive. Passing an invalid +algorithm value will throw a `TypeError`. + + +## jwa#sign(input, secretOrPrivateKey) + +Sign some input with either a secret for HMAC algorithms, or a private +key for RSA and ECDSA algorithms. + +If input is not already a string or buffer, `JSON.stringify` will be +called on it to attempt to coerce it. + +For the HMAC algorithm, `secretOrPrivateKey` should be a string or a +buffer. For ECDSA and RSA, the value should be a string representing a +PEM encoded **private** key. + +Output [base64url](http://en.wikipedia.org/wiki/Base64#URL_applications) +formatted. This is for convenience as JWS expects the signature in this +format. If your application needs the output in a different format, +[please open an issue](https://github.com/brianloveswords/node-jwa/issues). In +the meantime, you can use +[brianloveswords/base64url](https://github.com/brianloveswords/base64url) +to decode the signature. + +As of nodejs *v0.11.8*, SPKAC support was introduce. If your nodeJs +version satisfies, then you can pass an object `{ key: '..', passphrase: '...' }` + + +## jwa#verify(input, signature, secretOrPublicKey) + +Verify a signature. Returns `true` or `false`. + +`signature` should be a base64url encoded string. + +For the HMAC algorithm, `secretOrPublicKey` should be a string or a +buffer. For ECDSA and RSA, the value should be a string represented a +PEM encoded **public** key. + + +# Example + +HMAC +```js +const jwa = require('jwa'); + +const hmac = jwa('HS256'); +const input = 'super important stuff'; +const secret = 'shhhhhh'; + +const signature = hmac.sign(input, secret); +hmac.verify(input, signature, secret) // === true +hmac.verify(input, signature, 'trickery!') // === false +``` + +With keys +```js +const fs = require('fs'); +const jwa = require('jwa'); +const privateKey = fs.readFileSync(__dirname + '/ecdsa-p521-private.pem'); +const publicKey = fs.readFileSync(__dirname + '/ecdsa-p521-public.pem'); + +const ecdsa = jwa('ES512'); +const input = 'very important stuff'; + +const signature = ecdsa.sign(input, privateKey); +ecdsa.verify(input, signature, publicKey) // === true +``` +## License + +MIT + +``` +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +``` diff --git a/dist/node_modules/jwa/index.js b/dist/node_modules/jwa/index.js new file mode 100644 index 0000000..e71e6d1 --- /dev/null +++ b/dist/node_modules/jwa/index.js @@ -0,0 +1,252 @@ +var bufferEqual = require('buffer-equal-constant-time'); +var Buffer = require('safe-buffer').Buffer; +var crypto = require('crypto'); +var formatEcdsa = require('ecdsa-sig-formatter'); +var util = require('util'); + +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; + +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; +} + +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } +}; + +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (typeof key === 'object') { + return; + } + + throw typeError(MSG_INVALID_SIGNER_KEY); +}; + +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return key; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); + } + + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } +} + +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function toBase64(base64url) { + base64url = base64url.toString(); + + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; + } + } + + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); +} + +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} + +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} + +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} + +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); + } +} + +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} + +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} + +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); + } +} + +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); + } +} + +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); + } +} + +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; + }; +} + +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} + +function createNoneSigner() { + return function sign() { + return ''; + } +} + +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} + +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, + } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/i); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; + + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } +}; diff --git a/dist/node_modules/jwa/package.json b/dist/node_modules/jwa/package.json new file mode 100644 index 0000000..0777d53 --- /dev/null +++ b/dist/node_modules/jwa/package.json @@ -0,0 +1,37 @@ +{ + "name": "jwa", + "version": "1.4.1", + "description": "JWA implementation (supports all JWS algorithms)", + "main": "index.js", + "directories": { + "test": "test" + }, + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + }, + "devDependencies": { + "base64url": "^2.0.0", + "jwk-to-pem": "^2.0.1", + "semver": "4.3.6", + "tap": "6.2.0" + }, + "scripts": { + "test": "make test" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianloveswords/node-jwa.git" + }, + "keywords": [ + "jwa", + "jws", + "jwt", + "rsa", + "ecdsa", + "hmac" + ], + "author": "Brian J. Brennan ", + "license": "MIT" +} diff --git a/dist/node_modules/jws/CHANGELOG.md b/dist/node_modules/jws/CHANGELOG.md new file mode 100644 index 0000000..af8fc28 --- /dev/null +++ b/dist/node_modules/jws/CHANGELOG.md @@ -0,0 +1,34 @@ +# Change Log +All notable changes to this project will be documented in this file. + +## [3.0.0] +### Changed +- **BREAKING**: `jwt.verify` now requires an `algorithm` parameter, and + `jws.createVerify` requires an `algorithm` option. The `"alg"` field + signature headers is ignored. This mitigates a critical security flaw + in the library which would allow an attacker to generate signatures with + arbitrary contents that would be accepted by `jwt.verify`. See + https://auth0.com/blog/2015/03/31/critical-vulnerabilities-in-json-web-token-libraries/ + for details. + +## [2.0.0] - 2015-01-30 +### Changed +- **BREAKING**: Default payload encoding changed from `binary` to + `utf8`. `utf8` is a is a more sensible default than `binary` because + many payloads, as far as I can tell, will contain user-facing + strings that could be in any language. ([6b6de48]) + +- Code reorganization, thanks [@fearphage]! ([7880050]) + +### Added +- Option in all relevant methods for `encoding`. For those few users + that might be depending on a `binary` encoding of the messages, this + is for them. ([6b6de48]) + +[unreleased]: https://github.com/brianloveswords/node-jws/compare/v2.0.0...HEAD +[2.0.0]: https://github.com/brianloveswords/node-jws/compare/v1.0.1...v2.0.0 + +[7880050]: https://github.com/brianloveswords/node-jws/commit/7880050 +[6b6de48]: https://github.com/brianloveswords/node-jws/commit/6b6de48 + +[@fearphage]: https://github.com/fearphage diff --git a/dist/node_modules/jws/LICENSE b/dist/node_modules/jws/LICENSE new file mode 100644 index 0000000..caeb849 --- /dev/null +++ b/dist/node_modules/jws/LICENSE @@ -0,0 +1,17 @@ +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/jws/index.js b/dist/node_modules/jws/index.js new file mode 100644 index 0000000..8c8da93 --- /dev/null +++ b/dist/node_modules/jws/index.js @@ -0,0 +1,22 @@ +/*global exports*/ +var SignStream = require('./lib/sign-stream'); +var VerifyStream = require('./lib/verify-stream'); + +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; + +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); +}; +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); +}; diff --git a/dist/node_modules/jws/lib/data-stream.js b/dist/node_modules/jws/lib/data-stream.js new file mode 100644 index 0000000..3535d31 --- /dev/null +++ b/dist/node_modules/jws/lib/data-stream.js @@ -0,0 +1,55 @@ +/*global module, process*/ +var Buffer = require('safe-buffer').Buffer; +var Stream = require('stream'); +var util = require('util'); + +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; + + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; + } + + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; + } + + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; + } + + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); + +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); + this.emit('data', data); +}; + +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; +}; + +module.exports = DataStream; diff --git a/dist/node_modules/jws/lib/sign-stream.js b/dist/node_modules/jws/lib/sign-stream.js new file mode 100644 index 0000000..6a7ee42 --- /dev/null +++ b/dist/node_modules/jws/lib/sign-stream.js @@ -0,0 +1,78 @@ +/*global module*/ +var Buffer = require('safe-buffer').Buffer; +var DataStream = require('./data-stream'); +var jwa = require('jwa'); +var Stream = require('stream'); +var toString = require('./tostring'); +var util = require('util'); + +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} + +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} + +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); + + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); + +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +SignStream.sign = jwsSign; + +module.exports = SignStream; diff --git a/dist/node_modules/jws/lib/tostring.js b/dist/node_modules/jws/lib/tostring.js new file mode 100644 index 0000000..f5a49a3 --- /dev/null +++ b/dist/node_modules/jws/lib/tostring.js @@ -0,0 +1,10 @@ +/*global module*/ +var Buffer = require('buffer').Buffer; + +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); +}; diff --git a/dist/node_modules/jws/lib/verify-stream.js b/dist/node_modules/jws/lib/verify-stream.js new file mode 100644 index 0000000..39f7c73 --- /dev/null +++ b/dist/node_modules/jws/lib/verify-stream.js @@ -0,0 +1,120 @@ +/*global module*/ +var Buffer = require('safe-buffer').Buffer; +var DataStream = require('./data-stream'); +var jwa = require('jwa'); +var Stream = require('stream'); +var toString = require('./tostring'); +var util = require('util'); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; +} + +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} + +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +} + +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} + +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; +} + +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); +} + +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); +} + +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} + +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); + + if (!isValidJws(jwsSig)) + return null; + + var header = headerFromJWS(jwsSig); + + if (!header) + return null; + + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); + + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} + +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); + + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; + +module.exports = VerifyStream; diff --git a/dist/node_modules/jws/package.json b/dist/node_modules/jws/package.json new file mode 100644 index 0000000..3fb2837 --- /dev/null +++ b/dist/node_modules/jws/package.json @@ -0,0 +1,34 @@ +{ + "name": "jws", + "version": "3.2.2", + "description": "Implementation of JSON Web Signatures", + "main": "index.js", + "directories": { + "test": "test" + }, + "scripts": { + "test": "make test" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianloveswords/node-jws.git" + }, + "keywords": [ + "jws", + "json", + "web", + "signatures" + ], + "author": "Brian J Brennan", + "license": "MIT", + "readmeFilename": "readme.md", + "gitHead": "c0f6b27bcea5a2ad2e304d91c2e842e4076a6b03", + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + }, + "devDependencies": { + "semver": "^5.1.0", + "tape": "~2.14.0" + } +} diff --git a/dist/node_modules/jws/readme.md b/dist/node_modules/jws/readme.md new file mode 100644 index 0000000..1910c9a --- /dev/null +++ b/dist/node_modules/jws/readme.md @@ -0,0 +1,255 @@ +# node-jws [![Build Status](https://secure.travis-ci.org/brianloveswords/node-jws.png)](http://travis-ci.org/brianloveswords/node-jws) + +An implementation of [JSON Web Signatures](http://self-issued.info/docs/draft-ietf-jose-json-web-signature.html). + +This was developed against `draft-ietf-jose-json-web-signature-08` and +implements the entire spec **except** X.509 Certificate Chain +signing/verifying (patches welcome). + +There are both synchronous (`jws.sign`, `jws.verify`) and streaming +(`jws.createSign`, `jws.createVerify`) APIs. + +# Install + +```bash +$ npm install jws +``` + +# Usage + +## jws.ALGORITHMS + +Array of supported algorithms. The following algorithms are currently supported. + +alg Parameter Value | Digital Signature or MAC Algorithm +----------------|---------------------------- +HS256 | HMAC using SHA-256 hash algorithm +HS384 | HMAC using SHA-384 hash algorithm +HS512 | HMAC using SHA-512 hash algorithm +RS256 | RSASSA using SHA-256 hash algorithm +RS384 | RSASSA using SHA-384 hash algorithm +RS512 | RSASSA using SHA-512 hash algorithm +PS256 | RSASSA-PSS using SHA-256 hash algorithm +PS384 | RSASSA-PSS using SHA-384 hash algorithm +PS512 | RSASSA-PSS using SHA-512 hash algorithm +ES256 | ECDSA using P-256 curve and SHA-256 hash algorithm +ES384 | ECDSA using P-384 curve and SHA-384 hash algorithm +ES512 | ECDSA using P-521 curve and SHA-512 hash algorithm +none | No digital signature or MAC value included + +## jws.sign(options) + +(Synchronous) Return a JSON Web Signature for a header and a payload. + +Options: + +* `header` +* `payload` +* `secret` or `privateKey` +* `encoding` (Optional, defaults to 'utf8') + +`header` must be an object with an `alg` property. `header.alg` must be +one a value found in `jws.ALGORITHMS`. See above for a table of +supported algorithms. + +If `payload` is not a buffer or a string, it will be coerced into a string +using `JSON.stringify`. + +Example + +```js +const signature = jws.sign({ + header: { alg: 'HS256' }, + payload: 'h. jon benjamin', + secret: 'has a van', +}); +``` + +## jws.verify(signature, algorithm, secretOrKey) + +(Synchronous) Returns `true` or `false` for whether a signature matches a +secret or key. + +`signature` is a JWS Signature. `header.alg` must be a value found in `jws.ALGORITHMS`. +See above for a table of supported algorithms. `secretOrKey` is a string or +buffer containing either the secret for HMAC algorithms, or the PEM +encoded public key for RSA and ECDSA. + +Note that the `"alg"` value from the signature header is ignored. + + +## jws.decode(signature) + +(Synchronous) Returns the decoded header, decoded payload, and signature +parts of the JWS Signature. + +Returns an object with three properties, e.g. +```js +{ header: { alg: 'HS256' }, + payload: 'h. jon benjamin', + signature: 'YOWPewyGHKu4Y_0M_vtlEnNlqmFOclqp4Hy6hVHfFT4' +} +``` + +## jws.createSign(options) + +Returns a new SignStream object. + +Options: + +* `header` (required) +* `payload` +* `key` || `privateKey` || `secret` +* `encoding` (Optional, defaults to 'utf8') + +Other than `header`, all options expect a string or a buffer when the +value is known ahead of time, or a stream for convenience. +`key`/`privateKey`/`secret` may also be an object when using an encrypted +private key, see the [crypto documentation][encrypted-key-docs]. + +Example: + +```js + +// This... +jws.createSign({ + header: { alg: 'RS256' }, + privateKey: privateKeyStream, + payload: payloadStream, +}).on('done', function(signature) { + // ... +}); + +// is equivalent to this: +const signer = jws.createSign({ + header: { alg: 'RS256' }, +}); +privateKeyStream.pipe(signer.privateKey); +payloadStream.pipe(signer.payload); +signer.on('done', function(signature) { + // ... +}); +``` + +## jws.createVerify(options) + +Returns a new VerifyStream object. + +Options: + +* `signature` +* `algorithm` +* `key` || `publicKey` || `secret` +* `encoding` (Optional, defaults to 'utf8') + +All options expect a string or a buffer when the value is known ahead of +time, or a stream for convenience. + +Example: + +```js + +// This... +jws.createVerify({ + publicKey: pubKeyStream, + signature: sigStream, +}).on('done', function(verified, obj) { + // ... +}); + +// is equivilant to this: +const verifier = jws.createVerify(); +pubKeyStream.pipe(verifier.publicKey); +sigStream.pipe(verifier.signature); +verifier.on('done', function(verified, obj) { + // ... +}); +``` + +## Class: SignStream + +A `Readable Stream` that emits a single data event (the calculated +signature) when done. + +### Event: 'done' +`function (signature) { }` + +### signer.payload + +A `Writable Stream` that expects the JWS payload. Do *not* use if you +passed a `payload` option to the constructor. + +Example: + +```js +payloadStream.pipe(signer.payload); +``` + +### signer.secret
signer.key
signer.privateKey + +A `Writable Stream`. Expects the JWS secret for HMAC, or the privateKey +for ECDSA and RSA. Do *not* use if you passed a `secret` or `key` option +to the constructor. + +Example: + +```js +privateKeyStream.pipe(signer.privateKey); +``` + +## Class: VerifyStream + +This is a `Readable Stream` that emits a single data event, the result +of whether or not that signature was valid. + +### Event: 'done' +`function (valid, obj) { }` + +`valid` is a boolean for whether or not the signature is valid. + +### verifier.signature + +A `Writable Stream` that expects a JWS Signature. Do *not* use if you +passed a `signature` option to the constructor. + +### verifier.secret
verifier.key
verifier.publicKey + +A `Writable Stream` that expects a public key or secret. Do *not* use if you +passed a `key` or `secret` option to the constructor. + +# TODO + +* It feels like there should be some convenience options/APIs for + defining the algorithm rather than having to define a header object + with `{ alg: 'ES512' }` or whatever every time. + +* X.509 support, ugh + +# License + +MIT + +``` +Copyright (c) 2013-2015 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +``` + +[encrypted-key-docs]: https://nodejs.org/api/crypto.html#crypto_sign_sign_private_key_output_format diff --git a/dist/node_modules/lodash.includes/LICENSE b/dist/node_modules/lodash.includes/LICENSE new file mode 100644 index 0000000..e0c69d5 --- /dev/null +++ b/dist/node_modules/lodash.includes/LICENSE @@ -0,0 +1,47 @@ +Copyright jQuery Foundation and other contributors + +Based on Underscore.js, copyright Jeremy Ashkenas, +DocumentCloud and Investigative Reporters & Editors + +This software consists of voluntary contributions made by many +individuals. For exact contribution history, see the revision history +available at https://github.com/lodash/lodash + +The following license applies to all parts of this software except as +documented below: + +==== + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +==== + +Copyright and related rights for sample code are waived via CC0. Sample +code is defined as all source code displayed within the prose of the +documentation. + +CC0: http://creativecommons.org/publicdomain/zero/1.0/ + +==== + +Files located in the node_modules and vendor directories are externally +maintained libraries used by this software which have their own +licenses; we recommend you read them, as their terms may differ from the +terms above. diff --git a/dist/node_modules/lodash.includes/README.md b/dist/node_modules/lodash.includes/README.md new file mode 100644 index 0000000..26e9377 --- /dev/null +++ b/dist/node_modules/lodash.includes/README.md @@ -0,0 +1,18 @@ +# lodash.includes v4.3.0 + +The [lodash](https://lodash.com/) method `_.includes` exported as a [Node.js](https://nodejs.org/) module. + +## Installation + +Using npm: +```bash +$ {sudo -H} npm i -g npm +$ npm i --save lodash.includes +``` + +In Node.js: +```js +var includes = require('lodash.includes'); +``` + +See the [documentation](https://lodash.com/docs#includes) or [package source](https://github.com/lodash/lodash/blob/4.3.0-npm-packages/lodash.includes) for more details. diff --git a/dist/node_modules/lodash.includes/index.js b/dist/node_modules/lodash.includes/index.js new file mode 100644 index 0000000..e88d533 --- /dev/null +++ b/dist/node_modules/lodash.includes/index.js @@ -0,0 +1,745 @@ +/** + * lodash (Custom Build) + * Build: `lodash modularize exports="npm" -o ./` + * Copyright jQuery Foundation and other contributors + * Released under MIT license + * Based on Underscore.js 1.8.3 + * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + */ + +/** Used as references for various `Number` constants. */ +var INFINITY = 1 / 0, + MAX_SAFE_INTEGER = 9007199254740991, + MAX_INTEGER = 1.7976931348623157e+308, + NAN = 0 / 0; + +/** `Object#toString` result references. */ +var argsTag = '[object Arguments]', + funcTag = '[object Function]', + genTag = '[object GeneratorFunction]', + stringTag = '[object String]', + symbolTag = '[object Symbol]'; + +/** Used to match leading and trailing whitespace. */ +var reTrim = /^\s+|\s+$/g; + +/** Used to detect bad signed hexadecimal string values. */ +var reIsBadHex = /^[-+]0x[0-9a-f]+$/i; + +/** Used to detect binary string values. */ +var reIsBinary = /^0b[01]+$/i; + +/** Used to detect octal string values. */ +var reIsOctal = /^0o[0-7]+$/i; + +/** Used to detect unsigned integer values. */ +var reIsUint = /^(?:0|[1-9]\d*)$/; + +/** Built-in method references without a dependency on `root`. */ +var freeParseInt = parseInt; + +/** + * A specialized version of `_.map` for arrays without support for iteratee + * shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ +function arrayMap(array, iteratee) { + var index = -1, + length = array ? array.length : 0, + result = Array(length); + + while (++index < length) { + result[index] = iteratee(array[index], index, array); + } + return result; +} + +/** + * The base implementation of `_.findIndex` and `_.findLastIndex` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {number} fromIndex The index to search from. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {number} Returns the index of the matched value, else `-1`. + */ +function baseFindIndex(array, predicate, fromIndex, fromRight) { + var length = array.length, + index = fromIndex + (fromRight ? 1 : -1); + + while ((fromRight ? index-- : ++index < length)) { + if (predicate(array[index], index, array)) { + return index; + } + } + return -1; +} + +/** + * The base implementation of `_.indexOf` without `fromIndex` bounds checks. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ +function baseIndexOf(array, value, fromIndex) { + if (value !== value) { + return baseFindIndex(array, baseIsNaN, fromIndex); + } + var index = fromIndex - 1, + length = array.length; + + while (++index < length) { + if (array[index] === value) { + return index; + } + } + return -1; +} + +/** + * The base implementation of `_.isNaN` without support for number objects. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + */ +function baseIsNaN(value) { + return value !== value; +} + +/** + * The base implementation of `_.times` without support for iteratee shorthands + * or max array length checks. + * + * @private + * @param {number} n The number of times to invoke `iteratee`. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the array of results. + */ +function baseTimes(n, iteratee) { + var index = -1, + result = Array(n); + + while (++index < n) { + result[index] = iteratee(index); + } + return result; +} + +/** + * The base implementation of `_.values` and `_.valuesIn` which creates an + * array of `object` property values corresponding to the property names + * of `props`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} props The property names to get values for. + * @returns {Object} Returns the array of property values. + */ +function baseValues(object, props) { + return arrayMap(props, function(key) { + return object[key]; + }); +} + +/** + * Creates a unary function that invokes `func` with its argument transformed. + * + * @private + * @param {Function} func The function to wrap. + * @param {Function} transform The argument transform. + * @returns {Function} Returns the new function. + */ +function overArg(func, transform) { + return function(arg) { + return func(transform(arg)); + }; +} + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var objectToString = objectProto.toString; + +/** Built-in value references. */ +var propertyIsEnumerable = objectProto.propertyIsEnumerable; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeKeys = overArg(Object.keys, Object), + nativeMax = Math.max; + +/** + * Creates an array of the enumerable property names of the array-like `value`. + * + * @private + * @param {*} value The value to query. + * @param {boolean} inherited Specify returning inherited property names. + * @returns {Array} Returns the array of property names. + */ +function arrayLikeKeys(value, inherited) { + // Safari 8.1 makes `arguments.callee` enumerable in strict mode. + // Safari 9 makes `arguments.length` enumerable in strict mode. + var result = (isArray(value) || isArguments(value)) + ? baseTimes(value.length, String) + : []; + + var length = result.length, + skipIndexes = !!length; + + for (var key in value) { + if ((inherited || hasOwnProperty.call(value, key)) && + !(skipIndexes && (key == 'length' || isIndex(key, length)))) { + result.push(key); + } + } + return result; +} + +/** + * The base implementation of `_.keys` which doesn't treat sparse arrays as dense. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ +function baseKeys(object) { + if (!isPrototype(object)) { + return nativeKeys(object); + } + var result = []; + for (var key in Object(object)) { + if (hasOwnProperty.call(object, key) && key != 'constructor') { + result.push(key); + } + } + return result; +} + +/** + * Checks if `value` is a valid array-like index. + * + * @private + * @param {*} value The value to check. + * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. + * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. + */ +function isIndex(value, length) { + length = length == null ? MAX_SAFE_INTEGER : length; + return !!length && + (typeof value == 'number' || reIsUint.test(value)) && + (value > -1 && value % 1 == 0 && value < length); +} + +/** + * Checks if `value` is likely a prototype object. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a prototype, else `false`. + */ +function isPrototype(value) { + var Ctor = value && value.constructor, + proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto; + + return value === proto; +} + +/** + * Checks if `value` is in `collection`. If `collection` is a string, it's + * checked for a substring of `value`, otherwise + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * is used for equality comparisons. If `fromIndex` is negative, it's used as + * the offset from the end of `collection`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object|string} collection The collection to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.reduce`. + * @returns {boolean} Returns `true` if `value` is found, else `false`. + * @example + * + * _.includes([1, 2, 3], 1); + * // => true + * + * _.includes([1, 2, 3], 1, 2); + * // => false + * + * _.includes({ 'a': 1, 'b': 2 }, 1); + * // => true + * + * _.includes('abcd', 'bc'); + * // => true + */ +function includes(collection, value, fromIndex, guard) { + collection = isArrayLike(collection) ? collection : values(collection); + fromIndex = (fromIndex && !guard) ? toInteger(fromIndex) : 0; + + var length = collection.length; + if (fromIndex < 0) { + fromIndex = nativeMax(length + fromIndex, 0); + } + return isString(collection) + ? (fromIndex <= length && collection.indexOf(value, fromIndex) > -1) + : (!!length && baseIndexOf(collection, value, fromIndex) > -1); +} + +/** + * Checks if `value` is likely an `arguments` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + * else `false`. + * @example + * + * _.isArguments(function() { return arguments; }()); + * // => true + * + * _.isArguments([1, 2, 3]); + * // => false + */ +function isArguments(value) { + // Safari 8.1 makes `arguments.callee` enumerable in strict mode. + return isArrayLikeObject(value) && hasOwnProperty.call(value, 'callee') && + (!propertyIsEnumerable.call(value, 'callee') || objectToString.call(value) == argsTag); +} + +/** + * Checks if `value` is classified as an `Array` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array, else `false`. + * @example + * + * _.isArray([1, 2, 3]); + * // => true + * + * _.isArray(document.body.children); + * // => false + * + * _.isArray('abc'); + * // => false + * + * _.isArray(_.noop); + * // => false + */ +var isArray = Array.isArray; + +/** + * Checks if `value` is array-like. A value is considered array-like if it's + * not a function and has a `value.length` that's an integer greater than or + * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is array-like, else `false`. + * @example + * + * _.isArrayLike([1, 2, 3]); + * // => true + * + * _.isArrayLike(document.body.children); + * // => true + * + * _.isArrayLike('abc'); + * // => true + * + * _.isArrayLike(_.noop); + * // => false + */ +function isArrayLike(value) { + return value != null && isLength(value.length) && !isFunction(value); +} + +/** + * This method is like `_.isArrayLike` except that it also checks if `value` + * is an object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array-like object, + * else `false`. + * @example + * + * _.isArrayLikeObject([1, 2, 3]); + * // => true + * + * _.isArrayLikeObject(document.body.children); + * // => true + * + * _.isArrayLikeObject('abc'); + * // => false + * + * _.isArrayLikeObject(_.noop); + * // => false + */ +function isArrayLikeObject(value) { + return isObjectLike(value) && isArrayLike(value); +} + +/** + * Checks if `value` is classified as a `Function` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a function, else `false`. + * @example + * + * _.isFunction(_); + * // => true + * + * _.isFunction(/abc/); + * // => false + */ +function isFunction(value) { + // The use of `Object#toString` avoids issues with the `typeof` operator + // in Safari 8-9 which returns 'object' for typed array and other constructors. + var tag = isObject(value) ? objectToString.call(value) : ''; + return tag == funcTag || tag == genTag; +} + +/** + * Checks if `value` is a valid array-like length. + * + * **Note:** This method is loosely based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. + * @example + * + * _.isLength(3); + * // => true + * + * _.isLength(Number.MIN_VALUE); + * // => false + * + * _.isLength(Infinity); + * // => false + * + * _.isLength('3'); + * // => false + */ +function isLength(value) { + return typeof value == 'number' && + value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; +} + +/** + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false + */ +function isObject(value) { + var type = typeof value; + return !!value && (type == 'object' || type == 'function'); +} + +/** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ +function isObjectLike(value) { + return !!value && typeof value == 'object'; +} + +/** + * Checks if `value` is classified as a `String` primitive or object. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a string, else `false`. + * @example + * + * _.isString('abc'); + * // => true + * + * _.isString(1); + * // => false + */ +function isString(value) { + return typeof value == 'string' || + (!isArray(value) && isObjectLike(value) && objectToString.call(value) == stringTag); +} + +/** + * Checks if `value` is classified as a `Symbol` primitive or object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. + * @example + * + * _.isSymbol(Symbol.iterator); + * // => true + * + * _.isSymbol('abc'); + * // => false + */ +function isSymbol(value) { + return typeof value == 'symbol' || + (isObjectLike(value) && objectToString.call(value) == symbolTag); +} + +/** + * Converts `value` to a finite number. + * + * @static + * @memberOf _ + * @since 4.12.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted number. + * @example + * + * _.toFinite(3.2); + * // => 3.2 + * + * _.toFinite(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toFinite(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toFinite('3.2'); + * // => 3.2 + */ +function toFinite(value) { + if (!value) { + return value === 0 ? value : 0; + } + value = toNumber(value); + if (value === INFINITY || value === -INFINITY) { + var sign = (value < 0 ? -1 : 1); + return sign * MAX_INTEGER; + } + return value === value ? value : 0; +} + +/** + * Converts `value` to an integer. + * + * **Note:** This method is loosely based on + * [`ToInteger`](http://www.ecma-international.org/ecma-262/7.0/#sec-tointeger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toInteger(3.2); + * // => 3 + * + * _.toInteger(Number.MIN_VALUE); + * // => 0 + * + * _.toInteger(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toInteger('3.2'); + * // => 3 + */ +function toInteger(value) { + var result = toFinite(value), + remainder = result % 1; + + return result === result ? (remainder ? result - remainder : result) : 0; +} + +/** + * Converts `value` to a number. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to process. + * @returns {number} Returns the number. + * @example + * + * _.toNumber(3.2); + * // => 3.2 + * + * _.toNumber(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toNumber(Infinity); + * // => Infinity + * + * _.toNumber('3.2'); + * // => 3.2 + */ +function toNumber(value) { + if (typeof value == 'number') { + return value; + } + if (isSymbol(value)) { + return NAN; + } + if (isObject(value)) { + var other = typeof value.valueOf == 'function' ? value.valueOf() : value; + value = isObject(other) ? (other + '') : other; + } + if (typeof value != 'string') { + return value === 0 ? value : +value; + } + value = value.replace(reTrim, ''); + var isBinary = reIsBinary.test(value); + return (isBinary || reIsOctal.test(value)) + ? freeParseInt(value.slice(2), isBinary ? 2 : 8) + : (reIsBadHex.test(value) ? NAN : +value); +} + +/** + * Creates an array of the own enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. See the + * [ES spec](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * for more details. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keys(new Foo); + * // => ['a', 'b'] (iteration order is not guaranteed) + * + * _.keys('hi'); + * // => ['0', '1'] + */ +function keys(object) { + return isArrayLike(object) ? arrayLikeKeys(object) : baseKeys(object); +} + +/** + * Creates an array of the own enumerable string keyed property values of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property values. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.values(new Foo); + * // => [1, 2] (iteration order is not guaranteed) + * + * _.values('hi'); + * // => ['h', 'i'] + */ +function values(object) { + return object ? baseValues(object, keys(object)) : []; +} + +module.exports = includes; diff --git a/dist/node_modules/lodash.includes/package.json b/dist/node_modules/lodash.includes/package.json new file mode 100644 index 0000000..a02e645 --- /dev/null +++ b/dist/node_modules/lodash.includes/package.json @@ -0,0 +1,17 @@ +{ + "name": "lodash.includes", + "version": "4.3.0", + "description": "The lodash method `_.includes` exported as a module.", + "homepage": "https://lodash.com/", + "icon": "https://lodash.com/icon.svg", + "license": "MIT", + "keywords": "lodash-modularized, includes", + "author": "John-David Dalton (http://allyoucanleet.com/)", + "contributors": [ + "John-David Dalton (http://allyoucanleet.com/)", + "Blaine Bublitz (https://github.com/phated)", + "Mathias Bynens (https://mathiasbynens.be/)" + ], + "repository": "lodash/lodash", + "scripts": { "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\"" } +} diff --git a/dist/node_modules/lodash.isboolean/LICENSE b/dist/node_modules/lodash.isboolean/LICENSE new file mode 100644 index 0000000..b054ca5 --- /dev/null +++ b/dist/node_modules/lodash.isboolean/LICENSE @@ -0,0 +1,22 @@ +Copyright 2012-2016 The Dojo Foundation +Based on Underscore.js, copyright 2009-2016 Jeremy Ashkenas, +DocumentCloud and Investigative Reporters & Editors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/lodash.isboolean/README.md b/dist/node_modules/lodash.isboolean/README.md new file mode 100644 index 0000000..b3c476b --- /dev/null +++ b/dist/node_modules/lodash.isboolean/README.md @@ -0,0 +1,18 @@ +# lodash.isboolean v3.0.3 + +The [lodash](https://lodash.com/) method `_.isBoolean` exported as a [Node.js](https://nodejs.org/) module. + +## Installation + +Using npm: +```bash +$ {sudo -H} npm i -g npm +$ npm i --save lodash.isboolean +``` + +In Node.js: +```js +var isBoolean = require('lodash.isboolean'); +``` + +See the [documentation](https://lodash.com/docs#isBoolean) or [package source](https://github.com/lodash/lodash/blob/3.0.3-npm-packages/lodash.isboolean) for more details. diff --git a/dist/node_modules/lodash.isboolean/index.js b/dist/node_modules/lodash.isboolean/index.js new file mode 100644 index 0000000..23bbabd --- /dev/null +++ b/dist/node_modules/lodash.isboolean/index.js @@ -0,0 +1,70 @@ +/** + * lodash 3.0.3 (Custom Build) + * Build: `lodash modularize exports="npm" -o ./` + * Copyright 2012-2016 The Dojo Foundation + * Based on Underscore.js 1.8.3 + * Copyright 2009-2016 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** `Object#toString` result references. */ +var boolTag = '[object Boolean]'; + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** + * Used to resolve the [`toStringTag`](http://ecma-international.org/ecma-262/6.0/#sec-object.prototype.tostring) + * of values. + */ +var objectToString = objectProto.toString; + +/** + * Checks if `value` is classified as a boolean primitive or object. + * + * @static + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is correctly classified, else `false`. + * @example + * + * _.isBoolean(false); + * // => true + * + * _.isBoolean(null); + * // => false + */ +function isBoolean(value) { + return value === true || value === false || + (isObjectLike(value) && objectToString.call(value) == boolTag); +} + +/** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ +function isObjectLike(value) { + return !!value && typeof value == 'object'; +} + +module.exports = isBoolean; diff --git a/dist/node_modules/lodash.isboolean/package.json b/dist/node_modules/lodash.isboolean/package.json new file mode 100644 index 0000000..01d6e8b --- /dev/null +++ b/dist/node_modules/lodash.isboolean/package.json @@ -0,0 +1,17 @@ +{ + "name": "lodash.isboolean", + "version": "3.0.3", + "description": "The lodash method `_.isBoolean` exported as a module.", + "homepage": "https://lodash.com/", + "icon": "https://lodash.com/icon.svg", + "license": "MIT", + "keywords": "lodash-modularized, isboolean", + "author": "John-David Dalton (http://allyoucanleet.com/)", + "contributors": [ + "John-David Dalton (http://allyoucanleet.com/)", + "Blaine Bublitz (https://github.com/phated)", + "Mathias Bynens (https://mathiasbynens.be/)" + ], + "repository": "lodash/lodash", + "scripts": { "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\"" } +} diff --git a/dist/node_modules/lodash.isinteger/LICENSE b/dist/node_modules/lodash.isinteger/LICENSE new file mode 100644 index 0000000..e0c69d5 --- /dev/null +++ b/dist/node_modules/lodash.isinteger/LICENSE @@ -0,0 +1,47 @@ +Copyright jQuery Foundation and other contributors + +Based on Underscore.js, copyright Jeremy Ashkenas, +DocumentCloud and Investigative Reporters & Editors + +This software consists of voluntary contributions made by many +individuals. For exact contribution history, see the revision history +available at https://github.com/lodash/lodash + +The following license applies to all parts of this software except as +documented below: + +==== + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +==== + +Copyright and related rights for sample code are waived via CC0. Sample +code is defined as all source code displayed within the prose of the +documentation. + +CC0: http://creativecommons.org/publicdomain/zero/1.0/ + +==== + +Files located in the node_modules and vendor directories are externally +maintained libraries used by this software which have their own +licenses; we recommend you read them, as their terms may differ from the +terms above. diff --git a/dist/node_modules/lodash.isinteger/README.md b/dist/node_modules/lodash.isinteger/README.md new file mode 100644 index 0000000..3a78567 --- /dev/null +++ b/dist/node_modules/lodash.isinteger/README.md @@ -0,0 +1,18 @@ +# lodash.isinteger v4.0.4 + +The [lodash](https://lodash.com/) method `_.isInteger` exported as a [Node.js](https://nodejs.org/) module. + +## Installation + +Using npm: +```bash +$ {sudo -H} npm i -g npm +$ npm i --save lodash.isinteger +``` + +In Node.js: +```js +var isInteger = require('lodash.isinteger'); +``` + +See the [documentation](https://lodash.com/docs#isInteger) or [package source](https://github.com/lodash/lodash/blob/4.0.4-npm-packages/lodash.isinteger) for more details. diff --git a/dist/node_modules/lodash.isinteger/index.js b/dist/node_modules/lodash.isinteger/index.js new file mode 100644 index 0000000..3bf06f0 --- /dev/null +++ b/dist/node_modules/lodash.isinteger/index.js @@ -0,0 +1,265 @@ +/** + * lodash (Custom Build) + * Build: `lodash modularize exports="npm" -o ./` + * Copyright jQuery Foundation and other contributors + * Released under MIT license + * Based on Underscore.js 1.8.3 + * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + */ + +/** Used as references for various `Number` constants. */ +var INFINITY = 1 / 0, + MAX_INTEGER = 1.7976931348623157e+308, + NAN = 0 / 0; + +/** `Object#toString` result references. */ +var symbolTag = '[object Symbol]'; + +/** Used to match leading and trailing whitespace. */ +var reTrim = /^\s+|\s+$/g; + +/** Used to detect bad signed hexadecimal string values. */ +var reIsBadHex = /^[-+]0x[0-9a-f]+$/i; + +/** Used to detect binary string values. */ +var reIsBinary = /^0b[01]+$/i; + +/** Used to detect octal string values. */ +var reIsOctal = /^0o[0-7]+$/i; + +/** Built-in method references without a dependency on `root`. */ +var freeParseInt = parseInt; + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var objectToString = objectProto.toString; + +/** + * Checks if `value` is an integer. + * + * **Note:** This method is based on + * [`Number.isInteger`](https://mdn.io/Number/isInteger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an integer, else `false`. + * @example + * + * _.isInteger(3); + * // => true + * + * _.isInteger(Number.MIN_VALUE); + * // => false + * + * _.isInteger(Infinity); + * // => false + * + * _.isInteger('3'); + * // => false + */ +function isInteger(value) { + return typeof value == 'number' && value == toInteger(value); +} + +/** + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false + */ +function isObject(value) { + var type = typeof value; + return !!value && (type == 'object' || type == 'function'); +} + +/** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ +function isObjectLike(value) { + return !!value && typeof value == 'object'; +} + +/** + * Checks if `value` is classified as a `Symbol` primitive or object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. + * @example + * + * _.isSymbol(Symbol.iterator); + * // => true + * + * _.isSymbol('abc'); + * // => false + */ +function isSymbol(value) { + return typeof value == 'symbol' || + (isObjectLike(value) && objectToString.call(value) == symbolTag); +} + +/** + * Converts `value` to a finite number. + * + * @static + * @memberOf _ + * @since 4.12.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted number. + * @example + * + * _.toFinite(3.2); + * // => 3.2 + * + * _.toFinite(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toFinite(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toFinite('3.2'); + * // => 3.2 + */ +function toFinite(value) { + if (!value) { + return value === 0 ? value : 0; + } + value = toNumber(value); + if (value === INFINITY || value === -INFINITY) { + var sign = (value < 0 ? -1 : 1); + return sign * MAX_INTEGER; + } + return value === value ? value : 0; +} + +/** + * Converts `value` to an integer. + * + * **Note:** This method is loosely based on + * [`ToInteger`](http://www.ecma-international.org/ecma-262/7.0/#sec-tointeger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toInteger(3.2); + * // => 3 + * + * _.toInteger(Number.MIN_VALUE); + * // => 0 + * + * _.toInteger(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toInteger('3.2'); + * // => 3 + */ +function toInteger(value) { + var result = toFinite(value), + remainder = result % 1; + + return result === result ? (remainder ? result - remainder : result) : 0; +} + +/** + * Converts `value` to a number. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to process. + * @returns {number} Returns the number. + * @example + * + * _.toNumber(3.2); + * // => 3.2 + * + * _.toNumber(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toNumber(Infinity); + * // => Infinity + * + * _.toNumber('3.2'); + * // => 3.2 + */ +function toNumber(value) { + if (typeof value == 'number') { + return value; + } + if (isSymbol(value)) { + return NAN; + } + if (isObject(value)) { + var other = typeof value.valueOf == 'function' ? value.valueOf() : value; + value = isObject(other) ? (other + '') : other; + } + if (typeof value != 'string') { + return value === 0 ? value : +value; + } + value = value.replace(reTrim, ''); + var isBinary = reIsBinary.test(value); + return (isBinary || reIsOctal.test(value)) + ? freeParseInt(value.slice(2), isBinary ? 2 : 8) + : (reIsBadHex.test(value) ? NAN : +value); +} + +module.exports = isInteger; diff --git a/dist/node_modules/lodash.isinteger/package.json b/dist/node_modules/lodash.isinteger/package.json new file mode 100644 index 0000000..92db256 --- /dev/null +++ b/dist/node_modules/lodash.isinteger/package.json @@ -0,0 +1,17 @@ +{ + "name": "lodash.isinteger", + "version": "4.0.4", + "description": "The lodash method `_.isInteger` exported as a module.", + "homepage": "https://lodash.com/", + "icon": "https://lodash.com/icon.svg", + "license": "MIT", + "keywords": "lodash-modularized, isinteger", + "author": "John-David Dalton (http://allyoucanleet.com/)", + "contributors": [ + "John-David Dalton (http://allyoucanleet.com/)", + "Blaine Bublitz (https://github.com/phated)", + "Mathias Bynens (https://mathiasbynens.be/)" + ], + "repository": "lodash/lodash", + "scripts": { "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\"" } +} diff --git a/dist/node_modules/lodash.isnumber/LICENSE b/dist/node_modules/lodash.isnumber/LICENSE new file mode 100644 index 0000000..b054ca5 --- /dev/null +++ b/dist/node_modules/lodash.isnumber/LICENSE @@ -0,0 +1,22 @@ +Copyright 2012-2016 The Dojo Foundation +Based on Underscore.js, copyright 2009-2016 Jeremy Ashkenas, +DocumentCloud and Investigative Reporters & Editors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/lodash.isnumber/README.md b/dist/node_modules/lodash.isnumber/README.md new file mode 100644 index 0000000..a1d434d --- /dev/null +++ b/dist/node_modules/lodash.isnumber/README.md @@ -0,0 +1,18 @@ +# lodash.isnumber v3.0.3 + +The [lodash](https://lodash.com/) method `_.isNumber` exported as a [Node.js](https://nodejs.org/) module. + +## Installation + +Using npm: +```bash +$ {sudo -H} npm i -g npm +$ npm i --save lodash.isnumber +``` + +In Node.js: +```js +var isNumber = require('lodash.isnumber'); +``` + +See the [documentation](https://lodash.com/docs#isNumber) or [package source](https://github.com/lodash/lodash/blob/3.0.3-npm-packages/lodash.isnumber) for more details. diff --git a/dist/node_modules/lodash.isnumber/index.js b/dist/node_modules/lodash.isnumber/index.js new file mode 100644 index 0000000..35a8573 --- /dev/null +++ b/dist/node_modules/lodash.isnumber/index.js @@ -0,0 +1,79 @@ +/** + * lodash 3.0.3 (Custom Build) + * Build: `lodash modularize exports="npm" -o ./` + * Copyright 2012-2016 The Dojo Foundation + * Based on Underscore.js 1.8.3 + * Copyright 2009-2016 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** `Object#toString` result references. */ +var numberTag = '[object Number]'; + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** + * Used to resolve the [`toStringTag`](http://ecma-international.org/ecma-262/6.0/#sec-object.prototype.tostring) + * of values. + */ +var objectToString = objectProto.toString; + +/** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ +function isObjectLike(value) { + return !!value && typeof value == 'object'; +} + +/** + * Checks if `value` is classified as a `Number` primitive or object. + * + * **Note:** To exclude `Infinity`, `-Infinity`, and `NaN`, which are classified + * as numbers, use the `_.isFinite` method. + * + * @static + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is correctly classified, else `false`. + * @example + * + * _.isNumber(3); + * // => true + * + * _.isNumber(Number.MIN_VALUE); + * // => true + * + * _.isNumber(Infinity); + * // => true + * + * _.isNumber('3'); + * // => false + */ +function isNumber(value) { + return typeof value == 'number' || + (isObjectLike(value) && objectToString.call(value) == numberTag); +} + +module.exports = isNumber; diff --git a/dist/node_modules/lodash.isnumber/package.json b/dist/node_modules/lodash.isnumber/package.json new file mode 100644 index 0000000..4c33c2a --- /dev/null +++ b/dist/node_modules/lodash.isnumber/package.json @@ -0,0 +1,17 @@ +{ + "name": "lodash.isnumber", + "version": "3.0.3", + "description": "The lodash method `_.isNumber` exported as a module.", + "homepage": "https://lodash.com/", + "icon": "https://lodash.com/icon.svg", + "license": "MIT", + "keywords": "lodash-modularized, isnumber", + "author": "John-David Dalton (http://allyoucanleet.com/)", + "contributors": [ + "John-David Dalton (http://allyoucanleet.com/)", + "Blaine Bublitz (https://github.com/phated)", + "Mathias Bynens (https://mathiasbynens.be/)" + ], + "repository": "lodash/lodash", + "scripts": { "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\"" } +} diff --git a/dist/node_modules/lodash.isplainobject/LICENSE b/dist/node_modules/lodash.isplainobject/LICENSE new file mode 100644 index 0000000..e0c69d5 --- /dev/null +++ b/dist/node_modules/lodash.isplainobject/LICENSE @@ -0,0 +1,47 @@ +Copyright jQuery Foundation and other contributors + +Based on Underscore.js, copyright Jeremy Ashkenas, +DocumentCloud and Investigative Reporters & Editors + +This software consists of voluntary contributions made by many +individuals. For exact contribution history, see the revision history +available at https://github.com/lodash/lodash + +The following license applies to all parts of this software except as +documented below: + +==== + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +==== + +Copyright and related rights for sample code are waived via CC0. Sample +code is defined as all source code displayed within the prose of the +documentation. + +CC0: http://creativecommons.org/publicdomain/zero/1.0/ + +==== + +Files located in the node_modules and vendor directories are externally +maintained libraries used by this software which have their own +licenses; we recommend you read them, as their terms may differ from the +terms above. diff --git a/dist/node_modules/lodash.isplainobject/README.md b/dist/node_modules/lodash.isplainobject/README.md new file mode 100644 index 0000000..aeefd74 --- /dev/null +++ b/dist/node_modules/lodash.isplainobject/README.md @@ -0,0 +1,18 @@ +# lodash.isplainobject v4.0.6 + +The [lodash](https://lodash.com/) method `_.isPlainObject` exported as a [Node.js](https://nodejs.org/) module. + +## Installation + +Using npm: +```bash +$ {sudo -H} npm i -g npm +$ npm i --save lodash.isplainobject +``` + +In Node.js: +```js +var isPlainObject = require('lodash.isplainobject'); +``` + +See the [documentation](https://lodash.com/docs#isPlainObject) or [package source](https://github.com/lodash/lodash/blob/4.0.6-npm-packages/lodash.isplainobject) for more details. diff --git a/dist/node_modules/lodash.isplainobject/index.js b/dist/node_modules/lodash.isplainobject/index.js new file mode 100644 index 0000000..0f820ee --- /dev/null +++ b/dist/node_modules/lodash.isplainobject/index.js @@ -0,0 +1,139 @@ +/** + * lodash (Custom Build) + * Build: `lodash modularize exports="npm" -o ./` + * Copyright jQuery Foundation and other contributors + * Released under MIT license + * Based on Underscore.js 1.8.3 + * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + */ + +/** `Object#toString` result references. */ +var objectTag = '[object Object]'; + +/** + * Checks if `value` is a host object in IE < 9. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a host object, else `false`. + */ +function isHostObject(value) { + // Many host objects are `Object` objects that can coerce to strings + // despite having improperly defined `toString` methods. + var result = false; + if (value != null && typeof value.toString != 'function') { + try { + result = !!(value + ''); + } catch (e) {} + } + return result; +} + +/** + * Creates a unary function that invokes `func` with its argument transformed. + * + * @private + * @param {Function} func The function to wrap. + * @param {Function} transform The argument transform. + * @returns {Function} Returns the new function. + */ +function overArg(func, transform) { + return function(arg) { + return func(transform(arg)); + }; +} + +/** Used for built-in method references. */ +var funcProto = Function.prototype, + objectProto = Object.prototype; + +/** Used to resolve the decompiled source of functions. */ +var funcToString = funcProto.toString; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** Used to infer the `Object` constructor. */ +var objectCtorString = funcToString.call(Object); + +/** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var objectToString = objectProto.toString; + +/** Built-in value references. */ +var getPrototype = overArg(Object.getPrototypeOf, Object); + +/** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ +function isObjectLike(value) { + return !!value && typeof value == 'object'; +} + +/** + * Checks if `value` is a plain object, that is, an object created by the + * `Object` constructor or one with a `[[Prototype]]` of `null`. + * + * @static + * @memberOf _ + * @since 0.8.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a plain object, else `false`. + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * _.isPlainObject(new Foo); + * // => false + * + * _.isPlainObject([1, 2, 3]); + * // => false + * + * _.isPlainObject({ 'x': 0, 'y': 0 }); + * // => true + * + * _.isPlainObject(Object.create(null)); + * // => true + */ +function isPlainObject(value) { + if (!isObjectLike(value) || + objectToString.call(value) != objectTag || isHostObject(value)) { + return false; + } + var proto = getPrototype(value); + if (proto === null) { + return true; + } + var Ctor = hasOwnProperty.call(proto, 'constructor') && proto.constructor; + return (typeof Ctor == 'function' && + Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString); +} + +module.exports = isPlainObject; diff --git a/dist/node_modules/lodash.isplainobject/package.json b/dist/node_modules/lodash.isplainobject/package.json new file mode 100644 index 0000000..86f6a07 --- /dev/null +++ b/dist/node_modules/lodash.isplainobject/package.json @@ -0,0 +1,17 @@ +{ + "name": "lodash.isplainobject", + "version": "4.0.6", + "description": "The lodash method `_.isPlainObject` exported as a module.", + "homepage": "https://lodash.com/", + "icon": "https://lodash.com/icon.svg", + "license": "MIT", + "keywords": "lodash-modularized, isplainobject", + "author": "John-David Dalton (http://allyoucanleet.com/)", + "contributors": [ + "John-David Dalton (http://allyoucanleet.com/)", + "Blaine Bublitz (https://github.com/phated)", + "Mathias Bynens (https://mathiasbynens.be/)" + ], + "repository": "lodash/lodash", + "scripts": { "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\"" } +} diff --git a/dist/node_modules/lodash.isstring/LICENSE b/dist/node_modules/lodash.isstring/LICENSE new file mode 100644 index 0000000..b054ca5 --- /dev/null +++ b/dist/node_modules/lodash.isstring/LICENSE @@ -0,0 +1,22 @@ +Copyright 2012-2016 The Dojo Foundation +Based on Underscore.js, copyright 2009-2016 Jeremy Ashkenas, +DocumentCloud and Investigative Reporters & Editors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/dist/node_modules/lodash.isstring/README.md b/dist/node_modules/lodash.isstring/README.md new file mode 100644 index 0000000..f184029 --- /dev/null +++ b/dist/node_modules/lodash.isstring/README.md @@ -0,0 +1,18 @@ +# lodash.isstring v4.0.1 + +The [lodash](https://lodash.com/) method `_.isString` exported as a [Node.js](https://nodejs.org/) module. + +## Installation + +Using npm: +```bash +$ {sudo -H} npm i -g npm +$ npm i --save lodash.isstring +``` + +In Node.js: +```js +var isString = require('lodash.isstring'); +``` + +See the [documentation](https://lodash.com/docs#isString) or [package source](https://github.com/lodash/lodash/blob/4.0.1-npm-packages/lodash.isstring) for more details. diff --git a/dist/node_modules/lodash.isstring/index.js b/dist/node_modules/lodash.isstring/index.js new file mode 100644 index 0000000..408225c --- /dev/null +++ b/dist/node_modules/lodash.isstring/index.js @@ -0,0 +1,95 @@ +/** + * lodash 4.0.1 (Custom Build) + * Build: `lodash modularize exports="npm" -o ./` + * Copyright 2012-2016 The Dojo Foundation + * Based on Underscore.js 1.8.3 + * Copyright 2009-2016 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** `Object#toString` result references. */ +var stringTag = '[object String]'; + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** + * Used to resolve the [`toStringTag`](http://ecma-international.org/ecma-262/6.0/#sec-object.prototype.tostring) + * of values. + */ +var objectToString = objectProto.toString; + +/** + * Checks if `value` is classified as an `Array` object. + * + * @static + * @memberOf _ + * @type Function + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is correctly classified, else `false`. + * @example + * + * _.isArray([1, 2, 3]); + * // => true + * + * _.isArray(document.body.children); + * // => false + * + * _.isArray('abc'); + * // => false + * + * _.isArray(_.noop); + * // => false + */ +var isArray = Array.isArray; + +/** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ +function isObjectLike(value) { + return !!value && typeof value == 'object'; +} + +/** + * Checks if `value` is classified as a `String` primitive or object. + * + * @static + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is correctly classified, else `false`. + * @example + * + * _.isString('abc'); + * // => true + * + * _.isString(1); + * // => false + */ +function isString(value) { + return typeof value == 'string' || + (!isArray(value) && isObjectLike(value) && objectToString.call(value) == stringTag); +} + +module.exports = isString; diff --git a/dist/node_modules/lodash.isstring/package.json b/dist/node_modules/lodash.isstring/package.json new file mode 100644 index 0000000..1331535 --- /dev/null +++ b/dist/node_modules/lodash.isstring/package.json @@ -0,0 +1,17 @@ +{ + "name": "lodash.isstring", + "version": "4.0.1", + "description": "The lodash method `_.isString` exported as a module.", + "homepage": "https://lodash.com/", + "icon": "https://lodash.com/icon.svg", + "license": "MIT", + "keywords": "lodash-modularized, isstring", + "author": "John-David Dalton (http://allyoucanleet.com/)", + "contributors": [ + "John-David Dalton (http://allyoucanleet.com/)", + "Blaine Bublitz (https://github.com/phated)", + "Mathias Bynens (https://mathiasbynens.be/)" + ], + "repository": "lodash/lodash", + "scripts": { "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\"" } +} diff --git a/dist/node_modules/lodash.once/LICENSE b/dist/node_modules/lodash.once/LICENSE new file mode 100644 index 0000000..e0c69d5 --- /dev/null +++ b/dist/node_modules/lodash.once/LICENSE @@ -0,0 +1,47 @@ +Copyright jQuery Foundation and other contributors + +Based on Underscore.js, copyright Jeremy Ashkenas, +DocumentCloud and Investigative Reporters & Editors + +This software consists of voluntary contributions made by many +individuals. For exact contribution history, see the revision history +available at https://github.com/lodash/lodash + +The following license applies to all parts of this software except as +documented below: + +==== + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +==== + +Copyright and related rights for sample code are waived via CC0. Sample +code is defined as all source code displayed within the prose of the +documentation. + +CC0: http://creativecommons.org/publicdomain/zero/1.0/ + +==== + +Files located in the node_modules and vendor directories are externally +maintained libraries used by this software which have their own +licenses; we recommend you read them, as their terms may differ from the +terms above. diff --git a/dist/node_modules/lodash.once/README.md b/dist/node_modules/lodash.once/README.md new file mode 100644 index 0000000..c4a2f16 --- /dev/null +++ b/dist/node_modules/lodash.once/README.md @@ -0,0 +1,18 @@ +# lodash.once v4.1.1 + +The [lodash](https://lodash.com/) method `_.once` exported as a [Node.js](https://nodejs.org/) module. + +## Installation + +Using npm: +```bash +$ {sudo -H} npm i -g npm +$ npm i --save lodash.once +``` + +In Node.js: +```js +var once = require('lodash.once'); +``` + +See the [documentation](https://lodash.com/docs#once) or [package source](https://github.com/lodash/lodash/blob/4.1.1-npm-packages/lodash.once) for more details. diff --git a/dist/node_modules/lodash.once/index.js b/dist/node_modules/lodash.once/index.js new file mode 100644 index 0000000..414ceb3 --- /dev/null +++ b/dist/node_modules/lodash.once/index.js @@ -0,0 +1,294 @@ +/** + * lodash (Custom Build) + * Build: `lodash modularize exports="npm" -o ./` + * Copyright jQuery Foundation and other contributors + * Released under MIT license + * Based on Underscore.js 1.8.3 + * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + */ + +/** Used as the `TypeError` message for "Functions" methods. */ +var FUNC_ERROR_TEXT = 'Expected a function'; + +/** Used as references for various `Number` constants. */ +var INFINITY = 1 / 0, + MAX_INTEGER = 1.7976931348623157e+308, + NAN = 0 / 0; + +/** `Object#toString` result references. */ +var symbolTag = '[object Symbol]'; + +/** Used to match leading and trailing whitespace. */ +var reTrim = /^\s+|\s+$/g; + +/** Used to detect bad signed hexadecimal string values. */ +var reIsBadHex = /^[-+]0x[0-9a-f]+$/i; + +/** Used to detect binary string values. */ +var reIsBinary = /^0b[01]+$/i; + +/** Used to detect octal string values. */ +var reIsOctal = /^0o[0-7]+$/i; + +/** Built-in method references without a dependency on `root`. */ +var freeParseInt = parseInt; + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var objectToString = objectProto.toString; + +/** + * Creates a function that invokes `func`, with the `this` binding and arguments + * of the created function, while it's called less than `n` times. Subsequent + * calls to the created function return the result of the last `func` invocation. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {number} n The number of calls at which `func` is no longer invoked. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * jQuery(element).on('click', _.before(5, addContactToList)); + * // => Allows adding up to 4 contacts to the list. + */ +function before(n, func) { + var result; + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + n = toInteger(n); + return function() { + if (--n > 0) { + result = func.apply(this, arguments); + } + if (n <= 1) { + func = undefined; + } + return result; + }; +} + +/** + * Creates a function that is restricted to invoking `func` once. Repeat calls + * to the function return the value of the first invocation. The `func` is + * invoked with the `this` binding and arguments of the created function. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var initialize = _.once(createApplication); + * initialize(); + * initialize(); + * // => `createApplication` is invoked once + */ +function once(func) { + return before(2, func); +} + +/** + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false + */ +function isObject(value) { + var type = typeof value; + return !!value && (type == 'object' || type == 'function'); +} + +/** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ +function isObjectLike(value) { + return !!value && typeof value == 'object'; +} + +/** + * Checks if `value` is classified as a `Symbol` primitive or object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. + * @example + * + * _.isSymbol(Symbol.iterator); + * // => true + * + * _.isSymbol('abc'); + * // => false + */ +function isSymbol(value) { + return typeof value == 'symbol' || + (isObjectLike(value) && objectToString.call(value) == symbolTag); +} + +/** + * Converts `value` to a finite number. + * + * @static + * @memberOf _ + * @since 4.12.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted number. + * @example + * + * _.toFinite(3.2); + * // => 3.2 + * + * _.toFinite(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toFinite(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toFinite('3.2'); + * // => 3.2 + */ +function toFinite(value) { + if (!value) { + return value === 0 ? value : 0; + } + value = toNumber(value); + if (value === INFINITY || value === -INFINITY) { + var sign = (value < 0 ? -1 : 1); + return sign * MAX_INTEGER; + } + return value === value ? value : 0; +} + +/** + * Converts `value` to an integer. + * + * **Note:** This method is loosely based on + * [`ToInteger`](http://www.ecma-international.org/ecma-262/7.0/#sec-tointeger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toInteger(3.2); + * // => 3 + * + * _.toInteger(Number.MIN_VALUE); + * // => 0 + * + * _.toInteger(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toInteger('3.2'); + * // => 3 + */ +function toInteger(value) { + var result = toFinite(value), + remainder = result % 1; + + return result === result ? (remainder ? result - remainder : result) : 0; +} + +/** + * Converts `value` to a number. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to process. + * @returns {number} Returns the number. + * @example + * + * _.toNumber(3.2); + * // => 3.2 + * + * _.toNumber(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toNumber(Infinity); + * // => Infinity + * + * _.toNumber('3.2'); + * // => 3.2 + */ +function toNumber(value) { + if (typeof value == 'number') { + return value; + } + if (isSymbol(value)) { + return NAN; + } + if (isObject(value)) { + var other = typeof value.valueOf == 'function' ? value.valueOf() : value; + value = isObject(other) ? (other + '') : other; + } + if (typeof value != 'string') { + return value === 0 ? value : +value; + } + value = value.replace(reTrim, ''); + var isBinary = reIsBinary.test(value); + return (isBinary || reIsOctal.test(value)) + ? freeParseInt(value.slice(2), isBinary ? 2 : 8) + : (reIsBadHex.test(value) ? NAN : +value); +} + +module.exports = once; diff --git a/dist/node_modules/lodash.once/package.json b/dist/node_modules/lodash.once/package.json new file mode 100644 index 0000000..fae782c --- /dev/null +++ b/dist/node_modules/lodash.once/package.json @@ -0,0 +1,17 @@ +{ + "name": "lodash.once", + "version": "4.1.1", + "description": "The lodash method `_.once` exported as a module.", + "homepage": "https://lodash.com/", + "icon": "https://lodash.com/icon.svg", + "license": "MIT", + "keywords": "lodash-modularized, once", + "author": "John-David Dalton (http://allyoucanleet.com/)", + "contributors": [ + "John-David Dalton (http://allyoucanleet.com/)", + "Blaine Bublitz (https://github.com/phated)", + "Mathias Bynens (https://mathiasbynens.be/)" + ], + "repository": "lodash/lodash", + "scripts": { "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\"" } +} diff --git a/dist/node_modules/lru-cache/LICENSE b/dist/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000..f785757 --- /dev/null +++ b/dist/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/dist/node_modules/lru-cache/README.md b/dist/node_modules/lru-cache/README.md new file mode 100644 index 0000000..c3d0f20 --- /dev/null +++ b/dist/node_modules/lru-cache/README.md @@ -0,0 +1,1189 @@ +# lru-cache + +A cache object that deletes the least-recently-used items. + +Specify a max number of the most recently used items that you +want to keep, and this cache will keep that many of the most +recently accessed items. + +This is not primarily a TTL cache, and does not make strong TTL +guarantees. There is no preemptive pruning of expired items by +default, but you _may_ set a TTL on the cache or on a single +`set`. If you do so, it will treat expired items as missing, and +delete them when fetched. If you are more interested in TTL +caching than LRU caching, check out +[@isaacs/ttlcache](http://npm.im/@isaacs/ttlcache). + +As of version 7, this is one of the most performant LRU +implementations available in JavaScript, and supports a wide +diversity of use cases. However, note that using some of the +features will necessarily impact performance, by causing the +cache to have to do more work. See the "Performance" section +below. + +## Installation + +```bash +npm install lru-cache --save +``` + +## Usage + +```js +// hybrid module, either works +import { LRUCache } from 'lru-cache' +// or: +const { LRUCache } = require('lru-cache') +// or in minified form for web browsers: +import { LRUCache } from 'http://unpkg.com/lru-cache@9/dist/mjs/index.min.mjs' + +// At least one of 'max', 'ttl', or 'maxSize' is required, to prevent +// unsafe unbounded storage. +// +// In most cases, it's best to specify a max for performance, so all +// the required memory allocation is done up-front. +// +// All the other options are optional, see the sections below for +// documentation on what each one does. Most of them can be +// overridden for specific items in get()/set() +const options = { + max: 500, + + // for use with tracking overall storage size + maxSize: 5000, + sizeCalculation: (value, key) => { + return 1 + }, + + // for use when you need to clean up something when objects + // are evicted from the cache + dispose: (value, key) => { + freeFromMemoryOrWhatever(value) + }, + + // how long to live in ms + ttl: 1000 * 60 * 5, + + // return stale items before removing from cache? + allowStale: false, + + updateAgeOnGet: false, + updateAgeOnHas: false, + + // async method to use for cache.fetch(), for + // stale-while-revalidate type of behavior + fetchMethod: async ( + key, + staleValue, + { options, signal, context } + ) => {}, +} + +const cache = new LRUCache(options) + +cache.set('key', 'value') +cache.get('key') // "value" + +// non-string keys ARE fully supported +// but note that it must be THE SAME object, not +// just a JSON-equivalent object. +var someObject = { a: 1 } +cache.set(someObject, 'a value') +// Object keys are not toString()-ed +cache.set('[object Object]', 'a different value') +assert.equal(cache.get(someObject), 'a value') +// A similar object with same keys/values won't work, +// because it's a different object identity +assert.equal(cache.get({ a: 1 }), undefined) + +cache.clear() // empty the cache +``` + +If you put more stuff in the cache, then less recently used items +will fall out. That's what an LRU cache is. + +## `class LRUCache(options)` + +Create a new `LRUCache` object. + +When using TypeScript, set the `K` and `V` types to the `key` and +`value` types, respectively. + +The `FC` ("fetch context") generic type defaults to `unknown`. +If set to a value other than `void` or `undefined`, then any +calls to `cache.fetch()` _must_ provide a `context` option +matching the `FC` type. If `FC` is set to `void` or `undefined`, +then `cache.fetch()` _must not_ provide a `context` option. See +the documentation on `async fetch()` below. + +## Options + +All options are available on the LRUCache instance, making it +safe to pass an LRUCache instance as the options argument to make +another empty cache of the same type. + +Some options are marked read-only because changing them after +instantiation is not safe. Changing any of the other options +will of course only have an effect on subsequent method calls. + +### `max` (read only) + +The maximum number of items that remain in the cache (assuming no +TTL pruning or explicit deletions). Note that fewer items may be +stored if size calculation is used, and `maxSize` is exceeded. +This must be a positive finite intger. + +At least one of `max`, `maxSize`, or `TTL` is required. This +must be a positive integer if set. + +**It is strongly recommended to set a `max` to prevent unbounded +growth of the cache.** See "Storage Bounds Safety" below. + +### `maxSize` (read only) + +Set to a positive integer to track the sizes of items added to +the cache, and automatically evict items in order to stay below +this size. Note that this may result in fewer than `max` items +being stored. + +Attempting to add an item to the cache whose calculated size is +greater that this amount will be a no-op. The item will not be +cached, and no other items will be evicted. + +Optional, must be a positive integer if provided. + +Sets `maxEntrySize` to the same value, unless a different value +is provided for `maxEntrySize`. + +At least one of `max`, `maxSize`, or `TTL` is required. This +must be a positive integer if set. + +Even if size tracking is enabled, **it is strongly recommended to +set a `max` to prevent unbounded growth of the cache.** See +"Storage Bounds Safety" below. + +### `maxEntrySize` + +Set to a positive integer to track the sizes of items added to +the cache, and prevent caching any item over a given size. +Attempting to add an item whose calculated size is greater than +this amount will be a no-op. The item will not be cached, and no +other items will be evicted. + +Optional, must be a positive integer if provided. Defaults to +the value of `maxSize` if provided. + +### `sizeCalculation` + +Function used to calculate the size of stored items. If you're +storing strings or buffers, then you probably want to do +something like `n => n.length`. The item is passed as the first +argument, and the key is passed as the second argument. + +This may be overridden by passing an options object to +`cache.set()`. + +Requires `maxSize` to be set. + +If the `size` (or return value of `sizeCalculation`) for a given +entry is greater than `maxEntrySize`, then the item will not be +added to the cache. + +### `fetchMethod` (read only) + +Function that is used to make background asynchronous fetches. +Called with `fetchMethod(key, staleValue, { signal, options, +context })`. May return a Promise. + +If `fetchMethod` is not provided, then `cache.fetch(key)` is +equivalent to `Promise.resolve(cache.get(key))`. + +If at any time, `signal.aborted` is set to `true`, or if the +`signal.onabort` method is called, or if it emits an `'abort'` +event which you can listen to with `addEventListener`, then that +means that the fetch should be abandoned. This may be passed +along to async functions aware of AbortController/AbortSignal +behavior. + +The `fetchMethod` should **only** return `undefined` or a Promise +resolving to `undefined` if the AbortController signaled an +`abort` event. In all other cases, it should return or resolve +to a value suitable for adding to the cache. + +The `options` object is a union of the options that may be +provided to `set()` and `get()`. If they are modified, then that +will result in modifying the settings to `cache.set()` when the +value is resolved, and in the case of `noDeleteOnFetchRejection` +and `allowStaleOnFetchRejection`, the handling of `fetchMethod` +failures. + +For example, a DNS cache may update the TTL based on the value +returned from a remote DNS server by changing `options.ttl` in +the `fetchMethod`. + +### `noDeleteOnFetchRejection` + +If a `fetchMethod` throws an error or returns a rejected promise, +then by default, any existing stale value will be removed from +the cache. + +If `noDeleteOnFetchRejection` is set to `true`, then this +behavior is suppressed, and the stale value remains in the cache +in the case of a rejected `fetchMethod`. + +This is important in cases where a `fetchMethod` is _only_ called +as a background update while the stale value is returned, when +`allowStale` is used. + +This is implicitly in effect when `allowStaleOnFetchRejection` is +set. + +This may be set in calls to `fetch()`, or defaulted on the +constructor, or overridden by modifying the options object in the +`fetchMethod`. + +### `allowStaleOnFetchRejection` + +Set to true to return a stale value from the cache when a +`fetchMethod` throws an error or returns a rejected Promise. + +If a `fetchMethod` fails, and there is no stale value available, +the `fetch()` will resolve to `undefined`. Ie, all `fetchMethod` +errors are suppressed. + +Implies `noDeleteOnFetchRejection`. + +This may be set in calls to `fetch()`, or defaulted on the +constructor, or overridden by modifying the options object in the +`fetchMethod`. + +### `allowStaleOnFetchAbort` + +Set to true to return a stale value from the cache when the +`AbortSignal` passed to the `fetchMethod` dispatches an `'abort'` +event, whether user-triggered, or due to internal cache behavior. + +Unless `ignoreFetchAbort` is also set, the underlying +`fetchMethod` will still be considered canceled, and any value +it returns will be ignored and not cached. + +Caveat: since fetches are aborted when a new value is explicitly +set in the cache, this can lead to fetch returning a stale value, +since that was the fallback value _at the moment the `fetch()` was +initiated_, even though the new updated value is now present in +the cache. + +For example: + +```ts +const cache = new LRUCache({ + ttl: 100, + fetchMethod: async (url, oldValue, { signal }) => { + const res = await fetch(url, { signal }) + return await res.json() + } +}) +cache.set('https://example.com/', { some: 'data' }) +// 100ms go by... +const result = cache.fetch('https://example.com/') +cache.set('https://example.com/', { other: 'thing' }) +console.log(await result) // { some: 'data' } +console.log(cache.get('https://example.com/')) // { other: 'thing' } +``` + +### `ignoreFetchAbort` + +Set to true to ignore the `abort` event emitted by the +`AbortSignal` object passed to `fetchMethod`, and still cache the +resulting resolution value, as long as it is not `undefined`. + +When used on its own, this means aborted `fetch()` calls are not +immediately resolved or rejected when they are aborted, and +instead take the full time to await. + +When used with `allowStaleOnFetchAbort`, aborted `fetch()` calls +will resolve immediately to their stale cached value or +`undefined`, and will continue to process and eventually update +the cache when they resolve, as long as the resulting value is +not `undefined`, thus supporting a "return stale on timeout while +refreshing" mechanism by passing `AbortSignal.timeout(n)` as the +signal. + +For example: + +```js +const c = new LRUCache({ + ttl: 100, + ignoreFetchAbort: true, + allowStaleOnFetchAbort: true, + fetchMethod: async (key, oldValue, { signal }) => { + // note: do NOT pass the signal to fetch()! + // let's say this fetch can take a long time. + const res = await fetch(`https://slow-backend-server/${key}`) + return await res.json() + }, +}) + +// this will return the stale value after 100ms, while still +// updating in the background for next time. +const val = await c.fetch('key', { signal: AbortSignal.timeout(100) }) +``` + +**Note**: regardless of this setting, an `abort` event _is still +emitted on the `AbortSignal` object_, so may result in invalid +results when passed to other underlying APIs that use +AbortSignals. + +This may be overridden on the `fetch()` call or in the +`fetchMethod` itself. + +### `dispose` (read only) + +Function that is called on items when they are dropped from the +cache, as `this.dispose(value, key, reason)`. + +This can be handy if you want to close file descriptors or do +other cleanup tasks when items are no longer stored in the cache. + +**NOTE**: It is called _before_ the item has been fully removed +from the cache, so if you want to put it right back in, you need +to wait until the next tick. If you try to add it back in during +the `dispose()` function call, it will break things in subtle and +weird ways. + +Unlike several other options, this may _not_ be overridden by +passing an option to `set()`, for performance reasons. + +The `reason` will be one of the following strings, corresponding +to the reason for the item's deletion: + +- `evict` Item was evicted to make space for a new addition +- `set` Item was overwritten by a new value +- `delete` Item was removed by explicit `cache.delete(key)` or by + calling `cache.clear()`, which deletes everything. + +The `dispose()` method is _not_ called for canceled calls to +`fetchMethod()`. If you wish to handle evictions, overwrites, +and deletes of in-flight asynchronous fetches, you must use the +`AbortSignal` provided. + +Optional, must be a function. + +### `disposeAfter` (read only) + +The same as `dispose`, but called _after_ the entry is completely +removed and the cache is once again in a clean state. + +It is safe to add an item right back into the cache at this +point. However, note that it is _very_ easy to inadvertently +create infinite recursion in this way. + +The `disposeAfter()` method is _not_ called for canceled calls to +`fetchMethod()`. If you wish to handle evictions, overwrites, +and deletes of in-flight asynchronous fetches, you must use the +`AbortSignal` provided. + +### `noDisposeOnSet` + +Set to `true` to suppress calling the `dispose()` function if the +entry key is still accessible within the cache. + +This may be overridden by passing an options object to +`cache.set()`. + +Boolean, default `false`. Only relevant if `dispose` or +`disposeAfter` options are set. + +### `ttl` + +Max time to live for items before they are considered stale. +Note that stale items are NOT preemptively removed by default, +and MAY live in the cache, contributing to its LRU max, long +after they have expired. + +Also, as this cache is optimized for LRU/MRU operations, some of +the staleness/TTL checks will reduce performance. + +This is not primarily a TTL cache, and does not make strong TTL +guarantees. There is no pre-emptive pruning of expired items, +but you _may_ set a TTL on the cache, and it will treat expired +items as missing when they are fetched, and delete them. + +Optional, but must be a positive integer in ms if specified. + +This may be overridden by passing an options object to +`cache.set()`. + +At least one of `max`, `maxSize`, or `TTL` is required. This +must be a positive integer if set. + +Even if ttl tracking is enabled, **it is strongly recommended to +set a `max` to prevent unbounded growth of the cache.** See +"Storage Bounds Safety" below. + +If ttl tracking is enabled, and `max` and `maxSize` are not set, +and `ttlAutopurge` is not set, then a warning will be emitted +cautioning about the potential for unbounded memory consumption. +(The TypeScript definitions will also discourage this.) + +### `noUpdateTTL` + +Boolean flag to tell the cache to not update the TTL when setting +a new value for an existing key (ie, when updating a value rather +than inserting a new value). Note that the TTL value is _always_ +set (if provided) when adding a new entry into the cache. + +This may be passed as an option to `cache.set()`. + +Boolean, default false. + +### `ttlResolution` + +Minimum amount of time in ms in which to check for staleness. +Defaults to `1`, which means that the current time is checked at +most once per millisecond. + +Set to `0` to check the current time every time staleness is +tested. + +Note that setting this to a higher value _will_ improve +performance somewhat while using ttl tracking, albeit at the +expense of keeping stale items around a bit longer than intended. + +### `ttlAutopurge` + +Preemptively remove stale items from the cache. + +Note that this may _significantly_ degrade performance, +especially if the cache is storing a large number of items. It +is almost always best to just leave the stale items in the cache, +and let them fall out as new items are added. + +Note that this means that `allowStale` is a bit pointless, as +stale items will be deleted almost as soon as they expire. + +Use with caution! + +Boolean, default `false` + +### `allowStale` + +By default, if you set `ttl`, it'll only delete stale items from +the cache when you `get(key)`. That is, it's not preemptively +pruning items. + +If you set `allowStale:true`, it'll return the stale value as +well as deleting it. If you don't set this, then it'll return +`undefined` when you try to get a stale entry. + +Note that when a stale entry is fetched, _even if it is returned +due to `allowStale` being set_, it is removed from the cache +immediately. You can immediately put it back in the cache if you +wish, thus resetting the TTL. + +This may be overridden by passing an options object to +`cache.get()`. The `cache.has()` method will always return +`false` for stale items. + +Boolean, default false, only relevant if `ttl` is set. + +### `noDeleteOnStaleGet` + +When using time-expiring entries with `ttl`, by default stale +items will be removed from the cache when the key is accessed +with `cache.get()`. + +Setting `noDeleteOnStaleGet` to `true` will cause stale items to +remain in the cache, until they are explicitly deleted with +`cache.delete(key)`, or retrieved with `noDeleteOnStaleGet` set +to `false`. + +This may be overridden by passing an options object to +`cache.get()`. + +Boolean, default false, only relevant if `ttl` is set. + +### `updateAgeOnGet` + +When using time-expiring entries with `ttl`, setting this to +`true` will make each item's age reset to 0 whenever it is +retrieved from cache with `get()`, causing it to not expire. (It +can still fall out of cache based on recency of use, of course.) + +This may be overridden by passing an options object to +`cache.get()`. + +Boolean, default false, only relevant if `ttl` is set. + +### `updateAgeOnHas` + +When using time-expiring entries with `ttl`, setting this to +`true` will make each item's age reset to 0 whenever its presence +in the cache is checked with `has()`, causing it to not expire. +(It can still fall out of cache based on recency of use, of +course.) + +This may be overridden by passing an options object to +`cache.has()`. + +Boolean, default false, only relevant if `ttl` is set. + +## API + +### `new LRUCache(options)` + +Create a new LRUCache. All options are documented above, and are +on the cache as public members. + +The `K` and `V` types define the key and value types, +respectively. The optional `FC` type defines the type of the +`context` object passed to `cache.fetch()`. + +Keys and values **must not** be `null` or `undefined`. + +### `cache.max`, `cache.maxSize`, `cache.allowStale`, + +`cache.noDisposeOnSet`, `cache.sizeCalculation`, `cache.dispose`, +`cache.maxSize`, `cache.ttl`, `cache.updateAgeOnGet`, +`cache.updateAgeOnHas` + +All option names are exposed as public members on the cache +object. + +These are intended for read access only. Changing them during +program operation can cause undefined behavior. + +### `cache.size` + +The total number of items held in the cache at the current +moment. + +### `cache.calculatedSize` + +The total size of items in cache when using size tracking. + +### `set(key, value, [{ size, sizeCalculation, ttl, noDisposeOnSet, start, status }])` + +Add a value to the cache. + +Optional options object may contain `ttl` and `sizeCalculation` +as described above, which default to the settings on the cache +object. + +If `start` is provided, then that will set the effective start +time for the TTL calculation. Note that this must be a previous +value of `performance.now()` if supported, or a previous value of +`Date.now()` if not. + +Options object may also include `size`, which will prevent +calling the `sizeCalculation` function and just use the specified +number if it is a positive integer, and `noDisposeOnSet` which +will prevent calling a `dispose` function in the case of +overwrites. + +If the `size` (or return value of `sizeCalculation`) for a given +entry is greater than `maxEntrySize`, then the item will not be +added to the cache. + +Will update the recency of the entry. + +Returns the cache object. + +For the usage of the `status` option, see **Status Tracking** +below. + +If the value is `undefined`, then this is an alias for +`cache.delete(key)`. `undefined` is never stored in the cache. +See **Storing Undefined Values** below. + +### `get(key, { updateAgeOnGet, allowStale, status } = {}) => value` + +Return a value from the cache. + +Will update the recency of the cache entry found. + +If the key is not found, `get()` will return `undefined`. + +For the usage of the `status` option, see **Status Tracking** +below. + +### `async fetch(key, options = {}) => Promise` + +The following options are supported: + +- `updateAgeOnGet` +- `allowStale` +- `size` +- `sizeCalculation` +- `ttl` +- `noDisposeOnSet` +- `forceRefresh` +- `status` - See **Status Tracking** below. +- `signal` - AbortSignal can be used to cancel the `fetch()`. + Note that the `signal` option provided to the `fetchMethod` is + a different object, because it must also respond to internal + cache state changes, but aborting this signal will abort the + one passed to `fetchMethod` as well. +- `context` - sets the `context` option passed to the underlying + `fetchMethod`. + +If the value is in the cache and not stale, then the returned +Promise resolves to the value. + +If not in the cache, or beyond its TTL staleness, then +`fetchMethod(key, staleValue, { options, signal, context })` is +called, and the value returned will be added to the cache once +resolved. + +If called with `allowStale`, and an asynchronous fetch is +currently in progress to reload a stale value, then the former +stale value will be returned. + +If called with `forceRefresh`, then the cached item will be +re-fetched, even if it is not stale. However, if `allowStale` is +set, then the old value will still be returned. This is useful +in cases where you want to force a reload of a cached value. If +a background fetch is already in progress, then `forceRefresh` +has no effect. + +Multiple fetches for the same `key` will only call `fetchMethod` +a single time, and all will be resolved when the value is +resolved, even if different options are used. + +If `fetchMethod` is not specified, then this is effectively an +alias for `Promise.resolve(cache.get(key))`. + +When the fetch method resolves to a value, if the fetch has not +been aborted due to deletion, eviction, or being overwritten, +then it is added to the cache using the options provided. + +If the key is evicted or deleted before the `fetchMethod` +resolves, then the AbortSignal passed to the `fetchMethod` will +receive an `abort` event, and the promise returned by `fetch()` +will reject with the reason for the abort. + +If a `signal` is passed to the `fetch()` call, then aborting the +signal will abort the fetch and cause the `fetch()` promise to +reject with the reason provided. + +#### Setting `context` + +If an `FC` type is set to a type other than `unknown`, `void`, or +`undefined` in the LRUCache constructor, then all +calls to `cache.fetch()` _must_ provide a `context` option. If +set to `undefined` or `void`, then calls to fetch _must not_ +provide a `context` option. + +The `context` param allows you to provide arbitrary data that +might be relevant in the course of fetching the data. It is only +relevant for the course of a single `fetch()` operation, and +discarded afterwards. + +#### Note: `fetch()` calls are inflight-unique + +If you call `fetch()` multiple times with the same key value, +then every call after the first will resolve on the same +promise1, +_even if they have different settings that would otherwise change +the behvavior of the fetch_, such as `noDeleteOnFetchRejection` +or `ignoreFetchAbort`. + +In most cases, this is not a problem (in fact, only fetching +something once is what you probably want, if you're caching in +the first place). If you are changing the fetch() options +dramatically between runs, there's a good chance that you might +be trying to fit divergent semantics into a single object, and +would be better off with multiple cache instances. + +**1**: Ie, they're not the "same Promise", but they resolve at +the same time, because they're both waiting on the same +underlying fetchMethod response. + +### `peek(key, { allowStale } = {}) => value` + +Like `get()` but doesn't update recency or delete stale items. + +Returns `undefined` if the item is stale, unless `allowStale` is +set either on the cache or in the options object. + +### `has(key, { updateAgeOnHas, status } = {}) => Boolean` + +Check if a key is in the cache, without updating the recency of +use. Age is updated if `updateAgeOnHas` is set to `true` in +either the options or the constructor. + +Will return `false` if the item is stale, even though it is +technically in the cache. The difference can be determined (if +it matters) by using a `status` argument, and inspecting the +`has` field. + +For the usage of the `status` option, see **Status Tracking** +below. + +### `delete(key)` + +Deletes a key out of the cache. + +Returns `true` if the key was deleted, `false` otherwise. + +### `clear()` + +Clear the cache entirely, throwing away all values. + +### `keys()` + +Return a generator yielding the keys in the cache, in order from +most recently used to least recently used. + +### `rkeys()` + +Return a generator yielding the keys in the cache, in order from +least recently used to most recently used. + +### `values()` + +Return a generator yielding the values in the cache, in order +from most recently used to least recently used. + +### `rvalues()` + +Return a generator yielding the values in the cache, in order +from least recently used to most recently used. + +### `entries()` + +Return a generator yielding `[key, value]` pairs, in order from +most recently used to least recently used. + +### `rentries()` + +Return a generator yielding `[key, value]` pairs, in order from +least recently used to most recently used. + +### `find(fn, [getOptions])` + +Find a value for which the supplied `fn` method returns a truthy +value, similar to `Array.find()`. + +`fn` is called as `fn(value, key, cache)`. + +The optional `getOptions` are applied to the resulting `get()` of +the item found. + +### `dump()` + +Return an array of `[key, entry]` objects which can be passed to +`cache.load()` + +The `start` fields are calculated relative to a portable +`Date.now()` timestamp, even if `performance.now()` is available. + +Stale entries are always included in the `dump`, even if +`allowStale` is false. + +Note: this returns an actual array, not a generator, so it can be +more easily passed around. + +### `load(entries)` + +Reset the cache and load in the items in `entries` in the order +listed. Note that the shape of the resulting cache may be +different if the same options are not used in both caches. + +The `start` fields are assumed to be calculated relative to a +portable `Date.now()` timestamp, even if `performance.now()` is +available. + +### `purgeStale()` + +Delete any stale entries. Returns `true` if anything was +removed, `false` otherwise. + +### `getRemainingTTL(key)` + +Return the number of ms left in the item's TTL. If item is not +in cache, returns `0`. Returns `Infinity` if item is in cache +without a defined TTL. + +### `forEach(fn, [thisp])` + +Call the `fn` function with each set of `fn(value, key, cache)` +in the LRU cache, from most recent to least recently used. + +Does not affect recency of use. + +If `thisp` is provided, function will be called in the +`this`-context of the provided object. + +### `rforEach(fn, [thisp])` + +Same as `cache.forEach(fn, thisp)`, but in order from least +recently used to most recently used. + +### `pop()` + +Evict the least recently used item, returning its value. + +Returns `undefined` if cache is empty. + +## Status Tracking + +Occasionally, it may be useful to track the internal behavior of +the cache, particularly for logging, debugging, or for behavior +within the `fetchMethod`. To do this, you can pass a `status` +object to the `get()`, `set()`, `has()`, and `fetch()` methods. + +The `status` option should be a plain JavaScript object. + +The following fields will be set appropriately: + +```ts +interface Status { + /** + * The status of a set() operation. + * + * - add: the item was not found in the cache, and was added + * - update: the item was in the cache, with the same value provided + * - replace: the item was in the cache, and replaced + * - miss: the item was not added to the cache for some reason + */ + set?: 'add' | 'update' | 'replace' | 'miss' + + /** + * the ttl stored for the item, or undefined if ttls are not used. + */ + ttl?: LRUMilliseconds + + /** + * the start time for the item, or undefined if ttls are not used. + */ + start?: LRUMilliseconds + + /** + * The timestamp used for TTL calculation + */ + now?: LRUMilliseconds + + /** + * the remaining ttl for the item, or undefined if ttls are not used. + */ + remainingTTL?: LRUMilliseconds + + /** + * The calculated size for the item, if sizes are used. + */ + size?: LRUSize + + /** + * A flag indicating that the item was not stored, due to exceeding the + * {@link maxEntrySize} + */ + maxEntrySizeExceeded?: true + + /** + * The old value, specified in the case of `set:'update'` or + * `set:'replace'` + */ + oldValue?: V + + /** + * The results of a {@link has} operation + * + * - hit: the item was found in the cache + * - stale: the item was found in the cache, but is stale + * - miss: the item was not found in the cache + */ + has?: 'hit' | 'stale' | 'miss' + + /** + * The status of a {@link fetch} operation. + * Note that this can change as the underlying fetch() moves through + * various states. + * + * - inflight: there is another fetch() for this key which is in process + * - get: there is no fetchMethod, so {@link get} was called. + * - miss: the item is not in cache, and will be fetched. + * - hit: the item is in the cache, and was resolved immediately. + * - stale: the item is in the cache, but stale. + * - refresh: the item is in the cache, and not stale, but + * {@link forceRefresh} was specified. + */ + fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh' + + /** + * The {@link fetchMethod} was called + */ + fetchDispatched?: true + + /** + * The cached value was updated after a successful call to fetchMethod + */ + fetchUpdated?: true + + /** + * The reason for a fetch() rejection. Either the error raised by the + * {@link fetchMethod}, or the reason for an AbortSignal. + */ + fetchError?: Error + + /** + * The fetch received an abort signal + */ + fetchAborted?: true + + /** + * The abort signal received was ignored, and the fetch was allowed to + * continue. + */ + fetchAbortIgnored?: true + + /** + * The fetchMethod promise resolved successfully + */ + fetchResolved?: true + + /** + * The results of the fetchMethod promise were stored in the cache + */ + fetchUpdated?: true + + /** + * The fetchMethod promise was rejected + */ + fetchRejected?: true + + /** + * The status of a {@link get} operation. + * + * - fetching: The item is currently being fetched. If a previous value is + * present and allowed, that will be returned. + * - stale: The item is in the cache, and is stale. + * - hit: the item is in the cache + * - miss: the item is not in the cache + */ + get?: 'stale' | 'hit' | 'miss' + + /** + * A fetch or get operation returned a stale value. + */ + returnedStale?: true +} +``` + +## Storage Bounds Safety + +This implementation aims to be as flexible as possible, within +the limits of safe memory consumption and optimal performance. + +At initial object creation, storage is allocated for `max` items. +If `max` is set to zero, then some performance is lost, and item +count is unbounded. Either `maxSize` or `ttl` _must_ be set if +`max` is not specified. + +If `maxSize` is set, then this creates a safe limit on the +maximum storage consumed, but without the performance benefits of +pre-allocation. When `maxSize` is set, every item _must_ provide +a size, either via the `sizeCalculation` method provided to the +constructor, or via a `size` or `sizeCalculation` option provided +to `cache.set()`. The size of every item _must_ be a positive +integer. + +If neither `max` nor `maxSize` are set, then `ttl` tracking must +be enabled. Note that, even when tracking item `ttl`, items are +_not_ preemptively deleted when they become stale, unless +`ttlAutopurge` is enabled. Instead, they are only purged the +next time the key is requested. Thus, if `ttlAutopurge`, `max`, +and `maxSize` are all not set, then the cache will potentially +grow unbounded. + +In this case, a warning is printed to standard error. Future +versions may require the use of `ttlAutopurge` if `max` and +`maxSize` are not specified. + +If you truly wish to use a cache that is bound _only_ by TTL +expiration, consider using a `Map` object, and calling +`setTimeout` to delete entries when they expire. It will perform +much better than an LRU cache. + +Here is an implementation you may use, under the same +[license](./LICENSE) as this package: + +```js +// a storage-unbounded ttl cache that is not an lru-cache +const cache = { + data: new Map(), + timers: new Map(), + set: (k, v, ttl) => { + if (cache.timers.has(k)) { + clearTimeout(cache.timers.get(k)) + } + cache.timers.set( + k, + setTimeout(() => cache.delete(k), ttl) + ) + cache.data.set(k, v) + }, + get: k => cache.data.get(k), + has: k => cache.data.has(k), + delete: k => { + if (cache.timers.has(k)) { + clearTimeout(cache.timers.get(k)) + } + cache.timers.delete(k) + return cache.data.delete(k) + }, + clear: () => { + cache.data.clear() + for (const v of cache.timers.values()) { + clearTimeout(v) + } + cache.timers.clear() + }, +} +``` + +If that isn't to your liking, check out +[@isaacs/ttlcache](http://npm.im/@isaacs/ttlcache). + +## Storing Undefined Values + +This cache never stores undefined values, as `undefined` is used +internally in a few places to indicate that a key is not in the +cache. + +You may call `cache.set(key, undefined)`, but this is just an +an alias for `cache.delete(key)`. Note that this has the effect +that `cache.has(key)` will return _false_ after setting it to +undefined. + +```js +cache.set(myKey, undefined) +cache.has(myKey) // false! +``` + +If you need to track `undefined` values, and still note that the +key is in the cache, an easy workaround is to use a sigil object +of your own. + +```js +import { LRUCache } from 'lru-cache' +const undefinedValue = Symbol('undefined') +const cache = new LRUCache(...) +const mySet = (key, value) => + cache.set(key, value === undefined ? undefinedValue : value) +const myGet = (key, value) => { + const v = cache.get(key) + return v === undefinedValue ? undefined : v +} +``` + +## Performance + +As of January 2022, version 7 of this library is one of the most +performant LRU cache implementations in JavaScript. + +Benchmarks can be extremely difficult to get right. In +particular, the performance of set/get/delete operations on +objects will vary _wildly_ depending on the type of key used. V8 +is highly optimized for objects with keys that are short strings, +especially integer numeric strings. Thus any benchmark which +tests _solely_ using numbers as keys will tend to find that an +object-based approach performs the best. + +Note that coercing _anything_ to strings to use as object keys is +unsafe, unless you can be 100% certain that no other type of +value will be used. For example: + +```js +const myCache = {} +const set = (k, v) => (myCache[k] = v) +const get = k => myCache[k] + +set({}, 'please hang onto this for me') +set('[object Object]', 'oopsie') +``` + +Also beware of "Just So" stories regarding performance. Garbage +collection of large (especially: deep) object graphs can be +incredibly costly, with several "tipping points" where it +increases exponentially. As a result, putting that off until +later can make it much worse, and less predictable. If a library +performs well, but only in a scenario where the object graph is +kept shallow, then that won't help you if you are using large +objects as keys. + +In general, when attempting to use a library to improve +performance (such as a cache like this one), it's best to choose +an option that will perform well in the sorts of scenarios where +you'll actually use it. + +This library is optimized for repeated gets and minimizing +eviction time, since that is the expected need of a LRU. Set +operations are somewhat slower on average than a few other +options, in part because of that optimization. It is assumed +that you'll be caching some costly operation, ideally as rarely +as possible, so optimizing set over get would be unwise. + +If performance matters to you: + +1. If it's at all possible to use small integer values as keys, + and you can guarantee that no other types of values will be + used as keys, then do that, and use a cache such as + [lru-fast](https://npmjs.com/package/lru-fast), or + [mnemonist's + LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache) + which uses an Object as its data store. + +2. Failing that, if at all possible, use short non-numeric + strings (ie, less than 256 characters) as your keys, and use + [mnemonist's + LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache). + +3. If the types of your keys will be anything else, especially + long strings, strings that look like floats, objects, or some + mix of types, or if you aren't sure, then this library will + work well for you. + + If you do not need the features that this library provides + (like asynchronous fetching, a variety of TTL staleness + options, and so on), then [mnemonist's + LRUMap](https://yomguithereal.github.io/mnemonist/lru-map) is + a very good option, and just slightly faster than this module + (since it does considerably less). + +4. Do not use a `dispose` function, size tracking, or especially + ttl behavior, unless absolutely needed. These features are + convenient, and necessary in some use cases, and every attempt + has been made to make the performance impact minimal, but it + isn't nothing. + +## Breaking Changes in Version 7 + +This library changed to a different algorithm and internal data +structure in version 7, yielding significantly better +performance, albeit with some subtle changes as a result. + +If you were relying on the internals of LRUCache in version 6 or +before, it probably will not work in version 7 and above. + +## Breaking Changes in Version 8 + +- The `fetchContext` option was renamed to `context`, and may no + longer be set on the cache instance itself. +- Rewritten in TypeScript, so pretty much all the types moved + around a lot. +- The AbortController/AbortSignal polyfill was removed. For this + reason, **Node version 16.14.0 or higher is now required**. +- Internal properties were moved to actual private class + properties. +- Keys and values must not be `null` or `undefined`. +- Minified export available at `'lru-cache/min'`, for both CJS + and MJS builds. + +## Changes in Version 9 + +- Named export only, no default export. +- AbortController polyfill returned, albeit with a warning when + used. + +For more info, see the [change log](CHANGELOG.md). diff --git a/dist/node_modules/lru-cache/dist/cjs/index.d.ts b/dist/node_modules/lru-cache/dist/cjs/index.d.ts new file mode 100644 index 0000000..9a333a8 --- /dev/null +++ b/dist/node_modules/lru-cache/dist/cjs/index.d.ts @@ -0,0 +1,834 @@ +/** + * @module LRUCache + */ +declare const TYPE: unique symbol; +type Index = number & { + [TYPE]: 'LRUCache Index'; +}; +type UintArray = Uint8Array | Uint16Array | Uint32Array; +type NumberArray = UintArray | number[]; +declare class ZeroArray extends Array { + constructor(size: number); +} +type StackLike = Stack | Index[]; +declare class Stack { + #private; + heap: NumberArray; + length: number; + static create(max: number): StackLike; + constructor(max: number, HeapCls: { + new (n: number): NumberArray; + }); + push(n: Index): void; + pop(): Index; +} +/** + * Promise representing an in-progress {@link LRUCache#fetch} call + */ +export type BackgroundFetch = Promise & { + __returned: BackgroundFetch | undefined; + __abortController: AbortController; + __staleWhileFetching: V | undefined; +}; +export declare namespace LRUCache { + /** + * An integer greater than 0, reflecting the calculated size of items + */ + type Size = number; + /** + * Integer greater than 0, representing some number of milliseconds, or the + * time at which a TTL started counting from. + */ + type Milliseconds = number; + /** + * An integer greater than 0, reflecting a number of items + */ + type Count = number; + /** + * The reason why an item was removed from the cache, passed + * to the {@link Disposer} methods. + */ + type DisposeReason = 'evict' | 'set' | 'delete'; + /** + * A method called upon item removal, passed as the + * {@link OptionsBase.dispose} and/or + * {@link OptionsBase.disposeAfter} options. + */ + type Disposer = (value: V, key: K, reason: DisposeReason) => void; + /** + * A function that returns the effective calculated size + * of an entry in the cache. + */ + type SizeCalculator = (value: V, key: K) => Size; + /** + * Options provided to the + * {@link OptionsBase.fetchMethod} function. + */ + interface FetcherOptions { + signal: AbortSignal; + options: FetcherFetchOptions; + /** + * Object provided in the {@link FetchOptions.context} option to + * {@link LRUCache#fetch} + */ + context: FC; + } + /** + * Status object that may be passed to {@link LRUCache#fetch}, + * {@link LRUCache#get}, {@link LRUCache#set}, and {@link LRUCache#has}. + */ + interface Status { + /** + * The status of a set() operation. + * + * - add: the item was not found in the cache, and was added + * - update: the item was in the cache, with the same value provided + * - replace: the item was in the cache, and replaced + * - miss: the item was not added to the cache for some reason + */ + set?: 'add' | 'update' | 'replace' | 'miss'; + /** + * the ttl stored for the item, or undefined if ttls are not used. + */ + ttl?: Milliseconds; + /** + * the start time for the item, or undefined if ttls are not used. + */ + start?: Milliseconds; + /** + * The timestamp used for TTL calculation + */ + now?: Milliseconds; + /** + * the remaining ttl for the item, or undefined if ttls are not used. + */ + remainingTTL?: Milliseconds; + /** + * The calculated size for the item, if sizes are used. + */ + entrySize?: Size; + /** + * The total calculated size of the cache, if sizes are used. + */ + totalCalculatedSize?: Size; + /** + * A flag indicating that the item was not stored, due to exceeding the + * {@link OptionsBase.maxEntrySize} + */ + maxEntrySizeExceeded?: true; + /** + * The old value, specified in the case of `set:'update'` or + * `set:'replace'` + */ + oldValue?: V; + /** + * The results of a {@link LRUCache#has} operation + * + * - hit: the item was found in the cache + * - stale: the item was found in the cache, but is stale + * - miss: the item was not found in the cache + */ + has?: 'hit' | 'stale' | 'miss'; + /** + * The status of a {@link LRUCache#fetch} operation. + * Note that this can change as the underlying fetch() moves through + * various states. + * + * - inflight: there is another fetch() for this key which is in process + * - get: there is no fetchMethod, so {@link LRUCache#get} was called. + * - miss: the item is not in cache, and will be fetched. + * - hit: the item is in the cache, and was resolved immediately. + * - stale: the item is in the cache, but stale. + * - refresh: the item is in the cache, and not stale, but + * {@link FetchOptions.forceRefresh} was specified. + */ + fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh'; + /** + * The {@link OptionsBase.fetchMethod} was called + */ + fetchDispatched?: true; + /** + * The cached value was updated after a successful call to + * {@link OptionsBase.fetchMethod} + */ + fetchUpdated?: true; + /** + * The reason for a fetch() rejection. Either the error raised by the + * {@link OptionsBase.fetchMethod}, or the reason for an + * AbortSignal. + */ + fetchError?: Error; + /** + * The fetch received an abort signal + */ + fetchAborted?: true; + /** + * The abort signal received was ignored, and the fetch was allowed to + * continue. + */ + fetchAbortIgnored?: true; + /** + * The fetchMethod promise resolved successfully + */ + fetchResolved?: true; + /** + * The fetchMethod promise was rejected + */ + fetchRejected?: true; + /** + * The status of a {@link LRUCache#get} operation. + * + * - fetching: The item is currently being fetched. If a previous value + * is present and allowed, that will be returned. + * - stale: The item is in the cache, and is stale. + * - hit: the item is in the cache + * - miss: the item is not in the cache + */ + get?: 'stale' | 'hit' | 'miss'; + /** + * A fetch or get operation returned a stale value. + */ + returnedStale?: true; + } + /** + * options which override the options set in the LRUCache constructor + * when calling {@link LRUCache#fetch}. + * + * This is the union of {@link GetOptions} and {@link SetOptions}, plus + * {@link OptionsBase.noDeleteOnFetchRejection}, + * {@link OptionsBase.allowStaleOnFetchRejection}, + * {@link FetchOptions.forceRefresh}, and + * {@link OptionsBase.context} + * + * Any of these may be modified in the {@link OptionsBase.fetchMethod} + * function, but the {@link GetOptions} fields will of course have no + * effect, as the {@link LRUCache#get} call already happened by the time + * the fetchMethod is called. + */ + interface FetcherFetchOptions extends Pick, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet' | 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL' | 'noDeleteOnFetchRejection' | 'allowStaleOnFetchRejection' | 'ignoreFetchAbort' | 'allowStaleOnFetchAbort'> { + status?: Status; + size?: Size; + } + /** + * Options that may be passed to the {@link LRUCache#fetch} method. + */ + interface FetchOptions extends FetcherFetchOptions { + /** + * Set to true to force a re-load of the existing data, even if it + * is not yet stale. + */ + forceRefresh?: boolean; + /** + * Context provided to the {@link OptionsBase.fetchMethod} as + * the {@link FetcherOptions.context} param. + * + * If the FC type is specified as unknown (the default), + * undefined or void, then this is optional. Otherwise, it will + * be required. + */ + context?: FC; + signal?: AbortSignal; + status?: Status; + } + /** + * Options provided to {@link LRUCache#fetch} when the FC type is something + * other than `unknown`, `undefined`, or `void` + */ + interface FetchOptionsWithContext extends FetchOptions { + context: FC; + } + /** + * Options provided to {@link LRUCache#fetch} when the FC type is + * `undefined` or `void` + */ + interface FetchOptionsNoContext extends FetchOptions { + context?: undefined; + } + /** + * Options that may be passed to the {@link LRUCache#has} method. + */ + interface HasOptions extends Pick, 'updateAgeOnHas'> { + status?: Status; + } + /** + * Options that may be passed to the {@link LRUCache#get} method. + */ + interface GetOptions extends Pick, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet'> { + status?: Status; + } + /** + * Options that may be passed to the {@link LRUCache#peek} method. + */ + interface PeekOptions extends Pick, 'allowStale'> { + } + /** + * Options that may be passed to the {@link LRUCache#set} method. + */ + interface SetOptions extends Pick, 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'> { + /** + * If size tracking is enabled, then setting an explicit size + * in the {@link LRUCache#set} call will prevent calling the + * {@link OptionsBase.sizeCalculation} function. + */ + size?: Size; + /** + * If TTL tracking is enabled, then setting an explicit start + * time in the {@link LRUCache#set} call will override the + * default time from `performance.now()` or `Date.now()`. + * + * Note that it must be a valid value for whichever time-tracking + * method is in use. + */ + start?: Milliseconds; + status?: Status; + } + /** + * The type signature for the {@link OptionsBase.fetchMethod} option. + */ + type Fetcher = (key: K, staleValue: V | undefined, options: FetcherOptions) => Promise | V | void | undefined; + /** + * Options which may be passed to the {@link LRUCache} constructor. + * + * Most of these may be overridden in the various options that use + * them. + * + * Despite all being technically optional, the constructor requires that + * a cache is at minimum limited by one or more of {@link OptionsBase.max}, + * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}. + * + * If {@link OptionsBase.ttl} is used alone, then it is strongly advised + * (and in fact required by the type definitions here) that the cache + * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially + * unbounded storage. + */ + interface OptionsBase { + /** + * The maximum number of items to store in the cache before evicting + * old entries. This is read-only on the {@link LRUCache} instance, + * and may not be overridden. + * + * If set, then storage space will be pre-allocated at construction + * time, and the cache will perform significantly faster. + * + * Note that significantly fewer items may be stored, if + * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also + * set. + */ + max?: Count; + /** + * Max time in milliseconds for items to live in cache before they are + * considered stale. Note that stale items are NOT preemptively removed + * by default, and MAY live in the cache long after they have expired. + * + * Also, as this cache is optimized for LRU/MRU operations, some of + * the staleness/TTL checks will reduce performance, as they will incur + * overhead by deleting items. + * + * Must be an integer number of ms. If set to 0, this indicates "no TTL" + * + * @default 0 + */ + ttl?: Milliseconds; + /** + * Minimum amount of time in ms in which to check for staleness. + * Defaults to 1, which means that the current time is checked + * at most once per millisecond. + * + * Set to 0 to check the current time every time staleness is tested. + * (This reduces performance, and is theoretically unnecessary.) + * + * Setting this to a higher value will improve performance somewhat + * while using ttl tracking, albeit at the expense of keeping stale + * items around a bit longer than their TTLs would indicate. + * + * @default 1 + */ + ttlResolution?: Milliseconds; + /** + * Preemptively remove stale items from the cache. + * Note that this may significantly degrade performance, + * especially if the cache is storing a large number of items. + * It is almost always best to just leave the stale items in + * the cache, and let them fall out as new items are added. + * + * Note that this means that {@link OptionsBase.allowStale} is a bit + * pointless, as stale items will be deleted almost as soon as they + * expire. + * + * @default false + */ + ttlAutopurge?: boolean; + /** + * Update the age of items on {@link LRUCache#get}, renewing their TTL + * + * Has no effect if {@link OptionsBase.ttl} is not set. + * + * @default false + */ + updateAgeOnGet?: boolean; + /** + * Update the age of items on {@link LRUCache#has}, renewing their TTL + * + * Has no effect if {@link OptionsBase.ttl} is not set. + * + * @default false + */ + updateAgeOnHas?: boolean; + /** + * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return + * stale data, if available. + */ + allowStale?: boolean; + /** + * Function that is called on items when they are dropped from the cache. + * This can be handy if you want to close file descriptors or do other + * cleanup tasks when items are no longer accessible. Called with `key, + * value`. It's called before actually removing the item from the + * internal cache, so it is *NOT* safe to re-add them. + * + * Use {@link OptionsBase.disposeAfter} if you wish to dispose items after + * they have been full removed, when it is safe to add them back to the + * cache. + */ + dispose?: Disposer; + /** + * The same as {@link OptionsBase.dispose}, but called *after* the entry + * is completely removed and the cache is once again in a clean state. + * It is safe to add an item right back into the cache at this point. + * However, note that it is *very* easy to inadvertently create infinite + * recursion this way. + */ + disposeAfter?: Disposer; + /** + * Set to true to suppress calling the + * {@link OptionsBase.dispose} function if the entry key is + * still accessible within the cache. + * This may be overridden by passing an options object to + * {@link LRUCache#set}. + */ + noDisposeOnSet?: boolean; + /** + * Boolean flag to tell the cache to not update the TTL when + * setting a new value for an existing key (ie, when updating a value + * rather than inserting a new value). Note that the TTL value is + * _always_ set (if provided) when adding a new entry into the cache. + * + * Has no effect if a {@link OptionsBase.ttl} is not set. + */ + noUpdateTTL?: boolean; + /** + * If you wish to track item size, you must provide a maxSize + * note that we still will only keep up to max *actual items*, + * if max is set, so size tracking may cause fewer than max items + * to be stored. At the extreme, a single item of maxSize size + * will cause everything else in the cache to be dropped when it + * is added. Use with caution! + * + * Note also that size tracking can negatively impact performance, + * though for most cases, only minimally. + */ + maxSize?: Size; + /** + * The maximum allowed size for any single item in the cache. + * + * If a larger item is passed to {@link LRUCache#set} or returned by a + * {@link OptionsBase.fetchMethod}, then it will not be stored in the + * cache. + */ + maxEntrySize?: Size; + /** + * A function that returns a number indicating the item's size. + * + * If not provided, and {@link OptionsBase.maxSize} or + * {@link OptionsBase.maxEntrySize} are set, then all + * {@link LRUCache#set} calls **must** provide an explicit + * {@link SetOptions.size} or sizeCalculation param. + */ + sizeCalculation?: SizeCalculator; + /** + * Method that provides the implementation for {@link LRUCache#fetch} + */ + fetchMethod?: Fetcher; + /** + * Set to true to suppress the deletion of stale data when a + * {@link OptionsBase.fetchMethod} returns a rejected promise. + */ + noDeleteOnFetchRejection?: boolean; + /** + * Do not delete stale items when they are retrieved with + * {@link LRUCache#get}. + * + * Note that the `get` return value will still be `undefined` + * unless {@link OptionsBase.allowStale} is true. + */ + noDeleteOnStaleGet?: boolean; + /** + * Set to true to allow returning stale data when a + * {@link OptionsBase.fetchMethod} throws an error or returns a rejected + * promise. + * + * This differs from using {@link OptionsBase.allowStale} in that stale + * data will ONLY be returned in the case that the + * {@link LRUCache#fetch} fails, not any other times. + */ + allowStaleOnFetchRejection?: boolean; + /** + * Set to true to return a stale value from the cache when the + * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches an `'abort'` + * event, whether user-triggered, or due to internal cache behavior. + * + * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying + * {@link OptionsBase.fetchMethod} will still be considered canceled, and + * any value it returns will be ignored and not cached. + * + * Caveat: since fetches are aborted when a new value is explicitly + * set in the cache, this can lead to fetch returning a stale value, + * since that was the fallback value _at the moment the `fetch()` was + * initiated_, even though the new updated value is now present in + * the cache. + * + * For example: + * + * ```ts + * const cache = new LRUCache({ + * ttl: 100, + * fetchMethod: async (url, oldValue, { signal }) => { + * const res = await fetch(url, { signal }) + * return await res.json() + * } + * }) + * cache.set('https://example.com/', { some: 'data' }) + * // 100ms go by... + * const result = cache.fetch('https://example.com/') + * cache.set('https://example.com/', { other: 'thing' }) + * console.log(await result) // { some: 'data' } + * console.log(cache.get('https://example.com/')) // { other: 'thing' } + * ``` + */ + allowStaleOnFetchAbort?: boolean; + /** + * Set to true to ignore the `abort` event emitted by the `AbortSignal` + * object passed to {@link OptionsBase.fetchMethod}, and still cache the + * resulting resolution value, as long as it is not `undefined`. + * + * When used on its own, this means aborted {@link LRUCache#fetch} calls are not + * immediately resolved or rejected when they are aborted, and instead + * take the full time to await. + * + * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted + * {@link LRUCache#fetch} calls will resolve immediately to their stale + * cached value or `undefined`, and will continue to process and eventually + * update the cache when they resolve, as long as the resulting value is + * not `undefined`, thus supporting a "return stale on timeout while + * refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal. + * + * **Note**: regardless of this setting, an `abort` event _is still + * emitted on the `AbortSignal` object_, so may result in invalid results + * when passed to other underlying APIs that use AbortSignals. + * + * This may be overridden in the {@link OptionsBase.fetchMethod} or the + * call to {@link LRUCache#fetch}. + */ + ignoreFetchAbort?: boolean; + } + interface OptionsMaxLimit extends OptionsBase { + max: Count; + } + interface OptionsTTLLimit extends OptionsBase { + ttl: Milliseconds; + ttlAutopurge: boolean; + } + interface OptionsSizeLimit extends OptionsBase { + maxSize: Size; + } + /** + * The valid safe options for the {@link LRUCache} constructor + */ + type Options = OptionsMaxLimit | OptionsSizeLimit | OptionsTTLLimit; + /** + * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump} + */ + interface Entry { + value: V; + ttl?: Milliseconds; + size?: Size; + start?: Milliseconds; + } +} +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +export declare class LRUCache { + #private; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl: LRUCache.Milliseconds; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution: LRUCache.Milliseconds; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge: boolean; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet: boolean; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas: boolean; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale: boolean; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet: boolean; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL: boolean; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize: LRUCache.Size; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation?: LRUCache.SizeCalculator; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection: boolean; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet: boolean; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort: boolean; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection: boolean; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort: boolean; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c: LRUCache): { + starts: ZeroArray | undefined; + ttls: ZeroArray | undefined; + sizes: ZeroArray | undefined; + keyMap: Map; + keyList: (K | undefined)[]; + valList: (V | BackgroundFetch | undefined)[]; + next: NumberArray; + prev: NumberArray; + readonly head: Index; + readonly tail: Index; + free: StackLike; + isBackgroundFetch: (p: any) => boolean; + backgroundFetch: (k: K, index: number | undefined, options: LRUCache.FetchOptions, context: any) => BackgroundFetch; + moveToTail: (index: number) => void; + indexes: (options?: { + allowStale: boolean; + }) => Generator; + rindexes: (options?: { + allowStale: boolean; + }) => Generator; + isStale: (index: number | undefined) => boolean; + }; + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max(): LRUCache.Count; + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize(): LRUCache.Count; + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize(): LRUCache.Size; + /** + * The number of items stored in the cache (read-only) + */ + get size(): LRUCache.Count; + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod(): LRUCache.Fetcher | undefined; + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose(): LRUCache.Disposer | undefined; + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter(): LRUCache.Disposer | undefined; + constructor(options: LRUCache.Options | LRUCache); + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key: K): number; + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + entries(): Generator<(K | V | BackgroundFetch | undefined)[], void, unknown>; + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + rentries(): Generator<(K | V | BackgroundFetch | undefined)[], void, unknown>; + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + keys(): Generator; + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + rkeys(): Generator; + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + values(): Generator | undefined, void, unknown>; + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + rvalues(): Generator | undefined, void, unknown>; + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator](): Generator<(K | V | BackgroundFetch | undefined)[], void, unknown>; + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find(fn: (v: V, k: K, self: LRUCache) => boolean, getOptions?: LRUCache.GetOptions): V | undefined; + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach(fn: (v: V, k: K, self: LRUCache) => any, thisp?: any): void; + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn: (v: V, k: K, self: LRUCache) => any, thisp?: any): void; + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale(): boolean; + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump(): [K, LRUCache.Entry][]; + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr: [K, LRUCache.Entry][]): void; + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set(k: K, v: V | BackgroundFetch | undefined, setOptions?: LRUCache.SetOptions): this; + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop(): V | undefined; + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k: K, hasOptions?: LRUCache.HasOptions): boolean; + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k: K, peekOptions?: LRUCache.PeekOptions): V | undefined; + /** + * Make an asynchronous cached fetch using the + * {@link LRUCache.OptionsBase.fetchMethod} function. + * + * If multiple fetches for the same key are issued, then they will all be + * coalesced into a single call to fetchMethod. + * + * Note that this means that handling options such as + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}, + * {@link LRUCache.FetchOptions.signal}, + * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be + * determined by the FIRST fetch() call for a given key. + * + * This is a known (fixable) shortcoming which will be addresed on when + * someone complains about it, as the fix would involve added complexity and + * may not be worth the costs for this edge case. + */ + fetch(k: K, fetchOptions: unknown extends FC ? LRUCache.FetchOptions : FC extends undefined | void ? LRUCache.FetchOptionsNoContext : LRUCache.FetchOptionsWithContext): Promise; + fetch(k: unknown extends FC ? K : FC extends undefined | void ? K : never, fetchOptions?: unknown extends FC ? LRUCache.FetchOptions : FC extends undefined | void ? LRUCache.FetchOptionsNoContext : never): Promise; + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k: K, getOptions?: LRUCache.GetOptions): V | undefined; + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k: K): boolean; + /** + * Clear the cache entirely, throwing away all values. + */ + clear(): void; +} +export {}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/dist/node_modules/lru-cache/dist/cjs/index.d.ts.map b/dist/node_modules/lru-cache/dist/cjs/index.d.ts.map new file mode 100644 index 0000000..568b5e5 --- /dev/null +++ b/dist/node_modules/lru-cache/dist/cjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA;;GAEG;AA0FH,QAAA,MAAM,IAAI,eAAiB,CAAA;AAE3B,KAAK,KAAK,GAAG,MAAM,GAAG;IAAE,CAAC,IAAI,CAAC,EAAE,gBAAgB,CAAA;CAAE,CAAA;AAKlD,KAAK,SAAS,GAAG,UAAU,GAAG,WAAW,GAAG,WAAW,CAAA;AACvD,KAAK,WAAW,GAAG,SAAS,GAAG,MAAM,EAAE,CAAA;AAyBvC,cAAM,SAAU,SAAQ,KAAK,CAAC,MAAM,CAAC;gBACvB,IAAI,EAAE,MAAM;CAIzB;AAED,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK,EAAE,CAAA;AAChC,cAAM,KAAK;;IACT,IAAI,EAAE,WAAW,CAAA;IACjB,MAAM,EAAE,MAAM,CAAA;IAGd,MAAM,CAAC,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,SAAS;gBASnC,GAAG,EAAE,MAAM,EACX,OAAO,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,GAAG,WAAW,CAAA;KAAE;IAU3C,IAAI,CAAC,CAAC,EAAE,KAAK;IAGb,GAAG,IAAI,KAAK;CAGb;AAED;;GAEG;AACH,MAAM,MAAM,eAAe,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC,GAAG;IAC/D,UAAU,EAAE,eAAe,CAAC,CAAC,CAAC,GAAG,SAAS,CAAA;IAC1C,iBAAiB,EAAE,eAAe,CAAA;IAClC,oBAAoB,EAAE,CAAC,GAAG,SAAS,CAAA;CACpC,CAAA;AAQD,yBAAiB,QAAQ,CAAC;IACxB;;OAEG;IACH,KAAY,IAAI,GAAG,MAAM,CAAA;IAEzB;;;OAGG;IACH,KAAY,YAAY,GAAG,MAAM,CAAA;IAEjC;;OAEG;IACH,KAAY,KAAK,GAAG,MAAM,CAAA;IAE1B;;;OAGG;IACH,KAAY,aAAa,GAAG,OAAO,GAAG,KAAK,GAAG,QAAQ,CAAA;IACtD;;;;OAIG;IACH,KAAY,QAAQ,CAAC,CAAC,EAAE,CAAC,IAAI,CAC3B,KAAK,EAAE,CAAC,EACR,GAAG,EAAE,CAAC,EACN,MAAM,EAAE,aAAa,KAClB,IAAI,CAAA;IAET;;;OAGG;IACH,KAAY,cAAc,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,KAAK,IAAI,CAAA;IAE7D;;;OAGG;IACH,UAAiB,cAAc,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,GAAG,OAAO;QAChD,MAAM,EAAE,WAAW,CAAA;QACnB,OAAO,EAAE,mBAAmB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAA;QACtC;;;WAGG;QACH,OAAO,EAAE,EAAE,CAAA;KACZ;IAED;;;OAGG;IACH,UAAiB,MAAM,CAAC,CAAC;QACvB;;;;;;;WAOG;QACH,GAAG,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,SAAS,GAAG,MAAM,CAAA;QAE3C;;WAEG;QACH,GAAG,CAAC,EAAE,YAAY,CAAA;QAElB;;WAEG;QACH,KAAK,CAAC,EAAE,YAAY,CAAA;QAEpB;;WAEG;QACH,GAAG,CAAC,EAAE,YAAY,CAAA;QAElB;;WAEG;QACH,YAAY,CAAC,EAAE,YAAY,CAAA;QAE3B;;WAEG;QACH,SAAS,CAAC,EAAE,IAAI,CAAA;QAEhB;;WAEG;QACH,mBAAmB,CAAC,EAAE,IAAI,CAAA;QAE1B;;;WAGG;QACH,oBAAoB,CAAC,EAAE,IAAI,CAAA;QAE3B;;;WAGG;QACH,QAAQ,CAAC,EAAE,CAAC,CAAA;QAEZ;;;;;;WAMG;QACH,GAAG,CAAC,EAAE,KAAK,GAAG,OAAO,GAAG,MAAM,CAAA;QAE9B;;;;;;;;;;;;WAYG;QACH,KAAK,CAAC,EAAE,KAAK,GAAG,UAAU,GAAG,MAAM,GAAG,KAAK,GAAG,OAAO,GAAG,SAAS,CAAA;QAEjE;;WAEG;QACH,eAAe,CAAC,EAAE,IAAI,CAAA;QAEtB;;;WAGG;QACH,YAAY,CAAC,EAAE,IAAI,CAAA;QAEnB;;;;WAIG;QACH,UAAU,CAAC,EAAE,KAAK,CAAA;QAElB;;WAEG;QACH,YAAY,CAAC,EAAE,IAAI,CAAA;QAEnB;;;WAGG;QACH,iBAAiB,CAAC,EAAE,IAAI,CAAA;QAExB;;WAEG;QACH,aAAa,CAAC,EAAE,IAAI,CAAA;QAEpB;;WAEG;QACH,aAAa,CAAC,EAAE,IAAI,CAAA;QAEpB;;;;;;;;WAQG;QACH,GAAG,CAAC,EAAE,OAAO,GAAG,KAAK,GAAG,MAAM,CAAA;QAE9B;;WAEG;QACH,aAAa,CAAC,EAAE,IAAI,CAAA;KACrB;IAED;;;;;;;;;;;;;;OAcG;IACH,UAAiB,mBAAmB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,GAAG,OAAO,CACrD,SAAQ,IAAI,CACV,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,EACnB,YAAY,GACZ,gBAAgB,GAChB,oBAAoB,GACpB,iBAAiB,GACjB,KAAK,GACL,gBAAgB,GAChB,aAAa,GACb,0BAA0B,GAC1B,4BAA4B,GAC5B,kBAAkB,GAClB,wBAAwB,CAC3B;QACD,MAAM,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAA;QAClB,IAAI,CAAC,EAAE,IAAI,CAAA;KACZ;IAED;;OAEG;IACH,UAAiB,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACpC,SAAQ,mBAAmB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QACrC;;;WAGG;QACH,YAAY,CAAC,EAAE,OAAO,CAAA;QACtB;;;;;;;WAOG;QACH,OAAO,CAAC,EAAE,EAAE,CAAA;QACZ,MAAM,CAAC,EAAE,WAAW,CAAA;QACpB,MAAM,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAA;KACnB;IACD;;;OAGG;IACH,UAAiB,uBAAuB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAC/C,SAAQ,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC9B,OAAO,EAAE,EAAE,CAAA;KACZ;IACD;;;OAGG;IACH,UAAiB,qBAAqB,CAAC,CAAC,EAAE,CAAC,CACzC,SAAQ,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC;QACrC,OAAO,CAAC,EAAE,SAAS,CAAA;KACpB;IAED;;OAEG;IACH,UAAiB,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAClC,SAAQ,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE,gBAAgB,CAAC;QACrD,MAAM,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAA;KACnB;IAED;;OAEG;IACH,UAAiB,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAClC,SAAQ,IAAI,CACV,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,EACrB,YAAY,GAAG,gBAAgB,GAAG,oBAAoB,CACvD;QACD,MAAM,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAA;KACnB;IAED;;OAEG;IACH,UAAiB,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACnC,SAAQ,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE,YAAY,CAAC;KAAG;IAEtD;;OAEG;IACH,UAAiB,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAClC,SAAQ,IAAI,CACV,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,EACrB,iBAAiB,GAAG,KAAK,GAAG,gBAAgB,GAAG,aAAa,CAC7D;QACD;;;;WAIG;QACH,IAAI,CAAC,EAAE,IAAI,CAAA;QACX;;;;;;;WAOG;QACH,KAAK,CAAC,EAAE,YAAY,CAAA;QACpB,MAAM,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAA;KACnB;IAED;;OAEG;IACH,KAAY,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,GAAG,OAAO,IAAI,CACxC,GAAG,EAAE,CAAC,EACN,UAAU,EAAE,CAAC,GAAG,SAAS,EACzB,OAAO,EAAE,cAAc,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAC9B,OAAO,CAAC,CAAC,GAAG,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,GAAG,IAAI,GAAG,SAAS,CAAA;IAEzD;;;;;;;;;;;;;;OAcG;IACH,UAAiB,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;QACnC;;;;;;;;;;;WAWG;QACH,GAAG,CAAC,EAAE,KAAK,CAAA;QAEX;;;;;;;;;;;;WAYG;QACH,GAAG,CAAC,EAAE,YAAY,CAAA;QAElB;;;;;;;;;;;;;WAaG;QACH,aAAa,CAAC,EAAE,YAAY,CAAA;QAE5B;;;;;;;;;;;;WAYG;QACH,YAAY,CAAC,EAAE,OAAO,CAAA;QAEtB;;;;;;WAMG;QACH,cAAc,CAAC,EAAE,OAAO,CAAA;QAExB;;;;;;WAMG;QACH,cAAc,CAAC,EAAE,OAAO,CAAA;QAExB;;;WAGG;QACH,UAAU,CAAC,EAAE,OAAO,CAAA;QAEpB;;;;;;;;;;WAUG;QACH,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QAExB;;;;;;WAMG;QACH,YAAY,CAAC,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QAE7B;;;;;;WAMG;QACH,cAAc,CAAC,EAAE,OAAO,CAAA;QAExB;;;;;;;WAOG;QACH,WAAW,CAAC,EAAE,OAAO,CAAA;QAErB;;;;;;;;;;WAUG;QACH,OAAO,CAAC,EAAE,IAAI,CAAA;QAEd;;;;;;WAMG;QACH,YAAY,CAAC,EAAE,IAAI,CAAA;QAEnB;;;;;;;WAOG;QACH,eAAe,CAAC,EAAE,cAAc,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QAEtC;;WAEG;QACH,WAAW,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAA;QAE/B;;;WAGG;QACH,wBAAwB,CAAC,EAAE,OAAO,CAAA;QAElC;;;;;;WAMG;QACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;QAE5B;;;;;;;;WAQG;QACH,0BAA0B,CAAC,EAAE,OAAO,CAAA;QAEpC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;WAgCG;QACH,sBAAsB,CAAC,EAAE,OAAO,CAAA;QAEhC;;;;;;;;;;;;;;;;;;;;;;WAsBG;QACH,gBAAgB,CAAC,EAAE,OAAO,CAAA;KAC3B;IAED,UAAiB,eAAe,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACvC,SAAQ,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC7B,GAAG,EAAE,KAAK,CAAA;KACX;IACD,UAAiB,eAAe,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACvC,SAAQ,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC7B,GAAG,EAAE,YAAY,CAAA;QACjB,YAAY,EAAE,OAAO,CAAA;KACtB;IACD,UAAiB,gBAAgB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACxC,SAAQ,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC7B,OAAO,EAAE,IAAI,CAAA;KACd;IAED;;OAEG;IACH,KAAY,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,IACxB,eAAe,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GACzB,gBAAgB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAC1B,eAAe,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAA;IAE7B;;OAEG;IACH,UAAiB,KAAK,CAAC,CAAC;QACtB,KAAK,EAAE,CAAC,CAAA;QACR,GAAG,CAAC,EAAE,YAAY,CAAA;QAClB,IAAI,CAAC,EAAE,IAAI,CAAA;QACX,KAAK,CAAC,EAAE,YAAY,CAAA;KACrB;CACF;AAED;;;;;;;;GAQG;AACH,qBAAa,QAAQ,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,SAAS,EAAE,EAAE,EAAE,GAAG,OAAO;;IAU5D;;OAEG;IACH,GAAG,EAAE,QAAQ,CAAC,YAAY,CAAA;IAE1B;;OAEG;IACH,aAAa,EAAE,QAAQ,CAAC,YAAY,CAAA;IACpC;;OAEG;IACH,YAAY,EAAE,OAAO,CAAA;IACrB;;OAEG;IACH,cAAc,EAAE,OAAO,CAAA;IACvB;;OAEG;IACH,cAAc,EAAE,OAAO,CAAA;IACvB;;OAEG;IACH,UAAU,EAAE,OAAO,CAAA;IAEnB;;OAEG;IACH,cAAc,EAAE,OAAO,CAAA;IACvB;;OAEG;IACH,WAAW,EAAE,OAAO,CAAA;IACpB;;OAEG;IACH,YAAY,EAAE,QAAQ,CAAC,IAAI,CAAA;IAC3B;;OAEG;IACH,eAAe,CAAC,EAAE,QAAQ,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;IAC/C;;OAEG;IACH,wBAAwB,EAAE,OAAO,CAAA;IACjC;;OAEG;IACH,kBAAkB,EAAE,OAAO,CAAA;IAC3B;;OAEG;IACH,sBAAsB,EAAE,OAAO,CAAA;IAC/B;;OAEG;IACH,0BAA0B,EAAE,OAAO,CAAA;IACnC;;OAEG;IACH,gBAAgB,EAAE,OAAO,CAAA;IAsBzB;;;;;;;;OAQG;IACH,MAAM,CAAC,qBAAqB,CAC1B,CAAC,SAAS,EAAE,EACZ,CAAC,SAAS,EAAE,EACZ,EAAE,SAAS,OAAO,GAAG,OAAO,EAC5B,CAAC,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;;;;;;;;;;;;+BAmBI,GAAG;6BAErB,CAAC,SACG,MAAM,GAAG,SAAS,WAChB,SAAS,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,WAC/B,GAAG,KACX,gBAAgB,CAAC,CAAC;4BAOD,MAAM,KAAG,IAAI;4BAEb;YAAE,UAAU,EAAE,OAAO,CAAA;SAAE;6BAEtB;YAAE,UAAU,EAAE,OAAO,CAAA;SAAE;yBAE3B,MAAM,GAAG,SAAS;;IAOvC;;OAEG;IACH,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAExB;IACD;;OAEG;IACH,IAAI,OAAO,IAAI,QAAQ,CAAC,KAAK,CAE5B;IACD;;OAEG;IACH,IAAI,cAAc,IAAI,QAAQ,CAAC,IAAI,CAElC;IACD;;OAEG;IACH,IAAI,IAAI,IAAI,QAAQ,CAAC,KAAK,CAEzB;IACD;;OAEG;IACH,IAAI,WAAW,IAAI,QAAQ,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,SAAS,CAExD;IACD;;OAEG;IACH,IAAI,OAAO,wCAEV;IACD;;OAEG;IACH,IAAI,YAAY,wCAEf;gBAGC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;IAiJ1D;;OAEG;IACH,eAAe,CAAC,GAAG,EAAE,CAAC;IAkOtB;;;OAGG;IACF,OAAO;IAYR;;;;;OAKG;IACF,QAAQ;IAYT;;;OAGG;IACF,IAAI;IAYL;;;;;OAKG;IACF,KAAK;IAYN;;;OAGG;IACF,MAAM;IAYP;;;;;OAKG;IACF,OAAO;IAYR;;;OAGG;IACH,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,IAAI,CACF,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,OAAO,EACrD,UAAU,GAAE,QAAQ,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAM;IAchD;;;;;OAKG;IACH,OAAO,CACL,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,GAAG,EACjD,KAAK,GAAE,GAAU;IAYnB;;;OAGG;IACH,QAAQ,CACN,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,GAAG,EACjD,KAAK,GAAE,GAAU;IAYnB;;;OAGG;IACH,UAAU;IAWV;;;OAGG;IACH,IAAI;IAyBJ;;;;OAIG;IACH,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;IAiBlC;;;;;OAKG;IACH,GAAG,CACD,CAAC,EAAE,CAAC,EACJ,CAAC,EAAE,CAAC,GAAG,eAAe,CAAC,CAAC,CAAC,GAAG,SAAS,EACrC,UAAU,GAAE,QAAQ,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAM;IAuGhD;;;OAGG;IACH,GAAG,IAAI,CAAC,GAAG,SAAS;IAwDpB;;;;;;;OAOG;IACH,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,UAAU,GAAE,QAAQ,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAM;IA+BxD;;;;;;OAMG;IACH,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,GAAE,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAM;IAsK3D;;;;;;;;;;;;;;;;OAgBG;IACH,KAAK,CACH,CAAC,EAAE,CAAC,EACJ,YAAY,EAAE,OAAO,SAAS,EAAE,GAC5B,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAC/B,EAAE,SAAS,SAAS,GAAG,IAAI,GAC3B,QAAQ,CAAC,qBAAqB,CAAC,CAAC,EAAE,CAAC,CAAC,GACpC,QAAQ,CAAC,uBAAuB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAC7C,OAAO,CAAC,IAAI,GAAG,CAAC,CAAC;IAEpB,KAAK,CACH,CAAC,EAAE,OAAO,SAAS,EAAE,GACjB,CAAC,GACD,EAAE,SAAS,SAAS,GAAG,IAAI,GAC3B,CAAC,GACD,KAAK,EACT,YAAY,CAAC,EAAE,OAAO,SAAS,EAAE,GAC7B,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAC/B,EAAE,SAAS,SAAS,GAAG,IAAI,GAC3B,QAAQ,CAAC,qBAAqB,CAAC,CAAC,EAAE,CAAC,CAAC,GACpC,KAAK,GACR,OAAO,CAAC,IAAI,GAAG,CAAC,CAAC;IAkGpB;;;;;OAKG;IACH,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,UAAU,GAAE,QAAQ,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAM;IAgFxD;;;OAGG;IACH,MAAM,CAAC,CAAC,EAAE,CAAC;IA+CX;;OAEG;IACH,KAAK;CAuCN"} \ No newline at end of file diff --git a/dist/node_modules/lru-cache/dist/cjs/index.js b/dist/node_modules/lru-cache/dist/cjs/index.js new file mode 100644 index 0000000..51c9f23 --- /dev/null +++ b/dist/node_modules/lru-cache/dist/cjs/index.js @@ -0,0 +1,1395 @@ +"use strict"; +/** + * @module LRUCache + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LRUCache = void 0; +const perf = typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? process : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' + ? PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +class LRUCache { + // properties coming in from the options of these, only max and maxSize + // really *need* to be protected. The rest can be modified, as they just + // set defaults for various methods. + #max; + #maxSize; + #dispose; + #disposeAfter; + #fetchMethod; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.delete(this.#keyList[index]); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (ttl === 0 || start === 0) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + return (ttls[index] !== 0 && + starts[index] !== 0 && + (cachedNow || getNow()) - starts[index] > ttls[index]); + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.delete(this.#keyList[i]); + deleted = true; + } + } + return deleted; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.delete(k); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 + ? this.#tail + : this.#free.length !== 0 + ? this.#free.pop() + : this.#size === this.#max + ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index !== undefined && + (allowStale || !this.#isStale(index))) { + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.delete(k); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.delete(k); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.clear(); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#next[this.#prev[index]] = this.#next[index]; + this.#prev[this.#next[index]] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +exports.LRUCache = LRUCache; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/node_modules/lru-cache/dist/cjs/index.js.map b/dist/node_modules/lru-cache/dist/cjs/index.js.map new file mode 100644 index 0000000..a4e9a7b --- /dev/null +++ b/dist/node_modules/lru-cache/dist/cjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA;;GAEG;;;AAIH,MAAM,IAAI,GACR,OAAO,WAAW,KAAK,QAAQ;IAC/B,WAAW;IACX,OAAO,WAAW,CAAC,GAAG,KAAK,UAAU;IACnC,CAAC,CAAC,WAAW;IACb,CAAC,CAAC,IAAI,CAAA;AAEV,MAAM,MAAM,GAAG,IAAI,GAAG,EAAU,CAAA;AAKhC,qBAAqB;AACrB,MAAM,OAAO,GAAG,CACd,OAAO,OAAO,KAAK,QAAQ,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAChC,CAAA;AACzB,qBAAqB;AAErB,MAAM,WAAW,GAAG,CAClB,GAAW,EACX,IAAY,EACZ,IAAY,EACZ,EAAQ,EACR,EAAE;IACF,OAAO,OAAO,CAAC,WAAW,KAAK,UAAU;QACvC,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,EAAE,CAAC;QAC1C,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,IAAI,KAAK,GAAG,EAAE,CAAC,CAAA;AAChD,CAAC,CAAA;AAED,IAAI,EAAE,GAAG,UAAU,CAAC,eAAe,CAAA;AACnC,IAAI,EAAE,GAAG,UAAU,CAAC,WAAW,CAAA;AAE/B,qBAAqB;AACrB,IAAI,OAAO,EAAE,KAAK,WAAW,EAAE;IAC7B,YAAY;IACZ,EAAE,GAAG,MAAM,WAAW;QACpB,OAAO,CAAuB;QAC9B,QAAQ,GAA6B,EAAE,CAAA;QACvC,MAAM,CAAM;QACZ,OAAO,GAAY,KAAK,CAAA;QACxB,gBAAgB,CAAC,CAAS,EAAE,EAAwB;YAClD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;QACxB,CAAC;KACF,CAAA;IACD,YAAY;IACZ,EAAE,GAAG,MAAM,eAAe;QACxB;YACE,cAAc,EAAE,CAAA;QAClB,CAAC;QACD,MAAM,GAAG,IAAI,EAAE,EAAE,CAAA;QACjB,KAAK,CAAC,MAAW;YACf,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO;gBAAE,OAAM;YAC/B,YAAY;YACZ,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,MAAM,CAAA;YAC3B,YAAY;YACZ,IAAI,CAAC,MAAM,CAAC,OAAO,GAAG,IAAI,CAAA;YAC1B,YAAY;YACZ,KAAK,MAAM,EAAE,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrC,EAAE,CAAC,MAAM,CAAC,CAAA;aACX;YACD,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,MAAM,CAAC,CAAA;QAC/B,CAAC;KACF,CAAA;IACD,IAAI,sBAAsB,GACxB,OAAO,CAAC,GAAG,EAAE,2BAA2B,KAAK,GAAG,CAAA;IAClD,MAAM,cAAc,GAAG,GAAG,EAAE;QAC1B,IAAI,CAAC,sBAAsB;YAAE,OAAM;QACnC,sBAAsB,GAAG,KAAK,CAAA;QAC9B,WAAW,CACT,wDAAwD;YACtD,qDAAqD;YACrD,yDAAyD;YACzD,6DAA6D;YAC7D,mEAAmE;YACnE,mEAAmE;YACnE,qEAAqE,EACvE,qBAAqB,EACrB,SAAS,EACT,cAAc,CACf,CAAA;IACH,CAAC,CAAA;CACF;AACD,oBAAoB;AAEpB,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;AAEtD,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,CAAA;AAI3B,MAAM,QAAQ,GAAG,CAAC,CAAM,EAAe,EAAE,CACvC,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAA;AAKlD,qBAAqB;AACrB,wCAAwC;AACxC,sEAAsE;AACtE,uEAAuE;AACvE,uEAAuE;AACvE,wEAAwE;AACxE,uDAAuD;AACvD,2BAA2B;AAC3B,wDAAwD;AACxD,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE,CACnC,CAAC,QAAQ,CAAC,GAAG,CAAC;IACZ,CAAC,CAAC,IAAI;IACN,CAAC,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;QACvB,CAAC,CAAC,UAAU;QACZ,CAAC,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC;YACxB,CAAC,CAAC,WAAW;YACb,CAAC,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC;gBACxB,CAAC,CAAC,WAAW;gBACb,CAAC,CAAC,GAAG,IAAI,MAAM,CAAC,gBAAgB;oBAChC,CAAC,CAAC,SAAS;oBACX,CAAC,CAAC,IAAI,CAAA;AACV,oBAAoB;AAEpB,MAAM,SAAU,SAAQ,KAAa;IACnC,YAAY,IAAY;QACtB,KAAK,CAAC,IAAI,CAAC,CAAA;QACX,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACd,CAAC;CACF;AAGD,MAAM,KAAK;IACT,IAAI,CAAa;IACjB,MAAM,CAAQ;IACd,sBAAsB;IACtB,MAAM,CAAC,aAAa,GAAY,KAAK,CAAA;IACrC,MAAM,CAAC,MAAM,CAAC,GAAW;QACvB,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,CAAA;QACjC,IAAI,CAAC,OAAO;YAAE,OAAO,EAAE,CAAA;QACvB,KAAK,CAAC,aAAa,GAAG,IAAI,CAAA;QAC1B,MAAM,CAAC,GAAG,IAAI,KAAK,CAAC,GAAG,EAAE,OAAO,CAAC,CAAA;QACjC,KAAK,CAAC,aAAa,GAAG,KAAK,CAAA;QAC3B,OAAO,CAAC,CAAA;IACV,CAAC;IACD,YACE,GAAW,EACX,OAAyC;QAEzC,qBAAqB;QACrB,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE;YACxB,MAAM,IAAI,SAAS,CAAC,yCAAyC,CAAC,CAAA;SAC/D;QACD,oBAAoB;QACpB,IAAI,CAAC,IAAI,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,CAAA;QAC5B,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;IACjB,CAAC;IACD,IAAI,CAAC,CAAQ;QACX,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAA;IAC9B,CAAC;IACD,GAAG;QACD,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,MAAM,CAAU,CAAA;IAC1C,CAAC;;AA+nBH;;;;;;;;GAQG;AACH,MAAa,QAAQ;IACnB,uEAAuE;IACvE,wEAAwE;IACxE,oCAAoC;IAC3B,IAAI,CAAgB;IACpB,QAAQ,CAAe;IACvB,QAAQ,CAA0B;IAClC,aAAa,CAA0B;IACvC,YAAY,CAA6B;IAElD;;OAEG;IACH,GAAG,CAAuB;IAE1B;;OAEG;IACH,aAAa,CAAuB;IACpC;;OAEG;IACH,YAAY,CAAS;IACrB;;OAEG;IACH,cAAc,CAAS;IACvB;;OAEG;IACH,cAAc,CAAS;IACvB;;OAEG;IACH,UAAU,CAAS;IAEnB;;OAEG;IACH,cAAc,CAAS;IACvB;;OAEG;IACH,WAAW,CAAS;IACpB;;OAEG;IACH,YAAY,CAAe;IAC3B;;OAEG;IACH,eAAe,CAAgC;IAC/C;;OAEG;IACH,wBAAwB,CAAS;IACjC;;OAEG;IACH,kBAAkB,CAAS;IAC3B;;OAEG;IACH,sBAAsB,CAAS;IAC/B;;OAEG;IACH,0BAA0B,CAAS;IACnC;;OAEG;IACH,gBAAgB,CAAS;IAEzB,sBAAsB;IACtB,KAAK,CAAgB;IACrB,eAAe,CAAe;IAC9B,OAAO,CAAe;IACtB,QAAQ,CAAmB;IAC3B,QAAQ,CAAwC;IAChD,KAAK,CAAa;IAClB,KAAK,CAAa;IAClB,KAAK,CAAO;IACZ,KAAK,CAAO;IACZ,KAAK,CAAW;IAChB,SAAS,CAAsB;IAC/B,MAAM,CAAY;IAClB,OAAO,CAAY;IACnB,KAAK,CAAY;IAEjB,WAAW,CAAS;IACpB,eAAe,CAAS;IACxB,gBAAgB,CAAS;IAEzB;;;;;;;;OAQG;IACH,MAAM,CAAC,qBAAqB,CAI1B,CAAqB;QACrB,OAAO;YACL,aAAa;YACb,MAAM,EAAE,CAAC,CAAC,OAAO;YACjB,IAAI,EAAE,CAAC,CAAC,KAAK;YACb,KAAK,EAAE,CAAC,CAAC,MAAM;YACf,MAAM,EAAE,CAAC,CAAC,OAAyB;YACnC,OAAO,EAAE,CAAC,CAAC,QAAQ;YACnB,OAAO,EAAE,CAAC,CAAC,QAAQ;YACnB,IAAI,EAAE,CAAC,CAAC,KAAK;YACb,IAAI,EAAE,CAAC,CAAC,KAAK;YACb,IAAI,IAAI;gBACN,OAAO,CAAC,CAAC,KAAK,CAAA;YAChB,CAAC;YACD,IAAI,IAAI;gBACN,OAAO,CAAC,CAAC,KAAK,CAAA;YAChB,CAAC;YACD,IAAI,EAAE,CAAC,CAAC,KAAK;YACb,UAAU;YACV,iBAAiB,EAAE,CAAC,CAAM,EAAE,EAAE,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC;YACtD,eAAe,EAAE,CACf,CAAI,EACJ,KAAyB,EACzB,OAAwC,EACxC,OAAY,EACQ,EAAE,CACtB,CAAC,CAAC,gBAAgB,CAChB,CAAC,EACD,KAA0B,EAC1B,OAAO,EACP,OAAO,CACR;YACH,UAAU,EAAE,CAAC,KAAa,EAAQ,EAAE,CAClC,CAAC,CAAC,WAAW,CAAC,KAAc,CAAC;YAC/B,OAAO,EAAE,CAAC,OAAiC,EAAE,EAAE,CAC7C,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC;YACrB,QAAQ,EAAE,CAAC,OAAiC,EAAE,EAAE,CAC9C,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC;YACtB,OAAO,EAAE,CAAC,KAAyB,EAAE,EAAE,CACrC,CAAC,CAAC,QAAQ,CAAC,KAAc,CAAC;SAC7B,CAAA;IACH,CAAC;IAED,8BAA8B;IAE9B;;OAEG;IACH,IAAI,GAAG;QACL,OAAO,IAAI,CAAC,IAAI,CAAA;IAClB,CAAC;IACD;;OAEG;IACH,IAAI,OAAO;QACT,OAAO,IAAI,CAAC,QAAQ,CAAA;IACtB,CAAC;IACD;;OAEG;IACH,IAAI,cAAc;QAChB,OAAO,IAAI,CAAC,eAAe,CAAA;IAC7B,CAAC;IACD;;OAEG;IACH,IAAI,IAAI;QACN,OAAO,IAAI,CAAC,KAAK,CAAA;IACnB,CAAC;IACD;;OAEG;IACH,IAAI,WAAW;QACb,OAAO,IAAI,CAAC,YAAY,CAAA;IAC1B,CAAC;IACD;;OAEG;IACH,IAAI,OAAO;QACT,OAAO,IAAI,CAAC,QAAQ,CAAA;IACtB,CAAC;IACD;;OAEG;IACH,IAAI,YAAY;QACd,OAAO,IAAI,CAAC,aAAa,CAAA;IAC3B,CAAC;IAED,YACE,OAAwD;QAExD,MAAM,EACJ,GAAG,GAAG,CAAC,EACP,GAAG,EACH,aAAa,GAAG,CAAC,EACjB,YAAY,EACZ,cAAc,EACd,cAAc,EACd,UAAU,EACV,OAAO,EACP,YAAY,EACZ,cAAc,EACd,WAAW,EACX,OAAO,GAAG,CAAC,EACX,YAAY,GAAG,CAAC,EAChB,eAAe,EACf,WAAW,EACX,wBAAwB,EACxB,kBAAkB,EAClB,0BAA0B,EAC1B,sBAAsB,EACtB,gBAAgB,GACjB,GAAG,OAAO,CAAA;QAEX,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YAC/B,MAAM,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAA;SAChE;QAED,MAAM,SAAS,GAAG,GAAG,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAA;QACjD,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,qBAAqB,GAAG,GAAG,CAAC,CAAA;SAC7C;QAED,IAAI,CAAC,IAAI,GAAG,GAAG,CAAA;QACf,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAA;QACvB,IAAI,CAAC,YAAY,GAAG,YAAY,IAAI,IAAI,CAAC,QAAQ,CAAA;QACjD,IAAI,CAAC,eAAe,GAAG,eAAe,CAAA;QACtC,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,IAAI,CAAC,IAAI,CAAC,QAAQ,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE;gBACxC,MAAM,IAAI,SAAS,CACjB,oEAAoE,CACrE,CAAA;aACF;YACD,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,UAAU,EAAE;gBAC9C,MAAM,IAAI,SAAS,CAAC,qCAAqC,CAAC,CAAA;aAC3D;SACF;QAED,IACE,WAAW,KAAK,SAAS;YACzB,OAAO,WAAW,KAAK,UAAU,EACjC;YACA,MAAM,IAAI,SAAS,CACjB,6CAA6C,CAC9C,CAAA;SACF;QACD,IAAI,CAAC,YAAY,GAAG,WAAW,CAAA;QAC/B,IAAI,CAAC,eAAe,GAAG,CAAC,CAAC,WAAW,CAAA;QAEpC,IAAI,CAAC,OAAO,GAAG,IAAI,GAAG,EAAE,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAC9C,IAAI,CAAC,QAAQ,GAAG,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAC9C,IAAI,CAAC,KAAK,GAAG,IAAI,SAAS,CAAC,GAAG,CAAC,CAAA;QAC/B,IAAI,CAAC,KAAK,GAAG,IAAI,SAAS,CAAC,GAAG,CAAC,CAAA;QAC/B,IAAI,CAAC,KAAK,GAAG,CAAU,CAAA;QACvB,IAAI,CAAC,KAAK,GAAG,CAAU,CAAA;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QAC9B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAA;QACd,IAAI,CAAC,eAAe,GAAG,CAAC,CAAA;QAExB,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;YACjC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAA;SACxB;QACD,IAAI,OAAO,YAAY,KAAK,UAAU,EAAE;YACtC,IAAI,CAAC,aAAa,GAAG,YAAY,CAAA;YACjC,IAAI,CAAC,SAAS,GAAG,EAAE,CAAA;SACpB;aAAM;YACL,IAAI,CAAC,aAAa,GAAG,SAAS,CAAA;YAC9B,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;SAC3B;QACD,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAClC,IAAI,CAAC,gBAAgB,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QAE5C,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,cAAc,CAAA;QACtC,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,WAAW,CAAA;QAChC,IAAI,CAAC,wBAAwB,GAAG,CAAC,CAAC,wBAAwB,CAAA;QAC1D,IAAI,CAAC,0BAA0B,GAAG,CAAC,CAAC,0BAA0B,CAAA;QAC9D,IAAI,CAAC,sBAAsB,GAAG,CAAC,CAAC,sBAAsB,CAAA;QACtD,IAAI,CAAC,gBAAgB,GAAG,CAAC,CAAC,gBAAgB,CAAA;QAE1C,iDAAiD;QACjD,IAAI,IAAI,CAAC,YAAY,KAAK,CAAC,EAAE;YAC3B,IAAI,IAAI,CAAC,QAAQ,KAAK,CAAC,EAAE;gBACvB,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;oBAC5B,MAAM,IAAI,SAAS,CACjB,iDAAiD,CAClD,CAAA;iBACF;aACF;YACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;gBAChC,MAAM,IAAI,SAAS,CACjB,sDAAsD,CACvD,CAAA;aACF;YACD,IAAI,CAAC,uBAAuB,EAAE,CAAA;SAC/B;QAED,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,UAAU,CAAA;QAC9B,IAAI,CAAC,kBAAkB,GAAG,CAAC,CAAC,kBAAkB,CAAA;QAC9C,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,cAAc,CAAA;QACtC,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,cAAc,CAAA;QACtC,IAAI,CAAC,aAAa;YAChB,QAAQ,CAAC,aAAa,CAAC,IAAI,aAAa,KAAK,CAAC;gBAC5C,CAAC,CAAC,aAAa;gBACf,CAAC,CAAC,CAAC,CAAA;QACP,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,YAAY,CAAA;QAClC,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,CAAC,CAAA;QACnB,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;gBACvB,MAAM,IAAI,SAAS,CACjB,6CAA6C,CAC9C,CAAA;aACF;YACD,IAAI,CAAC,sBAAsB,EAAE,CAAA;SAC9B;QAED,2CAA2C;QAC3C,IAAI,IAAI,CAAC,IAAI,KAAK,CAAC,IAAI,IAAI,CAAC,GAAG,KAAK,CAAC,IAAI,IAAI,CAAC,QAAQ,KAAK,CAAC,EAAE;YAC5D,MAAM,IAAI,SAAS,CACjB,kDAAkD,CACnD,CAAA;SACF;QACD,IAAI,CAAC,IAAI,CAAC,YAAY,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YACtD,MAAM,IAAI,GAAG,qBAAqB,CAAA;YAClC,IAAI,UAAU,CAAC,IAAI,CAAC,EAAE;gBACpB,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;gBAChB,MAAM,GAAG,GACP,wDAAwD;oBACxD,yCAAyC,CAAA;gBAC3C,WAAW,CAAC,GAAG,EAAE,uBAAuB,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;aAC1D;SACF;IACH,CAAC;IAED;;OAEG;IACH,eAAe,CAAC,GAAM;QACpB,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;IAC7C,CAAC;IAED,sBAAsB;QACpB,MAAM,IAAI,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QACrC,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QACvC,IAAI,CAAC,KAAK,GAAG,IAAI,CAAA;QACjB,IAAI,CAAC,OAAO,GAAG,MAAM,CAAA;QAErB,IAAI,CAAC,WAAW,GAAG,CAAC,KAAK,EAAE,GAAG,EAAE,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,EAAE,EAAE;YACpD,MAAM,CAAC,KAAK,CAAC,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;YACrC,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,CAAA;YACjB,IAAI,GAAG,KAAK,CAAC,IAAI,IAAI,CAAC,YAAY,EAAE;gBAClC,MAAM,CAAC,GAAG,UAAU,CAAC,GAAG,EAAE;oBACxB,IAAI,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;wBACxB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAM,CAAC,CAAA;qBACvC;gBACH,CAAC,EAAE,GAAG,GAAG,CAAC,CAAC,CAAA;gBACX,yCAAyC;gBACzC,qBAAqB;gBACrB,IAAI,CAAC,CAAC,KAAK,EAAE;oBACX,CAAC,CAAC,KAAK,EAAE,CAAA;iBACV;gBACD,oBAAoB;aACrB;QACH,CAAC,CAAA;QAED,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC,EAAE;YAC5B,MAAM,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;QACpD,CAAC,CAAA;QAED,IAAI,CAAC,UAAU,GAAG,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE;YAClC,IAAI,IAAI,CAAC,KAAK,CAAC,EAAE;gBACf,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,CAAA;gBACvB,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAA;gBAC3B,MAAM,CAAC,GAAG,GAAG,GAAG,CAAA;gBAChB,MAAM,CAAC,KAAK,GAAG,KAAK,CAAA;gBACpB,MAAM,CAAC,GAAG,GAAG,SAAS,IAAI,MAAM,EAAE,CAAA;gBAClC,MAAM,GAAG,GAAG,MAAM,CAAC,GAAG,GAAG,KAAK,CAAA;gBAC9B,MAAM,CAAC,YAAY,GAAG,GAAG,GAAG,GAAG,CAAA;aAChC;QACH,CAAC,CAAA;QAED,0DAA0D;QAC1D,+BAA+B;QAC/B,IAAI,SAAS,GAAG,CAAC,CAAA;QACjB,MAAM,MAAM,GAAG,GAAG,EAAE;YAClB,MAAM,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;YACpB,IAAI,IAAI,CAAC,aAAa,GAAG,CAAC,EAAE;gBAC1B,SAAS,GAAG,CAAC,CAAA;gBACb,MAAM,CAAC,GAAG,UAAU,CAClB,GAAG,EAAE,CAAC,CAAC,SAAS,GAAG,CAAC,CAAC,EACrB,IAAI,CAAC,aAAa,CACnB,CAAA;gBACD,iCAAiC;gBACjC,qBAAqB;gBACrB,IAAI,CAAC,CAAC,KAAK,EAAE;oBACX,CAAC,CAAC,KAAK,EAAE,CAAA;iBACV;gBACD,oBAAoB;aACrB;YACD,OAAO,CAAC,CAAA;QACV,CAAC,CAAA;QAED,IAAI,CAAC,eAAe,GAAG,GAAG,CAAC,EAAE;YAC3B,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;YACnC,IAAI,KAAK,KAAK,SAAS,EAAE;gBACvB,OAAO,CAAC,CAAA;aACT;YACD,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,CAAA;YACvB,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAA;YAC3B,IAAI,GAAG,KAAK,CAAC,IAAI,KAAK,KAAK,CAAC,EAAE;gBAC5B,OAAO,QAAQ,CAAA;aAChB;YACD,MAAM,GAAG,GAAG,CAAC,SAAS,IAAI,MAAM,EAAE,CAAC,GAAG,KAAK,CAAA;YAC3C,OAAO,GAAG,GAAG,GAAG,CAAA;QAClB,CAAC,CAAA;QAED,IAAI,CAAC,QAAQ,GAAG,KAAK,CAAC,EAAE;YACtB,OAAO,CACL,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC;gBACjB,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC;gBACnB,CAAC,SAAS,IAAI,MAAM,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,CACtD,CAAA;QACH,CAAC,CAAA;IACH,CAAC;IAED,mDAAmD;IACnD,cAAc,GAA2B,GAAG,EAAE,GAAE,CAAC,CAAA;IACjD,UAAU,GACR,GAAG,EAAE,GAAE,CAAC,CAAA;IACV,WAAW,GAMC,GAAG,EAAE,GAAE,CAAC,CAAA;IACpB,oBAAoB;IAEpB,QAAQ,GAA8B,GAAG,EAAE,CAAC,KAAK,CAAA;IAEjD,uBAAuB;QACrB,MAAM,KAAK,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,CAAC,eAAe,GAAG,CAAC,CAAA;QACxB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,eAAe,GAAG,KAAK,CAAC,EAAE;YAC7B,IAAI,CAAC,eAAe,IAAI,KAAK,CAAC,KAAK,CAAC,CAAA;YACpC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QAClB,CAAC,CAAA;QACD,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,EAAE;YAClD,2CAA2C;YAC3C,sDAAsD;YACtD,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;gBAC9B,OAAO,CAAC,CAAA;aACT;YACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;gBACnB,IAAI,eAAe,EAAE;oBACnB,IAAI,OAAO,eAAe,KAAK,UAAU,EAAE;wBACzC,MAAM,IAAI,SAAS,CAAC,oCAAoC,CAAC,CAAA;qBAC1D;oBACD,IAAI,GAAG,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;oBAC5B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;wBACnB,MAAM,IAAI,SAAS,CACjB,0DAA0D,CAC3D,CAAA;qBACF;iBACF;qBAAM;oBACL,MAAM,IAAI,SAAS,CACjB,iDAAiD;wBAC/C,wDAAwD;wBACxD,sBAAsB,CACzB,CAAA;iBACF;aACF;YACD,OAAO,IAAI,CAAA;QACb,CAAC,CAAA;QACD,IAAI,CAAC,YAAY,GAAG,CAClB,KAAY,EACZ,IAAmB,EACnB,MAA2B,EAC3B,EAAE;YACF,KAAK,CAAC,KAAK,CAAC,GAAG,IAAI,CAAA;YACnB,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACjB,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,KAAK,CAAC,KAAK,CAAC,CAAA;gBAC5C,OAAO,IAAI,CAAC,eAAe,GAAG,OAAO,EAAE;oBACrC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;iBAClB;aACF;YACD,IAAI,CAAC,eAAe,IAAI,KAAK,CAAC,KAAK,CAAC,CAAA;YACpC,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,SAAS,GAAG,IAAI,CAAA;gBACvB,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,eAAe,CAAA;aAClD;QACH,CAAC,CAAA;IACH,CAAC;IAED,eAAe,GAA2B,EAAE,CAAC,EAAE,GAAE,CAAC,CAAA;IAClD,YAAY,GAIA,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,EAAE,EAAE,GAAE,CAAC,CAAA;IAC/B,YAAY,GAKS,CACnB,EAAK,EACL,EAA0B,EAC1B,IAAoB,EACpB,eAA+C,EAC/C,EAAE;QACF,IAAI,IAAI,IAAI,eAAe,EAAE;YAC3B,MAAM,IAAI,SAAS,CACjB,kEAAkE,CACnE,CAAA;SACF;QACD,OAAO,CAAC,CAAA;IACV,CAAC,CAAC;IAEF,CAAC,QAAQ,CAAC,EAAE,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE,GAAG,EAAE;QAC7C,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,KAAK,EAAE,IAAI,GAAI;gBAC/B,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,EAAE;oBAC1B,MAAK;iBACN;gBACD,IAAI,UAAU,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;oBACnC,MAAM,CAAC,CAAA;iBACR;gBACD,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,EAAE;oBACpB,MAAK;iBACN;qBAAM;oBACL,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAU,CAAA;iBAC3B;aACF;SACF;IACH,CAAC;IAED,CAAC,SAAS,CAAC,EAAE,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE,GAAG,EAAE;QAC9C,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,KAAK,EAAE,IAAI,GAAI;gBAC/B,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,EAAE;oBAC1B,MAAK;iBACN;gBACD,IAAI,UAAU,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;oBACnC,MAAM,CAAC,CAAA;iBACR;gBACD,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,EAAE;oBACpB,MAAK;iBACN;qBAAM;oBACL,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAU,CAAA;iBAC3B;aACF;SACF;IACH,CAAC;IAED,aAAa,CAAC,KAAY;QACxB,OAAO,CACL,KAAK,KAAK,SAAS;YACnB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAM,CAAC,KAAK,KAAK,CACtD,CAAA;IACH,CAAC;IAED;;;OAGG;IACH,CAAC,OAAO;QACN,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAE;YAC/B,IACE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,SAAS;gBAC9B,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,SAAS;gBAC9B,CAAC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAC1C;gBACA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;aAC3C;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,CAAC,QAAQ;QACP,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,SAAS,EAAE,EAAE;YAChC,IACE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,SAAS;gBAC9B,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,SAAS;gBAC9B,CAAC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAC1C;gBACA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;aAC3C;SACF;IACH,CAAC;IAED;;;OAGG;IACH,CAAC,IAAI;QACH,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAE;YAC/B,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,IACE,CAAC,KAAK,SAAS;gBACf,CAAC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAC1C;gBACA,MAAM,CAAC,CAAA;aACR;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,CAAC,KAAK;QACJ,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,SAAS,EAAE,EAAE;YAChC,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,IACE,CAAC,KAAK,SAAS;gBACf,CAAC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAC1C;gBACA,MAAM,CAAC,CAAA;aACR;SACF;IACH,CAAC;IAED;;;OAGG;IACH,CAAC,MAAM;QACL,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAE;YAC/B,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,IACE,CAAC,KAAK,SAAS;gBACf,CAAC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAC1C;gBACA,MAAM,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;aACvB;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,CAAC,OAAO;QACN,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,SAAS,EAAE,EAAE;YAChC,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,IACE,CAAC,KAAK,SAAS;gBACf,CAAC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAC1C;gBACA,MAAM,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;aACvB;SACF;IACH,CAAC;IAED;;;OAGG;IACH,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;IAED;;;OAGG;IACH,IAAI,CACF,EAAqD,EACrD,aAA4C,EAAE;QAE9C,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAE;YAC/B,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,MAAM,KAAK,GAAG,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC;gBACtC,CAAC,CAAC,CAAC,CAAC,oBAAoB;gBACxB,CAAC,CAAC,CAAC,CAAA;YACL,IAAI,KAAK,KAAK,SAAS;gBAAE,SAAQ;YACjC,IAAI,EAAE,CAAC,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAM,EAAE,IAAI,CAAC,EAAE;gBAC1C,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAM,EAAE,UAAU,CAAC,CAAA;aACnD;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,OAAO,CACL,EAAiD,EACjD,QAAa,IAAI;QAEjB,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAE;YAC/B,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,MAAM,KAAK,GAAG,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC;gBACtC,CAAC,CAAC,CAAC,CAAC,oBAAoB;gBACxB,CAAC,CAAC,CAAC,CAAA;YACL,IAAI,KAAK,KAAK,SAAS;gBAAE,SAAQ;YACjC,EAAE,CAAC,IAAI,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAM,EAAE,IAAI,CAAC,CAAA;SACnD;IACH,CAAC;IAED;;;OAGG;IACH,QAAQ,CACN,EAAiD,EACjD,QAAa,IAAI;QAEjB,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,SAAS,EAAE,EAAE;YAChC,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,MAAM,KAAK,GAAG,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC;gBACtC,CAAC,CAAC,CAAC,CAAC,oBAAoB;gBACxB,CAAC,CAAC,CAAC,CAAA;YACL,IAAI,KAAK,KAAK,SAAS;gBAAE,SAAQ;YACjC,EAAE,CAAC,IAAI,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAM,EAAE,IAAI,CAAC,CAAA;SACnD;IACH,CAAC;IAED;;;OAGG;IACH,UAAU;QACR,IAAI,OAAO,GAAG,KAAK,CAAA;QACnB,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,SAAS,CAAC,EAAE,UAAU,EAAE,IAAI,EAAE,CAAC,EAAE;YACpD,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;gBACpB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAM,CAAC,CAAA;gBAClC,OAAO,GAAG,IAAI,CAAA;aACf;SACF;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAED;;;OAGG;IACH,IAAI;QACF,MAAM,GAAG,GAA6B,EAAE,CAAA;QACxC,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,EAAE,UAAU,EAAE,IAAI,EAAE,CAAC,EAAE;YACnD,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC5B,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,MAAM,KAAK,GAAkB,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC;gBACrD,CAAC,CAAC,CAAC,CAAC,oBAAoB;gBACxB,CAAC,CAAC,CAAC,CAAA;YACL,IAAI,KAAK,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS;gBAAE,SAAQ;YACtD,MAAM,KAAK,GAAsB,EAAE,KAAK,EAAE,CAAA;YAC1C,IAAI,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,KAAK,CAAC,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;gBACzB,yDAAyD;gBACzD,4DAA4D;gBAC5D,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;gBACxC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC,CAAA;aAC3C;YACD,IAAI,IAAI,CAAC,MAAM,EAAE;gBACf,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;aAC5B;YACD,GAAG,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAA;SAC1B;QACD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED;;;;OAIG;IACH,IAAI,CAAC,GAA6B;QAChC,IAAI,CAAC,KAAK,EAAE,CAAA;QACZ,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,GAAG,EAAE;YAC9B,IAAI,KAAK,CAAC,KAAK,EAAE;gBACf,2DAA2D;gBAC3D,6DAA6D;gBAC7D,6DAA6D;gBAC7D,eAAe;gBACf,EAAE;gBACF,4DAA4D;gBAC5D,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC,KAAK,CAAA;gBACpC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,GAAG,CAAA;aAC/B;YACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;SAClC;IACH,CAAC;IAED;;;;;OAKG;IACH,GAAG,CACD,CAAI,EACJ,CAAqC,EACrC,aAA4C,EAAE;QAE9C,IAAI,CAAC,KAAK,SAAS,EAAE;YACnB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;YACd,OAAO,IAAI,CAAA;SACZ;QACD,MAAM,EACJ,GAAG,GAAG,IAAI,CAAC,GAAG,EACd,KAAK,EACL,cAAc,GAAG,IAAI,CAAC,cAAc,EACpC,eAAe,GAAG,IAAI,CAAC,eAAe,EACtC,MAAM,GACP,GAAG,UAAU,CAAA;QACd,IAAI,EAAE,WAAW,GAAG,IAAI,CAAC,WAAW,EAAE,GAAG,UAAU,CAAA;QAEnD,MAAM,IAAI,GAAG,IAAI,CAAC,YAAY,CAC5B,CAAC,EACD,CAAC,EACD,UAAU,CAAC,IAAI,IAAI,CAAC,EACpB,eAAe,CAChB,CAAA;QACD,6CAA6C;QAC7C,6CAA6C;QAC7C,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,GAAG,IAAI,CAAC,YAAY,EAAE;YACjD,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,GAAG,GAAG,MAAM,CAAA;gBACnB,MAAM,CAAC,oBAAoB,GAAG,IAAI,CAAA;aACnC;YACD,sDAAsD;YACtD,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;YACd,OAAO,IAAI,CAAA;SACZ;QACD,IAAI,KAAK,GAAG,IAAI,CAAC,KAAK,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QAC9D,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,WAAW;YACX,KAAK,GAAG,CACN,IAAI,CAAC,KAAK,KAAK,CAAC;gBACd,CAAC,CAAC,IAAI,CAAC,KAAK;gBACZ,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;oBACzB,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE;oBAClB,CAAC,CAAC,IAAI,CAAC,KAAK,KAAK,IAAI,CAAC,IAAI;wBAC1B,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;wBACpB,CAAC,CAAC,IAAI,CAAC,KAAK,CACN,CAAA;YACV,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YACxB,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YACxB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;YAC1B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,KAAK,CAAA;YAC9B,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,KAAK,CAAA;YAC9B,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;YAClB,IAAI,CAAC,KAAK,EAAE,CAAA;YACZ,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,IAAI,EAAE,MAAM,CAAC,CAAA;YACtC,IAAI,MAAM;gBAAE,MAAM,CAAC,GAAG,GAAG,KAAK,CAAA;YAC9B,WAAW,GAAG,KAAK,CAAA;SACpB;aAAM;YACL,SAAS;YACT,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAA;YACvB,MAAM,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAA2B,CAAA;YAC7D,IAAI,CAAC,KAAK,MAAM,EAAE;gBAChB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,EAAE;oBAC3D,MAAM,CAAC,iBAAiB,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,UAAU,CAAC,CAAC,CAAA;iBACtD;qBAAM,IAAI,CAAC,cAAc,EAAE;oBAC1B,IAAI,IAAI,CAAC,WAAW,EAAE;wBACpB,IAAI,CAAC,QAAQ,EAAE,CAAC,MAAW,EAAE,CAAC,EAAE,KAAK,CAAC,CAAA;qBACvC;oBACD,IAAI,IAAI,CAAC,gBAAgB,EAAE;wBACzB,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC,MAAW,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC,CAAA;qBAC9C;iBACF;gBACD,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,CAAA;gBAC3B,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,IAAI,EAAE,MAAM,CAAC,CAAA;gBACtC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;gBACxB,IAAI,MAAM,EAAE;oBACV,MAAM,CAAC,GAAG,GAAG,SAAS,CAAA;oBACtB,MAAM,QAAQ,GACZ,MAAM,IAAI,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC;wBACvC,CAAC,CAAC,MAAM,CAAC,oBAAoB;wBAC7B,CAAC,CAAC,MAAM,CAAA;oBACZ,IAAI,QAAQ,KAAK,SAAS;wBAAE,MAAM,CAAC,QAAQ,GAAG,QAAQ,CAAA;iBACvD;aACF;iBAAM,IAAI,MAAM,EAAE;gBACjB,MAAM,CAAC,GAAG,GAAG,QAAQ,CAAA;aACtB;SACF;QACD,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YAC5B,IAAI,CAAC,sBAAsB,EAAE,CAAA;SAC9B;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,GAAG,EAAE,KAAK,CAAC,CAAA;aACpC;YACD,IAAI,MAAM;gBAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;SAC3C;QACD,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,SAAS,EAAE;YAC9D,MAAM,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;YACzB,IAAI,IAAmC,CAAA;YACvC,OAAO,CAAC,IAAI,GAAG,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE;gBAC3B,IAAI,CAAC,aAAa,EAAE,CAAC,GAAG,IAAI,CAAC,CAAA;aAC9B;SACF;QACD,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;OAGG;IACH,GAAG;QACD,IAAI;YACF,OAAO,IAAI,CAAC,KAAK,EAAE;gBACjB,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;gBACrC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;gBACjB,IAAI,IAAI,CAAC,kBAAkB,CAAC,GAAG,CAAC,EAAE;oBAChC,IAAI,GAAG,CAAC,oBAAoB,EAAE;wBAC5B,OAAO,GAAG,CAAC,oBAAoB,CAAA;qBAChC;iBACF;qBAAM,IAAI,GAAG,KAAK,SAAS,EAAE;oBAC5B,OAAO,GAAG,CAAA;iBACX;aACF;SACF;gBAAS;YACR,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,SAAS,EAAE;gBAC3C,MAAM,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;gBACzB,IAAI,IAAmC,CAAA;gBACvC,OAAO,CAAC,IAAI,GAAG,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE;oBAC3B,IAAI,CAAC,aAAa,EAAE,CAAC,GAAG,IAAI,CAAC,CAAA;iBAC9B;aACF;SACF;IACH,CAAC;IAED,MAAM,CAAC,IAAa;QAClB,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAA;QACvB,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAM,CAAA;QAClC,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAM,CAAA;QAClC,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;YACtD,CAAC,CAAC,iBAAiB,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,SAAS,CAAC,CAAC,CAAA;SAChD;aAAM,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACpD,IAAI,IAAI,CAAC,WAAW,EAAE;gBACpB,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;aAC/B;YACD,IAAI,IAAI,CAAC,gBAAgB,EAAE;gBACzB,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC,CAAA;aACtC;SACF;QACD,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,CAAA;QAC1B,2DAA2D;QAC3D,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,SAAS,CAAA;YAC/B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,SAAS,CAAA;YAC/B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;SACtB;QACD,IAAI,IAAI,CAAC,KAAK,KAAK,CAAC,EAAE;YACpB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,CAAU,CAAA;YACpC,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAA;SACtB;aAAM;YACL,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAU,CAAA;SACvC;QACD,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACtB,IAAI,CAAC,KAAK,EAAE,CAAA;QACZ,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;;;;;OAOG;IACH,GAAG,CAAC,CAAI,EAAE,aAA4C,EAAE;QACtD,MAAM,EAAE,cAAc,GAAG,IAAI,CAAC,cAAc,EAAE,MAAM,EAAE,GACpD,UAAU,CAAA;QACZ,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QACjC,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;YAC9B,IACE,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC;gBAC1B,CAAC,CAAC,oBAAoB,KAAK,SAAS,EACpC;gBACA,OAAO,KAAK,CAAA;aACb;YACD,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;gBACzB,IAAI,cAAc,EAAE;oBAClB,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,CAAA;iBAC3B;gBACD,IAAI,MAAM,EAAE;oBACV,MAAM,CAAC,GAAG,GAAG,KAAK,CAAA;oBAClB,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;iBAC/B;gBACD,OAAO,IAAI,CAAA;aACZ;iBAAM,IAAI,MAAM,EAAE;gBACjB,MAAM,CAAC,GAAG,GAAG,OAAO,CAAA;gBACpB,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;aAC/B;SACF;aAAM,IAAI,MAAM,EAAE;YACjB,MAAM,CAAC,GAAG,GAAG,MAAM,CAAA;SACpB;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAED;;;;;;OAMG;IACH,IAAI,CAAC,CAAI,EAAE,cAA8C,EAAE;QACzD,MAAM,EAAE,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE,GAAG,WAAW,CAAA;QACpD,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QACjC,IACE,KAAK,KAAK,SAAS;YACnB,CAAC,UAAU,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,EACrC;YACA,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;YAC9B,oEAAoE;YACpE,OAAO,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAA;SAC/D;IACH,CAAC;IAED,gBAAgB,CACd,CAAI,EACJ,KAAwB,EACxB,OAAwC,EACxC,OAAY;QAEZ,MAAM,CAAC,GAAG,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;QAChE,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;YAC9B,OAAO,CAAC,CAAA;SACT;QAED,MAAM,EAAE,GAAG,IAAI,EAAE,EAAE,CAAA;QACnB,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,CAAA;QAC1B,yDAAyD;QACzD,MAAM,EAAE,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE;YAC/D,MAAM,EAAE,EAAE,CAAC,MAAM;SAClB,CAAC,CAAA;QAEF,MAAM,SAAS,GAAG;YAChB,MAAM,EAAE,EAAE,CAAC,MAAM;YACjB,OAAO;YACP,OAAO;SACR,CAAA;QAED,MAAM,EAAE,GAAG,CACT,CAAuB,EACvB,WAAW,GAAG,KAAK,EACG,EAAE;YACxB,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,CAAC,MAAM,CAAA;YAC7B,MAAM,WAAW,GAAG,OAAO,CAAC,gBAAgB,IAAI,CAAC,KAAK,SAAS,CAAA;YAC/D,IAAI,OAAO,CAAC,MAAM,EAAE;gBAClB,IAAI,OAAO,IAAI,CAAC,WAAW,EAAE;oBAC3B,OAAO,CAAC,MAAM,CAAC,YAAY,GAAG,IAAI,CAAA;oBAClC,OAAO,CAAC,MAAM,CAAC,UAAU,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,CAAA;oBAC5C,IAAI,WAAW;wBAAE,OAAO,CAAC,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAA;iBACzD;qBAAM;oBACL,OAAO,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;iBACpC;aACF;YACD,IAAI,OAAO,IAAI,CAAC,WAAW,IAAI,CAAC,WAAW,EAAE;gBAC3C,OAAO,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;aACnC;YACD,qEAAqE;YACrE,MAAM,EAAE,GAAG,CAAuB,CAAA;YAClC,IAAI,IAAI,CAAC,QAAQ,CAAC,KAAc,CAAC,KAAK,CAAC,EAAE;gBACvC,IAAI,CAAC,KAAK,SAAS,EAAE;oBACnB,IAAI,EAAE,CAAC,oBAAoB,EAAE;wBAC3B,IAAI,CAAC,QAAQ,CAAC,KAAc,CAAC,GAAG,EAAE,CAAC,oBAAoB,CAAA;qBACxD;yBAAM;wBACL,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;qBACf;iBACF;qBAAM;oBACL,IAAI,OAAO,CAAC,MAAM;wBAAE,OAAO,CAAC,MAAM,CAAC,YAAY,GAAG,IAAI,CAAA;oBACtD,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,OAAO,CAAC,CAAA;iBAClC;aACF;YACD,OAAO,CAAC,CAAA;QACV,CAAC,CAAA;QAED,MAAM,EAAE,GAAG,CAAC,EAAO,EAAE,EAAE;YACrB,IAAI,OAAO,CAAC,MAAM,EAAE;gBAClB,OAAO,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;gBACnC,OAAO,CAAC,MAAM,CAAC,UAAU,GAAG,EAAE,CAAA;aAC/B;YACD,OAAO,SAAS,CAAC,EAAE,CAAC,CAAA;QACtB,CAAC,CAAA;QAED,MAAM,SAAS,GAAG,CAAC,EAAO,EAAiB,EAAE;YAC3C,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,CAAC,MAAM,CAAA;YAC7B,MAAM,iBAAiB,GACrB,OAAO,IAAI,OAAO,CAAC,sBAAsB,CAAA;YAC3C,MAAM,UAAU,GACd,iBAAiB,IAAI,OAAO,CAAC,0BAA0B,CAAA;YACzD,MAAM,QAAQ,GAAG,UAAU,IAAI,OAAO,CAAC,wBAAwB,CAAA;YAC/D,MAAM,EAAE,GAAG,CAAuB,CAAA;YAClC,IAAI,IAAI,CAAC,QAAQ,CAAC,KAAc,CAAC,KAAK,CAAC,EAAE;gBACvC,qEAAqE;gBACrE,sEAAsE;gBACtE,MAAM,GAAG,GAAG,CAAC,QAAQ,IAAI,EAAE,CAAC,oBAAoB,KAAK,SAAS,CAAA;gBAC9D,IAAI,GAAG,EAAE;oBACP,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;iBACf;qBAAM,IAAI,CAAC,iBAAiB,EAAE;oBAC7B,oDAAoD;oBACpD,oDAAoD;oBACpD,mDAAmD;oBACnD,qDAAqD;oBACrD,IAAI,CAAC,QAAQ,CAAC,KAAc,CAAC,GAAG,EAAE,CAAC,oBAAoB,CAAA;iBACxD;aACF;YACD,IAAI,UAAU,EAAE;gBACd,IAAI,OAAO,CAAC,MAAM,IAAI,EAAE,CAAC,oBAAoB,KAAK,SAAS,EAAE;oBAC3D,OAAO,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;iBACpC;gBACD,OAAO,EAAE,CAAC,oBAAoB,CAAA;aAC/B;iBAAM,IAAI,EAAE,CAAC,UAAU,KAAK,EAAE,EAAE;gBAC/B,MAAM,EAAE,CAAA;aACT;QACH,CAAC,CAAA;QAED,MAAM,KAAK,GAAG,CACZ,GAAsC,EACtC,GAAqB,EACrB,EAAE;YACF,MAAM,GAAG,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAA;YAChD,IAAI,GAAG,IAAI,GAAG,YAAY,OAAO,EAAE;gBACjC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;aAC3B;YACD,8CAA8C;YAC9C,8CAA8C;YAC9C,+BAA+B;YAC/B,EAAE,CAAC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE;gBACvC,IACE,CAAC,OAAO,CAAC,gBAAgB;oBACzB,OAAO,CAAC,sBAAsB,EAC9B;oBACA,GAAG,EAAE,CAAA;oBACL,iDAAiD;oBACjD,IAAI,OAAO,CAAC,sBAAsB,EAAE;wBAClC,GAAG,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;qBACvB;iBACF;YACH,CAAC,CAAC,CAAA;QACJ,CAAC,CAAA;QAED,IAAI,OAAO,CAAC,MAAM;YAAE,OAAO,CAAC,MAAM,CAAC,eAAe,GAAG,IAAI,CAAA;QACzD,MAAM,CAAC,GAAG,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;QACzC,MAAM,EAAE,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE;YAC1B,iBAAiB,EAAE,EAAE;YACrB,oBAAoB,EAAE,CAAC;YACvB,UAAU,EAAE,SAAS;SACtB,CAAC,CAAA;QAEF,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,iCAAiC;YACjC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,CAAC,CAAA;YAC5D,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;SAC5B;aAAM;YACL,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,EAAE,CAAA;SAC1B;QACD,OAAO,EAAE,CAAA;IACX,CAAC;IAED,kBAAkB,CAAC,CAAM;QACvB,IAAI,CAAC,IAAI,CAAC,eAAe;YAAE,OAAO,KAAK,CAAA;QACvC,MAAM,CAAC,GAAG,CAAuB,CAAA;QACjC,OAAO,CACL,CAAC,CAAC,CAAC;YACH,CAAC,YAAY,OAAO;YACpB,CAAC,CAAC,cAAc,CAAC,sBAAsB,CAAC;YACxC,CAAC,CAAC,iBAAiB,YAAY,EAAE,CAClC,CAAA;IACH,CAAC;IAwCD,KAAK,CAAC,KAAK,CACT,CAAI,EACJ,eAAgD,EAAE;QAElD,MAAM;QACJ,cAAc;QACd,UAAU,GAAG,IAAI,CAAC,UAAU,EAC5B,cAAc,GAAG,IAAI,CAAC,cAAc,EACpC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB;QAC5C,cAAc;QACd,GAAG,GAAG,IAAI,CAAC,GAAG,EACd,cAAc,GAAG,IAAI,CAAC,cAAc,EACpC,IAAI,GAAG,CAAC,EACR,eAAe,GAAG,IAAI,CAAC,eAAe,EACtC,WAAW,GAAG,IAAI,CAAC,WAAW;QAC9B,0BAA0B;QAC1B,wBAAwB,GAAG,IAAI,CAAC,wBAAwB,EACxD,0BAA0B,GAAG,IAAI,CAAC,0BAA0B,EAC5D,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,EACxC,sBAAsB,GAAG,IAAI,CAAC,sBAAsB,EACpD,OAAO,EACP,YAAY,GAAG,KAAK,EACpB,MAAM,EACN,MAAM,GACP,GAAG,YAAY,CAAA;QAEhB,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE;YACzB,IAAI,MAAM;gBAAE,MAAM,CAAC,KAAK,GAAG,KAAK,CAAA;YAChC,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE;gBACjB,UAAU;gBACV,cAAc;gBACd,kBAAkB;gBAClB,MAAM;aACP,CAAC,CAAA;SACH;QAED,MAAM,OAAO,GAAG;YACd,UAAU;YACV,cAAc;YACd,kBAAkB;YAClB,GAAG;YACH,cAAc;YACd,IAAI;YACJ,eAAe;YACf,WAAW;YACX,wBAAwB;YACxB,0BAA0B;YAC1B,sBAAsB;YACtB,gBAAgB;YAChB,MAAM;YACN,MAAM;SACP,CAAA;QAED,IAAI,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QAC/B,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,IAAI,MAAM;gBAAE,MAAM,CAAC,KAAK,GAAG,MAAM,CAAA;YACjC,MAAM,CAAC,GAAG,IAAI,CAAC,gBAAgB,CAAC,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;YAC3D,OAAO,CAAC,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,CAAA;SAC1B;aAAM;YACL,mCAAmC;YACnC,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;YAC9B,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;gBAC9B,MAAM,KAAK,GACT,UAAU,IAAI,CAAC,CAAC,oBAAoB,KAAK,SAAS,CAAA;gBACpD,IAAI,MAAM,EAAE;oBACV,MAAM,CAAC,KAAK,GAAG,UAAU,CAAA;oBACzB,IAAI,KAAK;wBAAE,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;iBACvC;gBACD,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,CAAA;aAC3D;YAED,mEAAmE;YACnE,gEAAgE;YAChE,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;YACpC,IAAI,CAAC,YAAY,IAAI,CAAC,OAAO,EAAE;gBAC7B,IAAI,MAAM;oBAAE,MAAM,CAAC,KAAK,GAAG,KAAK,CAAA;gBAChC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAA;gBACvB,IAAI,cAAc,EAAE;oBAClB,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,CAAA;iBAC3B;gBACD,IAAI,MAAM;oBAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;gBAC1C,OAAO,CAAC,CAAA;aACT;YAED,iEAAiE;YACjE,qBAAqB;YACrB,MAAM,CAAC,GAAG,IAAI,CAAC,gBAAgB,CAAC,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;YAC3D,MAAM,QAAQ,GAAG,CAAC,CAAC,oBAAoB,KAAK,SAAS,CAAA;YACrD,MAAM,QAAQ,GAAG,QAAQ,IAAI,UAAU,CAAA;YACvC,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAA;gBAC5C,IAAI,QAAQ,IAAI,OAAO;oBAAE,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;aACrD;YACD,OAAO,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,CAAA;SAC9D;IACH,CAAC;IAED;;;;;OAKG;IACH,GAAG,CAAC,CAAI,EAAE,aAA4C,EAAE;QACtD,MAAM,EACJ,UAAU,GAAG,IAAI,CAAC,UAAU,EAC5B,cAAc,GAAG,IAAI,CAAC,cAAc,EACpC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB,EAC5C,MAAM,GACP,GAAG,UAAU,CAAA;QACd,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QACjC,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,QAAQ,GAAG,IAAI,CAAC,kBAAkB,CAAC,KAAK,CAAC,CAAA;YAC/C,IAAI,MAAM;gBAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;YAC1C,IAAI,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;gBACxB,IAAI,MAAM;oBAAE,MAAM,CAAC,GAAG,GAAG,OAAO,CAAA;gBAChC,mDAAmD;gBACnD,IAAI,CAAC,QAAQ,EAAE;oBACb,IAAI,CAAC,kBAAkB,EAAE;wBACvB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;qBACf;oBACD,IAAI,MAAM,IAAI,UAAU;wBAAE,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;oBACrD,OAAO,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAA;iBACtC;qBAAM;oBACL,IACE,MAAM;wBACN,UAAU;wBACV,KAAK,CAAC,oBAAoB,KAAK,SAAS,EACxC;wBACA,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;qBAC5B;oBACD,OAAO,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,oBAAoB,CAAC,CAAC,CAAC,SAAS,CAAA;iBAC3D;aACF;iBAAM;gBACL,IAAI,MAAM;oBAAE,MAAM,CAAC,GAAG,GAAG,KAAK,CAAA;gBAC9B,gEAAgE;gBAChE,iEAAiE;gBACjE,kEAAkE;gBAClE,oEAAoE;gBACpE,qCAAqC;gBACrC,IAAI,QAAQ,EAAE;oBACZ,OAAO,KAAK,CAAC,oBAAoB,CAAA;iBAClC;gBACD,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAA;gBACvB,IAAI,cAAc,EAAE;oBAClB,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,CAAA;iBAC3B;gBACD,OAAO,KAAK,CAAA;aACb;SACF;aAAM,IAAI,MAAM,EAAE;YACjB,MAAM,CAAC,GAAG,GAAG,MAAM,CAAA;SACpB;IACH,CAAC;IAED,QAAQ,CAAC,CAAQ,EAAE,CAAQ;QACzB,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;QACjB,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;IACnB,CAAC;IAED,WAAW,CAAC,KAAY;QACtB,iCAAiC;QACjC,oCAAoC;QACpC,OAAO;QACP,6DAA6D;QAC7D,0CAA0C;QAC1C,qBAAqB;QACrB,qBAAqB;QACrB,eAAe;QACf,IAAI,KAAK,KAAK,IAAI,CAAC,KAAK,EAAE;YACxB,IAAI,KAAK,KAAK,IAAI,CAAC,KAAK,EAAE;gBACxB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAU,CAAA;aACxC;iBAAM;gBACL,IAAI,CAAC,QAAQ,CACX,IAAI,CAAC,KAAK,CAAC,KAAK,CAAU,EAC1B,IAAI,CAAC,KAAK,CAAC,KAAK,CAAU,CAC3B,CAAA;aACF;YACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;YAChC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;SACnB;IACH,CAAC;IAED;;;OAGG;IACH,MAAM,CAAC,CAAI;QACT,IAAI,OAAO,GAAG,KAAK,CAAA;QACnB,IAAI,IAAI,CAAC,KAAK,KAAK,CAAC,EAAE;YACpB,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;YACjC,IAAI,KAAK,KAAK,SAAS,EAAE;gBACvB,OAAO,GAAG,IAAI,CAAA;gBACd,IAAI,IAAI,CAAC,KAAK,KAAK,CAAC,EAAE;oBACpB,IAAI,CAAC,KAAK,EAAE,CAAA;iBACb;qBAAM;oBACL,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,CAAA;oBAC3B,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;oBAC9B,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;wBAC9B,CAAC,CAAC,iBAAiB,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,SAAS,CAAC,CAAC,CAAA;qBAChD;yBAAM,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,gBAAgB,EAAE;wBACpD,IAAI,IAAI,CAAC,WAAW,EAAE;4BACpB,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAM,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAA;yBACrC;wBACD,IAAI,IAAI,CAAC,gBAAgB,EAAE;4BACzB,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC,CAAM,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAA;yBAC5C;qBACF;oBACD,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;oBACtB,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,SAAS,CAAA;oBAChC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,SAAS,CAAA;oBAChC,IAAI,KAAK,KAAK,IAAI,CAAC,KAAK,EAAE;wBACxB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAU,CAAA;qBACxC;yBAAM,IAAI,KAAK,KAAK,IAAI,CAAC,KAAK,EAAE;wBAC/B,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAU,CAAA;qBACxC;yBAAM;wBACL,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAA;wBACjD,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAA;qBAClD;oBACD,IAAI,CAAC,KAAK,EAAE,CAAA;oBACZ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;iBACvB;aACF;SACF;QACD,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE;YACnD,MAAM,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;YACzB,IAAI,IAAmC,CAAA;YACvC,OAAO,CAAC,IAAI,GAAG,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE;gBAC3B,IAAI,CAAC,aAAa,EAAE,CAAC,GAAG,IAAI,CAAC,CAAA;aAC9B;SACF;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAED;;OAEG;IACH,KAAK;QACH,KAAK,MAAM,KAAK,IAAI,IAAI,CAAC,SAAS,CAAC,EAAE,UAAU,EAAE,IAAI,EAAE,CAAC,EAAE;YACxD,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;YAC9B,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;gBAC9B,CAAC,CAAC,iBAAiB,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,SAAS,CAAC,CAAC,CAAA;aAChD;iBAAM;gBACL,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;gBAC9B,IAAI,IAAI,CAAC,WAAW,EAAE;oBACpB,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAM,EAAE,CAAM,EAAE,QAAQ,CAAC,CAAA;iBAC1C;gBACD,IAAI,IAAI,CAAC,gBAAgB,EAAE;oBACzB,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC,CAAM,EAAE,CAAM,EAAE,QAAQ,CAAC,CAAC,CAAA;iBACjD;aACF;SACF;QAED,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,CAAA;QACpB,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAC7B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,OAAO,EAAE;YAC9B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;YAClB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;SACrB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;SACpB;QACD,IAAI,CAAC,KAAK,GAAG,CAAU,CAAA;QACvB,IAAI,CAAC,KAAK,GAAG,CAAU,CAAA;QACvB,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAA;QACrB,IAAI,CAAC,eAAe,GAAG,CAAC,CAAA;QACxB,IAAI,CAAC,KAAK,GAAG,CAAC,CAAA;QACd,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,SAAS,EAAE;YAC3C,MAAM,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;YACzB,IAAI,IAAmC,CAAA;YACvC,OAAO,CAAC,IAAI,GAAG,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE;gBAC3B,IAAI,CAAC,aAAa,EAAE,CAAC,GAAG,IAAI,CAAC,CAAA;aAC9B;SACF;IACH,CAAC;CACF;AA79CD,4BA69CC","sourcesContent":["/**\n * @module LRUCache\n */\n\n// module-private names and types\ntype Perf = { now: () => number }\nconst perf: Perf =\n typeof performance === 'object' &&\n performance &&\n typeof performance.now === 'function'\n ? performance\n : Date\n\nconst warned = new Set()\n\n// either a function or a class\ntype ForC = ((...a: any[]) => any) | { new (...a: any[]): any }\n\n/* c8 ignore start */\nconst PROCESS = (\n typeof process === 'object' && !!process ? process : {}\n) as { [k: string]: any }\n/* c8 ignore start */\n\nconst emitWarning = (\n msg: string,\n type: string,\n code: string,\n fn: ForC\n) => {\n typeof PROCESS.emitWarning === 'function'\n ? PROCESS.emitWarning(msg, type, code, fn)\n : console.error(`[${code}] ${type}: ${msg}`)\n}\n\nlet AC = globalThis.AbortController\nlet AS = globalThis.AbortSignal\n\n/* c8 ignore start */\nif (typeof AC === 'undefined') {\n //@ts-ignore\n AS = class AbortSignal {\n onabort?: (...a: any[]) => any\n _onabort: ((...a: any[]) => any)[] = []\n reason?: any\n aborted: boolean = false\n addEventListener(_: string, fn: (...a: any[]) => any) {\n this._onabort.push(fn)\n }\n }\n //@ts-ignore\n AC = class AbortController {\n constructor() {\n warnACPolyfill()\n }\n signal = new AS()\n abort(reason: any) {\n if (this.signal.aborted) return\n //@ts-ignore\n this.signal.reason = reason\n //@ts-ignore\n this.signal.aborted = true\n //@ts-ignore\n for (const fn of this.signal._onabort) {\n fn(reason)\n }\n this.signal.onabort?.(reason)\n }\n }\n let printACPolyfillWarning =\n PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'\n const warnACPolyfill = () => {\n if (!printACPolyfillWarning) return\n printACPolyfillWarning = false\n emitWarning(\n 'AbortController is not defined. If using lru-cache in ' +\n 'node 14, load an AbortController polyfill from the ' +\n '`node-abort-controller` package. A minimal polyfill is ' +\n 'provided for use by LRUCache.fetch(), but it should not be ' +\n 'relied upon in other contexts (eg, passing it to other APIs that ' +\n 'use AbortController/AbortSignal might have undesirable effects). ' +\n 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.',\n 'NO_ABORT_CONTROLLER',\n 'ENOTSUP',\n warnACPolyfill\n )\n }\n}\n/* c8 ignore stop */\n\nconst shouldWarn = (code: string) => !warned.has(code)\n\nconst TYPE = Symbol('type')\ntype PosInt = number & { [TYPE]: 'Positive Integer' }\ntype Index = number & { [TYPE]: 'LRUCache Index' }\n\nconst isPosInt = (n: any): n is PosInt =>\n n && n === Math.floor(n) && n > 0 && isFinite(n)\n\ntype UintArray = Uint8Array | Uint16Array | Uint32Array\ntype NumberArray = UintArray | number[]\n\n/* c8 ignore start */\n// This is a little bit ridiculous, tbh.\n// The maximum array length is 2^32-1 or thereabouts on most JS impls.\n// And well before that point, you're caching the entire world, I mean,\n// that's ~32GB of just integers for the next/prev links, plus whatever\n// else to hold that many keys and values. Just filling the memory with\n// zeroes at init time is brutal when you get that big.\n// But why not be complete?\n// Maybe in the future, these limits will have expanded.\nconst getUintArray = (max: number) =>\n !isPosInt(max)\n ? null\n : max <= Math.pow(2, 8)\n ? Uint8Array\n : max <= Math.pow(2, 16)\n ? Uint16Array\n : max <= Math.pow(2, 32)\n ? Uint32Array\n : max <= Number.MAX_SAFE_INTEGER\n ? ZeroArray\n : null\n/* c8 ignore stop */\n\nclass ZeroArray extends Array {\n constructor(size: number) {\n super(size)\n this.fill(0)\n }\n}\n\ntype StackLike = Stack | Index[]\nclass Stack {\n heap: NumberArray\n length: number\n // private constructor\n static #constructing: boolean = false\n static create(max: number): StackLike {\n const HeapCls = getUintArray(max)\n if (!HeapCls) return []\n Stack.#constructing = true\n const s = new Stack(max, HeapCls)\n Stack.#constructing = false\n return s\n }\n constructor(\n max: number,\n HeapCls: { new (n: number): NumberArray }\n ) {\n /* c8 ignore start */\n if (!Stack.#constructing) {\n throw new TypeError('instantiate Stack using Stack.create(n)')\n }\n /* c8 ignore stop */\n this.heap = new HeapCls(max)\n this.length = 0\n }\n push(n: Index) {\n this.heap[this.length++] = n\n }\n pop(): Index {\n return this.heap[--this.length] as Index\n }\n}\n\n/**\n * Promise representing an in-progress {@link LRUCache#fetch} call\n */\nexport type BackgroundFetch = Promise & {\n __returned: BackgroundFetch | undefined\n __abortController: AbortController\n __staleWhileFetching: V | undefined\n}\n\ntype DisposeTask = [\n value: V,\n key: K,\n reason: LRUCache.DisposeReason\n]\n\nexport namespace LRUCache {\n /**\n * An integer greater than 0, reflecting the calculated size of items\n */\n export type Size = number\n\n /**\n * Integer greater than 0, representing some number of milliseconds, or the\n * time at which a TTL started counting from.\n */\n export type Milliseconds = number\n\n /**\n * An integer greater than 0, reflecting a number of items\n */\n export type Count = number\n\n /**\n * The reason why an item was removed from the cache, passed\n * to the {@link Disposer} methods.\n */\n export type DisposeReason = 'evict' | 'set' | 'delete'\n /**\n * A method called upon item removal, passed as the\n * {@link OptionsBase.dispose} and/or\n * {@link OptionsBase.disposeAfter} options.\n */\n export type Disposer = (\n value: V,\n key: K,\n reason: DisposeReason\n ) => void\n\n /**\n * A function that returns the effective calculated size\n * of an entry in the cache.\n */\n export type SizeCalculator = (value: V, key: K) => Size\n\n /**\n * Options provided to the\n * {@link OptionsBase.fetchMethod} function.\n */\n export interface FetcherOptions {\n signal: AbortSignal\n options: FetcherFetchOptions\n /**\n * Object provided in the {@link FetchOptions.context} option to\n * {@link LRUCache#fetch}\n */\n context: FC\n }\n\n /**\n * Status object that may be passed to {@link LRUCache#fetch},\n * {@link LRUCache#get}, {@link LRUCache#set}, and {@link LRUCache#has}.\n */\n export interface Status {\n /**\n * The status of a set() operation.\n *\n * - add: the item was not found in the cache, and was added\n * - update: the item was in the cache, with the same value provided\n * - replace: the item was in the cache, and replaced\n * - miss: the item was not added to the cache for some reason\n */\n set?: 'add' | 'update' | 'replace' | 'miss'\n\n /**\n * the ttl stored for the item, or undefined if ttls are not used.\n */\n ttl?: Milliseconds\n\n /**\n * the start time for the item, or undefined if ttls are not used.\n */\n start?: Milliseconds\n\n /**\n * The timestamp used for TTL calculation\n */\n now?: Milliseconds\n\n /**\n * the remaining ttl for the item, or undefined if ttls are not used.\n */\n remainingTTL?: Milliseconds\n\n /**\n * The calculated size for the item, if sizes are used.\n */\n entrySize?: Size\n\n /**\n * The total calculated size of the cache, if sizes are used.\n */\n totalCalculatedSize?: Size\n\n /**\n * A flag indicating that the item was not stored, due to exceeding the\n * {@link OptionsBase.maxEntrySize}\n */\n maxEntrySizeExceeded?: true\n\n /**\n * The old value, specified in the case of `set:'update'` or\n * `set:'replace'`\n */\n oldValue?: V\n\n /**\n * The results of a {@link LRUCache#has} operation\n *\n * - hit: the item was found in the cache\n * - stale: the item was found in the cache, but is stale\n * - miss: the item was not found in the cache\n */\n has?: 'hit' | 'stale' | 'miss'\n\n /**\n * The status of a {@link LRUCache#fetch} operation.\n * Note that this can change as the underlying fetch() moves through\n * various states.\n *\n * - inflight: there is another fetch() for this key which is in process\n * - get: there is no fetchMethod, so {@link LRUCache#get} was called.\n * - miss: the item is not in cache, and will be fetched.\n * - hit: the item is in the cache, and was resolved immediately.\n * - stale: the item is in the cache, but stale.\n * - refresh: the item is in the cache, and not stale, but\n * {@link FetchOptions.forceRefresh} was specified.\n */\n fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh'\n\n /**\n * The {@link OptionsBase.fetchMethod} was called\n */\n fetchDispatched?: true\n\n /**\n * The cached value was updated after a successful call to\n * {@link OptionsBase.fetchMethod}\n */\n fetchUpdated?: true\n\n /**\n * The reason for a fetch() rejection. Either the error raised by the\n * {@link OptionsBase.fetchMethod}, or the reason for an\n * AbortSignal.\n */\n fetchError?: Error\n\n /**\n * The fetch received an abort signal\n */\n fetchAborted?: true\n\n /**\n * The abort signal received was ignored, and the fetch was allowed to\n * continue.\n */\n fetchAbortIgnored?: true\n\n /**\n * The fetchMethod promise resolved successfully\n */\n fetchResolved?: true\n\n /**\n * The fetchMethod promise was rejected\n */\n fetchRejected?: true\n\n /**\n * The status of a {@link LRUCache#get} operation.\n *\n * - fetching: The item is currently being fetched. If a previous value\n * is present and allowed, that will be returned.\n * - stale: The item is in the cache, and is stale.\n * - hit: the item is in the cache\n * - miss: the item is not in the cache\n */\n get?: 'stale' | 'hit' | 'miss'\n\n /**\n * A fetch or get operation returned a stale value.\n */\n returnedStale?: true\n }\n\n /**\n * options which override the options set in the LRUCache constructor\n * when calling {@link LRUCache#fetch}.\n *\n * This is the union of {@link GetOptions} and {@link SetOptions}, plus\n * {@link OptionsBase.noDeleteOnFetchRejection},\n * {@link OptionsBase.allowStaleOnFetchRejection},\n * {@link FetchOptions.forceRefresh}, and\n * {@link OptionsBase.context}\n *\n * Any of these may be modified in the {@link OptionsBase.fetchMethod}\n * function, but the {@link GetOptions} fields will of course have no\n * effect, as the {@link LRUCache#get} call already happened by the time\n * the fetchMethod is called.\n */\n export interface FetcherFetchOptions\n extends Pick<\n OptionsBase,\n | 'allowStale'\n | 'updateAgeOnGet'\n | 'noDeleteOnStaleGet'\n | 'sizeCalculation'\n | 'ttl'\n | 'noDisposeOnSet'\n | 'noUpdateTTL'\n | 'noDeleteOnFetchRejection'\n | 'allowStaleOnFetchRejection'\n | 'ignoreFetchAbort'\n | 'allowStaleOnFetchAbort'\n > {\n status?: Status\n size?: Size\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#fetch} method.\n */\n export interface FetchOptions\n extends FetcherFetchOptions {\n /**\n * Set to true to force a re-load of the existing data, even if it\n * is not yet stale.\n */\n forceRefresh?: boolean\n /**\n * Context provided to the {@link OptionsBase.fetchMethod} as\n * the {@link FetcherOptions.context} param.\n *\n * If the FC type is specified as unknown (the default),\n * undefined or void, then this is optional. Otherwise, it will\n * be required.\n */\n context?: FC\n signal?: AbortSignal\n status?: Status\n }\n /**\n * Options provided to {@link LRUCache#fetch} when the FC type is something\n * other than `unknown`, `undefined`, or `void`\n */\n export interface FetchOptionsWithContext\n extends FetchOptions {\n context: FC\n }\n /**\n * Options provided to {@link LRUCache#fetch} when the FC type is\n * `undefined` or `void`\n */\n export interface FetchOptionsNoContext\n extends FetchOptions {\n context?: undefined\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#has} method.\n */\n export interface HasOptions\n extends Pick, 'updateAgeOnHas'> {\n status?: Status\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#get} method.\n */\n export interface GetOptions\n extends Pick<\n OptionsBase,\n 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet'\n > {\n status?: Status\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#peek} method.\n */\n export interface PeekOptions\n extends Pick, 'allowStale'> {}\n\n /**\n * Options that may be passed to the {@link LRUCache#set} method.\n */\n export interface SetOptions\n extends Pick<\n OptionsBase,\n 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'\n > {\n /**\n * If size tracking is enabled, then setting an explicit size\n * in the {@link LRUCache#set} call will prevent calling the\n * {@link OptionsBase.sizeCalculation} function.\n */\n size?: Size\n /**\n * If TTL tracking is enabled, then setting an explicit start\n * time in the {@link LRUCache#set} call will override the\n * default time from `performance.now()` or `Date.now()`.\n *\n * Note that it must be a valid value for whichever time-tracking\n * method is in use.\n */\n start?: Milliseconds\n status?: Status\n }\n\n /**\n * The type signature for the {@link OptionsBase.fetchMethod} option.\n */\n export type Fetcher = (\n key: K,\n staleValue: V | undefined,\n options: FetcherOptions\n ) => Promise | V | void | undefined\n\n /**\n * Options which may be passed to the {@link LRUCache} constructor.\n *\n * Most of these may be overridden in the various options that use\n * them.\n *\n * Despite all being technically optional, the constructor requires that\n * a cache is at minimum limited by one or more of {@link OptionsBase.max},\n * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}.\n *\n * If {@link OptionsBase.ttl} is used alone, then it is strongly advised\n * (and in fact required by the type definitions here) that the cache\n * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially\n * unbounded storage.\n */\n export interface OptionsBase {\n /**\n * The maximum number of items to store in the cache before evicting\n * old entries. This is read-only on the {@link LRUCache} instance,\n * and may not be overridden.\n *\n * If set, then storage space will be pre-allocated at construction\n * time, and the cache will perform significantly faster.\n *\n * Note that significantly fewer items may be stored, if\n * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also\n * set.\n */\n max?: Count\n\n /**\n * Max time in milliseconds for items to live in cache before they are\n * considered stale. Note that stale items are NOT preemptively removed\n * by default, and MAY live in the cache long after they have expired.\n *\n * Also, as this cache is optimized for LRU/MRU operations, some of\n * the staleness/TTL checks will reduce performance, as they will incur\n * overhead by deleting items.\n *\n * Must be an integer number of ms. If set to 0, this indicates \"no TTL\"\n *\n * @default 0\n */\n ttl?: Milliseconds\n\n /**\n * Minimum amount of time in ms in which to check for staleness.\n * Defaults to 1, which means that the current time is checked\n * at most once per millisecond.\n *\n * Set to 0 to check the current time every time staleness is tested.\n * (This reduces performance, and is theoretically unnecessary.)\n *\n * Setting this to a higher value will improve performance somewhat\n * while using ttl tracking, albeit at the expense of keeping stale\n * items around a bit longer than their TTLs would indicate.\n *\n * @default 1\n */\n ttlResolution?: Milliseconds\n\n /**\n * Preemptively remove stale items from the cache.\n * Note that this may significantly degrade performance,\n * especially if the cache is storing a large number of items.\n * It is almost always best to just leave the stale items in\n * the cache, and let them fall out as new items are added.\n *\n * Note that this means that {@link OptionsBase.allowStale} is a bit\n * pointless, as stale items will be deleted almost as soon as they\n * expire.\n *\n * @default false\n */\n ttlAutopurge?: boolean\n\n /**\n * Update the age of items on {@link LRUCache#get}, renewing their TTL\n *\n * Has no effect if {@link OptionsBase.ttl} is not set.\n *\n * @default false\n */\n updateAgeOnGet?: boolean\n\n /**\n * Update the age of items on {@link LRUCache#has}, renewing their TTL\n *\n * Has no effect if {@link OptionsBase.ttl} is not set.\n *\n * @default false\n */\n updateAgeOnHas?: boolean\n\n /**\n * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return\n * stale data, if available.\n */\n allowStale?: boolean\n\n /**\n * Function that is called on items when they are dropped from the cache.\n * This can be handy if you want to close file descriptors or do other\n * cleanup tasks when items are no longer accessible. Called with `key,\n * value`. It's called before actually removing the item from the\n * internal cache, so it is *NOT* safe to re-add them.\n *\n * Use {@link OptionsBase.disposeAfter} if you wish to dispose items after\n * they have been full removed, when it is safe to add them back to the\n * cache.\n */\n dispose?: Disposer\n\n /**\n * The same as {@link OptionsBase.dispose}, but called *after* the entry\n * is completely removed and the cache is once again in a clean state.\n * It is safe to add an item right back into the cache at this point.\n * However, note that it is *very* easy to inadvertently create infinite\n * recursion this way.\n */\n disposeAfter?: Disposer\n\n /**\n * Set to true to suppress calling the\n * {@link OptionsBase.dispose} function if the entry key is\n * still accessible within the cache.\n * This may be overridden by passing an options object to\n * {@link LRUCache#set}.\n */\n noDisposeOnSet?: boolean\n\n /**\n * Boolean flag to tell the cache to not update the TTL when\n * setting a new value for an existing key (ie, when updating a value\n * rather than inserting a new value). Note that the TTL value is\n * _always_ set (if provided) when adding a new entry into the cache.\n *\n * Has no effect if a {@link OptionsBase.ttl} is not set.\n */\n noUpdateTTL?: boolean\n\n /**\n * If you wish to track item size, you must provide a maxSize\n * note that we still will only keep up to max *actual items*,\n * if max is set, so size tracking may cause fewer than max items\n * to be stored. At the extreme, a single item of maxSize size\n * will cause everything else in the cache to be dropped when it\n * is added. Use with caution!\n *\n * Note also that size tracking can negatively impact performance,\n * though for most cases, only minimally.\n */\n maxSize?: Size\n\n /**\n * The maximum allowed size for any single item in the cache.\n *\n * If a larger item is passed to {@link LRUCache#set} or returned by a\n * {@link OptionsBase.fetchMethod}, then it will not be stored in the\n * cache.\n */\n maxEntrySize?: Size\n\n /**\n * A function that returns a number indicating the item's size.\n *\n * If not provided, and {@link OptionsBase.maxSize} or\n * {@link OptionsBase.maxEntrySize} are set, then all\n * {@link LRUCache#set} calls **must** provide an explicit\n * {@link SetOptions.size} or sizeCalculation param.\n */\n sizeCalculation?: SizeCalculator\n\n /**\n * Method that provides the implementation for {@link LRUCache#fetch}\n */\n fetchMethod?: Fetcher\n\n /**\n * Set to true to suppress the deletion of stale data when a\n * {@link OptionsBase.fetchMethod} returns a rejected promise.\n */\n noDeleteOnFetchRejection?: boolean\n\n /**\n * Do not delete stale items when they are retrieved with\n * {@link LRUCache#get}.\n *\n * Note that the `get` return value will still be `undefined`\n * unless {@link OptionsBase.allowStale} is true.\n */\n noDeleteOnStaleGet?: boolean\n\n /**\n * Set to true to allow returning stale data when a\n * {@link OptionsBase.fetchMethod} throws an error or returns a rejected\n * promise.\n *\n * This differs from using {@link OptionsBase.allowStale} in that stale\n * data will ONLY be returned in the case that the\n * {@link LRUCache#fetch} fails, not any other times.\n */\n allowStaleOnFetchRejection?: boolean\n\n /**\n * Set to true to return a stale value from the cache when the\n * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches an `'abort'`\n * event, whether user-triggered, or due to internal cache behavior.\n *\n * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying\n * {@link OptionsBase.fetchMethod} will still be considered canceled, and\n * any value it returns will be ignored and not cached.\n *\n * Caveat: since fetches are aborted when a new value is explicitly\n * set in the cache, this can lead to fetch returning a stale value,\n * since that was the fallback value _at the moment the `fetch()` was\n * initiated_, even though the new updated value is now present in\n * the cache.\n *\n * For example:\n *\n * ```ts\n * const cache = new LRUCache({\n * ttl: 100,\n * fetchMethod: async (url, oldValue, { signal }) => {\n * const res = await fetch(url, { signal })\n * return await res.json()\n * }\n * })\n * cache.set('https://example.com/', { some: 'data' })\n * // 100ms go by...\n * const result = cache.fetch('https://example.com/')\n * cache.set('https://example.com/', { other: 'thing' })\n * console.log(await result) // { some: 'data' }\n * console.log(cache.get('https://example.com/')) // { other: 'thing' }\n * ```\n */\n allowStaleOnFetchAbort?: boolean\n\n /**\n * Set to true to ignore the `abort` event emitted by the `AbortSignal`\n * object passed to {@link OptionsBase.fetchMethod}, and still cache the\n * resulting resolution value, as long as it is not `undefined`.\n *\n * When used on its own, this means aborted {@link LRUCache#fetch} calls are not\n * immediately resolved or rejected when they are aborted, and instead\n * take the full time to await.\n *\n * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted\n * {@link LRUCache#fetch} calls will resolve immediately to their stale\n * cached value or `undefined`, and will continue to process and eventually\n * update the cache when they resolve, as long as the resulting value is\n * not `undefined`, thus supporting a \"return stale on timeout while\n * refreshing\" mechanism by passing `AbortSignal.timeout(n)` as the signal.\n *\n * **Note**: regardless of this setting, an `abort` event _is still\n * emitted on the `AbortSignal` object_, so may result in invalid results\n * when passed to other underlying APIs that use AbortSignals.\n *\n * This may be overridden in the {@link OptionsBase.fetchMethod} or the\n * call to {@link LRUCache#fetch}.\n */\n ignoreFetchAbort?: boolean\n }\n\n export interface OptionsMaxLimit\n extends OptionsBase {\n max: Count\n }\n export interface OptionsTTLLimit\n extends OptionsBase {\n ttl: Milliseconds\n ttlAutopurge: boolean\n }\n export interface OptionsSizeLimit\n extends OptionsBase {\n maxSize: Size\n }\n\n /**\n * The valid safe options for the {@link LRUCache} constructor\n */\n export type Options =\n | OptionsMaxLimit\n | OptionsSizeLimit\n | OptionsTTLLimit\n\n /**\n * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump}\n */\n export interface Entry {\n value: V\n ttl?: Milliseconds\n size?: Size\n start?: Milliseconds\n }\n}\n\n/**\n * Default export, the thing you're using this module to get.\n *\n * All properties from the options object (with the exception of\n * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as\n * normal public members. (`max` and `maxBase` are read-only getters.)\n * Changing any of these will alter the defaults for subsequent method calls,\n * but is otherwise safe.\n */\nexport class LRUCache {\n // properties coming in from the options of these, only max and maxSize\n // really *need* to be protected. The rest can be modified, as they just\n // set defaults for various methods.\n readonly #max: LRUCache.Count\n readonly #maxSize: LRUCache.Size\n readonly #dispose?: LRUCache.Disposer\n readonly #disposeAfter?: LRUCache.Disposer\n readonly #fetchMethod?: LRUCache.Fetcher\n\n /**\n * {@link LRUCache.OptionsBase.ttl}\n */\n ttl: LRUCache.Milliseconds\n\n /**\n * {@link LRUCache.OptionsBase.ttlResolution}\n */\n ttlResolution: LRUCache.Milliseconds\n /**\n * {@link LRUCache.OptionsBase.ttlAutopurge}\n */\n ttlAutopurge: boolean\n /**\n * {@link LRUCache.OptionsBase.updateAgeOnGet}\n */\n updateAgeOnGet: boolean\n /**\n * {@link LRUCache.OptionsBase.updateAgeOnHas}\n */\n updateAgeOnHas: boolean\n /**\n * {@link LRUCache.OptionsBase.allowStale}\n */\n allowStale: boolean\n\n /**\n * {@link LRUCache.OptionsBase.noDisposeOnSet}\n */\n noDisposeOnSet: boolean\n /**\n * {@link LRUCache.OptionsBase.noUpdateTTL}\n */\n noUpdateTTL: boolean\n /**\n * {@link LRUCache.OptionsBase.maxEntrySize}\n */\n maxEntrySize: LRUCache.Size\n /**\n * {@link LRUCache.OptionsBase.sizeCalculation}\n */\n sizeCalculation?: LRUCache.SizeCalculator\n /**\n * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}\n */\n noDeleteOnFetchRejection: boolean\n /**\n * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}\n */\n noDeleteOnStaleGet: boolean\n /**\n * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}\n */\n allowStaleOnFetchAbort: boolean\n /**\n * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}\n */\n allowStaleOnFetchRejection: boolean\n /**\n * {@link LRUCache.OptionsBase.ignoreFetchAbort}\n */\n ignoreFetchAbort: boolean\n\n // computed properties\n #size: LRUCache.Count\n #calculatedSize: LRUCache.Size\n #keyMap: Map\n #keyList: (K | undefined)[]\n #valList: (V | BackgroundFetch | undefined)[]\n #next: NumberArray\n #prev: NumberArray\n #head: Index\n #tail: Index\n #free: StackLike\n #disposed?: DisposeTask[]\n #sizes?: ZeroArray\n #starts?: ZeroArray\n #ttls?: ZeroArray\n\n #hasDispose: boolean\n #hasFetchMethod: boolean\n #hasDisposeAfter: boolean\n\n /**\n * Do not call this method unless you need to inspect the\n * inner workings of the cache. If anything returned by this\n * object is modified in any way, strange breakage may occur.\n *\n * These fields are private for a reason!\n *\n * @internal\n */\n static unsafeExposeInternals<\n K extends {},\n V extends {},\n FC extends unknown = unknown\n >(c: LRUCache) {\n return {\n // properties\n starts: c.#starts,\n ttls: c.#ttls,\n sizes: c.#sizes,\n keyMap: c.#keyMap as Map,\n keyList: c.#keyList,\n valList: c.#valList,\n next: c.#next,\n prev: c.#prev,\n get head() {\n return c.#head\n },\n get tail() {\n return c.#tail\n },\n free: c.#free,\n // methods\n isBackgroundFetch: (p: any) => c.#isBackgroundFetch(p),\n backgroundFetch: (\n k: K,\n index: number | undefined,\n options: LRUCache.FetchOptions,\n context: any\n ): BackgroundFetch =>\n c.#backgroundFetch(\n k,\n index as Index | undefined,\n options,\n context\n ),\n moveToTail: (index: number): void =>\n c.#moveToTail(index as Index),\n indexes: (options?: { allowStale: boolean }) =>\n c.#indexes(options),\n rindexes: (options?: { allowStale: boolean }) =>\n c.#rindexes(options),\n isStale: (index: number | undefined) =>\n c.#isStale(index as Index),\n }\n }\n\n // Protected read-only members\n\n /**\n * {@link LRUCache.OptionsBase.max} (read-only)\n */\n get max(): LRUCache.Count {\n return this.#max\n }\n /**\n * {@link LRUCache.OptionsBase.maxSize} (read-only)\n */\n get maxSize(): LRUCache.Count {\n return this.#maxSize\n }\n /**\n * The total computed size of items in the cache (read-only)\n */\n get calculatedSize(): LRUCache.Size {\n return this.#calculatedSize\n }\n /**\n * The number of items stored in the cache (read-only)\n */\n get size(): LRUCache.Count {\n return this.#size\n }\n /**\n * {@link LRUCache.OptionsBase.fetchMethod} (read-only)\n */\n get fetchMethod(): LRUCache.Fetcher | undefined {\n return this.#fetchMethod\n }\n /**\n * {@link LRUCache.OptionsBase.dispose} (read-only)\n */\n get dispose() {\n return this.#dispose\n }\n /**\n * {@link LRUCache.OptionsBase.disposeAfter} (read-only)\n */\n get disposeAfter() {\n return this.#disposeAfter\n }\n\n constructor(\n options: LRUCache.Options | LRUCache\n ) {\n const {\n max = 0,\n ttl,\n ttlResolution = 1,\n ttlAutopurge,\n updateAgeOnGet,\n updateAgeOnHas,\n allowStale,\n dispose,\n disposeAfter,\n noDisposeOnSet,\n noUpdateTTL,\n maxSize = 0,\n maxEntrySize = 0,\n sizeCalculation,\n fetchMethod,\n noDeleteOnFetchRejection,\n noDeleteOnStaleGet,\n allowStaleOnFetchRejection,\n allowStaleOnFetchAbort,\n ignoreFetchAbort,\n } = options\n\n if (max !== 0 && !isPosInt(max)) {\n throw new TypeError('max option must be a nonnegative integer')\n }\n\n const UintArray = max ? getUintArray(max) : Array\n if (!UintArray) {\n throw new Error('invalid max value: ' + max)\n }\n\n this.#max = max\n this.#maxSize = maxSize\n this.maxEntrySize = maxEntrySize || this.#maxSize\n this.sizeCalculation = sizeCalculation\n if (this.sizeCalculation) {\n if (!this.#maxSize && !this.maxEntrySize) {\n throw new TypeError(\n 'cannot set sizeCalculation without setting maxSize or maxEntrySize'\n )\n }\n if (typeof this.sizeCalculation !== 'function') {\n throw new TypeError('sizeCalculation set to non-function')\n }\n }\n\n if (\n fetchMethod !== undefined &&\n typeof fetchMethod !== 'function'\n ) {\n throw new TypeError(\n 'fetchMethod must be a function if specified'\n )\n }\n this.#fetchMethod = fetchMethod\n this.#hasFetchMethod = !!fetchMethod\n\n this.#keyMap = new Map()\n this.#keyList = new Array(max).fill(undefined)\n this.#valList = new Array(max).fill(undefined)\n this.#next = new UintArray(max)\n this.#prev = new UintArray(max)\n this.#head = 0 as Index\n this.#tail = 0 as Index\n this.#free = Stack.create(max)\n this.#size = 0\n this.#calculatedSize = 0\n\n if (typeof dispose === 'function') {\n this.#dispose = dispose\n }\n if (typeof disposeAfter === 'function') {\n this.#disposeAfter = disposeAfter\n this.#disposed = []\n } else {\n this.#disposeAfter = undefined\n this.#disposed = undefined\n }\n this.#hasDispose = !!this.#dispose\n this.#hasDisposeAfter = !!this.#disposeAfter\n\n this.noDisposeOnSet = !!noDisposeOnSet\n this.noUpdateTTL = !!noUpdateTTL\n this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection\n this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection\n this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort\n this.ignoreFetchAbort = !!ignoreFetchAbort\n\n // NB: maxEntrySize is set to maxSize if it's set\n if (this.maxEntrySize !== 0) {\n if (this.#maxSize !== 0) {\n if (!isPosInt(this.#maxSize)) {\n throw new TypeError(\n 'maxSize must be a positive integer if specified'\n )\n }\n }\n if (!isPosInt(this.maxEntrySize)) {\n throw new TypeError(\n 'maxEntrySize must be a positive integer if specified'\n )\n }\n this.#initializeSizeTracking()\n }\n\n this.allowStale = !!allowStale\n this.noDeleteOnStaleGet = !!noDeleteOnStaleGet\n this.updateAgeOnGet = !!updateAgeOnGet\n this.updateAgeOnHas = !!updateAgeOnHas\n this.ttlResolution =\n isPosInt(ttlResolution) || ttlResolution === 0\n ? ttlResolution\n : 1\n this.ttlAutopurge = !!ttlAutopurge\n this.ttl = ttl || 0\n if (this.ttl) {\n if (!isPosInt(this.ttl)) {\n throw new TypeError(\n 'ttl must be a positive integer if specified'\n )\n }\n this.#initializeTTLTracking()\n }\n\n // do not allow completely unbounded caches\n if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {\n throw new TypeError(\n 'At least one of max, maxSize, or ttl is required'\n )\n }\n if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {\n const code = 'LRU_CACHE_UNBOUNDED'\n if (shouldWarn(code)) {\n warned.add(code)\n const msg =\n 'TTL caching without ttlAutopurge, max, or maxSize can ' +\n 'result in unbounded memory consumption.'\n emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)\n }\n }\n }\n\n /**\n * Return the remaining TTL time for a given entry key\n */\n getRemainingTTL(key: K) {\n return this.#keyMap.has(key) ? Infinity : 0\n }\n\n #initializeTTLTracking() {\n const ttls = new ZeroArray(this.#max)\n const starts = new ZeroArray(this.#max)\n this.#ttls = ttls\n this.#starts = starts\n\n this.#setItemTTL = (index, ttl, start = perf.now()) => {\n starts[index] = ttl !== 0 ? start : 0\n ttls[index] = ttl\n if (ttl !== 0 && this.ttlAutopurge) {\n const t = setTimeout(() => {\n if (this.#isStale(index)) {\n this.delete(this.#keyList[index] as K)\n }\n }, ttl + 1)\n // unref() not supported on all platforms\n /* c8 ignore start */\n if (t.unref) {\n t.unref()\n }\n /* c8 ignore stop */\n }\n }\n\n this.#updateItemAge = index => {\n starts[index] = ttls[index] !== 0 ? perf.now() : 0\n }\n\n this.#statusTTL = (status, index) => {\n if (ttls[index]) {\n const ttl = ttls[index]\n const start = starts[index]\n status.ttl = ttl\n status.start = start\n status.now = cachedNow || getNow()\n const age = status.now - start\n status.remainingTTL = ttl - age\n }\n }\n\n // debounce calls to perf.now() to 1s so we're not hitting\n // that costly call repeatedly.\n let cachedNow = 0\n const getNow = () => {\n const n = perf.now()\n if (this.ttlResolution > 0) {\n cachedNow = n\n const t = setTimeout(\n () => (cachedNow = 0),\n this.ttlResolution\n )\n // not available on all platforms\n /* c8 ignore start */\n if (t.unref) {\n t.unref()\n }\n /* c8 ignore stop */\n }\n return n\n }\n\n this.getRemainingTTL = key => {\n const index = this.#keyMap.get(key)\n if (index === undefined) {\n return 0\n }\n const ttl = ttls[index]\n const start = starts[index]\n if (ttl === 0 || start === 0) {\n return Infinity\n }\n const age = (cachedNow || getNow()) - start\n return ttl - age\n }\n\n this.#isStale = index => {\n return (\n ttls[index] !== 0 &&\n starts[index] !== 0 &&\n (cachedNow || getNow()) - starts[index] > ttls[index]\n )\n }\n }\n\n // conditionally set private methods related to TTL\n #updateItemAge: (index: Index) => void = () => {}\n #statusTTL: (status: LRUCache.Status, index: Index) => void =\n () => {}\n #setItemTTL: (\n index: Index,\n ttl: LRUCache.Milliseconds,\n start?: LRUCache.Milliseconds\n // ignore because we never call this if we're not already in TTL mode\n /* c8 ignore start */\n ) => void = () => {}\n /* c8 ignore stop */\n\n #isStale: (index: Index) => boolean = () => false\n\n #initializeSizeTracking() {\n const sizes = new ZeroArray(this.#max)\n this.#calculatedSize = 0\n this.#sizes = sizes\n this.#removeItemSize = index => {\n this.#calculatedSize -= sizes[index]\n sizes[index] = 0\n }\n this.#requireSize = (k, v, size, sizeCalculation) => {\n // provisionally accept background fetches.\n // actual value size will be checked when they return.\n if (this.#isBackgroundFetch(v)) {\n return 0\n }\n if (!isPosInt(size)) {\n if (sizeCalculation) {\n if (typeof sizeCalculation !== 'function') {\n throw new TypeError('sizeCalculation must be a function')\n }\n size = sizeCalculation(v, k)\n if (!isPosInt(size)) {\n throw new TypeError(\n 'sizeCalculation return invalid (expect positive integer)'\n )\n }\n } else {\n throw new TypeError(\n 'invalid size value (must be positive integer). ' +\n 'When maxSize or maxEntrySize is used, sizeCalculation ' +\n 'or size must be set.'\n )\n }\n }\n return size\n }\n this.#addItemSize = (\n index: Index,\n size: LRUCache.Size,\n status?: LRUCache.Status\n ) => {\n sizes[index] = size\n if (this.#maxSize) {\n const maxSize = this.#maxSize - sizes[index]\n while (this.#calculatedSize > maxSize) {\n this.#evict(true)\n }\n }\n this.#calculatedSize += sizes[index]\n if (status) {\n status.entrySize = size\n status.totalCalculatedSize = this.#calculatedSize\n }\n }\n }\n\n #removeItemSize: (index: Index) => void = _i => {}\n #addItemSize: (\n index: Index,\n size: LRUCache.Size,\n status?: LRUCache.Status\n ) => void = (_i, _s, _st) => {}\n #requireSize: (\n k: K,\n v: V | BackgroundFetch,\n size?: LRUCache.Size,\n sizeCalculation?: LRUCache.SizeCalculator\n ) => LRUCache.Size = (\n _k: K,\n _v: V | BackgroundFetch,\n size?: LRUCache.Size,\n sizeCalculation?: LRUCache.SizeCalculator\n ) => {\n if (size || sizeCalculation) {\n throw new TypeError(\n 'cannot set size without setting maxSize or maxEntrySize on cache'\n )\n }\n return 0\n };\n\n *#indexes({ allowStale = this.allowStale } = {}) {\n if (this.#size) {\n for (let i = this.#tail; true; ) {\n if (!this.#isValidIndex(i)) {\n break\n }\n if (allowStale || !this.#isStale(i)) {\n yield i\n }\n if (i === this.#head) {\n break\n } else {\n i = this.#prev[i] as Index\n }\n }\n }\n }\n\n *#rindexes({ allowStale = this.allowStale } = {}) {\n if (this.#size) {\n for (let i = this.#head; true; ) {\n if (!this.#isValidIndex(i)) {\n break\n }\n if (allowStale || !this.#isStale(i)) {\n yield i\n }\n if (i === this.#tail) {\n break\n } else {\n i = this.#next[i] as Index\n }\n }\n }\n }\n\n #isValidIndex(index: Index) {\n return (\n index !== undefined &&\n this.#keyMap.get(this.#keyList[index] as K) === index\n )\n }\n\n /**\n * Return a generator yielding `[key, value]` pairs,\n * in order from most recently used to least recently used.\n */\n *entries() {\n for (const i of this.#indexes()) {\n if (\n this.#valList[i] !== undefined &&\n this.#keyList[i] !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield [this.#keyList[i], this.#valList[i]]\n }\n }\n }\n\n /**\n * Inverse order version of {@link LRUCache.entries}\n *\n * Return a generator yielding `[key, value]` pairs,\n * in order from least recently used to most recently used.\n */\n *rentries() {\n for (const i of this.#rindexes()) {\n if (\n this.#valList[i] !== undefined &&\n this.#keyList[i] !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield [this.#keyList[i], this.#valList[i]]\n }\n }\n }\n\n /**\n * Return a generator yielding the keys in the cache,\n * in order from most recently used to least recently used.\n */\n *keys() {\n for (const i of this.#indexes()) {\n const k = this.#keyList[i]\n if (\n k !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield k\n }\n }\n }\n\n /**\n * Inverse order version of {@link LRUCache.keys}\n *\n * Return a generator yielding the keys in the cache,\n * in order from least recently used to most recently used.\n */\n *rkeys() {\n for (const i of this.#rindexes()) {\n const k = this.#keyList[i]\n if (\n k !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield k\n }\n }\n }\n\n /**\n * Return a generator yielding the values in the cache,\n * in order from most recently used to least recently used.\n */\n *values() {\n for (const i of this.#indexes()) {\n const v = this.#valList[i]\n if (\n v !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield this.#valList[i]\n }\n }\n }\n\n /**\n * Inverse order version of {@link LRUCache.values}\n *\n * Return a generator yielding the values in the cache,\n * in order from least recently used to most recently used.\n */\n *rvalues() {\n for (const i of this.#rindexes()) {\n const v = this.#valList[i]\n if (\n v !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield this.#valList[i]\n }\n }\n }\n\n /**\n * Iterating over the cache itself yields the same results as\n * {@link LRUCache.entries}\n */\n [Symbol.iterator]() {\n return this.entries()\n }\n\n /**\n * Find a value for which the supplied fn method returns a truthy value,\n * similar to Array.find(). fn is called as fn(value, key, cache).\n */\n find(\n fn: (v: V, k: K, self: LRUCache) => boolean,\n getOptions: LRUCache.GetOptions = {}\n ) {\n for (const i of this.#indexes()) {\n const v = this.#valList[i]\n const value = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined) continue\n if (fn(value, this.#keyList[i] as K, this)) {\n return this.get(this.#keyList[i] as K, getOptions)\n }\n }\n }\n\n /**\n * Call the supplied function on each item in the cache, in order from\n * most recently used to least recently used. fn is called as\n * fn(value, key, cache). Does not update age or recenty of use.\n * Does not iterate over stale values.\n */\n forEach(\n fn: (v: V, k: K, self: LRUCache) => any,\n thisp: any = this\n ) {\n for (const i of this.#indexes()) {\n const v = this.#valList[i]\n const value = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined) continue\n fn.call(thisp, value, this.#keyList[i] as K, this)\n }\n }\n\n /**\n * The same as {@link LRUCache.forEach} but items are iterated over in\n * reverse order. (ie, less recently used items are iterated over first.)\n */\n rforEach(\n fn: (v: V, k: K, self: LRUCache) => any,\n thisp: any = this\n ) {\n for (const i of this.#rindexes()) {\n const v = this.#valList[i]\n const value = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined) continue\n fn.call(thisp, value, this.#keyList[i] as K, this)\n }\n }\n\n /**\n * Delete any stale entries. Returns true if anything was removed,\n * false otherwise.\n */\n purgeStale() {\n let deleted = false\n for (const i of this.#rindexes({ allowStale: true })) {\n if (this.#isStale(i)) {\n this.delete(this.#keyList[i] as K)\n deleted = true\n }\n }\n return deleted\n }\n\n /**\n * Return an array of [key, {@link LRUCache.Entry}] tuples which can be\n * passed to cache.load()\n */\n dump() {\n const arr: [K, LRUCache.Entry][] = []\n for (const i of this.#indexes({ allowStale: true })) {\n const key = this.#keyList[i]\n const v = this.#valList[i]\n const value: V | undefined = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined || key === undefined) continue\n const entry: LRUCache.Entry = { value }\n if (this.#ttls && this.#starts) {\n entry.ttl = this.#ttls[i]\n // always dump the start relative to a portable timestamp\n // it's ok for this to be a bit slow, it's a rare operation.\n const age = perf.now() - this.#starts[i]\n entry.start = Math.floor(Date.now() - age)\n }\n if (this.#sizes) {\n entry.size = this.#sizes[i]\n }\n arr.unshift([key, entry])\n }\n return arr\n }\n\n /**\n * Reset the cache and load in the items in entries in the order listed.\n * Note that the shape of the resulting cache may be different if the\n * same options are not used in both caches.\n */\n load(arr: [K, LRUCache.Entry][]) {\n this.clear()\n for (const [key, entry] of arr) {\n if (entry.start) {\n // entry.start is a portable timestamp, but we may be using\n // node's performance.now(), so calculate the offset, so that\n // we get the intended remaining TTL, no matter how long it's\n // been on ice.\n //\n // it's ok for this to be a bit slow, it's a rare operation.\n const age = Date.now() - entry.start\n entry.start = perf.now() - age\n }\n this.set(key, entry.value, entry)\n }\n }\n\n /**\n * Add a value to the cache.\n *\n * Note: if `undefined` is specified as a value, this is an alias for\n * {@link LRUCache#delete}\n */\n set(\n k: K,\n v: V | BackgroundFetch | undefined,\n setOptions: LRUCache.SetOptions = {}\n ) {\n if (v === undefined) {\n this.delete(k)\n return this\n }\n const {\n ttl = this.ttl,\n start,\n noDisposeOnSet = this.noDisposeOnSet,\n sizeCalculation = this.sizeCalculation,\n status,\n } = setOptions\n let { noUpdateTTL = this.noUpdateTTL } = setOptions\n\n const size = this.#requireSize(\n k,\n v,\n setOptions.size || 0,\n sizeCalculation\n )\n // if the item doesn't fit, don't do anything\n // NB: maxEntrySize set to maxSize by default\n if (this.maxEntrySize && size > this.maxEntrySize) {\n if (status) {\n status.set = 'miss'\n status.maxEntrySizeExceeded = true\n }\n // have to delete, in case something is there already.\n this.delete(k)\n return this\n }\n let index = this.#size === 0 ? undefined : this.#keyMap.get(k)\n if (index === undefined) {\n // addition\n index = (\n this.#size === 0\n ? this.#tail\n : this.#free.length !== 0\n ? this.#free.pop()\n : this.#size === this.#max\n ? this.#evict(false)\n : this.#size\n ) as Index\n this.#keyList[index] = k\n this.#valList[index] = v\n this.#keyMap.set(k, index)\n this.#next[this.#tail] = index\n this.#prev[index] = this.#tail\n this.#tail = index\n this.#size++\n this.#addItemSize(index, size, status)\n if (status) status.set = 'add'\n noUpdateTTL = false\n } else {\n // update\n this.#moveToTail(index)\n const oldVal = this.#valList[index] as V | BackgroundFetch\n if (v !== oldVal) {\n if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {\n oldVal.__abortController.abort(new Error('replaced'))\n } else if (!noDisposeOnSet) {\n if (this.#hasDispose) {\n this.#dispose?.(oldVal as V, k, 'set')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([oldVal as V, k, 'set'])\n }\n }\n this.#removeItemSize(index)\n this.#addItemSize(index, size, status)\n this.#valList[index] = v\n if (status) {\n status.set = 'replace'\n const oldValue =\n oldVal && this.#isBackgroundFetch(oldVal)\n ? oldVal.__staleWhileFetching\n : oldVal\n if (oldValue !== undefined) status.oldValue = oldValue\n }\n } else if (status) {\n status.set = 'update'\n }\n }\n if (ttl !== 0 && !this.#ttls) {\n this.#initializeTTLTracking()\n }\n if (this.#ttls) {\n if (!noUpdateTTL) {\n this.#setItemTTL(index, ttl, start)\n }\n if (status) this.#statusTTL(status, index)\n }\n if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n return this\n }\n\n /**\n * Evict the least recently used item, returning its value or\n * `undefined` if cache is empty.\n */\n pop(): V | undefined {\n try {\n while (this.#size) {\n const val = this.#valList[this.#head]\n this.#evict(true)\n if (this.#isBackgroundFetch(val)) {\n if (val.__staleWhileFetching) {\n return val.__staleWhileFetching\n }\n } else if (val !== undefined) {\n return val\n }\n }\n } finally {\n if (this.#hasDisposeAfter && this.#disposed) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n }\n }\n\n #evict(free: boolean) {\n const head = this.#head\n const k = this.#keyList[head] as K\n const v = this.#valList[head] as V\n if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {\n v.__abortController.abort(new Error('evicted'))\n } else if (this.#hasDispose || this.#hasDisposeAfter) {\n if (this.#hasDispose) {\n this.#dispose?.(v, k, 'evict')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([v, k, 'evict'])\n }\n }\n this.#removeItemSize(head)\n // if we aren't about to use the index, then null these out\n if (free) {\n this.#keyList[head] = undefined\n this.#valList[head] = undefined\n this.#free.push(head)\n }\n if (this.#size === 1) {\n this.#head = this.#tail = 0 as Index\n this.#free.length = 0\n } else {\n this.#head = this.#next[head] as Index\n }\n this.#keyMap.delete(k)\n this.#size--\n return head\n }\n\n /**\n * Check if a key is in the cache, without updating the recency of use.\n * Will return false if the item is stale, even though it is technically\n * in the cache.\n *\n * Will not update item age unless\n * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.\n */\n has(k: K, hasOptions: LRUCache.HasOptions = {}) {\n const { updateAgeOnHas = this.updateAgeOnHas, status } =\n hasOptions\n const index = this.#keyMap.get(k)\n if (index !== undefined) {\n const v = this.#valList[index]\n if (\n this.#isBackgroundFetch(v) &&\n v.__staleWhileFetching === undefined\n ) {\n return false\n }\n if (!this.#isStale(index)) {\n if (updateAgeOnHas) {\n this.#updateItemAge(index)\n }\n if (status) {\n status.has = 'hit'\n this.#statusTTL(status, index)\n }\n return true\n } else if (status) {\n status.has = 'stale'\n this.#statusTTL(status, index)\n }\n } else if (status) {\n status.has = 'miss'\n }\n return false\n }\n\n /**\n * Like {@link LRUCache#get} but doesn't update recency or delete stale\n * items.\n *\n * Returns `undefined` if the item is stale, unless\n * {@link LRUCache.OptionsBase.allowStale} is set.\n */\n peek(k: K, peekOptions: LRUCache.PeekOptions = {}) {\n const { allowStale = this.allowStale } = peekOptions\n const index = this.#keyMap.get(k)\n if (\n index !== undefined &&\n (allowStale || !this.#isStale(index))\n ) {\n const v = this.#valList[index]\n // either stale and allowed, or forcing a refresh of non-stale value\n return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v\n }\n }\n\n #backgroundFetch(\n k: K,\n index: Index | undefined,\n options: LRUCache.FetchOptions,\n context: any\n ): BackgroundFetch {\n const v = index === undefined ? undefined : this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n return v\n }\n\n const ac = new AC()\n const { signal } = options\n // when/if our AC signals, then stop listening to theirs.\n signal?.addEventListener('abort', () => ac.abort(signal.reason), {\n signal: ac.signal,\n })\n\n const fetchOpts = {\n signal: ac.signal,\n options,\n context,\n }\n\n const cb = (\n v: V | void | undefined,\n updateCache = false\n ): V | undefined | void => {\n const { aborted } = ac.signal\n const ignoreAbort = options.ignoreFetchAbort && v !== undefined\n if (options.status) {\n if (aborted && !updateCache) {\n options.status.fetchAborted = true\n options.status.fetchError = ac.signal.reason\n if (ignoreAbort) options.status.fetchAbortIgnored = true\n } else {\n options.status.fetchResolved = true\n }\n }\n if (aborted && !ignoreAbort && !updateCache) {\n return fetchFail(ac.signal.reason)\n }\n // either we didn't abort, and are still here, or we did, and ignored\n const bf = p as BackgroundFetch\n if (this.#valList[index as Index] === p) {\n if (v === undefined) {\n if (bf.__staleWhileFetching) {\n this.#valList[index as Index] = bf.__staleWhileFetching\n } else {\n this.delete(k)\n }\n } else {\n if (options.status) options.status.fetchUpdated = true\n this.set(k, v, fetchOpts.options)\n }\n }\n return v\n }\n\n const eb = (er: any) => {\n if (options.status) {\n options.status.fetchRejected = true\n options.status.fetchError = er\n }\n return fetchFail(er)\n }\n\n const fetchFail = (er: any): V | undefined => {\n const { aborted } = ac.signal\n const allowStaleAborted =\n aborted && options.allowStaleOnFetchAbort\n const allowStale =\n allowStaleAborted || options.allowStaleOnFetchRejection\n const noDelete = allowStale || options.noDeleteOnFetchRejection\n const bf = p as BackgroundFetch\n if (this.#valList[index as Index] === p) {\n // if we allow stale on fetch rejections, then we need to ensure that\n // the stale value is not removed from the cache when the fetch fails.\n const del = !noDelete || bf.__staleWhileFetching === undefined\n if (del) {\n this.delete(k)\n } else if (!allowStaleAborted) {\n // still replace the *promise* with the stale value,\n // since we are done with the promise at this point.\n // leave it untouched if we're still waiting for an\n // aborted background fetch that hasn't yet returned.\n this.#valList[index as Index] = bf.__staleWhileFetching\n }\n }\n if (allowStale) {\n if (options.status && bf.__staleWhileFetching !== undefined) {\n options.status.returnedStale = true\n }\n return bf.__staleWhileFetching\n } else if (bf.__returned === bf) {\n throw er\n }\n }\n\n const pcall = (\n res: (v: V | void | undefined) => void,\n rej: (e: any) => void\n ) => {\n const fmp = this.#fetchMethod?.(k, v, fetchOpts)\n if (fmp && fmp instanceof Promise) {\n fmp.then(v => res(v), rej)\n }\n // ignored, we go until we finish, regardless.\n // defer check until we are actually aborting,\n // so fetchMethod can override.\n ac.signal.addEventListener('abort', () => {\n if (\n !options.ignoreFetchAbort ||\n options.allowStaleOnFetchAbort\n ) {\n res()\n // when it eventually resolves, update the cache.\n if (options.allowStaleOnFetchAbort) {\n res = v => cb(v, true)\n }\n }\n })\n }\n\n if (options.status) options.status.fetchDispatched = true\n const p = new Promise(pcall).then(cb, eb)\n const bf = Object.assign(p, {\n __abortController: ac,\n __staleWhileFetching: v,\n __returned: undefined,\n })\n\n if (index === undefined) {\n // internal, don't expose status.\n this.set(k, bf, { ...fetchOpts.options, status: undefined })\n index = this.#keyMap.get(k)\n } else {\n this.#valList[index] = bf\n }\n return bf\n }\n\n #isBackgroundFetch(p: any): p is BackgroundFetch {\n if (!this.#hasFetchMethod) return false\n const b = p as BackgroundFetch\n return (\n !!b &&\n b instanceof Promise &&\n b.hasOwnProperty('__staleWhileFetching') &&\n b.__abortController instanceof AC\n )\n }\n\n /**\n * Make an asynchronous cached fetch using the\n * {@link LRUCache.OptionsBase.fetchMethod} function.\n *\n * If multiple fetches for the same key are issued, then they will all be\n * coalesced into a single call to fetchMethod.\n *\n * Note that this means that handling options such as\n * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort},\n * {@link LRUCache.FetchOptions.signal},\n * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be\n * determined by the FIRST fetch() call for a given key.\n *\n * This is a known (fixable) shortcoming which will be addresed on when\n * someone complains about it, as the fix would involve added complexity and\n * may not be worth the costs for this edge case.\n */\n fetch(\n k: K,\n fetchOptions: unknown extends FC\n ? LRUCache.FetchOptions\n : FC extends undefined | void\n ? LRUCache.FetchOptionsNoContext\n : LRUCache.FetchOptionsWithContext\n ): Promise\n // this overload not allowed if context is required\n fetch(\n k: unknown extends FC\n ? K\n : FC extends undefined | void\n ? K\n : never,\n fetchOptions?: unknown extends FC\n ? LRUCache.FetchOptions\n : FC extends undefined | void\n ? LRUCache.FetchOptionsNoContext\n : never\n ): Promise\n async fetch(\n k: K,\n fetchOptions: LRUCache.FetchOptions = {}\n ): Promise {\n const {\n // get options\n allowStale = this.allowStale,\n updateAgeOnGet = this.updateAgeOnGet,\n noDeleteOnStaleGet = this.noDeleteOnStaleGet,\n // set options\n ttl = this.ttl,\n noDisposeOnSet = this.noDisposeOnSet,\n size = 0,\n sizeCalculation = this.sizeCalculation,\n noUpdateTTL = this.noUpdateTTL,\n // fetch exclusive options\n noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,\n allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,\n ignoreFetchAbort = this.ignoreFetchAbort,\n allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,\n context,\n forceRefresh = false,\n status,\n signal,\n } = fetchOptions\n\n if (!this.#hasFetchMethod) {\n if (status) status.fetch = 'get'\n return this.get(k, {\n allowStale,\n updateAgeOnGet,\n noDeleteOnStaleGet,\n status,\n })\n }\n\n const options = {\n allowStale,\n updateAgeOnGet,\n noDeleteOnStaleGet,\n ttl,\n noDisposeOnSet,\n size,\n sizeCalculation,\n noUpdateTTL,\n noDeleteOnFetchRejection,\n allowStaleOnFetchRejection,\n allowStaleOnFetchAbort,\n ignoreFetchAbort,\n status,\n signal,\n }\n\n let index = this.#keyMap.get(k)\n if (index === undefined) {\n if (status) status.fetch = 'miss'\n const p = this.#backgroundFetch(k, index, options, context)\n return (p.__returned = p)\n } else {\n // in cache, maybe already fetching\n const v = this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n const stale =\n allowStale && v.__staleWhileFetching !== undefined\n if (status) {\n status.fetch = 'inflight'\n if (stale) status.returnedStale = true\n }\n return stale ? v.__staleWhileFetching : (v.__returned = v)\n }\n\n // if we force a refresh, that means do NOT serve the cached value,\n // unless we are already in the process of refreshing the cache.\n const isStale = this.#isStale(index)\n if (!forceRefresh && !isStale) {\n if (status) status.fetch = 'hit'\n this.#moveToTail(index)\n if (updateAgeOnGet) {\n this.#updateItemAge(index)\n }\n if (status) this.#statusTTL(status, index)\n return v\n }\n\n // ok, it is stale or a forced refresh, and not already fetching.\n // refresh the cache.\n const p = this.#backgroundFetch(k, index, options, context)\n const hasStale = p.__staleWhileFetching !== undefined\n const staleVal = hasStale && allowStale\n if (status) {\n status.fetch = isStale ? 'stale' : 'refresh'\n if (staleVal && isStale) status.returnedStale = true\n }\n return staleVal ? p.__staleWhileFetching : (p.__returned = p)\n }\n }\n\n /**\n * Return a value from the cache. Will update the recency of the cache\n * entry found.\n *\n * If the key is not found, get() will return `undefined`.\n */\n get(k: K, getOptions: LRUCache.GetOptions = {}) {\n const {\n allowStale = this.allowStale,\n updateAgeOnGet = this.updateAgeOnGet,\n noDeleteOnStaleGet = this.noDeleteOnStaleGet,\n status,\n } = getOptions\n const index = this.#keyMap.get(k)\n if (index !== undefined) {\n const value = this.#valList[index]\n const fetching = this.#isBackgroundFetch(value)\n if (status) this.#statusTTL(status, index)\n if (this.#isStale(index)) {\n if (status) status.get = 'stale'\n // delete only if not an in-flight background fetch\n if (!fetching) {\n if (!noDeleteOnStaleGet) {\n this.delete(k)\n }\n if (status && allowStale) status.returnedStale = true\n return allowStale ? value : undefined\n } else {\n if (\n status &&\n allowStale &&\n value.__staleWhileFetching !== undefined\n ) {\n status.returnedStale = true\n }\n return allowStale ? value.__staleWhileFetching : undefined\n }\n } else {\n if (status) status.get = 'hit'\n // if we're currently fetching it, we don't actually have it yet\n // it's not stale, which means this isn't a staleWhileRefetching.\n // If it's not stale, and fetching, AND has a __staleWhileFetching\n // value, then that means the user fetched with {forceRefresh:true},\n // so it's safe to return that value.\n if (fetching) {\n return value.__staleWhileFetching\n }\n this.#moveToTail(index)\n if (updateAgeOnGet) {\n this.#updateItemAge(index)\n }\n return value\n }\n } else if (status) {\n status.get = 'miss'\n }\n }\n\n #connect(p: Index, n: Index) {\n this.#prev[n] = p\n this.#next[p] = n\n }\n\n #moveToTail(index: Index): void {\n // if tail already, nothing to do\n // if head, move head to next[index]\n // else\n // move next[prev[index]] to next[index] (head has no prev)\n // move prev[next[index]] to prev[index]\n // prev[index] = tail\n // next[tail] = index\n // tail = index\n if (index !== this.#tail) {\n if (index === this.#head) {\n this.#head = this.#next[index] as Index\n } else {\n this.#connect(\n this.#prev[index] as Index,\n this.#next[index] as Index\n )\n }\n this.#connect(this.#tail, index)\n this.#tail = index\n }\n }\n\n /**\n * Deletes a key out of the cache.\n * Returns true if the key was deleted, false otherwise.\n */\n delete(k: K) {\n let deleted = false\n if (this.#size !== 0) {\n const index = this.#keyMap.get(k)\n if (index !== undefined) {\n deleted = true\n if (this.#size === 1) {\n this.clear()\n } else {\n this.#removeItemSize(index)\n const v = this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n v.__abortController.abort(new Error('deleted'))\n } else if (this.#hasDispose || this.#hasDisposeAfter) {\n if (this.#hasDispose) {\n this.#dispose?.(v as V, k, 'delete')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([v as V, k, 'delete'])\n }\n }\n this.#keyMap.delete(k)\n this.#keyList[index] = undefined\n this.#valList[index] = undefined\n if (index === this.#tail) {\n this.#tail = this.#prev[index] as Index\n } else if (index === this.#head) {\n this.#head = this.#next[index] as Index\n } else {\n this.#next[this.#prev[index]] = this.#next[index]\n this.#prev[this.#next[index]] = this.#prev[index]\n }\n this.#size--\n this.#free.push(index)\n }\n }\n }\n if (this.#hasDisposeAfter && this.#disposed?.length) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n return deleted\n }\n\n /**\n * Clear the cache entirely, throwing away all values.\n */\n clear() {\n for (const index of this.#rindexes({ allowStale: true })) {\n const v = this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n v.__abortController.abort(new Error('deleted'))\n } else {\n const k = this.#keyList[index]\n if (this.#hasDispose) {\n this.#dispose?.(v as V, k as K, 'delete')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([v as V, k as K, 'delete'])\n }\n }\n }\n\n this.#keyMap.clear()\n this.#valList.fill(undefined)\n this.#keyList.fill(undefined)\n if (this.#ttls && this.#starts) {\n this.#ttls.fill(0)\n this.#starts.fill(0)\n }\n if (this.#sizes) {\n this.#sizes.fill(0)\n }\n this.#head = 0 as Index\n this.#tail = 0 as Index\n this.#free.length = 0\n this.#calculatedSize = 0\n this.#size = 0\n if (this.#hasDisposeAfter && this.#disposed) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n }\n}\n"]} \ No newline at end of file diff --git a/dist/node_modules/lru-cache/dist/cjs/index.min.js b/dist/node_modules/lru-cache/dist/cjs/index.min.js new file mode 100644 index 0000000..d854bf5 --- /dev/null +++ b/dist/node_modules/lru-cache/dist/cjs/index.min.js @@ -0,0 +1,2 @@ +"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),k=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=k(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#_;#g;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#p;#n;#i;#t;#l;#c;#o;#h;#w;#r;#m;#F;#S;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#S,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#w,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#p}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#_}get disposeAfter(){return this.#g}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:u,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:d,maxSize:p=0,maxEntrySize:F=0,sizeCalculation:c,fetchMethod:w,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:S,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:g,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?k(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=p,this.maxEntrySize=F||this.#f,this.sizeCalculation=c,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=w,this.#T=!!w,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#w=R.create(e),this.#s=0,this.#p=0,typeof u=="function"&&(this.#_=u),typeof b=="function"?(this.#g=b,this.#r=[]):(this.#g=void 0,this.#r=void 0),this.#b=!!this.#_,this.#a=!!this.#g,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!d,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!g,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!S,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#S=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let u=n.now-r;n.remainingTTL=a-u}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let u=(i||s())-r;return a-u},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#p=0,this.#m=t,this.#E=e=>{this.#p-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#p>n;)this.#W(!0)}this.#p+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#p)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#S&&this.#F){h.ttl=this.#S[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:u=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#w.length!==0?this.#w.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),u=!1;else{this.#v(f);let d=this.#t[f];if(e!==d){if(this.#T&&this.#e(d)?d.__abortController.abort(new Error("replaced")):h||(this.#b&&this.#_?.(d,t,"set"),this.#a&&this.#r?.push([d,t,"set"])),this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let p=d&&this.#e(d)?d.__staleWhileFetching:d;p!==void 0&&(r.oldValue=p)}}else r&&(r.set="update")}if(s!==0&&!this.#S&&this.#L(),this.#S&&(u||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let d=this.#r,p;for(;p=d?.shift();)this.#g?.(...p)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#w.push(e)),this.#s===1?(this.#o=this.#h=0,this.#w.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},u=(c,w=!1)=>{let{aborted:l}=h.signal,S=i.ignoreFetchAbort&&c!==void 0;if(i.status&&(l&&!w?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,S&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!S&&!w)return f(h.signal.reason);let y=p;return this.#t[e]===p&&(c===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,c,r.options))),c},b=c=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=c),f(c)),f=c=>{let{aborted:w}=h.signal,l=w&&i.allowStaleOnFetchAbort,S=l||i.allowStaleOnFetchRejection,y=S||i.noDeleteOnFetchRejection,g=p;if(this.#t[e]===p&&(!y||g.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=g.__staleWhileFetching)),S)return i.status&&g.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),g.__staleWhileFetching;if(g.__returned===g)throw c},d=(c,w)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(S=>c(S),w),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(c(),i.allowStaleOnFetchAbort&&(c=S=>u(S,!0)))})};i.status&&(i.status.fetchDispatched=!0);let p=new Promise(d).then(u,b),F=Object.assign(p,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:u=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:d=this.allowStaleOnFetchRejection,ignoreFetchAbort:p=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:c,forceRefresh:w=!1,status:l,signal:S}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:u,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:d,allowStaleOnFetchAbort:F,ignoreFetchAbort:p,status:l,signal:S},g=this.#n.get(t);if(g===void 0){l&&(l.fetch="miss");let _=this.#D(t,g,y,c);return _.__returned=_}else{let _=this.#t[g];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(g);if(!w&&!O)return l&&(l.fetch="hit"),this.#v(g),s&&this.#z(g),l&&this.#O(l,g),_;let A=this.#D(t,g,y,c),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],u=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),u?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),u?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#w.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#g?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#_?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#S&&this.#F&&(this.#S.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#w.length=0,this.#p=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}};exports.LRUCache=C; +//# sourceMappingURL=index.min.js.map diff --git a/dist/node_modules/lru-cache/dist/cjs/index.min.js.map b/dist/node_modules/lru-cache/dist/cjs/index.min.js.map new file mode 100644 index 0000000..898bfd3 --- /dev/null +++ b/dist/node_modules/lru-cache/dist/cjs/index.min.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../../src/index.ts"], + "sourcesContent": ["/**\n * @module LRUCache\n */\n\n// module-private names and types\ntype Perf = { now: () => number }\nconst perf: Perf =\n typeof performance === 'object' &&\n performance &&\n typeof performance.now === 'function'\n ? performance\n : Date\n\nconst warned = new Set()\n\n// either a function or a class\ntype ForC = ((...a: any[]) => any) | { new (...a: any[]): any }\n\n/* c8 ignore start */\nconst PROCESS = (\n typeof process === 'object' && !!process ? process : {}\n) as { [k: string]: any }\n/* c8 ignore start */\n\nconst emitWarning = (\n msg: string,\n type: string,\n code: string,\n fn: ForC\n) => {\n typeof PROCESS.emitWarning === 'function'\n ? PROCESS.emitWarning(msg, type, code, fn)\n : console.error(`[${code}] ${type}: ${msg}`)\n}\n\nlet AC = globalThis.AbortController\nlet AS = globalThis.AbortSignal\n\n/* c8 ignore start */\nif (typeof AC === 'undefined') {\n //@ts-ignore\n AS = class AbortSignal {\n onabort?: (...a: any[]) => any\n _onabort: ((...a: any[]) => any)[] = []\n reason?: any\n aborted: boolean = false\n addEventListener(_: string, fn: (...a: any[]) => any) {\n this._onabort.push(fn)\n }\n }\n //@ts-ignore\n AC = class AbortController {\n constructor() {\n warnACPolyfill()\n }\n signal = new AS()\n abort(reason: any) {\n if (this.signal.aborted) return\n //@ts-ignore\n this.signal.reason = reason\n //@ts-ignore\n this.signal.aborted = true\n //@ts-ignore\n for (const fn of this.signal._onabort) {\n fn(reason)\n }\n this.signal.onabort?.(reason)\n }\n }\n let printACPolyfillWarning =\n PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'\n const warnACPolyfill = () => {\n if (!printACPolyfillWarning) return\n printACPolyfillWarning = false\n emitWarning(\n 'AbortController is not defined. If using lru-cache in ' +\n 'node 14, load an AbortController polyfill from the ' +\n '`node-abort-controller` package. A minimal polyfill is ' +\n 'provided for use by LRUCache.fetch(), but it should not be ' +\n 'relied upon in other contexts (eg, passing it to other APIs that ' +\n 'use AbortController/AbortSignal might have undesirable effects). ' +\n 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.',\n 'NO_ABORT_CONTROLLER',\n 'ENOTSUP',\n warnACPolyfill\n )\n }\n}\n/* c8 ignore stop */\n\nconst shouldWarn = (code: string) => !warned.has(code)\n\nconst TYPE = Symbol('type')\ntype PosInt = number & { [TYPE]: 'Positive Integer' }\ntype Index = number & { [TYPE]: 'LRUCache Index' }\n\nconst isPosInt = (n: any): n is PosInt =>\n n && n === Math.floor(n) && n > 0 && isFinite(n)\n\ntype UintArray = Uint8Array | Uint16Array | Uint32Array\ntype NumberArray = UintArray | number[]\n\n/* c8 ignore start */\n// This is a little bit ridiculous, tbh.\n// The maximum array length is 2^32-1 or thereabouts on most JS impls.\n// And well before that point, you're caching the entire world, I mean,\n// that's ~32GB of just integers for the next/prev links, plus whatever\n// else to hold that many keys and values. Just filling the memory with\n// zeroes at init time is brutal when you get that big.\n// But why not be complete?\n// Maybe in the future, these limits will have expanded.\nconst getUintArray = (max: number) =>\n !isPosInt(max)\n ? null\n : max <= Math.pow(2, 8)\n ? Uint8Array\n : max <= Math.pow(2, 16)\n ? Uint16Array\n : max <= Math.pow(2, 32)\n ? Uint32Array\n : max <= Number.MAX_SAFE_INTEGER\n ? ZeroArray\n : null\n/* c8 ignore stop */\n\nclass ZeroArray extends Array {\n constructor(size: number) {\n super(size)\n this.fill(0)\n }\n}\n\ntype StackLike = Stack | Index[]\nclass Stack {\n heap: NumberArray\n length: number\n // private constructor\n static #constructing: boolean = false\n static create(max: number): StackLike {\n const HeapCls = getUintArray(max)\n if (!HeapCls) return []\n Stack.#constructing = true\n const s = new Stack(max, HeapCls)\n Stack.#constructing = false\n return s\n }\n constructor(\n max: number,\n HeapCls: { new (n: number): NumberArray }\n ) {\n /* c8 ignore start */\n if (!Stack.#constructing) {\n throw new TypeError('instantiate Stack using Stack.create(n)')\n }\n /* c8 ignore stop */\n this.heap = new HeapCls(max)\n this.length = 0\n }\n push(n: Index) {\n this.heap[this.length++] = n\n }\n pop(): Index {\n return this.heap[--this.length] as Index\n }\n}\n\n/**\n * Promise representing an in-progress {@link LRUCache#fetch} call\n */\nexport type BackgroundFetch = Promise & {\n __returned: BackgroundFetch | undefined\n __abortController: AbortController\n __staleWhileFetching: V | undefined\n}\n\ntype DisposeTask = [\n value: V,\n key: K,\n reason: LRUCache.DisposeReason\n]\n\nexport namespace LRUCache {\n /**\n * An integer greater than 0, reflecting the calculated size of items\n */\n export type Size = number\n\n /**\n * Integer greater than 0, representing some number of milliseconds, or the\n * time at which a TTL started counting from.\n */\n export type Milliseconds = number\n\n /**\n * An integer greater than 0, reflecting a number of items\n */\n export type Count = number\n\n /**\n * The reason why an item was removed from the cache, passed\n * to the {@link Disposer} methods.\n */\n export type DisposeReason = 'evict' | 'set' | 'delete'\n /**\n * A method called upon item removal, passed as the\n * {@link OptionsBase.dispose} and/or\n * {@link OptionsBase.disposeAfter} options.\n */\n export type Disposer = (\n value: V,\n key: K,\n reason: DisposeReason\n ) => void\n\n /**\n * A function that returns the effective calculated size\n * of an entry in the cache.\n */\n export type SizeCalculator = (value: V, key: K) => Size\n\n /**\n * Options provided to the\n * {@link OptionsBase.fetchMethod} function.\n */\n export interface FetcherOptions {\n signal: AbortSignal\n options: FetcherFetchOptions\n /**\n * Object provided in the {@link FetchOptions.context} option to\n * {@link LRUCache#fetch}\n */\n context: FC\n }\n\n /**\n * Status object that may be passed to {@link LRUCache#fetch},\n * {@link LRUCache#get}, {@link LRUCache#set}, and {@link LRUCache#has}.\n */\n export interface Status {\n /**\n * The status of a set() operation.\n *\n * - add: the item was not found in the cache, and was added\n * - update: the item was in the cache, with the same value provided\n * - replace: the item was in the cache, and replaced\n * - miss: the item was not added to the cache for some reason\n */\n set?: 'add' | 'update' | 'replace' | 'miss'\n\n /**\n * the ttl stored for the item, or undefined if ttls are not used.\n */\n ttl?: Milliseconds\n\n /**\n * the start time for the item, or undefined if ttls are not used.\n */\n start?: Milliseconds\n\n /**\n * The timestamp used for TTL calculation\n */\n now?: Milliseconds\n\n /**\n * the remaining ttl for the item, or undefined if ttls are not used.\n */\n remainingTTL?: Milliseconds\n\n /**\n * The calculated size for the item, if sizes are used.\n */\n entrySize?: Size\n\n /**\n * The total calculated size of the cache, if sizes are used.\n */\n totalCalculatedSize?: Size\n\n /**\n * A flag indicating that the item was not stored, due to exceeding the\n * {@link OptionsBase.maxEntrySize}\n */\n maxEntrySizeExceeded?: true\n\n /**\n * The old value, specified in the case of `set:'update'` or\n * `set:'replace'`\n */\n oldValue?: V\n\n /**\n * The results of a {@link LRUCache#has} operation\n *\n * - hit: the item was found in the cache\n * - stale: the item was found in the cache, but is stale\n * - miss: the item was not found in the cache\n */\n has?: 'hit' | 'stale' | 'miss'\n\n /**\n * The status of a {@link LRUCache#fetch} operation.\n * Note that this can change as the underlying fetch() moves through\n * various states.\n *\n * - inflight: there is another fetch() for this key which is in process\n * - get: there is no fetchMethod, so {@link LRUCache#get} was called.\n * - miss: the item is not in cache, and will be fetched.\n * - hit: the item is in the cache, and was resolved immediately.\n * - stale: the item is in the cache, but stale.\n * - refresh: the item is in the cache, and not stale, but\n * {@link FetchOptions.forceRefresh} was specified.\n */\n fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh'\n\n /**\n * The {@link OptionsBase.fetchMethod} was called\n */\n fetchDispatched?: true\n\n /**\n * The cached value was updated after a successful call to\n * {@link OptionsBase.fetchMethod}\n */\n fetchUpdated?: true\n\n /**\n * The reason for a fetch() rejection. Either the error raised by the\n * {@link OptionsBase.fetchMethod}, or the reason for an\n * AbortSignal.\n */\n fetchError?: Error\n\n /**\n * The fetch received an abort signal\n */\n fetchAborted?: true\n\n /**\n * The abort signal received was ignored, and the fetch was allowed to\n * continue.\n */\n fetchAbortIgnored?: true\n\n /**\n * The fetchMethod promise resolved successfully\n */\n fetchResolved?: true\n\n /**\n * The fetchMethod promise was rejected\n */\n fetchRejected?: true\n\n /**\n * The status of a {@link LRUCache#get} operation.\n *\n * - fetching: The item is currently being fetched. If a previous value\n * is present and allowed, that will be returned.\n * - stale: The item is in the cache, and is stale.\n * - hit: the item is in the cache\n * - miss: the item is not in the cache\n */\n get?: 'stale' | 'hit' | 'miss'\n\n /**\n * A fetch or get operation returned a stale value.\n */\n returnedStale?: true\n }\n\n /**\n * options which override the options set in the LRUCache constructor\n * when calling {@link LRUCache#fetch}.\n *\n * This is the union of {@link GetOptions} and {@link SetOptions}, plus\n * {@link OptionsBase.noDeleteOnFetchRejection},\n * {@link OptionsBase.allowStaleOnFetchRejection},\n * {@link FetchOptions.forceRefresh}, and\n * {@link OptionsBase.context}\n *\n * Any of these may be modified in the {@link OptionsBase.fetchMethod}\n * function, but the {@link GetOptions} fields will of course have no\n * effect, as the {@link LRUCache#get} call already happened by the time\n * the fetchMethod is called.\n */\n export interface FetcherFetchOptions\n extends Pick<\n OptionsBase,\n | 'allowStale'\n | 'updateAgeOnGet'\n | 'noDeleteOnStaleGet'\n | 'sizeCalculation'\n | 'ttl'\n | 'noDisposeOnSet'\n | 'noUpdateTTL'\n | 'noDeleteOnFetchRejection'\n | 'allowStaleOnFetchRejection'\n | 'ignoreFetchAbort'\n | 'allowStaleOnFetchAbort'\n > {\n status?: Status\n size?: Size\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#fetch} method.\n */\n export interface FetchOptions\n extends FetcherFetchOptions {\n /**\n * Set to true to force a re-load of the existing data, even if it\n * is not yet stale.\n */\n forceRefresh?: boolean\n /**\n * Context provided to the {@link OptionsBase.fetchMethod} as\n * the {@link FetcherOptions.context} param.\n *\n * If the FC type is specified as unknown (the default),\n * undefined or void, then this is optional. Otherwise, it will\n * be required.\n */\n context?: FC\n signal?: AbortSignal\n status?: Status\n }\n /**\n * Options provided to {@link LRUCache#fetch} when the FC type is something\n * other than `unknown`, `undefined`, or `void`\n */\n export interface FetchOptionsWithContext\n extends FetchOptions {\n context: FC\n }\n /**\n * Options provided to {@link LRUCache#fetch} when the FC type is\n * `undefined` or `void`\n */\n export interface FetchOptionsNoContext\n extends FetchOptions {\n context?: undefined\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#has} method.\n */\n export interface HasOptions\n extends Pick, 'updateAgeOnHas'> {\n status?: Status\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#get} method.\n */\n export interface GetOptions\n extends Pick<\n OptionsBase,\n 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet'\n > {\n status?: Status\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#peek} method.\n */\n export interface PeekOptions\n extends Pick, 'allowStale'> {}\n\n /**\n * Options that may be passed to the {@link LRUCache#set} method.\n */\n export interface SetOptions\n extends Pick<\n OptionsBase,\n 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'\n > {\n /**\n * If size tracking is enabled, then setting an explicit size\n * in the {@link LRUCache#set} call will prevent calling the\n * {@link OptionsBase.sizeCalculation} function.\n */\n size?: Size\n /**\n * If TTL tracking is enabled, then setting an explicit start\n * time in the {@link LRUCache#set} call will override the\n * default time from `performance.now()` or `Date.now()`.\n *\n * Note that it must be a valid value for whichever time-tracking\n * method is in use.\n */\n start?: Milliseconds\n status?: Status\n }\n\n /**\n * The type signature for the {@link OptionsBase.fetchMethod} option.\n */\n export type Fetcher = (\n key: K,\n staleValue: V | undefined,\n options: FetcherOptions\n ) => Promise | V | void | undefined\n\n /**\n * Options which may be passed to the {@link LRUCache} constructor.\n *\n * Most of these may be overridden in the various options that use\n * them.\n *\n * Despite all being technically optional, the constructor requires that\n * a cache is at minimum limited by one or more of {@link OptionsBase.max},\n * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}.\n *\n * If {@link OptionsBase.ttl} is used alone, then it is strongly advised\n * (and in fact required by the type definitions here) that the cache\n * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially\n * unbounded storage.\n */\n export interface OptionsBase {\n /**\n * The maximum number of items to store in the cache before evicting\n * old entries. This is read-only on the {@link LRUCache} instance,\n * and may not be overridden.\n *\n * If set, then storage space will be pre-allocated at construction\n * time, and the cache will perform significantly faster.\n *\n * Note that significantly fewer items may be stored, if\n * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also\n * set.\n */\n max?: Count\n\n /**\n * Max time in milliseconds for items to live in cache before they are\n * considered stale. Note that stale items are NOT preemptively removed\n * by default, and MAY live in the cache long after they have expired.\n *\n * Also, as this cache is optimized for LRU/MRU operations, some of\n * the staleness/TTL checks will reduce performance, as they will incur\n * overhead by deleting items.\n *\n * Must be an integer number of ms. If set to 0, this indicates \"no TTL\"\n *\n * @default 0\n */\n ttl?: Milliseconds\n\n /**\n * Minimum amount of time in ms in which to check for staleness.\n * Defaults to 1, which means that the current time is checked\n * at most once per millisecond.\n *\n * Set to 0 to check the current time every time staleness is tested.\n * (This reduces performance, and is theoretically unnecessary.)\n *\n * Setting this to a higher value will improve performance somewhat\n * while using ttl tracking, albeit at the expense of keeping stale\n * items around a bit longer than their TTLs would indicate.\n *\n * @default 1\n */\n ttlResolution?: Milliseconds\n\n /**\n * Preemptively remove stale items from the cache.\n * Note that this may significantly degrade performance,\n * especially if the cache is storing a large number of items.\n * It is almost always best to just leave the stale items in\n * the cache, and let them fall out as new items are added.\n *\n * Note that this means that {@link OptionsBase.allowStale} is a bit\n * pointless, as stale items will be deleted almost as soon as they\n * expire.\n *\n * @default false\n */\n ttlAutopurge?: boolean\n\n /**\n * Update the age of items on {@link LRUCache#get}, renewing their TTL\n *\n * Has no effect if {@link OptionsBase.ttl} is not set.\n *\n * @default false\n */\n updateAgeOnGet?: boolean\n\n /**\n * Update the age of items on {@link LRUCache#has}, renewing their TTL\n *\n * Has no effect if {@link OptionsBase.ttl} is not set.\n *\n * @default false\n */\n updateAgeOnHas?: boolean\n\n /**\n * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return\n * stale data, if available.\n */\n allowStale?: boolean\n\n /**\n * Function that is called on items when they are dropped from the cache.\n * This can be handy if you want to close file descriptors or do other\n * cleanup tasks when items are no longer accessible. Called with `key,\n * value`. It's called before actually removing the item from the\n * internal cache, so it is *NOT* safe to re-add them.\n *\n * Use {@link OptionsBase.disposeAfter} if you wish to dispose items after\n * they have been full removed, when it is safe to add them back to the\n * cache.\n */\n dispose?: Disposer\n\n /**\n * The same as {@link OptionsBase.dispose}, but called *after* the entry\n * is completely removed and the cache is once again in a clean state.\n * It is safe to add an item right back into the cache at this point.\n * However, note that it is *very* easy to inadvertently create infinite\n * recursion this way.\n */\n disposeAfter?: Disposer\n\n /**\n * Set to true to suppress calling the\n * {@link OptionsBase.dispose} function if the entry key is\n * still accessible within the cache.\n * This may be overridden by passing an options object to\n * {@link LRUCache#set}.\n */\n noDisposeOnSet?: boolean\n\n /**\n * Boolean flag to tell the cache to not update the TTL when\n * setting a new value for an existing key (ie, when updating a value\n * rather than inserting a new value). Note that the TTL value is\n * _always_ set (if provided) when adding a new entry into the cache.\n *\n * Has no effect if a {@link OptionsBase.ttl} is not set.\n */\n noUpdateTTL?: boolean\n\n /**\n * If you wish to track item size, you must provide a maxSize\n * note that we still will only keep up to max *actual items*,\n * if max is set, so size tracking may cause fewer than max items\n * to be stored. At the extreme, a single item of maxSize size\n * will cause everything else in the cache to be dropped when it\n * is added. Use with caution!\n *\n * Note also that size tracking can negatively impact performance,\n * though for most cases, only minimally.\n */\n maxSize?: Size\n\n /**\n * The maximum allowed size for any single item in the cache.\n *\n * If a larger item is passed to {@link LRUCache#set} or returned by a\n * {@link OptionsBase.fetchMethod}, then it will not be stored in the\n * cache.\n */\n maxEntrySize?: Size\n\n /**\n * A function that returns a number indicating the item's size.\n *\n * If not provided, and {@link OptionsBase.maxSize} or\n * {@link OptionsBase.maxEntrySize} are set, then all\n * {@link LRUCache#set} calls **must** provide an explicit\n * {@link SetOptions.size} or sizeCalculation param.\n */\n sizeCalculation?: SizeCalculator\n\n /**\n * Method that provides the implementation for {@link LRUCache#fetch}\n */\n fetchMethod?: Fetcher\n\n /**\n * Set to true to suppress the deletion of stale data when a\n * {@link OptionsBase.fetchMethod} returns a rejected promise.\n */\n noDeleteOnFetchRejection?: boolean\n\n /**\n * Do not delete stale items when they are retrieved with\n * {@link LRUCache#get}.\n *\n * Note that the `get` return value will still be `undefined`\n * unless {@link OptionsBase.allowStale} is true.\n */\n noDeleteOnStaleGet?: boolean\n\n /**\n * Set to true to allow returning stale data when a\n * {@link OptionsBase.fetchMethod} throws an error or returns a rejected\n * promise.\n *\n * This differs from using {@link OptionsBase.allowStale} in that stale\n * data will ONLY be returned in the case that the\n * {@link LRUCache#fetch} fails, not any other times.\n */\n allowStaleOnFetchRejection?: boolean\n\n /**\n * Set to true to return a stale value from the cache when the\n * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches an `'abort'`\n * event, whether user-triggered, or due to internal cache behavior.\n *\n * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying\n * {@link OptionsBase.fetchMethod} will still be considered canceled, and\n * any value it returns will be ignored and not cached.\n *\n * Caveat: since fetches are aborted when a new value is explicitly\n * set in the cache, this can lead to fetch returning a stale value,\n * since that was the fallback value _at the moment the `fetch()` was\n * initiated_, even though the new updated value is now present in\n * the cache.\n *\n * For example:\n *\n * ```ts\n * const cache = new LRUCache({\n * ttl: 100,\n * fetchMethod: async (url, oldValue, { signal }) => {\n * const res = await fetch(url, { signal })\n * return await res.json()\n * }\n * })\n * cache.set('https://example.com/', { some: 'data' })\n * // 100ms go by...\n * const result = cache.fetch('https://example.com/')\n * cache.set('https://example.com/', { other: 'thing' })\n * console.log(await result) // { some: 'data' }\n * console.log(cache.get('https://example.com/')) // { other: 'thing' }\n * ```\n */\n allowStaleOnFetchAbort?: boolean\n\n /**\n * Set to true to ignore the `abort` event emitted by the `AbortSignal`\n * object passed to {@link OptionsBase.fetchMethod}, and still cache the\n * resulting resolution value, as long as it is not `undefined`.\n *\n * When used on its own, this means aborted {@link LRUCache#fetch} calls are not\n * immediately resolved or rejected when they are aborted, and instead\n * take the full time to await.\n *\n * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted\n * {@link LRUCache#fetch} calls will resolve immediately to their stale\n * cached value or `undefined`, and will continue to process and eventually\n * update the cache when they resolve, as long as the resulting value is\n * not `undefined`, thus supporting a \"return stale on timeout while\n * refreshing\" mechanism by passing `AbortSignal.timeout(n)` as the signal.\n *\n * **Note**: regardless of this setting, an `abort` event _is still\n * emitted on the `AbortSignal` object_, so may result in invalid results\n * when passed to other underlying APIs that use AbortSignals.\n *\n * This may be overridden in the {@link OptionsBase.fetchMethod} or the\n * call to {@link LRUCache#fetch}.\n */\n ignoreFetchAbort?: boolean\n }\n\n export interface OptionsMaxLimit\n extends OptionsBase {\n max: Count\n }\n export interface OptionsTTLLimit\n extends OptionsBase {\n ttl: Milliseconds\n ttlAutopurge: boolean\n }\n export interface OptionsSizeLimit\n extends OptionsBase {\n maxSize: Size\n }\n\n /**\n * The valid safe options for the {@link LRUCache} constructor\n */\n export type Options =\n | OptionsMaxLimit\n | OptionsSizeLimit\n | OptionsTTLLimit\n\n /**\n * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump}\n */\n export interface Entry {\n value: V\n ttl?: Milliseconds\n size?: Size\n start?: Milliseconds\n }\n}\n\n/**\n * Default export, the thing you're using this module to get.\n *\n * All properties from the options object (with the exception of\n * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as\n * normal public members. (`max` and `maxBase` are read-only getters.)\n * Changing any of these will alter the defaults for subsequent method calls,\n * but is otherwise safe.\n */\nexport class LRUCache {\n // properties coming in from the options of these, only max and maxSize\n // really *need* to be protected. The rest can be modified, as they just\n // set defaults for various methods.\n readonly #max: LRUCache.Count\n readonly #maxSize: LRUCache.Size\n readonly #dispose?: LRUCache.Disposer\n readonly #disposeAfter?: LRUCache.Disposer\n readonly #fetchMethod?: LRUCache.Fetcher\n\n /**\n * {@link LRUCache.OptionsBase.ttl}\n */\n ttl: LRUCache.Milliseconds\n\n /**\n * {@link LRUCache.OptionsBase.ttlResolution}\n */\n ttlResolution: LRUCache.Milliseconds\n /**\n * {@link LRUCache.OptionsBase.ttlAutopurge}\n */\n ttlAutopurge: boolean\n /**\n * {@link LRUCache.OptionsBase.updateAgeOnGet}\n */\n updateAgeOnGet: boolean\n /**\n * {@link LRUCache.OptionsBase.updateAgeOnHas}\n */\n updateAgeOnHas: boolean\n /**\n * {@link LRUCache.OptionsBase.allowStale}\n */\n allowStale: boolean\n\n /**\n * {@link LRUCache.OptionsBase.noDisposeOnSet}\n */\n noDisposeOnSet: boolean\n /**\n * {@link LRUCache.OptionsBase.noUpdateTTL}\n */\n noUpdateTTL: boolean\n /**\n * {@link LRUCache.OptionsBase.maxEntrySize}\n */\n maxEntrySize: LRUCache.Size\n /**\n * {@link LRUCache.OptionsBase.sizeCalculation}\n */\n sizeCalculation?: LRUCache.SizeCalculator\n /**\n * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}\n */\n noDeleteOnFetchRejection: boolean\n /**\n * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}\n */\n noDeleteOnStaleGet: boolean\n /**\n * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}\n */\n allowStaleOnFetchAbort: boolean\n /**\n * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}\n */\n allowStaleOnFetchRejection: boolean\n /**\n * {@link LRUCache.OptionsBase.ignoreFetchAbort}\n */\n ignoreFetchAbort: boolean\n\n // computed properties\n #size: LRUCache.Count\n #calculatedSize: LRUCache.Size\n #keyMap: Map\n #keyList: (K | undefined)[]\n #valList: (V | BackgroundFetch | undefined)[]\n #next: NumberArray\n #prev: NumberArray\n #head: Index\n #tail: Index\n #free: StackLike\n #disposed?: DisposeTask[]\n #sizes?: ZeroArray\n #starts?: ZeroArray\n #ttls?: ZeroArray\n\n #hasDispose: boolean\n #hasFetchMethod: boolean\n #hasDisposeAfter: boolean\n\n /**\n * Do not call this method unless you need to inspect the\n * inner workings of the cache. If anything returned by this\n * object is modified in any way, strange breakage may occur.\n *\n * These fields are private for a reason!\n *\n * @internal\n */\n static unsafeExposeInternals<\n K extends {},\n V extends {},\n FC extends unknown = unknown\n >(c: LRUCache) {\n return {\n // properties\n starts: c.#starts,\n ttls: c.#ttls,\n sizes: c.#sizes,\n keyMap: c.#keyMap as Map,\n keyList: c.#keyList,\n valList: c.#valList,\n next: c.#next,\n prev: c.#prev,\n get head() {\n return c.#head\n },\n get tail() {\n return c.#tail\n },\n free: c.#free,\n // methods\n isBackgroundFetch: (p: any) => c.#isBackgroundFetch(p),\n backgroundFetch: (\n k: K,\n index: number | undefined,\n options: LRUCache.FetchOptions,\n context: any\n ): BackgroundFetch =>\n c.#backgroundFetch(\n k,\n index as Index | undefined,\n options,\n context\n ),\n moveToTail: (index: number): void =>\n c.#moveToTail(index as Index),\n indexes: (options?: { allowStale: boolean }) =>\n c.#indexes(options),\n rindexes: (options?: { allowStale: boolean }) =>\n c.#rindexes(options),\n isStale: (index: number | undefined) =>\n c.#isStale(index as Index),\n }\n }\n\n // Protected read-only members\n\n /**\n * {@link LRUCache.OptionsBase.max} (read-only)\n */\n get max(): LRUCache.Count {\n return this.#max\n }\n /**\n * {@link LRUCache.OptionsBase.maxSize} (read-only)\n */\n get maxSize(): LRUCache.Count {\n return this.#maxSize\n }\n /**\n * The total computed size of items in the cache (read-only)\n */\n get calculatedSize(): LRUCache.Size {\n return this.#calculatedSize\n }\n /**\n * The number of items stored in the cache (read-only)\n */\n get size(): LRUCache.Count {\n return this.#size\n }\n /**\n * {@link LRUCache.OptionsBase.fetchMethod} (read-only)\n */\n get fetchMethod(): LRUCache.Fetcher | undefined {\n return this.#fetchMethod\n }\n /**\n * {@link LRUCache.OptionsBase.dispose} (read-only)\n */\n get dispose() {\n return this.#dispose\n }\n /**\n * {@link LRUCache.OptionsBase.disposeAfter} (read-only)\n */\n get disposeAfter() {\n return this.#disposeAfter\n }\n\n constructor(\n options: LRUCache.Options | LRUCache\n ) {\n const {\n max = 0,\n ttl,\n ttlResolution = 1,\n ttlAutopurge,\n updateAgeOnGet,\n updateAgeOnHas,\n allowStale,\n dispose,\n disposeAfter,\n noDisposeOnSet,\n noUpdateTTL,\n maxSize = 0,\n maxEntrySize = 0,\n sizeCalculation,\n fetchMethod,\n noDeleteOnFetchRejection,\n noDeleteOnStaleGet,\n allowStaleOnFetchRejection,\n allowStaleOnFetchAbort,\n ignoreFetchAbort,\n } = options\n\n if (max !== 0 && !isPosInt(max)) {\n throw new TypeError('max option must be a nonnegative integer')\n }\n\n const UintArray = max ? getUintArray(max) : Array\n if (!UintArray) {\n throw new Error('invalid max value: ' + max)\n }\n\n this.#max = max\n this.#maxSize = maxSize\n this.maxEntrySize = maxEntrySize || this.#maxSize\n this.sizeCalculation = sizeCalculation\n if (this.sizeCalculation) {\n if (!this.#maxSize && !this.maxEntrySize) {\n throw new TypeError(\n 'cannot set sizeCalculation without setting maxSize or maxEntrySize'\n )\n }\n if (typeof this.sizeCalculation !== 'function') {\n throw new TypeError('sizeCalculation set to non-function')\n }\n }\n\n if (\n fetchMethod !== undefined &&\n typeof fetchMethod !== 'function'\n ) {\n throw new TypeError(\n 'fetchMethod must be a function if specified'\n )\n }\n this.#fetchMethod = fetchMethod\n this.#hasFetchMethod = !!fetchMethod\n\n this.#keyMap = new Map()\n this.#keyList = new Array(max).fill(undefined)\n this.#valList = new Array(max).fill(undefined)\n this.#next = new UintArray(max)\n this.#prev = new UintArray(max)\n this.#head = 0 as Index\n this.#tail = 0 as Index\n this.#free = Stack.create(max)\n this.#size = 0\n this.#calculatedSize = 0\n\n if (typeof dispose === 'function') {\n this.#dispose = dispose\n }\n if (typeof disposeAfter === 'function') {\n this.#disposeAfter = disposeAfter\n this.#disposed = []\n } else {\n this.#disposeAfter = undefined\n this.#disposed = undefined\n }\n this.#hasDispose = !!this.#dispose\n this.#hasDisposeAfter = !!this.#disposeAfter\n\n this.noDisposeOnSet = !!noDisposeOnSet\n this.noUpdateTTL = !!noUpdateTTL\n this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection\n this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection\n this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort\n this.ignoreFetchAbort = !!ignoreFetchAbort\n\n // NB: maxEntrySize is set to maxSize if it's set\n if (this.maxEntrySize !== 0) {\n if (this.#maxSize !== 0) {\n if (!isPosInt(this.#maxSize)) {\n throw new TypeError(\n 'maxSize must be a positive integer if specified'\n )\n }\n }\n if (!isPosInt(this.maxEntrySize)) {\n throw new TypeError(\n 'maxEntrySize must be a positive integer if specified'\n )\n }\n this.#initializeSizeTracking()\n }\n\n this.allowStale = !!allowStale\n this.noDeleteOnStaleGet = !!noDeleteOnStaleGet\n this.updateAgeOnGet = !!updateAgeOnGet\n this.updateAgeOnHas = !!updateAgeOnHas\n this.ttlResolution =\n isPosInt(ttlResolution) || ttlResolution === 0\n ? ttlResolution\n : 1\n this.ttlAutopurge = !!ttlAutopurge\n this.ttl = ttl || 0\n if (this.ttl) {\n if (!isPosInt(this.ttl)) {\n throw new TypeError(\n 'ttl must be a positive integer if specified'\n )\n }\n this.#initializeTTLTracking()\n }\n\n // do not allow completely unbounded caches\n if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {\n throw new TypeError(\n 'At least one of max, maxSize, or ttl is required'\n )\n }\n if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {\n const code = 'LRU_CACHE_UNBOUNDED'\n if (shouldWarn(code)) {\n warned.add(code)\n const msg =\n 'TTL caching without ttlAutopurge, max, or maxSize can ' +\n 'result in unbounded memory consumption.'\n emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)\n }\n }\n }\n\n /**\n * Return the remaining TTL time for a given entry key\n */\n getRemainingTTL(key: K) {\n return this.#keyMap.has(key) ? Infinity : 0\n }\n\n #initializeTTLTracking() {\n const ttls = new ZeroArray(this.#max)\n const starts = new ZeroArray(this.#max)\n this.#ttls = ttls\n this.#starts = starts\n\n this.#setItemTTL = (index, ttl, start = perf.now()) => {\n starts[index] = ttl !== 0 ? start : 0\n ttls[index] = ttl\n if (ttl !== 0 && this.ttlAutopurge) {\n const t = setTimeout(() => {\n if (this.#isStale(index)) {\n this.delete(this.#keyList[index] as K)\n }\n }, ttl + 1)\n // unref() not supported on all platforms\n /* c8 ignore start */\n if (t.unref) {\n t.unref()\n }\n /* c8 ignore stop */\n }\n }\n\n this.#updateItemAge = index => {\n starts[index] = ttls[index] !== 0 ? perf.now() : 0\n }\n\n this.#statusTTL = (status, index) => {\n if (ttls[index]) {\n const ttl = ttls[index]\n const start = starts[index]\n status.ttl = ttl\n status.start = start\n status.now = cachedNow || getNow()\n const age = status.now - start\n status.remainingTTL = ttl - age\n }\n }\n\n // debounce calls to perf.now() to 1s so we're not hitting\n // that costly call repeatedly.\n let cachedNow = 0\n const getNow = () => {\n const n = perf.now()\n if (this.ttlResolution > 0) {\n cachedNow = n\n const t = setTimeout(\n () => (cachedNow = 0),\n this.ttlResolution\n )\n // not available on all platforms\n /* c8 ignore start */\n if (t.unref) {\n t.unref()\n }\n /* c8 ignore stop */\n }\n return n\n }\n\n this.getRemainingTTL = key => {\n const index = this.#keyMap.get(key)\n if (index === undefined) {\n return 0\n }\n const ttl = ttls[index]\n const start = starts[index]\n if (ttl === 0 || start === 0) {\n return Infinity\n }\n const age = (cachedNow || getNow()) - start\n return ttl - age\n }\n\n this.#isStale = index => {\n return (\n ttls[index] !== 0 &&\n starts[index] !== 0 &&\n (cachedNow || getNow()) - starts[index] > ttls[index]\n )\n }\n }\n\n // conditionally set private methods related to TTL\n #updateItemAge: (index: Index) => void = () => {}\n #statusTTL: (status: LRUCache.Status, index: Index) => void =\n () => {}\n #setItemTTL: (\n index: Index,\n ttl: LRUCache.Milliseconds,\n start?: LRUCache.Milliseconds\n // ignore because we never call this if we're not already in TTL mode\n /* c8 ignore start */\n ) => void = () => {}\n /* c8 ignore stop */\n\n #isStale: (index: Index) => boolean = () => false\n\n #initializeSizeTracking() {\n const sizes = new ZeroArray(this.#max)\n this.#calculatedSize = 0\n this.#sizes = sizes\n this.#removeItemSize = index => {\n this.#calculatedSize -= sizes[index]\n sizes[index] = 0\n }\n this.#requireSize = (k, v, size, sizeCalculation) => {\n // provisionally accept background fetches.\n // actual value size will be checked when they return.\n if (this.#isBackgroundFetch(v)) {\n return 0\n }\n if (!isPosInt(size)) {\n if (sizeCalculation) {\n if (typeof sizeCalculation !== 'function') {\n throw new TypeError('sizeCalculation must be a function')\n }\n size = sizeCalculation(v, k)\n if (!isPosInt(size)) {\n throw new TypeError(\n 'sizeCalculation return invalid (expect positive integer)'\n )\n }\n } else {\n throw new TypeError(\n 'invalid size value (must be positive integer). ' +\n 'When maxSize or maxEntrySize is used, sizeCalculation ' +\n 'or size must be set.'\n )\n }\n }\n return size\n }\n this.#addItemSize = (\n index: Index,\n size: LRUCache.Size,\n status?: LRUCache.Status\n ) => {\n sizes[index] = size\n if (this.#maxSize) {\n const maxSize = this.#maxSize - sizes[index]\n while (this.#calculatedSize > maxSize) {\n this.#evict(true)\n }\n }\n this.#calculatedSize += sizes[index]\n if (status) {\n status.entrySize = size\n status.totalCalculatedSize = this.#calculatedSize\n }\n }\n }\n\n #removeItemSize: (index: Index) => void = _i => {}\n #addItemSize: (\n index: Index,\n size: LRUCache.Size,\n status?: LRUCache.Status\n ) => void = (_i, _s, _st) => {}\n #requireSize: (\n k: K,\n v: V | BackgroundFetch,\n size?: LRUCache.Size,\n sizeCalculation?: LRUCache.SizeCalculator\n ) => LRUCache.Size = (\n _k: K,\n _v: V | BackgroundFetch,\n size?: LRUCache.Size,\n sizeCalculation?: LRUCache.SizeCalculator\n ) => {\n if (size || sizeCalculation) {\n throw new TypeError(\n 'cannot set size without setting maxSize or maxEntrySize on cache'\n )\n }\n return 0\n };\n\n *#indexes({ allowStale = this.allowStale } = {}) {\n if (this.#size) {\n for (let i = this.#tail; true; ) {\n if (!this.#isValidIndex(i)) {\n break\n }\n if (allowStale || !this.#isStale(i)) {\n yield i\n }\n if (i === this.#head) {\n break\n } else {\n i = this.#prev[i] as Index\n }\n }\n }\n }\n\n *#rindexes({ allowStale = this.allowStale } = {}) {\n if (this.#size) {\n for (let i = this.#head; true; ) {\n if (!this.#isValidIndex(i)) {\n break\n }\n if (allowStale || !this.#isStale(i)) {\n yield i\n }\n if (i === this.#tail) {\n break\n } else {\n i = this.#next[i] as Index\n }\n }\n }\n }\n\n #isValidIndex(index: Index) {\n return (\n index !== undefined &&\n this.#keyMap.get(this.#keyList[index] as K) === index\n )\n }\n\n /**\n * Return a generator yielding `[key, value]` pairs,\n * in order from most recently used to least recently used.\n */\n *entries() {\n for (const i of this.#indexes()) {\n if (\n this.#valList[i] !== undefined &&\n this.#keyList[i] !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield [this.#keyList[i], this.#valList[i]]\n }\n }\n }\n\n /**\n * Inverse order version of {@link LRUCache.entries}\n *\n * Return a generator yielding `[key, value]` pairs,\n * in order from least recently used to most recently used.\n */\n *rentries() {\n for (const i of this.#rindexes()) {\n if (\n this.#valList[i] !== undefined &&\n this.#keyList[i] !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield [this.#keyList[i], this.#valList[i]]\n }\n }\n }\n\n /**\n * Return a generator yielding the keys in the cache,\n * in order from most recently used to least recently used.\n */\n *keys() {\n for (const i of this.#indexes()) {\n const k = this.#keyList[i]\n if (\n k !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield k\n }\n }\n }\n\n /**\n * Inverse order version of {@link LRUCache.keys}\n *\n * Return a generator yielding the keys in the cache,\n * in order from least recently used to most recently used.\n */\n *rkeys() {\n for (const i of this.#rindexes()) {\n const k = this.#keyList[i]\n if (\n k !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield k\n }\n }\n }\n\n /**\n * Return a generator yielding the values in the cache,\n * in order from most recently used to least recently used.\n */\n *values() {\n for (const i of this.#indexes()) {\n const v = this.#valList[i]\n if (\n v !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield this.#valList[i]\n }\n }\n }\n\n /**\n * Inverse order version of {@link LRUCache.values}\n *\n * Return a generator yielding the values in the cache,\n * in order from least recently used to most recently used.\n */\n *rvalues() {\n for (const i of this.#rindexes()) {\n const v = this.#valList[i]\n if (\n v !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield this.#valList[i]\n }\n }\n }\n\n /**\n * Iterating over the cache itself yields the same results as\n * {@link LRUCache.entries}\n */\n [Symbol.iterator]() {\n return this.entries()\n }\n\n /**\n * Find a value for which the supplied fn method returns a truthy value,\n * similar to Array.find(). fn is called as fn(value, key, cache).\n */\n find(\n fn: (v: V, k: K, self: LRUCache) => boolean,\n getOptions: LRUCache.GetOptions = {}\n ) {\n for (const i of this.#indexes()) {\n const v = this.#valList[i]\n const value = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined) continue\n if (fn(value, this.#keyList[i] as K, this)) {\n return this.get(this.#keyList[i] as K, getOptions)\n }\n }\n }\n\n /**\n * Call the supplied function on each item in the cache, in order from\n * most recently used to least recently used. fn is called as\n * fn(value, key, cache). Does not update age or recenty of use.\n * Does not iterate over stale values.\n */\n forEach(\n fn: (v: V, k: K, self: LRUCache) => any,\n thisp: any = this\n ) {\n for (const i of this.#indexes()) {\n const v = this.#valList[i]\n const value = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined) continue\n fn.call(thisp, value, this.#keyList[i] as K, this)\n }\n }\n\n /**\n * The same as {@link LRUCache.forEach} but items are iterated over in\n * reverse order. (ie, less recently used items are iterated over first.)\n */\n rforEach(\n fn: (v: V, k: K, self: LRUCache) => any,\n thisp: any = this\n ) {\n for (const i of this.#rindexes()) {\n const v = this.#valList[i]\n const value = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined) continue\n fn.call(thisp, value, this.#keyList[i] as K, this)\n }\n }\n\n /**\n * Delete any stale entries. Returns true if anything was removed,\n * false otherwise.\n */\n purgeStale() {\n let deleted = false\n for (const i of this.#rindexes({ allowStale: true })) {\n if (this.#isStale(i)) {\n this.delete(this.#keyList[i] as K)\n deleted = true\n }\n }\n return deleted\n }\n\n /**\n * Return an array of [key, {@link LRUCache.Entry}] tuples which can be\n * passed to cache.load()\n */\n dump() {\n const arr: [K, LRUCache.Entry][] = []\n for (const i of this.#indexes({ allowStale: true })) {\n const key = this.#keyList[i]\n const v = this.#valList[i]\n const value: V | undefined = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined || key === undefined) continue\n const entry: LRUCache.Entry = { value }\n if (this.#ttls && this.#starts) {\n entry.ttl = this.#ttls[i]\n // always dump the start relative to a portable timestamp\n // it's ok for this to be a bit slow, it's a rare operation.\n const age = perf.now() - this.#starts[i]\n entry.start = Math.floor(Date.now() - age)\n }\n if (this.#sizes) {\n entry.size = this.#sizes[i]\n }\n arr.unshift([key, entry])\n }\n return arr\n }\n\n /**\n * Reset the cache and load in the items in entries in the order listed.\n * Note that the shape of the resulting cache may be different if the\n * same options are not used in both caches.\n */\n load(arr: [K, LRUCache.Entry][]) {\n this.clear()\n for (const [key, entry] of arr) {\n if (entry.start) {\n // entry.start is a portable timestamp, but we may be using\n // node's performance.now(), so calculate the offset, so that\n // we get the intended remaining TTL, no matter how long it's\n // been on ice.\n //\n // it's ok for this to be a bit slow, it's a rare operation.\n const age = Date.now() - entry.start\n entry.start = perf.now() - age\n }\n this.set(key, entry.value, entry)\n }\n }\n\n /**\n * Add a value to the cache.\n *\n * Note: if `undefined` is specified as a value, this is an alias for\n * {@link LRUCache#delete}\n */\n set(\n k: K,\n v: V | BackgroundFetch | undefined,\n setOptions: LRUCache.SetOptions = {}\n ) {\n if (v === undefined) {\n this.delete(k)\n return this\n }\n const {\n ttl = this.ttl,\n start,\n noDisposeOnSet = this.noDisposeOnSet,\n sizeCalculation = this.sizeCalculation,\n status,\n } = setOptions\n let { noUpdateTTL = this.noUpdateTTL } = setOptions\n\n const size = this.#requireSize(\n k,\n v,\n setOptions.size || 0,\n sizeCalculation\n )\n // if the item doesn't fit, don't do anything\n // NB: maxEntrySize set to maxSize by default\n if (this.maxEntrySize && size > this.maxEntrySize) {\n if (status) {\n status.set = 'miss'\n status.maxEntrySizeExceeded = true\n }\n // have to delete, in case something is there already.\n this.delete(k)\n return this\n }\n let index = this.#size === 0 ? undefined : this.#keyMap.get(k)\n if (index === undefined) {\n // addition\n index = (\n this.#size === 0\n ? this.#tail\n : this.#free.length !== 0\n ? this.#free.pop()\n : this.#size === this.#max\n ? this.#evict(false)\n : this.#size\n ) as Index\n this.#keyList[index] = k\n this.#valList[index] = v\n this.#keyMap.set(k, index)\n this.#next[this.#tail] = index\n this.#prev[index] = this.#tail\n this.#tail = index\n this.#size++\n this.#addItemSize(index, size, status)\n if (status) status.set = 'add'\n noUpdateTTL = false\n } else {\n // update\n this.#moveToTail(index)\n const oldVal = this.#valList[index] as V | BackgroundFetch\n if (v !== oldVal) {\n if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {\n oldVal.__abortController.abort(new Error('replaced'))\n } else if (!noDisposeOnSet) {\n if (this.#hasDispose) {\n this.#dispose?.(oldVal as V, k, 'set')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([oldVal as V, k, 'set'])\n }\n }\n this.#removeItemSize(index)\n this.#addItemSize(index, size, status)\n this.#valList[index] = v\n if (status) {\n status.set = 'replace'\n const oldValue =\n oldVal && this.#isBackgroundFetch(oldVal)\n ? oldVal.__staleWhileFetching\n : oldVal\n if (oldValue !== undefined) status.oldValue = oldValue\n }\n } else if (status) {\n status.set = 'update'\n }\n }\n if (ttl !== 0 && !this.#ttls) {\n this.#initializeTTLTracking()\n }\n if (this.#ttls) {\n if (!noUpdateTTL) {\n this.#setItemTTL(index, ttl, start)\n }\n if (status) this.#statusTTL(status, index)\n }\n if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n return this\n }\n\n /**\n * Evict the least recently used item, returning its value or\n * `undefined` if cache is empty.\n */\n pop(): V | undefined {\n try {\n while (this.#size) {\n const val = this.#valList[this.#head]\n this.#evict(true)\n if (this.#isBackgroundFetch(val)) {\n if (val.__staleWhileFetching) {\n return val.__staleWhileFetching\n }\n } else if (val !== undefined) {\n return val\n }\n }\n } finally {\n if (this.#hasDisposeAfter && this.#disposed) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n }\n }\n\n #evict(free: boolean) {\n const head = this.#head\n const k = this.#keyList[head] as K\n const v = this.#valList[head] as V\n if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {\n v.__abortController.abort(new Error('evicted'))\n } else if (this.#hasDispose || this.#hasDisposeAfter) {\n if (this.#hasDispose) {\n this.#dispose?.(v, k, 'evict')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([v, k, 'evict'])\n }\n }\n this.#removeItemSize(head)\n // if we aren't about to use the index, then null these out\n if (free) {\n this.#keyList[head] = undefined\n this.#valList[head] = undefined\n this.#free.push(head)\n }\n if (this.#size === 1) {\n this.#head = this.#tail = 0 as Index\n this.#free.length = 0\n } else {\n this.#head = this.#next[head] as Index\n }\n this.#keyMap.delete(k)\n this.#size--\n return head\n }\n\n /**\n * Check if a key is in the cache, without updating the recency of use.\n * Will return false if the item is stale, even though it is technically\n * in the cache.\n *\n * Will not update item age unless\n * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.\n */\n has(k: K, hasOptions: LRUCache.HasOptions = {}) {\n const { updateAgeOnHas = this.updateAgeOnHas, status } =\n hasOptions\n const index = this.#keyMap.get(k)\n if (index !== undefined) {\n const v = this.#valList[index]\n if (\n this.#isBackgroundFetch(v) &&\n v.__staleWhileFetching === undefined\n ) {\n return false\n }\n if (!this.#isStale(index)) {\n if (updateAgeOnHas) {\n this.#updateItemAge(index)\n }\n if (status) {\n status.has = 'hit'\n this.#statusTTL(status, index)\n }\n return true\n } else if (status) {\n status.has = 'stale'\n this.#statusTTL(status, index)\n }\n } else if (status) {\n status.has = 'miss'\n }\n return false\n }\n\n /**\n * Like {@link LRUCache#get} but doesn't update recency or delete stale\n * items.\n *\n * Returns `undefined` if the item is stale, unless\n * {@link LRUCache.OptionsBase.allowStale} is set.\n */\n peek(k: K, peekOptions: LRUCache.PeekOptions = {}) {\n const { allowStale = this.allowStale } = peekOptions\n const index = this.#keyMap.get(k)\n if (\n index !== undefined &&\n (allowStale || !this.#isStale(index))\n ) {\n const v = this.#valList[index]\n // either stale and allowed, or forcing a refresh of non-stale value\n return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v\n }\n }\n\n #backgroundFetch(\n k: K,\n index: Index | undefined,\n options: LRUCache.FetchOptions,\n context: any\n ): BackgroundFetch {\n const v = index === undefined ? undefined : this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n return v\n }\n\n const ac = new AC()\n const { signal } = options\n // when/if our AC signals, then stop listening to theirs.\n signal?.addEventListener('abort', () => ac.abort(signal.reason), {\n signal: ac.signal,\n })\n\n const fetchOpts = {\n signal: ac.signal,\n options,\n context,\n }\n\n const cb = (\n v: V | void | undefined,\n updateCache = false\n ): V | undefined | void => {\n const { aborted } = ac.signal\n const ignoreAbort = options.ignoreFetchAbort && v !== undefined\n if (options.status) {\n if (aborted && !updateCache) {\n options.status.fetchAborted = true\n options.status.fetchError = ac.signal.reason\n if (ignoreAbort) options.status.fetchAbortIgnored = true\n } else {\n options.status.fetchResolved = true\n }\n }\n if (aborted && !ignoreAbort && !updateCache) {\n return fetchFail(ac.signal.reason)\n }\n // either we didn't abort, and are still here, or we did, and ignored\n const bf = p as BackgroundFetch\n if (this.#valList[index as Index] === p) {\n if (v === undefined) {\n if (bf.__staleWhileFetching) {\n this.#valList[index as Index] = bf.__staleWhileFetching\n } else {\n this.delete(k)\n }\n } else {\n if (options.status) options.status.fetchUpdated = true\n this.set(k, v, fetchOpts.options)\n }\n }\n return v\n }\n\n const eb = (er: any) => {\n if (options.status) {\n options.status.fetchRejected = true\n options.status.fetchError = er\n }\n return fetchFail(er)\n }\n\n const fetchFail = (er: any): V | undefined => {\n const { aborted } = ac.signal\n const allowStaleAborted =\n aborted && options.allowStaleOnFetchAbort\n const allowStale =\n allowStaleAborted || options.allowStaleOnFetchRejection\n const noDelete = allowStale || options.noDeleteOnFetchRejection\n const bf = p as BackgroundFetch\n if (this.#valList[index as Index] === p) {\n // if we allow stale on fetch rejections, then we need to ensure that\n // the stale value is not removed from the cache when the fetch fails.\n const del = !noDelete || bf.__staleWhileFetching === undefined\n if (del) {\n this.delete(k)\n } else if (!allowStaleAborted) {\n // still replace the *promise* with the stale value,\n // since we are done with the promise at this point.\n // leave it untouched if we're still waiting for an\n // aborted background fetch that hasn't yet returned.\n this.#valList[index as Index] = bf.__staleWhileFetching\n }\n }\n if (allowStale) {\n if (options.status && bf.__staleWhileFetching !== undefined) {\n options.status.returnedStale = true\n }\n return bf.__staleWhileFetching\n } else if (bf.__returned === bf) {\n throw er\n }\n }\n\n const pcall = (\n res: (v: V | void | undefined) => void,\n rej: (e: any) => void\n ) => {\n const fmp = this.#fetchMethod?.(k, v, fetchOpts)\n if (fmp && fmp instanceof Promise) {\n fmp.then(v => res(v), rej)\n }\n // ignored, we go until we finish, regardless.\n // defer check until we are actually aborting,\n // so fetchMethod can override.\n ac.signal.addEventListener('abort', () => {\n if (\n !options.ignoreFetchAbort ||\n options.allowStaleOnFetchAbort\n ) {\n res()\n // when it eventually resolves, update the cache.\n if (options.allowStaleOnFetchAbort) {\n res = v => cb(v, true)\n }\n }\n })\n }\n\n if (options.status) options.status.fetchDispatched = true\n const p = new Promise(pcall).then(cb, eb)\n const bf = Object.assign(p, {\n __abortController: ac,\n __staleWhileFetching: v,\n __returned: undefined,\n })\n\n if (index === undefined) {\n // internal, don't expose status.\n this.set(k, bf, { ...fetchOpts.options, status: undefined })\n index = this.#keyMap.get(k)\n } else {\n this.#valList[index] = bf\n }\n return bf\n }\n\n #isBackgroundFetch(p: any): p is BackgroundFetch {\n if (!this.#hasFetchMethod) return false\n const b = p as BackgroundFetch\n return (\n !!b &&\n b instanceof Promise &&\n b.hasOwnProperty('__staleWhileFetching') &&\n b.__abortController instanceof AC\n )\n }\n\n /**\n * Make an asynchronous cached fetch using the\n * {@link LRUCache.OptionsBase.fetchMethod} function.\n *\n * If multiple fetches for the same key are issued, then they will all be\n * coalesced into a single call to fetchMethod.\n *\n * Note that this means that handling options such as\n * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort},\n * {@link LRUCache.FetchOptions.signal},\n * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be\n * determined by the FIRST fetch() call for a given key.\n *\n * This is a known (fixable) shortcoming which will be addresed on when\n * someone complains about it, as the fix would involve added complexity and\n * may not be worth the costs for this edge case.\n */\n fetch(\n k: K,\n fetchOptions: unknown extends FC\n ? LRUCache.FetchOptions\n : FC extends undefined | void\n ? LRUCache.FetchOptionsNoContext\n : LRUCache.FetchOptionsWithContext\n ): Promise\n // this overload not allowed if context is required\n fetch(\n k: unknown extends FC\n ? K\n : FC extends undefined | void\n ? K\n : never,\n fetchOptions?: unknown extends FC\n ? LRUCache.FetchOptions\n : FC extends undefined | void\n ? LRUCache.FetchOptionsNoContext\n : never\n ): Promise\n async fetch(\n k: K,\n fetchOptions: LRUCache.FetchOptions = {}\n ): Promise {\n const {\n // get options\n allowStale = this.allowStale,\n updateAgeOnGet = this.updateAgeOnGet,\n noDeleteOnStaleGet = this.noDeleteOnStaleGet,\n // set options\n ttl = this.ttl,\n noDisposeOnSet = this.noDisposeOnSet,\n size = 0,\n sizeCalculation = this.sizeCalculation,\n noUpdateTTL = this.noUpdateTTL,\n // fetch exclusive options\n noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,\n allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,\n ignoreFetchAbort = this.ignoreFetchAbort,\n allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,\n context,\n forceRefresh = false,\n status,\n signal,\n } = fetchOptions\n\n if (!this.#hasFetchMethod) {\n if (status) status.fetch = 'get'\n return this.get(k, {\n allowStale,\n updateAgeOnGet,\n noDeleteOnStaleGet,\n status,\n })\n }\n\n const options = {\n allowStale,\n updateAgeOnGet,\n noDeleteOnStaleGet,\n ttl,\n noDisposeOnSet,\n size,\n sizeCalculation,\n noUpdateTTL,\n noDeleteOnFetchRejection,\n allowStaleOnFetchRejection,\n allowStaleOnFetchAbort,\n ignoreFetchAbort,\n status,\n signal,\n }\n\n let index = this.#keyMap.get(k)\n if (index === undefined) {\n if (status) status.fetch = 'miss'\n const p = this.#backgroundFetch(k, index, options, context)\n return (p.__returned = p)\n } else {\n // in cache, maybe already fetching\n const v = this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n const stale =\n allowStale && v.__staleWhileFetching !== undefined\n if (status) {\n status.fetch = 'inflight'\n if (stale) status.returnedStale = true\n }\n return stale ? v.__staleWhileFetching : (v.__returned = v)\n }\n\n // if we force a refresh, that means do NOT serve the cached value,\n // unless we are already in the process of refreshing the cache.\n const isStale = this.#isStale(index)\n if (!forceRefresh && !isStale) {\n if (status) status.fetch = 'hit'\n this.#moveToTail(index)\n if (updateAgeOnGet) {\n this.#updateItemAge(index)\n }\n if (status) this.#statusTTL(status, index)\n return v\n }\n\n // ok, it is stale or a forced refresh, and not already fetching.\n // refresh the cache.\n const p = this.#backgroundFetch(k, index, options, context)\n const hasStale = p.__staleWhileFetching !== undefined\n const staleVal = hasStale && allowStale\n if (status) {\n status.fetch = isStale ? 'stale' : 'refresh'\n if (staleVal && isStale) status.returnedStale = true\n }\n return staleVal ? p.__staleWhileFetching : (p.__returned = p)\n }\n }\n\n /**\n * Return a value from the cache. Will update the recency of the cache\n * entry found.\n *\n * If the key is not found, get() will return `undefined`.\n */\n get(k: K, getOptions: LRUCache.GetOptions = {}) {\n const {\n allowStale = this.allowStale,\n updateAgeOnGet = this.updateAgeOnGet,\n noDeleteOnStaleGet = this.noDeleteOnStaleGet,\n status,\n } = getOptions\n const index = this.#keyMap.get(k)\n if (index !== undefined) {\n const value = this.#valList[index]\n const fetching = this.#isBackgroundFetch(value)\n if (status) this.#statusTTL(status, index)\n if (this.#isStale(index)) {\n if (status) status.get = 'stale'\n // delete only if not an in-flight background fetch\n if (!fetching) {\n if (!noDeleteOnStaleGet) {\n this.delete(k)\n }\n if (status && allowStale) status.returnedStale = true\n return allowStale ? value : undefined\n } else {\n if (\n status &&\n allowStale &&\n value.__staleWhileFetching !== undefined\n ) {\n status.returnedStale = true\n }\n return allowStale ? value.__staleWhileFetching : undefined\n }\n } else {\n if (status) status.get = 'hit'\n // if we're currently fetching it, we don't actually have it yet\n // it's not stale, which means this isn't a staleWhileRefetching.\n // If it's not stale, and fetching, AND has a __staleWhileFetching\n // value, then that means the user fetched with {forceRefresh:true},\n // so it's safe to return that value.\n if (fetching) {\n return value.__staleWhileFetching\n }\n this.#moveToTail(index)\n if (updateAgeOnGet) {\n this.#updateItemAge(index)\n }\n return value\n }\n } else if (status) {\n status.get = 'miss'\n }\n }\n\n #connect(p: Index, n: Index) {\n this.#prev[n] = p\n this.#next[p] = n\n }\n\n #moveToTail(index: Index): void {\n // if tail already, nothing to do\n // if head, move head to next[index]\n // else\n // move next[prev[index]] to next[index] (head has no prev)\n // move prev[next[index]] to prev[index]\n // prev[index] = tail\n // next[tail] = index\n // tail = index\n if (index !== this.#tail) {\n if (index === this.#head) {\n this.#head = this.#next[index] as Index\n } else {\n this.#connect(\n this.#prev[index] as Index,\n this.#next[index] as Index\n )\n }\n this.#connect(this.#tail, index)\n this.#tail = index\n }\n }\n\n /**\n * Deletes a key out of the cache.\n * Returns true if the key was deleted, false otherwise.\n */\n delete(k: K) {\n let deleted = false\n if (this.#size !== 0) {\n const index = this.#keyMap.get(k)\n if (index !== undefined) {\n deleted = true\n if (this.#size === 1) {\n this.clear()\n } else {\n this.#removeItemSize(index)\n const v = this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n v.__abortController.abort(new Error('deleted'))\n } else if (this.#hasDispose || this.#hasDisposeAfter) {\n if (this.#hasDispose) {\n this.#dispose?.(v as V, k, 'delete')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([v as V, k, 'delete'])\n }\n }\n this.#keyMap.delete(k)\n this.#keyList[index] = undefined\n this.#valList[index] = undefined\n if (index === this.#tail) {\n this.#tail = this.#prev[index] as Index\n } else if (index === this.#head) {\n this.#head = this.#next[index] as Index\n } else {\n this.#next[this.#prev[index]] = this.#next[index]\n this.#prev[this.#next[index]] = this.#prev[index]\n }\n this.#size--\n this.#free.push(index)\n }\n }\n }\n if (this.#hasDisposeAfter && this.#disposed?.length) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n return deleted\n }\n\n /**\n * Clear the cache entirely, throwing away all values.\n */\n clear() {\n for (const index of this.#rindexes({ allowStale: true })) {\n const v = this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n v.__abortController.abort(new Error('deleted'))\n } else {\n const k = this.#keyList[index]\n if (this.#hasDispose) {\n this.#dispose?.(v as V, k as K, 'delete')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([v as V, k as K, 'delete'])\n }\n }\n }\n\n this.#keyMap.clear()\n this.#valList.fill(undefined)\n this.#keyList.fill(undefined)\n if (this.#ttls && this.#starts) {\n this.#ttls.fill(0)\n this.#starts.fill(0)\n }\n if (this.#sizes) {\n this.#sizes.fill(0)\n }\n this.#head = 0 as Index\n this.#tail = 0 as Index\n this.#free.length = 0\n this.#calculatedSize = 0\n this.#size = 0\n if (this.#hasDisposeAfter && this.#disposed) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n }\n}\n"], + "mappings": "+aAMA,IAAMA,EACJ,OAAO,aAAgB,UACvB,aACA,OAAO,YAAY,KAAQ,WACvB,YACA,KAEAC,EAAS,IAAI,IAMbC,EACJ,OAAO,SAAY,UAAc,QAAU,QAAU,CAAA,EAIjDC,EAAc,CAClBC,EACAC,EACAC,EACAC,IACE,CACF,OAAOL,EAAQ,aAAgB,WAC3BA,EAAQ,YAAYE,EAAKC,EAAMC,EAAMC,CAAE,EACvC,QAAQ,MAAM,IAAID,MAASD,MAASD,GAAK,CAC/C,EAEII,EAAK,WAAW,gBAChBC,EAAK,WAAW,YAGpB,GAAI,OAAOD,EAAO,IAAa,CAE7BC,EAAK,KAAiB,CACpB,QACA,SAAqC,CAAA,EACrC,OACA,QAAmB,GACnB,iBAAiBC,EAAWH,EAAwB,CAClD,KAAK,SAAS,KAAKA,CAAE,CACvB,GAGFC,EAAK,KAAqB,CACxB,aAAA,CACEG,EAAc,CAChB,CACA,OAAS,IAAIF,EACb,MAAMG,EAAW,CACf,GAAI,MAAK,OAAO,QAEhB,MAAK,OAAO,OAASA,EAErB,KAAK,OAAO,QAAU,GAEtB,QAAWL,KAAM,KAAK,OAAO,SAC3BA,EAAGK,CAAM,EAEX,KAAK,OAAO,UAAUA,CAAM,EAC9B,GAEF,IAAIC,EACFX,EAAQ,KAAK,8BAAgC,IACzCS,EAAiB,IAAK,CACrBE,IACLA,EAAyB,GACzBV,EACE,maAOA,sBACA,UACAQ,CAAc,EAElB,EAIF,IAAMG,EAAcR,GAAiB,CAACL,EAAO,IAAIK,CAAI,EAE/CS,EAAO,OAAO,MAAM,EAIpBC,EAAYC,GAChBA,GAAKA,IAAM,KAAK,MAAMA,CAAC,GAAKA,EAAI,GAAK,SAASA,CAAC,EAc3CC,EAAgBC,GACnBH,EAASG,CAAG,EAETA,GAAO,KAAK,IAAI,EAAG,CAAC,EACpB,WACAA,GAAO,KAAK,IAAI,EAAG,EAAE,EACrB,YACAA,GAAO,KAAK,IAAI,EAAG,EAAE,EACrB,YACAA,GAAO,OAAO,iBACdC,EACA,KATA,KAYAA,EAAN,cAAwB,KAAa,CACnC,YAAYC,EAAY,CACtB,MAAMA,CAAI,EACV,KAAK,KAAK,CAAC,CACb,KAIIC,EAAN,KAAW,CACT,KACA,OAGA,OAAO,OAAOH,EAAW,CACvB,IAAMI,EAAUL,EAAaC,CAAG,EAChC,GAAI,CAACI,EAAS,MAAO,CAAA,EACrBC,EAAAF,EAAMG,EAAgB,IACtB,IAAMC,EAAI,IAAIJ,EAAMH,EAAKI,CAAO,EAChC,OAAAC,EAAAF,EAAMG,EAAgB,IACfC,CACT,CACA,YACEP,EACAI,EAAyC,CAGzC,GAAI,CAACI,EAAAL,EAAMG,GACT,MAAM,IAAI,UAAU,yCAAyC,EAG/D,KAAK,KAAO,IAAIF,EAAQJ,CAAG,EAC3B,KAAK,OAAS,CAChB,CACA,KAAKF,EAAQ,CACX,KAAK,KAAK,KAAK,QAAQ,EAAIA,CAC7B,CACA,KAAG,CACD,OAAO,KAAK,KAAK,EAAE,KAAK,MAAM,CAChC,GA9BIW,EAANN,EAISG,EAAA,YAAPI,EAJID,EAIGH,EAAyB,IAkqBlC,IAAaK,EAAb,KAAqB,CAIVC,GACAC,GACAC,GACAC,GACAC,GAKT,IAKA,cAIA,aAIA,eAIA,eAIA,WAKA,eAIA,YAIA,aAIA,gBAIA,yBAIA,mBAIA,uBAIA,2BAIA,iBAGAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GAEAC,GACAC,GACAC,GAWA,OAAO,sBAILC,EAAqB,CACrB,MAAO,CAEL,OAAQA,EAAEL,GACV,KAAMK,EAAEJ,GACR,MAAOI,EAAEN,GACT,OAAQM,EAAEf,GACV,QAASe,EAAEd,GACX,QAASc,EAAEb,GACX,KAAMa,EAAEZ,GACR,KAAMY,EAAEX,GACR,IAAI,MAAI,CACN,OAAOW,EAAEV,EACX,EACA,IAAI,MAAI,CACN,OAAOU,EAAET,EACX,EACA,KAAMS,EAAER,GAER,kBAAoBS,GAAWD,EAAEE,GAAmBD,CAAC,EACrD,gBAAiB,CACfE,EACAC,EACAC,EACAC,IAEAN,EAAEO,GACAJ,EACAC,EACAC,EACAC,CAAO,EAEX,WAAaF,GACXJ,EAAEQ,GAAYJ,CAAc,EAC9B,QAAUC,GACRL,EAAES,GAASJ,CAAO,EACpB,SAAWA,GACTL,EAAEU,GAAUL,CAAO,EACrB,QAAUD,GACRJ,EAAEW,GAASP,CAAc,EAE/B,CAOA,IAAI,KAAG,CACL,OAAO,KAAK1B,EACd,CAIA,IAAI,SAAO,CACT,OAAO,KAAKC,EACd,CAIA,IAAI,gBAAc,CAChB,OAAO,KAAKK,EACd,CAIA,IAAI,MAAI,CACN,OAAO,KAAKD,EACd,CAIA,IAAI,aAAW,CACb,OAAO,KAAKD,EACd,CAIA,IAAI,SAAO,CACT,OAAO,KAAKF,EACd,CAIA,IAAI,cAAY,CACd,OAAO,KAAKC,EACd,CAEA,YACEwB,EAAwD,CAExD,GAAM,CACJ,IAAAvC,EAAM,EACN,IAAA8C,EACA,cAAAC,EAAgB,EAChB,aAAAC,EACA,eAAAC,EACA,eAAAC,EACA,WAAAC,EACA,QAAAC,EACA,aAAAC,EACA,eAAAC,EACA,YAAAC,EACA,QAAAC,EAAU,EACV,aAAAC,EAAe,EACf,gBAAAC,EACA,YAAAC,EACA,yBAAAC,EACA,mBAAAC,EACA,2BAAAC,EACA,uBAAAC,EACA,iBAAAC,CAAgB,EACdzB,EAEJ,GAAIvC,IAAQ,GAAK,CAACH,EAASG,CAAG,EAC5B,MAAM,IAAI,UAAU,0CAA0C,EAGhE,IAAMiE,EAAYjE,EAAMD,EAAaC,CAAG,EAAI,MAC5C,GAAI,CAACiE,EACH,MAAM,IAAI,MAAM,sBAAwBjE,CAAG,EAO7C,GAJA,KAAKY,GAAOZ,EACZ,KAAKa,GAAW2C,EAChB,KAAK,aAAeC,GAAgB,KAAK5C,GACzC,KAAK,gBAAkB6C,EACnB,KAAK,gBAAiB,CACxB,GAAI,CAAC,KAAK7C,IAAY,CAAC,KAAK,aAC1B,MAAM,IAAI,UACR,oEAAoE,EAGxE,GAAI,OAAO,KAAK,iBAAoB,WAClC,MAAM,IAAI,UAAU,qCAAqC,EAI7D,GACE8C,IAAgB,QAChB,OAAOA,GAAgB,WAEvB,MAAM,IAAI,UACR,6CAA6C,EAsCjD,GAnCA,KAAK3C,GAAe2C,EACpB,KAAK3B,GAAkB,CAAC,CAAC2B,EAEzB,KAAKxC,GAAU,IAAI,IACnB,KAAKC,GAAW,IAAI,MAAMpB,CAAG,EAAE,KAAK,MAAS,EAC7C,KAAKqB,GAAW,IAAI,MAAMrB,CAAG,EAAE,KAAK,MAAS,EAC7C,KAAKsB,GAAQ,IAAI2C,EAAUjE,CAAG,EAC9B,KAAKuB,GAAQ,IAAI0C,EAAUjE,CAAG,EAC9B,KAAKwB,GAAQ,EACb,KAAKC,GAAQ,EACb,KAAKC,GAAQjB,EAAM,OAAOT,CAAG,EAC7B,KAAKiB,GAAQ,EACb,KAAKC,GAAkB,EAEnB,OAAOkC,GAAY,aACrB,KAAKtC,GAAWsC,GAEd,OAAOC,GAAiB,YAC1B,KAAKtC,GAAgBsC,EACrB,KAAK1B,GAAY,CAAA,IAEjB,KAAKZ,GAAgB,OACrB,KAAKY,GAAY,QAEnB,KAAKI,GAAc,CAAC,CAAC,KAAKjB,GAC1B,KAAKmB,GAAmB,CAAC,CAAC,KAAKlB,GAE/B,KAAK,eAAiB,CAAC,CAACuC,EACxB,KAAK,YAAc,CAAC,CAACC,EACrB,KAAK,yBAA2B,CAAC,CAACK,EAClC,KAAK,2BAA6B,CAAC,CAACE,EACpC,KAAK,uBAAyB,CAAC,CAACC,EAChC,KAAK,iBAAmB,CAAC,CAACC,EAGtB,KAAK,eAAiB,EAAG,CAC3B,GAAI,KAAKnD,KAAa,GAChB,CAAChB,EAAS,KAAKgB,EAAQ,EACzB,MAAM,IAAI,UACR,iDAAiD,EAIvD,GAAI,CAAChB,EAAS,KAAK,YAAY,EAC7B,MAAM,IAAI,UACR,sDAAsD,EAG1D,KAAKqE,GAAuB,EAa9B,GAVA,KAAK,WAAa,CAAC,CAACf,EACpB,KAAK,mBAAqB,CAAC,CAACU,EAC5B,KAAK,eAAiB,CAAC,CAACZ,EACxB,KAAK,eAAiB,CAAC,CAACC,EACxB,KAAK,cACHrD,EAASkD,CAAa,GAAKA,IAAkB,EACzCA,EACA,EACN,KAAK,aAAe,CAAC,CAACC,EACtB,KAAK,IAAMF,GAAO,EACd,KAAK,IAAK,CACZ,GAAI,CAACjD,EAAS,KAAK,GAAG,EACpB,MAAM,IAAI,UACR,6CAA6C,EAGjD,KAAKsE,GAAsB,EAI7B,GAAI,KAAKvD,KAAS,GAAK,KAAK,MAAQ,GAAK,KAAKC,KAAa,EACzD,MAAM,IAAI,UACR,kDAAkD,EAGtD,GAAI,CAAC,KAAK,cAAgB,CAAC,KAAKD,IAAQ,CAAC,KAAKC,GAAU,CACtD,IAAM1B,EAAO,sBACTQ,EAAWR,CAAI,IACjBL,EAAO,IAAIK,CAAI,EAIfH,EAFE,gGAEe,wBAAyBG,EAAMwB,CAAQ,GAG9D,CAKA,gBAAgByD,EAAM,CACpB,OAAO,KAAKjD,GAAQ,IAAIiD,CAAG,EAAI,IAAW,CAC5C,CAEAD,IAAsB,CACpB,IAAME,EAAO,IAAIpE,EAAU,KAAKW,EAAI,EAC9B0D,EAAS,IAAIrE,EAAU,KAAKW,EAAI,EACtC,KAAKkB,GAAQuC,EACb,KAAKxC,GAAUyC,EAEf,KAAKC,GAAc,CAACjC,EAAOQ,EAAK0B,EAAQ3F,EAAK,IAAG,IAAM,CAGpD,GAFAyF,EAAOhC,CAAK,EAAIQ,IAAQ,EAAI0B,EAAQ,EACpCH,EAAK/B,CAAK,EAAIQ,EACVA,IAAQ,GAAK,KAAK,aAAc,CAClC,IAAM2B,EAAI,WAAW,IAAK,CACpB,KAAK5B,GAASP,CAAK,GACrB,KAAK,OAAO,KAAKlB,GAASkB,CAAK,CAAM,CAEzC,EAAGQ,EAAM,CAAC,EAGN2B,EAAE,OACJA,EAAE,MAAK,EAIb,EAEA,KAAKC,GAAiBpC,GAAQ,CAC5BgC,EAAOhC,CAAK,EAAI+B,EAAK/B,CAAK,IAAM,EAAIzD,EAAK,IAAG,EAAK,CACnD,EAEA,KAAK8F,GAAa,CAACC,EAAQtC,IAAS,CAClC,GAAI+B,EAAK/B,CAAK,EAAG,CACf,IAAMQ,EAAMuB,EAAK/B,CAAK,EAChBkC,EAAQF,EAAOhC,CAAK,EAC1BsC,EAAO,IAAM9B,EACb8B,EAAO,MAAQJ,EACfI,EAAO,IAAMC,GAAaC,EAAM,EAChC,IAAMC,EAAMH,EAAO,IAAMJ,EACzBI,EAAO,aAAe9B,EAAMiC,EAEhC,EAIA,IAAIF,EAAY,EACVC,EAAS,IAAK,CAClB,IAAM,EAAIjG,EAAK,IAAG,EAClB,GAAI,KAAK,cAAgB,EAAG,CAC1BgG,EAAY,EACZ,IAAMJ,EAAI,WACR,IAAOI,EAAY,EACnB,KAAK,aAAa,EAIhBJ,EAAE,OACJA,EAAE,MAAK,EAIX,OAAO,CACT,EAEA,KAAK,gBAAkBL,GAAM,CAC3B,IAAM9B,EAAQ,KAAKnB,GAAQ,IAAIiD,CAAG,EAClC,GAAI9B,IAAU,OACZ,MAAO,GAET,IAAMQ,EAAMuB,EAAK/B,CAAK,EAChBkC,EAAQF,EAAOhC,CAAK,EAC1B,GAAIQ,IAAQ,GAAK0B,IAAU,EACzB,MAAO,KAET,IAAMO,GAAOF,GAAaC,EAAM,GAAMN,EACtC,OAAO1B,EAAMiC,CACf,EAEA,KAAKlC,GAAWP,GAEZ+B,EAAK/B,CAAK,IAAM,GAChBgC,EAAOhC,CAAK,IAAM,IACjBuC,GAAaC,EAAM,GAAMR,EAAOhC,CAAK,EAAI+B,EAAK/B,CAAK,CAG1D,CAGAoC,GAAyC,IAAK,CAAE,EAChDC,GACE,IAAK,CAAE,EACTJ,GAMY,IAAK,CAAE,EAGnB1B,GAAsC,IAAM,GAE5CqB,IAAuB,CACrB,IAAMc,EAAQ,IAAI/E,EAAU,KAAKW,EAAI,EACrC,KAAKM,GAAkB,EACvB,KAAKU,GAASoD,EACd,KAAKC,GAAkB3C,GAAQ,CAC7B,KAAKpB,IAAmB8D,EAAM1C,CAAK,EACnC0C,EAAM1C,CAAK,EAAI,CACjB,EACA,KAAK4C,GAAe,CAAC7C,EAAG8C,EAAGjF,EAAMwD,IAAmB,CAGlD,GAAI,KAAKtB,GAAmB+C,CAAC,EAC3B,MAAO,GAET,GAAI,CAACtF,EAASK,CAAI,EAChB,GAAIwD,EAAiB,CACnB,GAAI,OAAOA,GAAoB,WAC7B,MAAM,IAAI,UAAU,oCAAoC,EAG1D,GADAxD,EAAOwD,EAAgByB,EAAG9C,CAAC,EACvB,CAACxC,EAASK,CAAI,EAChB,MAAM,IAAI,UACR,0DAA0D,MAI9D,OAAM,IAAI,UACR,2HAEwB,EAI9B,OAAOA,CACT,EACA,KAAKkF,GAAe,CAClB9C,EACApC,EACA0E,IACE,CAEF,GADAI,EAAM1C,CAAK,EAAIpC,EACX,KAAKW,GAAU,CACjB,IAAM2C,EAAU,KAAK3C,GAAWmE,EAAM1C,CAAK,EAC3C,KAAO,KAAKpB,GAAkBsC,GAC5B,KAAK6B,GAAO,EAAI,EAGpB,KAAKnE,IAAmB8D,EAAM1C,CAAK,EAC/BsC,IACFA,EAAO,UAAY1E,EACnB0E,EAAO,oBAAsB,KAAK1D,GAEtC,CACF,CAEA+D,GAA0CK,GAAK,CAAE,EACjDF,GAIY,CAACE,EAAIC,EAAIC,IAAO,CAAE,EAC9BN,GAKqB,CACnBO,EACAC,EACAxF,EACAwD,IACE,CACF,GAAIxD,GAAQwD,EACV,MAAM,IAAI,UACR,kEAAkE,EAGtE,MAAO,EACT,EAEA,CAACf,GAAS,CAAE,WAAAQ,EAAa,KAAK,UAAU,EAAK,CAAA,EAAE,CAC7C,GAAI,KAAKlC,GACP,QAAS0E,EAAI,KAAKlE,GACZ,GAAC,KAAKmE,GAAcD,CAAC,KAGrBxC,GAAc,CAAC,KAAKN,GAAS8C,CAAC,KAChC,MAAMA,GAEJA,IAAM,KAAKnE,MAGbmE,EAAI,KAAKpE,GAAMoE,CAAC,CAIxB,CAEA,CAAC/C,GAAU,CAAE,WAAAO,EAAa,KAAK,UAAU,EAAK,CAAA,EAAE,CAC9C,GAAI,KAAKlC,GACP,QAAS0E,EAAI,KAAKnE,GACZ,GAAC,KAAKoE,GAAcD,CAAC,KAGrBxC,GAAc,CAAC,KAAKN,GAAS8C,CAAC,KAChC,MAAMA,GAEJA,IAAM,KAAKlE,MAGbkE,EAAI,KAAKrE,GAAMqE,CAAC,CAIxB,CAEAC,GAActD,EAAY,CACxB,OACEA,IAAU,QACV,KAAKnB,GAAQ,IAAI,KAAKC,GAASkB,CAAK,CAAM,IAAMA,CAEpD,CAMA,CAAC,SAAO,CACN,QAAWqD,KAAK,KAAKhD,GAAQ,EAEzB,KAAKtB,GAASsE,CAAC,IAAM,QACrB,KAAKvE,GAASuE,CAAC,IAAM,QACrB,CAAC,KAAKvD,GAAmB,KAAKf,GAASsE,CAAC,CAAC,IAEzC,KAAM,CAAC,KAAKvE,GAASuE,CAAC,EAAG,KAAKtE,GAASsE,CAAC,CAAC,EAG/C,CAQA,CAAC,UAAQ,CACP,QAAWA,KAAK,KAAK/C,GAAS,EAE1B,KAAKvB,GAASsE,CAAC,IAAM,QACrB,KAAKvE,GAASuE,CAAC,IAAM,QACrB,CAAC,KAAKvD,GAAmB,KAAKf,GAASsE,CAAC,CAAC,IAEzC,KAAM,CAAC,KAAKvE,GAASuE,CAAC,EAAG,KAAKtE,GAASsE,CAAC,CAAC,EAG/C,CAMA,CAAC,MAAI,CACH,QAAWA,KAAK,KAAKhD,GAAQ,EAAI,CAC/B,IAAMN,EAAI,KAAKjB,GAASuE,CAAC,EAEvBtD,IAAM,QACN,CAAC,KAAKD,GAAmB,KAAKf,GAASsE,CAAC,CAAC,IAEzC,MAAMtD,GAGZ,CAQA,CAAC,OAAK,CACJ,QAAWsD,KAAK,KAAK/C,GAAS,EAAI,CAChC,IAAMP,EAAI,KAAKjB,GAASuE,CAAC,EAEvBtD,IAAM,QACN,CAAC,KAAKD,GAAmB,KAAKf,GAASsE,CAAC,CAAC,IAEzC,MAAMtD,GAGZ,CAMA,CAAC,QAAM,CACL,QAAWsD,KAAK,KAAKhD,GAAQ,EACjB,KAAKtB,GAASsE,CAAC,IAEjB,QACN,CAAC,KAAKvD,GAAmB,KAAKf,GAASsE,CAAC,CAAC,IAEzC,MAAM,KAAKtE,GAASsE,CAAC,EAG3B,CAQA,CAAC,SAAO,CACN,QAAWA,KAAK,KAAK/C,GAAS,EAClB,KAAKvB,GAASsE,CAAC,IAEjB,QACN,CAAC,KAAKvD,GAAmB,KAAKf,GAASsE,CAAC,CAAC,IAEzC,MAAM,KAAKtE,GAASsE,CAAC,EAG3B,CAMA,CAAC,OAAO,QAAQ,GAAC,CACf,OAAO,KAAK,QAAO,CACrB,CAMA,KACEvG,EACAyG,EAA4C,CAAA,EAAE,CAE9C,QAAW,KAAK,KAAKlD,GAAQ,EAAI,CAC/B,IAAMwC,EAAI,KAAK9D,GAAS,CAAC,EACnByE,EAAQ,KAAK1D,GAAmB+C,CAAC,EACnCA,EAAE,qBACFA,EACJ,GAAIW,IAAU,QACV1G,EAAG0G,EAAO,KAAK1E,GAAS,CAAC,EAAQ,IAAI,EACvC,OAAO,KAAK,IAAI,KAAKA,GAAS,CAAC,EAAQyE,CAAU,EAGvD,CAQA,QACEzG,EACA2G,EAAa,KAAI,CAEjB,QAAW,KAAK,KAAKpD,GAAQ,EAAI,CAC/B,IAAMwC,EAAI,KAAK9D,GAAS,CAAC,EACnByE,EAAQ,KAAK1D,GAAmB+C,CAAC,EACnCA,EAAE,qBACFA,EACAW,IAAU,QACd1G,EAAG,KAAK2G,EAAOD,EAAO,KAAK1E,GAAS,CAAC,EAAQ,IAAI,EAErD,CAMA,SACEhC,EACA2G,EAAa,KAAI,CAEjB,QAAW,KAAK,KAAKnD,GAAS,EAAI,CAChC,IAAMuC,EAAI,KAAK9D,GAAS,CAAC,EACnByE,EAAQ,KAAK1D,GAAmB+C,CAAC,EACnCA,EAAE,qBACFA,EACAW,IAAU,QACd1G,EAAG,KAAK2G,EAAOD,EAAO,KAAK1E,GAAS,CAAC,EAAQ,IAAI,EAErD,CAMA,YAAU,CACR,IAAI4E,EAAU,GACd,QAAWL,KAAK,KAAK/C,GAAU,CAAE,WAAY,EAAI,CAAE,EAC7C,KAAKC,GAAS8C,CAAC,IACjB,KAAK,OAAO,KAAKvE,GAASuE,CAAC,CAAM,EACjCK,EAAU,IAGd,OAAOA,CACT,CAMA,MAAI,CACF,IAAMC,EAAgC,CAAA,EACtC,QAAWN,KAAK,KAAKhD,GAAS,CAAE,WAAY,EAAI,CAAE,EAAG,CACnD,IAAMyB,EAAM,KAAKhD,GAASuE,CAAC,EACrBR,EAAI,KAAK9D,GAASsE,CAAC,EACnBG,EAAuB,KAAK1D,GAAmB+C,CAAC,EAClDA,EAAE,qBACFA,EACJ,GAAIW,IAAU,QAAa1B,IAAQ,OAAW,SAC9C,IAAM8B,EAA2B,CAAE,MAAAJ,CAAK,EACxC,GAAI,KAAKhE,IAAS,KAAKD,GAAS,CAC9BqE,EAAM,IAAM,KAAKpE,GAAM6D,CAAC,EAGxB,IAAMZ,EAAMlG,EAAK,IAAG,EAAK,KAAKgD,GAAQ8D,CAAC,EACvCO,EAAM,MAAQ,KAAK,MAAM,KAAK,IAAG,EAAKnB,CAAG,EAEvC,KAAKnD,KACPsE,EAAM,KAAO,KAAKtE,GAAO+D,CAAC,GAE5BM,EAAI,QAAQ,CAAC7B,EAAK8B,CAAK,CAAC,EAE1B,OAAOD,CACT,CAOA,KAAKA,EAA6B,CAChC,KAAK,MAAK,EACV,OAAW,CAAC7B,EAAK8B,CAAK,IAAKD,EAAK,CAC9B,GAAIC,EAAM,MAAO,CAOf,IAAMnB,EAAM,KAAK,IAAG,EAAKmB,EAAM,MAC/BA,EAAM,MAAQrH,EAAK,IAAG,EAAKkG,EAE7B,KAAK,IAAIX,EAAK8B,EAAM,MAAOA,CAAK,EAEpC,CAQA,IACE7D,EACA8C,EACAgB,EAA4C,CAAA,EAAE,CAE9C,GAAIhB,IAAM,OACR,YAAK,OAAO9C,CAAC,EACN,KAET,GAAM,CACJ,IAAAS,EAAM,KAAK,IACX,MAAA0B,EACA,eAAAlB,EAAiB,KAAK,eACtB,gBAAAI,EAAkB,KAAK,gBACvB,OAAAkB,CAAM,EACJuB,EACA,CAAE,YAAA5C,EAAc,KAAK,WAAW,EAAK4C,EAEnCjG,EAAO,KAAKgF,GAChB7C,EACA8C,EACAgB,EAAW,MAAQ,EACnBzC,CAAe,EAIjB,GAAI,KAAK,cAAgBxD,EAAO,KAAK,aACnC,OAAI0E,IACFA,EAAO,IAAM,OACbA,EAAO,qBAAuB,IAGhC,KAAK,OAAOvC,CAAC,EACN,KAET,IAAIC,EAAQ,KAAKrB,KAAU,EAAI,OAAY,KAAKE,GAAQ,IAAIkB,CAAC,EAC7D,GAAIC,IAAU,OAEZA,EACE,KAAKrB,KAAU,EACX,KAAKQ,GACL,KAAKC,GAAM,SAAW,EACtB,KAAKA,GAAM,IAAG,EACd,KAAKT,KAAU,KAAKL,GACpB,KAAKyE,GAAO,EAAK,EACjB,KAAKpE,GAEX,KAAKG,GAASkB,CAAK,EAAID,EACvB,KAAKhB,GAASiB,CAAK,EAAI6C,EACvB,KAAKhE,GAAQ,IAAIkB,EAAGC,CAAK,EACzB,KAAKhB,GAAM,KAAKG,EAAK,EAAIa,EACzB,KAAKf,GAAMe,CAAK,EAAI,KAAKb,GACzB,KAAKA,GAAQa,EACb,KAAKrB,KACL,KAAKmE,GAAa9C,EAAOpC,EAAM0E,CAAM,EACjCA,IAAQA,EAAO,IAAM,OACzBrB,EAAc,OACT,CAEL,KAAKb,GAAYJ,CAAK,EACtB,IAAM8D,EAAS,KAAK/E,GAASiB,CAAK,EAClC,GAAI6C,IAAMiB,GAcR,GAbI,KAAKpE,IAAmB,KAAKI,GAAmBgE,CAAM,EACxDA,EAAO,kBAAkB,MAAM,IAAI,MAAM,UAAU,CAAC,EAC1C9C,IACN,KAAKvB,IACP,KAAKjB,KAAWsF,EAAa/D,EAAG,KAAK,EAEnC,KAAKJ,IACP,KAAKN,IAAW,KAAK,CAACyE,EAAa/D,EAAG,KAAK,CAAC,GAGhD,KAAK4C,GAAgB3C,CAAK,EAC1B,KAAK8C,GAAa9C,EAAOpC,EAAM0E,CAAM,EACrC,KAAKvD,GAASiB,CAAK,EAAI6C,EACnBP,EAAQ,CACVA,EAAO,IAAM,UACb,IAAMyB,EACJD,GAAU,KAAKhE,GAAmBgE,CAAM,EACpCA,EAAO,qBACPA,EACFC,IAAa,SAAWzB,EAAO,SAAWyB,SAEvCzB,IACTA,EAAO,IAAM,UAYjB,GATI9B,IAAQ,GAAK,CAAC,KAAKhB,IACrB,KAAKqC,GAAsB,EAEzB,KAAKrC,KACFyB,GACH,KAAKgB,GAAYjC,EAAOQ,EAAK0B,CAAK,EAEhCI,GAAQ,KAAKD,GAAWC,EAAQtC,CAAK,GAEvC,CAACgB,GAAkB,KAAKrB,IAAoB,KAAKN,GAAW,CAC9D,IAAM2E,EAAK,KAAK3E,GACZ4E,EACJ,KAAQA,EAAOD,GAAI,MAAK,GACtB,KAAKvF,KAAgB,GAAGwF,CAAI,EAGhC,OAAO,IACT,CAMA,KAAG,CACD,GAAI,CACF,KAAO,KAAKtF,IAAO,CACjB,IAAMuF,EAAM,KAAKnF,GAAS,KAAKG,EAAK,EAEpC,GADA,KAAK6D,GAAO,EAAI,EACZ,KAAKjD,GAAmBoE,CAAG,GAC7B,GAAIA,EAAI,qBACN,OAAOA,EAAI,6BAEJA,IAAQ,OACjB,OAAOA,WAIX,GAAI,KAAKvE,IAAoB,KAAKN,GAAW,CAC3C,IAAM2E,EAAK,KAAK3E,GACZ4E,EACJ,KAAQA,EAAOD,GAAI,MAAK,GACtB,KAAKvF,KAAgB,GAAGwF,CAAI,GAIpC,CAEAlB,GAAOoB,EAAa,CAClB,IAAMC,EAAO,KAAKlF,GACZa,EAAI,KAAKjB,GAASsF,CAAI,EACtBvB,EAAI,KAAK9D,GAASqF,CAAI,EAC5B,OAAI,KAAK1E,IAAmB,KAAKI,GAAmB+C,CAAC,EACnDA,EAAE,kBAAkB,MAAM,IAAI,MAAM,SAAS,CAAC,GACrC,KAAKpD,IAAe,KAAKE,MAC9B,KAAKF,IACP,KAAKjB,KAAWqE,EAAG9C,EAAG,OAAO,EAE3B,KAAKJ,IACP,KAAKN,IAAW,KAAK,CAACwD,EAAG9C,EAAG,OAAO,CAAC,GAGxC,KAAK4C,GAAgByB,CAAI,EAErBD,IACF,KAAKrF,GAASsF,CAAI,EAAI,OACtB,KAAKrF,GAASqF,CAAI,EAAI,OACtB,KAAKhF,GAAM,KAAKgF,CAAI,GAElB,KAAKzF,KAAU,GACjB,KAAKO,GAAQ,KAAKC,GAAQ,EAC1B,KAAKC,GAAM,OAAS,GAEpB,KAAKF,GAAQ,KAAKF,GAAMoF,CAAI,EAE9B,KAAKvF,GAAQ,OAAOkB,CAAC,EACrB,KAAKpB,KACEyF,CACT,CAUA,IAAIrE,EAAMsE,EAA4C,CAAA,EAAE,CACtD,GAAM,CAAE,eAAAzD,EAAiB,KAAK,eAAgB,OAAA0B,CAAM,EAClD+B,EACIrE,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAChC,GAAIC,IAAU,OAAW,CACvB,IAAM6C,EAAI,KAAK9D,GAASiB,CAAK,EAC7B,GACE,KAAKF,GAAmB+C,CAAC,GACzBA,EAAE,uBAAyB,OAE3B,MAAO,GAET,GAAK,KAAKtC,GAASP,CAAK,EASbsC,IACTA,EAAO,IAAM,QACb,KAAKD,GAAWC,EAAQtC,CAAK,OAV7B,QAAIY,GACF,KAAKwB,GAAepC,CAAK,EAEvBsC,IACFA,EAAO,IAAM,MACb,KAAKD,GAAWC,EAAQtC,CAAK,GAExB,QAKAsC,IACTA,EAAO,IAAM,QAEf,MAAO,EACT,CASA,KAAKvC,EAAMuE,EAA8C,CAAA,EAAE,CACzD,GAAM,CAAE,WAAAzD,EAAa,KAAK,UAAU,EAAKyD,EACnCtE,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAChC,GACEC,IAAU,SACTa,GAAc,CAAC,KAAKN,GAASP,CAAK,GACnC,CACA,IAAM6C,EAAI,KAAK9D,GAASiB,CAAK,EAE7B,OAAO,KAAKF,GAAmB+C,CAAC,EAAIA,EAAE,qBAAuBA,EAEjE,CAEA1C,GACEJ,EACAC,EACAC,EACAC,EAAY,CAEZ,IAAM2C,EAAI7C,IAAU,OAAY,OAAY,KAAKjB,GAASiB,CAAK,EAC/D,GAAI,KAAKF,GAAmB+C,CAAC,EAC3B,OAAOA,EAGT,IAAM0B,EAAK,IAAIxH,EACT,CAAE,OAAAyH,CAAM,EAAKvE,EAEnBuE,GAAQ,iBAAiB,QAAS,IAAMD,EAAG,MAAMC,EAAO,MAAM,EAAG,CAC/D,OAAQD,EAAG,OACZ,EAED,IAAME,EAAY,CAChB,OAAQF,EAAG,OACX,QAAAtE,EACA,QAAAC,GAGIwE,EAAK,CACT7B,EACA8B,EAAc,KACU,CACxB,GAAM,CAAE,QAAAC,CAAO,EAAKL,EAAG,OACjBM,EAAc5E,EAAQ,kBAAoB4C,IAAM,OAUtD,GATI5C,EAAQ,SACN2E,GAAW,CAACD,GACd1E,EAAQ,OAAO,aAAe,GAC9BA,EAAQ,OAAO,WAAasE,EAAG,OAAO,OAClCM,IAAa5E,EAAQ,OAAO,kBAAoB,KAEpDA,EAAQ,OAAO,cAAgB,IAG/B2E,GAAW,CAACC,GAAe,CAACF,EAC9B,OAAOG,EAAUP,EAAG,OAAO,MAAM,EAGnC,IAAMQ,EAAK,EACX,OAAI,KAAKhG,GAASiB,CAAc,IAAM,IAChC6C,IAAM,OACJkC,EAAG,qBACL,KAAKhG,GAASiB,CAAc,EAAI+E,EAAG,qBAEnC,KAAK,OAAOhF,CAAC,GAGXE,EAAQ,SAAQA,EAAQ,OAAO,aAAe,IAClD,KAAK,IAAIF,EAAG8C,EAAG4B,EAAU,OAAO,IAG7B5B,CACT,EAEMmC,EAAMC,IACNhF,EAAQ,SACVA,EAAQ,OAAO,cAAgB,GAC/BA,EAAQ,OAAO,WAAagF,GAEvBH,EAAUG,CAAE,GAGfH,EAAaG,GAA0B,CAC3C,GAAM,CAAE,QAAAL,CAAO,EAAKL,EAAG,OACjBW,EACJN,GAAW3E,EAAQ,uBACfY,EACJqE,GAAqBjF,EAAQ,2BACzBkF,EAAWtE,GAAcZ,EAAQ,yBACjC8E,EAAK,EAeX,GAdI,KAAKhG,GAASiB,CAAc,IAAM,IAGxB,CAACmF,GAAYJ,EAAG,uBAAyB,OAEnD,KAAK,OAAOhF,CAAC,EACHmF,IAKV,KAAKnG,GAASiB,CAAc,EAAI+E,EAAG,uBAGnClE,EACF,OAAIZ,EAAQ,QAAU8E,EAAG,uBAAyB,SAChD9E,EAAQ,OAAO,cAAgB,IAE1B8E,EAAG,qBACL,GAAIA,EAAG,aAAeA,EAC3B,MAAME,CAEV,EAEMG,EAAQ,CACZC,EACAC,IACE,CACF,IAAMC,EAAM,KAAK7G,KAAeqB,EAAG8C,EAAG4B,CAAS,EAC3Cc,GAAOA,aAAe,SACxBA,EAAI,KAAK1C,GAAKwC,EAAIxC,CAAC,EAAGyC,CAAG,EAK3Bf,EAAG,OAAO,iBAAiB,QAAS,IAAK,EAErC,CAACtE,EAAQ,kBACTA,EAAQ,0BAERoF,EAAG,EAECpF,EAAQ,yBACVoF,EAAMxC,GAAK6B,EAAG7B,EAAG,EAAI,GAG3B,CAAC,CACH,EAEI5C,EAAQ,SAAQA,EAAQ,OAAO,gBAAkB,IACrD,IAAM,EAAI,IAAI,QAAQmF,CAAK,EAAE,KAAKV,EAAIM,CAAE,EAClCD,EAAK,OAAO,OAAO,EAAG,CAC1B,kBAAmBR,EACnB,qBAAsB1B,EACtB,WAAY,OACb,EAED,OAAI7C,IAAU,QAEZ,KAAK,IAAID,EAAGgF,EAAI,CAAE,GAAGN,EAAU,QAAS,OAAQ,MAAS,CAAE,EAC3DzE,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,GAE1B,KAAKhB,GAASiB,CAAK,EAAI+E,EAElBA,CACT,CAEAjF,GAAmBD,EAAM,CACvB,GAAI,CAAC,KAAKH,GAAiB,MAAO,GAClC,IAAM8F,EAAI3F,EACV,MACE,CAAC,CAAC2F,GACFA,aAAa,SACbA,EAAE,eAAe,sBAAsB,GACvCA,EAAE,6BAA6BzI,CAEnC,CAwCA,MAAM,MACJgD,EACA0F,EAAgD,CAAA,EAAE,CAElD,GAAM,CAEJ,WAAA5E,EAAa,KAAK,WAClB,eAAAF,EAAiB,KAAK,eACtB,mBAAAY,EAAqB,KAAK,mBAE1B,IAAAf,EAAM,KAAK,IACX,eAAAQ,EAAiB,KAAK,eACtB,KAAApD,EAAO,EACP,gBAAAwD,EAAkB,KAAK,gBACvB,YAAAH,EAAc,KAAK,YAEnB,yBAAAK,EAA2B,KAAK,yBAChC,2BAAAE,EAA6B,KAAK,2BAClC,iBAAAE,EAAmB,KAAK,iBACxB,uBAAAD,EAAyB,KAAK,uBAC9B,QAAAvB,EACA,aAAAwF,EAAe,GACf,OAAApD,EACA,OAAAkC,CAAM,EACJiB,EAEJ,GAAI,CAAC,KAAK/F,GACR,OAAI4C,IAAQA,EAAO,MAAQ,OACpB,KAAK,IAAIvC,EAAG,CACjB,WAAAc,EACA,eAAAF,EACA,mBAAAY,EACA,OAAAe,EACD,EAGH,IAAMrC,EAAU,CACd,WAAAY,EACA,eAAAF,EACA,mBAAAY,EACA,IAAAf,EACA,eAAAQ,EACA,KAAApD,EACA,gBAAAwD,EACA,YAAAH,EACA,yBAAAK,EACA,2BAAAE,EACA,uBAAAC,EACA,iBAAAC,EACA,OAAAY,EACA,OAAAkC,GAGExE,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAC9B,GAAIC,IAAU,OAAW,CACnBsC,IAAQA,EAAO,MAAQ,QAC3B,IAAMzC,EAAI,KAAKM,GAAiBJ,EAAGC,EAAOC,EAASC,CAAO,EAC1D,OAAQL,EAAE,WAAaA,MAClB,CAEL,IAAMgD,EAAI,KAAK9D,GAASiB,CAAK,EAC7B,GAAI,KAAKF,GAAmB+C,CAAC,EAAG,CAC9B,IAAM8C,EACJ9E,GAAcgC,EAAE,uBAAyB,OAC3C,OAAIP,IACFA,EAAO,MAAQ,WACXqD,IAAOrD,EAAO,cAAgB,KAE7BqD,EAAQ9C,EAAE,qBAAwBA,EAAE,WAAaA,EAK1D,IAAM+C,EAAU,KAAKrF,GAASP,CAAK,EACnC,GAAI,CAAC0F,GAAgB,CAACE,EACpB,OAAItD,IAAQA,EAAO,MAAQ,OAC3B,KAAKlC,GAAYJ,CAAK,EAClBW,GACF,KAAKyB,GAAepC,CAAK,EAEvBsC,GAAQ,KAAKD,GAAWC,EAAQtC,CAAK,EAClC6C,EAKT,IAAMhD,EAAI,KAAKM,GAAiBJ,EAAGC,EAAOC,EAASC,CAAO,EAEpD2F,EADWhG,EAAE,uBAAyB,QACfgB,EAC7B,OAAIyB,IACFA,EAAO,MAAQsD,EAAU,QAAU,UAC/BC,GAAYD,IAAStD,EAAO,cAAgB,KAE3CuD,EAAWhG,EAAE,qBAAwBA,EAAE,WAAaA,EAE/D,CAQA,IAAIE,EAAMwD,EAA4C,CAAA,EAAE,CACtD,GAAM,CACJ,WAAA1C,EAAa,KAAK,WAClB,eAAAF,EAAiB,KAAK,eACtB,mBAAAY,EAAqB,KAAK,mBAC1B,OAAAe,CAAM,EACJiB,EACEvD,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAChC,GAAIC,IAAU,OAAW,CACvB,IAAMwD,EAAQ,KAAKzE,GAASiB,CAAK,EAC3B8F,EAAW,KAAKhG,GAAmB0D,CAAK,EAE9C,OADIlB,GAAQ,KAAKD,GAAWC,EAAQtC,CAAK,EACrC,KAAKO,GAASP,CAAK,GACjBsC,IAAQA,EAAO,IAAM,SAEpBwD,GAQDxD,GACAzB,GACA2C,EAAM,uBAAyB,SAE/BlB,EAAO,cAAgB,IAElBzB,EAAa2C,EAAM,qBAAuB,SAb5CjC,GACH,KAAK,OAAOxB,CAAC,EAEXuC,GAAUzB,IAAYyB,EAAO,cAAgB,IAC1CzB,EAAa2C,EAAQ,UAY1BlB,IAAQA,EAAO,IAAM,OAMrBwD,EACKtC,EAAM,sBAEf,KAAKpD,GAAYJ,CAAK,EAClBW,GACF,KAAKyB,GAAepC,CAAK,EAEpBwD,SAEAlB,IACTA,EAAO,IAAM,OAEjB,CAEAyD,GAASlG,EAAUrC,EAAQ,CACzB,KAAKyB,GAAMzB,CAAC,EAAIqC,EAChB,KAAKb,GAAMa,CAAC,EAAIrC,CAClB,CAEA4C,GAAYJ,EAAY,CASlBA,IAAU,KAAKb,KACba,IAAU,KAAKd,GACjB,KAAKA,GAAQ,KAAKF,GAAMgB,CAAK,EAE7B,KAAK+F,GACH,KAAK9G,GAAMe,CAAK,EAChB,KAAKhB,GAAMgB,CAAK,CAAU,EAG9B,KAAK+F,GAAS,KAAK5G,GAAOa,CAAK,EAC/B,KAAKb,GAAQa,EAEjB,CAMA,OAAOD,EAAI,CACT,IAAI2D,EAAU,GACd,GAAI,KAAK/E,KAAU,EAAG,CACpB,IAAMqB,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAChC,GAAIC,IAAU,OAEZ,GADA0D,EAAU,GACN,KAAK/E,KAAU,EACjB,KAAK,MAAK,MACL,CACL,KAAKgE,GAAgB3C,CAAK,EAC1B,IAAM6C,EAAI,KAAK9D,GAASiB,CAAK,EACzB,KAAKF,GAAmB+C,CAAC,EAC3BA,EAAE,kBAAkB,MAAM,IAAI,MAAM,SAAS,CAAC,GACrC,KAAKpD,IAAe,KAAKE,MAC9B,KAAKF,IACP,KAAKjB,KAAWqE,EAAQ9C,EAAG,QAAQ,EAEjC,KAAKJ,IACP,KAAKN,IAAW,KAAK,CAACwD,EAAQ9C,EAAG,QAAQ,CAAC,GAG9C,KAAKlB,GAAQ,OAAOkB,CAAC,EACrB,KAAKjB,GAASkB,CAAK,EAAI,OACvB,KAAKjB,GAASiB,CAAK,EAAI,OACnBA,IAAU,KAAKb,GACjB,KAAKA,GAAQ,KAAKF,GAAMe,CAAK,EACpBA,IAAU,KAAKd,GACxB,KAAKA,GAAQ,KAAKF,GAAMgB,CAAK,GAE7B,KAAKhB,GAAM,KAAKC,GAAMe,CAAK,CAAC,EAAI,KAAKhB,GAAMgB,CAAK,EAChD,KAAKf,GAAM,KAAKD,GAAMgB,CAAK,CAAC,EAAI,KAAKf,GAAMe,CAAK,GAElD,KAAKrB,KACL,KAAKS,GAAM,KAAKY,CAAK,GAI3B,GAAI,KAAKL,IAAoB,KAAKN,IAAW,OAAQ,CACnD,IAAM2E,EAAK,KAAK3E,GACZ4E,EACJ,KAAQA,EAAOD,GAAI,MAAK,GACtB,KAAKvF,KAAgB,GAAGwF,CAAI,EAGhC,OAAOP,CACT,CAKA,OAAK,CACH,QAAW1D,KAAS,KAAKM,GAAU,CAAE,WAAY,EAAI,CAAE,EAAG,CACxD,IAAMuC,EAAI,KAAK9D,GAASiB,CAAK,EAC7B,GAAI,KAAKF,GAAmB+C,CAAC,EAC3BA,EAAE,kBAAkB,MAAM,IAAI,MAAM,SAAS,CAAC,MACzC,CACL,IAAM9C,EAAI,KAAKjB,GAASkB,CAAK,EACzB,KAAKP,IACP,KAAKjB,KAAWqE,EAAQ9C,EAAQ,QAAQ,EAEtC,KAAKJ,IACP,KAAKN,IAAW,KAAK,CAACwD,EAAQ9C,EAAQ,QAAQ,CAAC,GAoBrD,GAfA,KAAKlB,GAAQ,MAAK,EAClB,KAAKE,GAAS,KAAK,MAAS,EAC5B,KAAKD,GAAS,KAAK,MAAS,EACxB,KAAKU,IAAS,KAAKD,KACrB,KAAKC,GAAM,KAAK,CAAC,EACjB,KAAKD,GAAQ,KAAK,CAAC,GAEjB,KAAKD,IACP,KAAKA,GAAO,KAAK,CAAC,EAEpB,KAAKJ,GAAQ,EACb,KAAKC,GAAQ,EACb,KAAKC,GAAM,OAAS,EACpB,KAAKR,GAAkB,EACvB,KAAKD,GAAQ,EACT,KAAKgB,IAAoB,KAAKN,GAAW,CAC3C,IAAM2E,EAAK,KAAK3E,GACZ4E,EACJ,KAAQA,EAAOD,GAAI,MAAK,GACtB,KAAKvF,KAAgB,GAAGwF,CAAI,EAGlC,GA59CF,QAAA,SAAA5F", + "names": ["perf", "warned", "PROCESS", "emitWarning", "msg", "type", "code", "fn", "AC", "AS", "_", "warnACPolyfill", "reason", "printACPolyfillWarning", "shouldWarn", "TYPE", "isPosInt", "n", "getUintArray", "max", "ZeroArray", "size", "_Stack", "HeapCls", "__privateSet", "_constructing", "s", "__privateGet", "Stack", "__privateAdd", "LRUCache", "#max", "#maxSize", "#dispose", "#disposeAfter", "#fetchMethod", "#size", "#calculatedSize", "#keyMap", "#keyList", "#valList", "#next", "#prev", "#head", "#tail", "#free", "#disposed", "#sizes", "#starts", "#ttls", "#hasDispose", "#hasFetchMethod", "#hasDisposeAfter", "c", "p", "#isBackgroundFetch", "k", "index", "options", "context", "#backgroundFetch", "#moveToTail", "#indexes", "#rindexes", "#isStale", "ttl", "ttlResolution", "ttlAutopurge", "updateAgeOnGet", "updateAgeOnHas", "allowStale", "dispose", "disposeAfter", "noDisposeOnSet", "noUpdateTTL", "maxSize", "maxEntrySize", "sizeCalculation", "fetchMethod", "noDeleteOnFetchRejection", "noDeleteOnStaleGet", "allowStaleOnFetchRejection", "allowStaleOnFetchAbort", "ignoreFetchAbort", "UintArray", "#initializeSizeTracking", "#initializeTTLTracking", "key", "ttls", "starts", "#setItemTTL", "start", "t", "#updateItemAge", "#statusTTL", "status", "cachedNow", "getNow", "age", "sizes", "#removeItemSize", "#requireSize", "v", "#addItemSize", "#evict", "_i", "_s", "_st", "_k", "_v", "i", "#isValidIndex", "getOptions", "value", "thisp", "deleted", "arr", "entry", "setOptions", "oldVal", "oldValue", "dt", "task", "val", "free", "head", "hasOptions", "peekOptions", "ac", "signal", "fetchOpts", "cb", "updateCache", "aborted", "ignoreAbort", "fetchFail", "bf", "eb", "er", "allowStaleAborted", "noDelete", "pcall", "res", "rej", "fmp", "b", "fetchOptions", "forceRefresh", "stale", "isStale", "staleVal", "fetching", "#connect"] +} diff --git a/dist/node_modules/lru-cache/dist/cjs/package.json b/dist/node_modules/lru-cache/dist/cjs/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/dist/node_modules/lru-cache/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/dist/node_modules/lru-cache/dist/mjs/index.d.ts b/dist/node_modules/lru-cache/dist/mjs/index.d.ts new file mode 100644 index 0000000..9a333a8 --- /dev/null +++ b/dist/node_modules/lru-cache/dist/mjs/index.d.ts @@ -0,0 +1,834 @@ +/** + * @module LRUCache + */ +declare const TYPE: unique symbol; +type Index = number & { + [TYPE]: 'LRUCache Index'; +}; +type UintArray = Uint8Array | Uint16Array | Uint32Array; +type NumberArray = UintArray | number[]; +declare class ZeroArray extends Array { + constructor(size: number); +} +type StackLike = Stack | Index[]; +declare class Stack { + #private; + heap: NumberArray; + length: number; + static create(max: number): StackLike; + constructor(max: number, HeapCls: { + new (n: number): NumberArray; + }); + push(n: Index): void; + pop(): Index; +} +/** + * Promise representing an in-progress {@link LRUCache#fetch} call + */ +export type BackgroundFetch = Promise & { + __returned: BackgroundFetch | undefined; + __abortController: AbortController; + __staleWhileFetching: V | undefined; +}; +export declare namespace LRUCache { + /** + * An integer greater than 0, reflecting the calculated size of items + */ + type Size = number; + /** + * Integer greater than 0, representing some number of milliseconds, or the + * time at which a TTL started counting from. + */ + type Milliseconds = number; + /** + * An integer greater than 0, reflecting a number of items + */ + type Count = number; + /** + * The reason why an item was removed from the cache, passed + * to the {@link Disposer} methods. + */ + type DisposeReason = 'evict' | 'set' | 'delete'; + /** + * A method called upon item removal, passed as the + * {@link OptionsBase.dispose} and/or + * {@link OptionsBase.disposeAfter} options. + */ + type Disposer = (value: V, key: K, reason: DisposeReason) => void; + /** + * A function that returns the effective calculated size + * of an entry in the cache. + */ + type SizeCalculator = (value: V, key: K) => Size; + /** + * Options provided to the + * {@link OptionsBase.fetchMethod} function. + */ + interface FetcherOptions { + signal: AbortSignal; + options: FetcherFetchOptions; + /** + * Object provided in the {@link FetchOptions.context} option to + * {@link LRUCache#fetch} + */ + context: FC; + } + /** + * Status object that may be passed to {@link LRUCache#fetch}, + * {@link LRUCache#get}, {@link LRUCache#set}, and {@link LRUCache#has}. + */ + interface Status { + /** + * The status of a set() operation. + * + * - add: the item was not found in the cache, and was added + * - update: the item was in the cache, with the same value provided + * - replace: the item was in the cache, and replaced + * - miss: the item was not added to the cache for some reason + */ + set?: 'add' | 'update' | 'replace' | 'miss'; + /** + * the ttl stored for the item, or undefined if ttls are not used. + */ + ttl?: Milliseconds; + /** + * the start time for the item, or undefined if ttls are not used. + */ + start?: Milliseconds; + /** + * The timestamp used for TTL calculation + */ + now?: Milliseconds; + /** + * the remaining ttl for the item, or undefined if ttls are not used. + */ + remainingTTL?: Milliseconds; + /** + * The calculated size for the item, if sizes are used. + */ + entrySize?: Size; + /** + * The total calculated size of the cache, if sizes are used. + */ + totalCalculatedSize?: Size; + /** + * A flag indicating that the item was not stored, due to exceeding the + * {@link OptionsBase.maxEntrySize} + */ + maxEntrySizeExceeded?: true; + /** + * The old value, specified in the case of `set:'update'` or + * `set:'replace'` + */ + oldValue?: V; + /** + * The results of a {@link LRUCache#has} operation + * + * - hit: the item was found in the cache + * - stale: the item was found in the cache, but is stale + * - miss: the item was not found in the cache + */ + has?: 'hit' | 'stale' | 'miss'; + /** + * The status of a {@link LRUCache#fetch} operation. + * Note that this can change as the underlying fetch() moves through + * various states. + * + * - inflight: there is another fetch() for this key which is in process + * - get: there is no fetchMethod, so {@link LRUCache#get} was called. + * - miss: the item is not in cache, and will be fetched. + * - hit: the item is in the cache, and was resolved immediately. + * - stale: the item is in the cache, but stale. + * - refresh: the item is in the cache, and not stale, but + * {@link FetchOptions.forceRefresh} was specified. + */ + fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh'; + /** + * The {@link OptionsBase.fetchMethod} was called + */ + fetchDispatched?: true; + /** + * The cached value was updated after a successful call to + * {@link OptionsBase.fetchMethod} + */ + fetchUpdated?: true; + /** + * The reason for a fetch() rejection. Either the error raised by the + * {@link OptionsBase.fetchMethod}, or the reason for an + * AbortSignal. + */ + fetchError?: Error; + /** + * The fetch received an abort signal + */ + fetchAborted?: true; + /** + * The abort signal received was ignored, and the fetch was allowed to + * continue. + */ + fetchAbortIgnored?: true; + /** + * The fetchMethod promise resolved successfully + */ + fetchResolved?: true; + /** + * The fetchMethod promise was rejected + */ + fetchRejected?: true; + /** + * The status of a {@link LRUCache#get} operation. + * + * - fetching: The item is currently being fetched. If a previous value + * is present and allowed, that will be returned. + * - stale: The item is in the cache, and is stale. + * - hit: the item is in the cache + * - miss: the item is not in the cache + */ + get?: 'stale' | 'hit' | 'miss'; + /** + * A fetch or get operation returned a stale value. + */ + returnedStale?: true; + } + /** + * options which override the options set in the LRUCache constructor + * when calling {@link LRUCache#fetch}. + * + * This is the union of {@link GetOptions} and {@link SetOptions}, plus + * {@link OptionsBase.noDeleteOnFetchRejection}, + * {@link OptionsBase.allowStaleOnFetchRejection}, + * {@link FetchOptions.forceRefresh}, and + * {@link OptionsBase.context} + * + * Any of these may be modified in the {@link OptionsBase.fetchMethod} + * function, but the {@link GetOptions} fields will of course have no + * effect, as the {@link LRUCache#get} call already happened by the time + * the fetchMethod is called. + */ + interface FetcherFetchOptions extends Pick, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet' | 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL' | 'noDeleteOnFetchRejection' | 'allowStaleOnFetchRejection' | 'ignoreFetchAbort' | 'allowStaleOnFetchAbort'> { + status?: Status; + size?: Size; + } + /** + * Options that may be passed to the {@link LRUCache#fetch} method. + */ + interface FetchOptions extends FetcherFetchOptions { + /** + * Set to true to force a re-load of the existing data, even if it + * is not yet stale. + */ + forceRefresh?: boolean; + /** + * Context provided to the {@link OptionsBase.fetchMethod} as + * the {@link FetcherOptions.context} param. + * + * If the FC type is specified as unknown (the default), + * undefined or void, then this is optional. Otherwise, it will + * be required. + */ + context?: FC; + signal?: AbortSignal; + status?: Status; + } + /** + * Options provided to {@link LRUCache#fetch} when the FC type is something + * other than `unknown`, `undefined`, or `void` + */ + interface FetchOptionsWithContext extends FetchOptions { + context: FC; + } + /** + * Options provided to {@link LRUCache#fetch} when the FC type is + * `undefined` or `void` + */ + interface FetchOptionsNoContext extends FetchOptions { + context?: undefined; + } + /** + * Options that may be passed to the {@link LRUCache#has} method. + */ + interface HasOptions extends Pick, 'updateAgeOnHas'> { + status?: Status; + } + /** + * Options that may be passed to the {@link LRUCache#get} method. + */ + interface GetOptions extends Pick, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet'> { + status?: Status; + } + /** + * Options that may be passed to the {@link LRUCache#peek} method. + */ + interface PeekOptions extends Pick, 'allowStale'> { + } + /** + * Options that may be passed to the {@link LRUCache#set} method. + */ + interface SetOptions extends Pick, 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'> { + /** + * If size tracking is enabled, then setting an explicit size + * in the {@link LRUCache#set} call will prevent calling the + * {@link OptionsBase.sizeCalculation} function. + */ + size?: Size; + /** + * If TTL tracking is enabled, then setting an explicit start + * time in the {@link LRUCache#set} call will override the + * default time from `performance.now()` or `Date.now()`. + * + * Note that it must be a valid value for whichever time-tracking + * method is in use. + */ + start?: Milliseconds; + status?: Status; + } + /** + * The type signature for the {@link OptionsBase.fetchMethod} option. + */ + type Fetcher = (key: K, staleValue: V | undefined, options: FetcherOptions) => Promise | V | void | undefined; + /** + * Options which may be passed to the {@link LRUCache} constructor. + * + * Most of these may be overridden in the various options that use + * them. + * + * Despite all being technically optional, the constructor requires that + * a cache is at minimum limited by one or more of {@link OptionsBase.max}, + * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}. + * + * If {@link OptionsBase.ttl} is used alone, then it is strongly advised + * (and in fact required by the type definitions here) that the cache + * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially + * unbounded storage. + */ + interface OptionsBase { + /** + * The maximum number of items to store in the cache before evicting + * old entries. This is read-only on the {@link LRUCache} instance, + * and may not be overridden. + * + * If set, then storage space will be pre-allocated at construction + * time, and the cache will perform significantly faster. + * + * Note that significantly fewer items may be stored, if + * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also + * set. + */ + max?: Count; + /** + * Max time in milliseconds for items to live in cache before they are + * considered stale. Note that stale items are NOT preemptively removed + * by default, and MAY live in the cache long after they have expired. + * + * Also, as this cache is optimized for LRU/MRU operations, some of + * the staleness/TTL checks will reduce performance, as they will incur + * overhead by deleting items. + * + * Must be an integer number of ms. If set to 0, this indicates "no TTL" + * + * @default 0 + */ + ttl?: Milliseconds; + /** + * Minimum amount of time in ms in which to check for staleness. + * Defaults to 1, which means that the current time is checked + * at most once per millisecond. + * + * Set to 0 to check the current time every time staleness is tested. + * (This reduces performance, and is theoretically unnecessary.) + * + * Setting this to a higher value will improve performance somewhat + * while using ttl tracking, albeit at the expense of keeping stale + * items around a bit longer than their TTLs would indicate. + * + * @default 1 + */ + ttlResolution?: Milliseconds; + /** + * Preemptively remove stale items from the cache. + * Note that this may significantly degrade performance, + * especially if the cache is storing a large number of items. + * It is almost always best to just leave the stale items in + * the cache, and let them fall out as new items are added. + * + * Note that this means that {@link OptionsBase.allowStale} is a bit + * pointless, as stale items will be deleted almost as soon as they + * expire. + * + * @default false + */ + ttlAutopurge?: boolean; + /** + * Update the age of items on {@link LRUCache#get}, renewing their TTL + * + * Has no effect if {@link OptionsBase.ttl} is not set. + * + * @default false + */ + updateAgeOnGet?: boolean; + /** + * Update the age of items on {@link LRUCache#has}, renewing their TTL + * + * Has no effect if {@link OptionsBase.ttl} is not set. + * + * @default false + */ + updateAgeOnHas?: boolean; + /** + * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return + * stale data, if available. + */ + allowStale?: boolean; + /** + * Function that is called on items when they are dropped from the cache. + * This can be handy if you want to close file descriptors or do other + * cleanup tasks when items are no longer accessible. Called with `key, + * value`. It's called before actually removing the item from the + * internal cache, so it is *NOT* safe to re-add them. + * + * Use {@link OptionsBase.disposeAfter} if you wish to dispose items after + * they have been full removed, when it is safe to add them back to the + * cache. + */ + dispose?: Disposer; + /** + * The same as {@link OptionsBase.dispose}, but called *after* the entry + * is completely removed and the cache is once again in a clean state. + * It is safe to add an item right back into the cache at this point. + * However, note that it is *very* easy to inadvertently create infinite + * recursion this way. + */ + disposeAfter?: Disposer; + /** + * Set to true to suppress calling the + * {@link OptionsBase.dispose} function if the entry key is + * still accessible within the cache. + * This may be overridden by passing an options object to + * {@link LRUCache#set}. + */ + noDisposeOnSet?: boolean; + /** + * Boolean flag to tell the cache to not update the TTL when + * setting a new value for an existing key (ie, when updating a value + * rather than inserting a new value). Note that the TTL value is + * _always_ set (if provided) when adding a new entry into the cache. + * + * Has no effect if a {@link OptionsBase.ttl} is not set. + */ + noUpdateTTL?: boolean; + /** + * If you wish to track item size, you must provide a maxSize + * note that we still will only keep up to max *actual items*, + * if max is set, so size tracking may cause fewer than max items + * to be stored. At the extreme, a single item of maxSize size + * will cause everything else in the cache to be dropped when it + * is added. Use with caution! + * + * Note also that size tracking can negatively impact performance, + * though for most cases, only minimally. + */ + maxSize?: Size; + /** + * The maximum allowed size for any single item in the cache. + * + * If a larger item is passed to {@link LRUCache#set} or returned by a + * {@link OptionsBase.fetchMethod}, then it will not be stored in the + * cache. + */ + maxEntrySize?: Size; + /** + * A function that returns a number indicating the item's size. + * + * If not provided, and {@link OptionsBase.maxSize} or + * {@link OptionsBase.maxEntrySize} are set, then all + * {@link LRUCache#set} calls **must** provide an explicit + * {@link SetOptions.size} or sizeCalculation param. + */ + sizeCalculation?: SizeCalculator; + /** + * Method that provides the implementation for {@link LRUCache#fetch} + */ + fetchMethod?: Fetcher; + /** + * Set to true to suppress the deletion of stale data when a + * {@link OptionsBase.fetchMethod} returns a rejected promise. + */ + noDeleteOnFetchRejection?: boolean; + /** + * Do not delete stale items when they are retrieved with + * {@link LRUCache#get}. + * + * Note that the `get` return value will still be `undefined` + * unless {@link OptionsBase.allowStale} is true. + */ + noDeleteOnStaleGet?: boolean; + /** + * Set to true to allow returning stale data when a + * {@link OptionsBase.fetchMethod} throws an error or returns a rejected + * promise. + * + * This differs from using {@link OptionsBase.allowStale} in that stale + * data will ONLY be returned in the case that the + * {@link LRUCache#fetch} fails, not any other times. + */ + allowStaleOnFetchRejection?: boolean; + /** + * Set to true to return a stale value from the cache when the + * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches an `'abort'` + * event, whether user-triggered, or due to internal cache behavior. + * + * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying + * {@link OptionsBase.fetchMethod} will still be considered canceled, and + * any value it returns will be ignored and not cached. + * + * Caveat: since fetches are aborted when a new value is explicitly + * set in the cache, this can lead to fetch returning a stale value, + * since that was the fallback value _at the moment the `fetch()` was + * initiated_, even though the new updated value is now present in + * the cache. + * + * For example: + * + * ```ts + * const cache = new LRUCache({ + * ttl: 100, + * fetchMethod: async (url, oldValue, { signal }) => { + * const res = await fetch(url, { signal }) + * return await res.json() + * } + * }) + * cache.set('https://example.com/', { some: 'data' }) + * // 100ms go by... + * const result = cache.fetch('https://example.com/') + * cache.set('https://example.com/', { other: 'thing' }) + * console.log(await result) // { some: 'data' } + * console.log(cache.get('https://example.com/')) // { other: 'thing' } + * ``` + */ + allowStaleOnFetchAbort?: boolean; + /** + * Set to true to ignore the `abort` event emitted by the `AbortSignal` + * object passed to {@link OptionsBase.fetchMethod}, and still cache the + * resulting resolution value, as long as it is not `undefined`. + * + * When used on its own, this means aborted {@link LRUCache#fetch} calls are not + * immediately resolved or rejected when they are aborted, and instead + * take the full time to await. + * + * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted + * {@link LRUCache#fetch} calls will resolve immediately to their stale + * cached value or `undefined`, and will continue to process and eventually + * update the cache when they resolve, as long as the resulting value is + * not `undefined`, thus supporting a "return stale on timeout while + * refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal. + * + * **Note**: regardless of this setting, an `abort` event _is still + * emitted on the `AbortSignal` object_, so may result in invalid results + * when passed to other underlying APIs that use AbortSignals. + * + * This may be overridden in the {@link OptionsBase.fetchMethod} or the + * call to {@link LRUCache#fetch}. + */ + ignoreFetchAbort?: boolean; + } + interface OptionsMaxLimit extends OptionsBase { + max: Count; + } + interface OptionsTTLLimit extends OptionsBase { + ttl: Milliseconds; + ttlAutopurge: boolean; + } + interface OptionsSizeLimit extends OptionsBase { + maxSize: Size; + } + /** + * The valid safe options for the {@link LRUCache} constructor + */ + type Options = OptionsMaxLimit | OptionsSizeLimit | OptionsTTLLimit; + /** + * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump} + */ + interface Entry { + value: V; + ttl?: Milliseconds; + size?: Size; + start?: Milliseconds; + } +} +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +export declare class LRUCache { + #private; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl: LRUCache.Milliseconds; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution: LRUCache.Milliseconds; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge: boolean; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet: boolean; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas: boolean; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale: boolean; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet: boolean; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL: boolean; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize: LRUCache.Size; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation?: LRUCache.SizeCalculator; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection: boolean; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet: boolean; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort: boolean; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection: boolean; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort: boolean; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c: LRUCache): { + starts: ZeroArray | undefined; + ttls: ZeroArray | undefined; + sizes: ZeroArray | undefined; + keyMap: Map; + keyList: (K | undefined)[]; + valList: (V | BackgroundFetch | undefined)[]; + next: NumberArray; + prev: NumberArray; + readonly head: Index; + readonly tail: Index; + free: StackLike; + isBackgroundFetch: (p: any) => boolean; + backgroundFetch: (k: K, index: number | undefined, options: LRUCache.FetchOptions, context: any) => BackgroundFetch; + moveToTail: (index: number) => void; + indexes: (options?: { + allowStale: boolean; + }) => Generator; + rindexes: (options?: { + allowStale: boolean; + }) => Generator; + isStale: (index: number | undefined) => boolean; + }; + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max(): LRUCache.Count; + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize(): LRUCache.Count; + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize(): LRUCache.Size; + /** + * The number of items stored in the cache (read-only) + */ + get size(): LRUCache.Count; + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod(): LRUCache.Fetcher | undefined; + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose(): LRUCache.Disposer | undefined; + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter(): LRUCache.Disposer | undefined; + constructor(options: LRUCache.Options | LRUCache); + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key: K): number; + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + entries(): Generator<(K | V | BackgroundFetch | undefined)[], void, unknown>; + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + rentries(): Generator<(K | V | BackgroundFetch | undefined)[], void, unknown>; + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + keys(): Generator; + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + rkeys(): Generator; + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + values(): Generator | undefined, void, unknown>; + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + rvalues(): Generator | undefined, void, unknown>; + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator](): Generator<(K | V | BackgroundFetch | undefined)[], void, unknown>; + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find(fn: (v: V, k: K, self: LRUCache) => boolean, getOptions?: LRUCache.GetOptions): V | undefined; + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach(fn: (v: V, k: K, self: LRUCache) => any, thisp?: any): void; + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn: (v: V, k: K, self: LRUCache) => any, thisp?: any): void; + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale(): boolean; + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump(): [K, LRUCache.Entry][]; + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr: [K, LRUCache.Entry][]): void; + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set(k: K, v: V | BackgroundFetch | undefined, setOptions?: LRUCache.SetOptions): this; + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop(): V | undefined; + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k: K, hasOptions?: LRUCache.HasOptions): boolean; + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k: K, peekOptions?: LRUCache.PeekOptions): V | undefined; + /** + * Make an asynchronous cached fetch using the + * {@link LRUCache.OptionsBase.fetchMethod} function. + * + * If multiple fetches for the same key are issued, then they will all be + * coalesced into a single call to fetchMethod. + * + * Note that this means that handling options such as + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}, + * {@link LRUCache.FetchOptions.signal}, + * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be + * determined by the FIRST fetch() call for a given key. + * + * This is a known (fixable) shortcoming which will be addresed on when + * someone complains about it, as the fix would involve added complexity and + * may not be worth the costs for this edge case. + */ + fetch(k: K, fetchOptions: unknown extends FC ? LRUCache.FetchOptions : FC extends undefined | void ? LRUCache.FetchOptionsNoContext : LRUCache.FetchOptionsWithContext): Promise; + fetch(k: unknown extends FC ? K : FC extends undefined | void ? K : never, fetchOptions?: unknown extends FC ? LRUCache.FetchOptions : FC extends undefined | void ? LRUCache.FetchOptionsNoContext : never): Promise; + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k: K, getOptions?: LRUCache.GetOptions): V | undefined; + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k: K): boolean; + /** + * Clear the cache entirely, throwing away all values. + */ + clear(): void; +} +export {}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/dist/node_modules/lru-cache/dist/mjs/index.d.ts.map b/dist/node_modules/lru-cache/dist/mjs/index.d.ts.map new file mode 100644 index 0000000..568b5e5 --- /dev/null +++ b/dist/node_modules/lru-cache/dist/mjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA;;GAEG;AA0FH,QAAA,MAAM,IAAI,eAAiB,CAAA;AAE3B,KAAK,KAAK,GAAG,MAAM,GAAG;IAAE,CAAC,IAAI,CAAC,EAAE,gBAAgB,CAAA;CAAE,CAAA;AAKlD,KAAK,SAAS,GAAG,UAAU,GAAG,WAAW,GAAG,WAAW,CAAA;AACvD,KAAK,WAAW,GAAG,SAAS,GAAG,MAAM,EAAE,CAAA;AAyBvC,cAAM,SAAU,SAAQ,KAAK,CAAC,MAAM,CAAC;gBACvB,IAAI,EAAE,MAAM;CAIzB;AAED,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK,EAAE,CAAA;AAChC,cAAM,KAAK;;IACT,IAAI,EAAE,WAAW,CAAA;IACjB,MAAM,EAAE,MAAM,CAAA;IAGd,MAAM,CAAC,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,SAAS;gBASnC,GAAG,EAAE,MAAM,EACX,OAAO,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,GAAG,WAAW,CAAA;KAAE;IAU3C,IAAI,CAAC,CAAC,EAAE,KAAK;IAGb,GAAG,IAAI,KAAK;CAGb;AAED;;GAEG;AACH,MAAM,MAAM,eAAe,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC,GAAG;IAC/D,UAAU,EAAE,eAAe,CAAC,CAAC,CAAC,GAAG,SAAS,CAAA;IAC1C,iBAAiB,EAAE,eAAe,CAAA;IAClC,oBAAoB,EAAE,CAAC,GAAG,SAAS,CAAA;CACpC,CAAA;AAQD,yBAAiB,QAAQ,CAAC;IACxB;;OAEG;IACH,KAAY,IAAI,GAAG,MAAM,CAAA;IAEzB;;;OAGG;IACH,KAAY,YAAY,GAAG,MAAM,CAAA;IAEjC;;OAEG;IACH,KAAY,KAAK,GAAG,MAAM,CAAA;IAE1B;;;OAGG;IACH,KAAY,aAAa,GAAG,OAAO,GAAG,KAAK,GAAG,QAAQ,CAAA;IACtD;;;;OAIG;IACH,KAAY,QAAQ,CAAC,CAAC,EAAE,CAAC,IAAI,CAC3B,KAAK,EAAE,CAAC,EACR,GAAG,EAAE,CAAC,EACN,MAAM,EAAE,aAAa,KAClB,IAAI,CAAA;IAET;;;OAGG;IACH,KAAY,cAAc,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,KAAK,IAAI,CAAA;IAE7D;;;OAGG;IACH,UAAiB,cAAc,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,GAAG,OAAO;QAChD,MAAM,EAAE,WAAW,CAAA;QACnB,OAAO,EAAE,mBAAmB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAA;QACtC;;;WAGG;QACH,OAAO,EAAE,EAAE,CAAA;KACZ;IAED;;;OAGG;IACH,UAAiB,MAAM,CAAC,CAAC;QACvB;;;;;;;WAOG;QACH,GAAG,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,SAAS,GAAG,MAAM,CAAA;QAE3C;;WAEG;QACH,GAAG,CAAC,EAAE,YAAY,CAAA;QAElB;;WAEG;QACH,KAAK,CAAC,EAAE,YAAY,CAAA;QAEpB;;WAEG;QACH,GAAG,CAAC,EAAE,YAAY,CAAA;QAElB;;WAEG;QACH,YAAY,CAAC,EAAE,YAAY,CAAA;QAE3B;;WAEG;QACH,SAAS,CAAC,EAAE,IAAI,CAAA;QAEhB;;WAEG;QACH,mBAAmB,CAAC,EAAE,IAAI,CAAA;QAE1B;;;WAGG;QACH,oBAAoB,CAAC,EAAE,IAAI,CAAA;QAE3B;;;WAGG;QACH,QAAQ,CAAC,EAAE,CAAC,CAAA;QAEZ;;;;;;WAMG;QACH,GAAG,CAAC,EAAE,KAAK,GAAG,OAAO,GAAG,MAAM,CAAA;QAE9B;;;;;;;;;;;;WAYG;QACH,KAAK,CAAC,EAAE,KAAK,GAAG,UAAU,GAAG,MAAM,GAAG,KAAK,GAAG,OAAO,GAAG,SAAS,CAAA;QAEjE;;WAEG;QACH,eAAe,CAAC,EAAE,IAAI,CAAA;QAEtB;;;WAGG;QACH,YAAY,CAAC,EAAE,IAAI,CAAA;QAEnB;;;;WAIG;QACH,UAAU,CAAC,EAAE,KAAK,CAAA;QAElB;;WAEG;QACH,YAAY,CAAC,EAAE,IAAI,CAAA;QAEnB;;;WAGG;QACH,iBAAiB,CAAC,EAAE,IAAI,CAAA;QAExB;;WAEG;QACH,aAAa,CAAC,EAAE,IAAI,CAAA;QAEpB;;WAEG;QACH,aAAa,CAAC,EAAE,IAAI,CAAA;QAEpB;;;;;;;;WAQG;QACH,GAAG,CAAC,EAAE,OAAO,GAAG,KAAK,GAAG,MAAM,CAAA;QAE9B;;WAEG;QACH,aAAa,CAAC,EAAE,IAAI,CAAA;KACrB;IAED;;;;;;;;;;;;;;OAcG;IACH,UAAiB,mBAAmB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,GAAG,OAAO,CACrD,SAAQ,IAAI,CACV,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,EACnB,YAAY,GACZ,gBAAgB,GAChB,oBAAoB,GACpB,iBAAiB,GACjB,KAAK,GACL,gBAAgB,GAChB,aAAa,GACb,0BAA0B,GAC1B,4BAA4B,GAC5B,kBAAkB,GAClB,wBAAwB,CAC3B;QACD,MAAM,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAA;QAClB,IAAI,CAAC,EAAE,IAAI,CAAA;KACZ;IAED;;OAEG;IACH,UAAiB,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACpC,SAAQ,mBAAmB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QACrC;;;WAGG;QACH,YAAY,CAAC,EAAE,OAAO,CAAA;QACtB;;;;;;;WAOG;QACH,OAAO,CAAC,EAAE,EAAE,CAAA;QACZ,MAAM,CAAC,EAAE,WAAW,CAAA;QACpB,MAAM,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAA;KACnB;IACD;;;OAGG;IACH,UAAiB,uBAAuB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAC/C,SAAQ,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC9B,OAAO,EAAE,EAAE,CAAA;KACZ;IACD;;;OAGG;IACH,UAAiB,qBAAqB,CAAC,CAAC,EAAE,CAAC,CACzC,SAAQ,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC;QACrC,OAAO,CAAC,EAAE,SAAS,CAAA;KACpB;IAED;;OAEG;IACH,UAAiB,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAClC,SAAQ,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE,gBAAgB,CAAC;QACrD,MAAM,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAA;KACnB;IAED;;OAEG;IACH,UAAiB,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAClC,SAAQ,IAAI,CACV,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,EACrB,YAAY,GAAG,gBAAgB,GAAG,oBAAoB,CACvD;QACD,MAAM,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAA;KACnB;IAED;;OAEG;IACH,UAAiB,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACnC,SAAQ,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE,YAAY,CAAC;KAAG;IAEtD;;OAEG;IACH,UAAiB,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAClC,SAAQ,IAAI,CACV,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,EACrB,iBAAiB,GAAG,KAAK,GAAG,gBAAgB,GAAG,aAAa,CAC7D;QACD;;;;WAIG;QACH,IAAI,CAAC,EAAE,IAAI,CAAA;QACX;;;;;;;WAOG;QACH,KAAK,CAAC,EAAE,YAAY,CAAA;QACpB,MAAM,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAA;KACnB;IAED;;OAEG;IACH,KAAY,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,GAAG,OAAO,IAAI,CACxC,GAAG,EAAE,CAAC,EACN,UAAU,EAAE,CAAC,GAAG,SAAS,EACzB,OAAO,EAAE,cAAc,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAC9B,OAAO,CAAC,CAAC,GAAG,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,GAAG,IAAI,GAAG,SAAS,CAAA;IAEzD;;;;;;;;;;;;;;OAcG;IACH,UAAiB,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;QACnC;;;;;;;;;;;WAWG;QACH,GAAG,CAAC,EAAE,KAAK,CAAA;QAEX;;;;;;;;;;;;WAYG;QACH,GAAG,CAAC,EAAE,YAAY,CAAA;QAElB;;;;;;;;;;;;;WAaG;QACH,aAAa,CAAC,EAAE,YAAY,CAAA;QAE5B;;;;;;;;;;;;WAYG;QACH,YAAY,CAAC,EAAE,OAAO,CAAA;QAEtB;;;;;;WAMG;QACH,cAAc,CAAC,EAAE,OAAO,CAAA;QAExB;;;;;;WAMG;QACH,cAAc,CAAC,EAAE,OAAO,CAAA;QAExB;;;WAGG;QACH,UAAU,CAAC,EAAE,OAAO,CAAA;QAEpB;;;;;;;;;;WAUG;QACH,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QAExB;;;;;;WAMG;QACH,YAAY,CAAC,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QAE7B;;;;;;WAMG;QACH,cAAc,CAAC,EAAE,OAAO,CAAA;QAExB;;;;;;;WAOG;QACH,WAAW,CAAC,EAAE,OAAO,CAAA;QAErB;;;;;;;;;;WAUG;QACH,OAAO,CAAC,EAAE,IAAI,CAAA;QAEd;;;;;;WAMG;QACH,YAAY,CAAC,EAAE,IAAI,CAAA;QAEnB;;;;;;;WAOG;QACH,eAAe,CAAC,EAAE,cAAc,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QAEtC;;WAEG;QACH,WAAW,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAA;QAE/B;;;WAGG;QACH,wBAAwB,CAAC,EAAE,OAAO,CAAA;QAElC;;;;;;WAMG;QACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;QAE5B;;;;;;;;WAQG;QACH,0BAA0B,CAAC,EAAE,OAAO,CAAA;QAEpC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;WAgCG;QACH,sBAAsB,CAAC,EAAE,OAAO,CAAA;QAEhC;;;;;;;;;;;;;;;;;;;;;;WAsBG;QACH,gBAAgB,CAAC,EAAE,OAAO,CAAA;KAC3B;IAED,UAAiB,eAAe,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACvC,SAAQ,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC7B,GAAG,EAAE,KAAK,CAAA;KACX;IACD,UAAiB,eAAe,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACvC,SAAQ,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC7B,GAAG,EAAE,YAAY,CAAA;QACjB,YAAY,EAAE,OAAO,CAAA;KACtB;IACD,UAAiB,gBAAgB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CACxC,SAAQ,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC7B,OAAO,EAAE,IAAI,CAAA;KACd;IAED;;OAEG;IACH,KAAY,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,IACxB,eAAe,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GACzB,gBAAgB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAC1B,eAAe,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAA;IAE7B;;OAEG;IACH,UAAiB,KAAK,CAAC,CAAC;QACtB,KAAK,EAAE,CAAC,CAAA;QACR,GAAG,CAAC,EAAE,YAAY,CAAA;QAClB,IAAI,CAAC,EAAE,IAAI,CAAA;QACX,KAAK,CAAC,EAAE,YAAY,CAAA;KACrB;CACF;AAED;;;;;;;;GAQG;AACH,qBAAa,QAAQ,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,SAAS,EAAE,EAAE,EAAE,GAAG,OAAO;;IAU5D;;OAEG;IACH,GAAG,EAAE,QAAQ,CAAC,YAAY,CAAA;IAE1B;;OAEG;IACH,aAAa,EAAE,QAAQ,CAAC,YAAY,CAAA;IACpC;;OAEG;IACH,YAAY,EAAE,OAAO,CAAA;IACrB;;OAEG;IACH,cAAc,EAAE,OAAO,CAAA;IACvB;;OAEG;IACH,cAAc,EAAE,OAAO,CAAA;IACvB;;OAEG;IACH,UAAU,EAAE,OAAO,CAAA;IAEnB;;OAEG;IACH,cAAc,EAAE,OAAO,CAAA;IACvB;;OAEG;IACH,WAAW,EAAE,OAAO,CAAA;IACpB;;OAEG;IACH,YAAY,EAAE,QAAQ,CAAC,IAAI,CAAA;IAC3B;;OAEG;IACH,eAAe,CAAC,EAAE,QAAQ,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;IAC/C;;OAEG;IACH,wBAAwB,EAAE,OAAO,CAAA;IACjC;;OAEG;IACH,kBAAkB,EAAE,OAAO,CAAA;IAC3B;;OAEG;IACH,sBAAsB,EAAE,OAAO,CAAA;IAC/B;;OAEG;IACH,0BAA0B,EAAE,OAAO,CAAA;IACnC;;OAEG;IACH,gBAAgB,EAAE,OAAO,CAAA;IAsBzB;;;;;;;;OAQG;IACH,MAAM,CAAC,qBAAqB,CAC1B,CAAC,SAAS,EAAE,EACZ,CAAC,SAAS,EAAE,EACZ,EAAE,SAAS,OAAO,GAAG,OAAO,EAC5B,CAAC,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;;;;;;;;;;;;+BAmBI,GAAG;6BAErB,CAAC,SACG,MAAM,GAAG,SAAS,WAChB,SAAS,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,WAC/B,GAAG,KACX,gBAAgB,CAAC,CAAC;4BAOD,MAAM,KAAG,IAAI;4BAEb;YAAE,UAAU,EAAE,OAAO,CAAA;SAAE;6BAEtB;YAAE,UAAU,EAAE,OAAO,CAAA;SAAE;yBAE3B,MAAM,GAAG,SAAS;;IAOvC;;OAEG;IACH,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAExB;IACD;;OAEG;IACH,IAAI,OAAO,IAAI,QAAQ,CAAC,KAAK,CAE5B;IACD;;OAEG;IACH,IAAI,cAAc,IAAI,QAAQ,CAAC,IAAI,CAElC;IACD;;OAEG;IACH,IAAI,IAAI,IAAI,QAAQ,CAAC,KAAK,CAEzB;IACD;;OAEG;IACH,IAAI,WAAW,IAAI,QAAQ,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,SAAS,CAExD;IACD;;OAEG;IACH,IAAI,OAAO,wCAEV;IACD;;OAEG;IACH,IAAI,YAAY,wCAEf;gBAGC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;IAiJ1D;;OAEG;IACH,eAAe,CAAC,GAAG,EAAE,CAAC;IAkOtB;;;OAGG;IACF,OAAO;IAYR;;;;;OAKG;IACF,QAAQ;IAYT;;;OAGG;IACF,IAAI;IAYL;;;;;OAKG;IACF,KAAK;IAYN;;;OAGG;IACF,MAAM;IAYP;;;;;OAKG;IACF,OAAO;IAYR;;;OAGG;IACH,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,IAAI,CACF,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,OAAO,EACrD,UAAU,GAAE,QAAQ,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAM;IAchD;;;;;OAKG;IACH,OAAO,CACL,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,GAAG,EACjD,KAAK,GAAE,GAAU;IAYnB;;;OAGG;IACH,QAAQ,CACN,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,GAAG,EACjD,KAAK,GAAE,GAAU;IAYnB;;;OAGG;IACH,UAAU;IAWV;;;OAGG;IACH,IAAI;IAyBJ;;;;OAIG;IACH,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE;IAiBlC;;;;;OAKG;IACH,GAAG,CACD,CAAC,EAAE,CAAC,EACJ,CAAC,EAAE,CAAC,GAAG,eAAe,CAAC,CAAC,CAAC,GAAG,SAAS,EACrC,UAAU,GAAE,QAAQ,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAM;IAuGhD;;;OAGG;IACH,GAAG,IAAI,CAAC,GAAG,SAAS;IAwDpB;;;;;;;OAOG;IACH,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,UAAU,GAAE,QAAQ,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAM;IA+BxD;;;;;;OAMG;IACH,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,GAAE,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAM;IAsK3D;;;;;;;;;;;;;;;;OAgBG;IACH,KAAK,CACH,CAAC,EAAE,CAAC,EACJ,YAAY,EAAE,OAAO,SAAS,EAAE,GAC5B,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAC/B,EAAE,SAAS,SAAS,GAAG,IAAI,GAC3B,QAAQ,CAAC,qBAAqB,CAAC,CAAC,EAAE,CAAC,CAAC,GACpC,QAAQ,CAAC,uBAAuB,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAC7C,OAAO,CAAC,IAAI,GAAG,CAAC,CAAC;IAEpB,KAAK,CACH,CAAC,EAAE,OAAO,SAAS,EAAE,GACjB,CAAC,GACD,EAAE,SAAS,SAAS,GAAG,IAAI,GAC3B,CAAC,GACD,KAAK,EACT,YAAY,CAAC,EAAE,OAAO,SAAS,EAAE,GAC7B,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAC/B,EAAE,SAAS,SAAS,GAAG,IAAI,GAC3B,QAAQ,CAAC,qBAAqB,CAAC,CAAC,EAAE,CAAC,CAAC,GACpC,KAAK,GACR,OAAO,CAAC,IAAI,GAAG,CAAC,CAAC;IAkGpB;;;;;OAKG;IACH,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,UAAU,GAAE,QAAQ,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAM;IAgFxD;;;OAGG;IACH,MAAM,CAAC,CAAC,EAAE,CAAC;IA+CX;;OAEG;IACH,KAAK;CAuCN"} \ No newline at end of file diff --git a/dist/node_modules/lru-cache/dist/mjs/index.js b/dist/node_modules/lru-cache/dist/mjs/index.js new file mode 100644 index 0000000..8af17c0 --- /dev/null +++ b/dist/node_modules/lru-cache/dist/mjs/index.js @@ -0,0 +1,1391 @@ +/** + * @module LRUCache + */ +const perf = typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? process : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' + ? PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +export class LRUCache { + // properties coming in from the options of these, only max and maxSize + // really *need* to be protected. The rest can be modified, as they just + // set defaults for various methods. + #max; + #maxSize; + #dispose; + #disposeAfter; + #fetchMethod; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.delete(this.#keyList[index]); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (ttl === 0 || start === 0) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + return (ttls[index] !== 0 && + starts[index] !== 0 && + (cachedNow || getNow()) - starts[index] > ttls[index]); + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.delete(this.#keyList[i]); + deleted = true; + } + } + return deleted; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.delete(k); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 + ? this.#tail + : this.#free.length !== 0 + ? this.#free.pop() + : this.#size === this.#max + ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index !== undefined && + (allowStale || !this.#isStale(index))) { + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.delete(k); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.delete(k); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.clear(); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#next[this.#prev[index]] = this.#next[index]; + this.#prev[this.#next[index]] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/node_modules/lru-cache/dist/mjs/index.js.map b/dist/node_modules/lru-cache/dist/mjs/index.js.map new file mode 100644 index 0000000..404bb11 --- /dev/null +++ b/dist/node_modules/lru-cache/dist/mjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAIH,MAAM,IAAI,GACR,OAAO,WAAW,KAAK,QAAQ;IAC/B,WAAW;IACX,OAAO,WAAW,CAAC,GAAG,KAAK,UAAU;IACnC,CAAC,CAAC,WAAW;IACb,CAAC,CAAC,IAAI,CAAA;AAEV,MAAM,MAAM,GAAG,IAAI,GAAG,EAAU,CAAA;AAKhC,qBAAqB;AACrB,MAAM,OAAO,GAAG,CACd,OAAO,OAAO,KAAK,QAAQ,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAChC,CAAA;AACzB,qBAAqB;AAErB,MAAM,WAAW,GAAG,CAClB,GAAW,EACX,IAAY,EACZ,IAAY,EACZ,EAAQ,EACR,EAAE;IACF,OAAO,OAAO,CAAC,WAAW,KAAK,UAAU;QACvC,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,EAAE,CAAC;QAC1C,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,IAAI,KAAK,GAAG,EAAE,CAAC,CAAA;AAChD,CAAC,CAAA;AAED,IAAI,EAAE,GAAG,UAAU,CAAC,eAAe,CAAA;AACnC,IAAI,EAAE,GAAG,UAAU,CAAC,WAAW,CAAA;AAE/B,qBAAqB;AACrB,IAAI,OAAO,EAAE,KAAK,WAAW,EAAE;IAC7B,YAAY;IACZ,EAAE,GAAG,MAAM,WAAW;QACpB,OAAO,CAAuB;QAC9B,QAAQ,GAA6B,EAAE,CAAA;QACvC,MAAM,CAAM;QACZ,OAAO,GAAY,KAAK,CAAA;QACxB,gBAAgB,CAAC,CAAS,EAAE,EAAwB;YAClD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;QACxB,CAAC;KACF,CAAA;IACD,YAAY;IACZ,EAAE,GAAG,MAAM,eAAe;QACxB;YACE,cAAc,EAAE,CAAA;QAClB,CAAC;QACD,MAAM,GAAG,IAAI,EAAE,EAAE,CAAA;QACjB,KAAK,CAAC,MAAW;YACf,IAAI,IAAI,CAAC,MAAM,CAAC,OAAO;gBAAE,OAAM;YAC/B,YAAY;YACZ,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,MAAM,CAAA;YAC3B,YAAY;YACZ,IAAI,CAAC,MAAM,CAAC,OAAO,GAAG,IAAI,CAAA;YAC1B,YAAY;YACZ,KAAK,MAAM,EAAE,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrC,EAAE,CAAC,MAAM,CAAC,CAAA;aACX;YACD,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,MAAM,CAAC,CAAA;QAC/B,CAAC;KACF,CAAA;IACD,IAAI,sBAAsB,GACxB,OAAO,CAAC,GAAG,EAAE,2BAA2B,KAAK,GAAG,CAAA;IAClD,MAAM,cAAc,GAAG,GAAG,EAAE;QAC1B,IAAI,CAAC,sBAAsB;YAAE,OAAM;QACnC,sBAAsB,GAAG,KAAK,CAAA;QAC9B,WAAW,CACT,wDAAwD;YACtD,qDAAqD;YACrD,yDAAyD;YACzD,6DAA6D;YAC7D,mEAAmE;YACnE,mEAAmE;YACnE,qEAAqE,EACvE,qBAAqB,EACrB,SAAS,EACT,cAAc,CACf,CAAA;IACH,CAAC,CAAA;CACF;AACD,oBAAoB;AAEpB,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;AAEtD,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,CAAA;AAI3B,MAAM,QAAQ,GAAG,CAAC,CAAM,EAAe,EAAE,CACvC,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAA;AAKlD,qBAAqB;AACrB,wCAAwC;AACxC,sEAAsE;AACtE,uEAAuE;AACvE,uEAAuE;AACvE,wEAAwE;AACxE,uDAAuD;AACvD,2BAA2B;AAC3B,wDAAwD;AACxD,MAAM,YAAY,GAAG,CAAC,GAAW,EAAE,EAAE,CACnC,CAAC,QAAQ,CAAC,GAAG,CAAC;IACZ,CAAC,CAAC,IAAI;IACN,CAAC,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;QACvB,CAAC,CAAC,UAAU;QACZ,CAAC,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC;YACxB,CAAC,CAAC,WAAW;YACb,CAAC,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,CAAC;gBACxB,CAAC,CAAC,WAAW;gBACb,CAAC,CAAC,GAAG,IAAI,MAAM,CAAC,gBAAgB;oBAChC,CAAC,CAAC,SAAS;oBACX,CAAC,CAAC,IAAI,CAAA;AACV,oBAAoB;AAEpB,MAAM,SAAU,SAAQ,KAAa;IACnC,YAAY,IAAY;QACtB,KAAK,CAAC,IAAI,CAAC,CAAA;QACX,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACd,CAAC;CACF;AAGD,MAAM,KAAK;IACT,IAAI,CAAa;IACjB,MAAM,CAAQ;IACd,sBAAsB;IACtB,MAAM,CAAC,aAAa,GAAY,KAAK,CAAA;IACrC,MAAM,CAAC,MAAM,CAAC,GAAW;QACvB,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,CAAA;QACjC,IAAI,CAAC,OAAO;YAAE,OAAO,EAAE,CAAA;QACvB,KAAK,CAAC,aAAa,GAAG,IAAI,CAAA;QAC1B,MAAM,CAAC,GAAG,IAAI,KAAK,CAAC,GAAG,EAAE,OAAO,CAAC,CAAA;QACjC,KAAK,CAAC,aAAa,GAAG,KAAK,CAAA;QAC3B,OAAO,CAAC,CAAA;IACV,CAAC;IACD,YACE,GAAW,EACX,OAAyC;QAEzC,qBAAqB;QACrB,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE;YACxB,MAAM,IAAI,SAAS,CAAC,yCAAyC,CAAC,CAAA;SAC/D;QACD,oBAAoB;QACpB,IAAI,CAAC,IAAI,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,CAAA;QAC5B,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;IACjB,CAAC;IACD,IAAI,CAAC,CAAQ;QACX,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAA;IAC9B,CAAC;IACD,GAAG;QACD,OAAO,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,MAAM,CAAU,CAAA;IAC1C,CAAC;;AA+nBH;;;;;;;;GAQG;AACH,MAAM,OAAO,QAAQ;IACnB,uEAAuE;IACvE,wEAAwE;IACxE,oCAAoC;IAC3B,IAAI,CAAgB;IACpB,QAAQ,CAAe;IACvB,QAAQ,CAA0B;IAClC,aAAa,CAA0B;IACvC,YAAY,CAA6B;IAElD;;OAEG;IACH,GAAG,CAAuB;IAE1B;;OAEG;IACH,aAAa,CAAuB;IACpC;;OAEG;IACH,YAAY,CAAS;IACrB;;OAEG;IACH,cAAc,CAAS;IACvB;;OAEG;IACH,cAAc,CAAS;IACvB;;OAEG;IACH,UAAU,CAAS;IAEnB;;OAEG;IACH,cAAc,CAAS;IACvB;;OAEG;IACH,WAAW,CAAS;IACpB;;OAEG;IACH,YAAY,CAAe;IAC3B;;OAEG;IACH,eAAe,CAAgC;IAC/C;;OAEG;IACH,wBAAwB,CAAS;IACjC;;OAEG;IACH,kBAAkB,CAAS;IAC3B;;OAEG;IACH,sBAAsB,CAAS;IAC/B;;OAEG;IACH,0BAA0B,CAAS;IACnC;;OAEG;IACH,gBAAgB,CAAS;IAEzB,sBAAsB;IACtB,KAAK,CAAgB;IACrB,eAAe,CAAe;IAC9B,OAAO,CAAe;IACtB,QAAQ,CAAmB;IAC3B,QAAQ,CAAwC;IAChD,KAAK,CAAa;IAClB,KAAK,CAAa;IAClB,KAAK,CAAO;IACZ,KAAK,CAAO;IACZ,KAAK,CAAW;IAChB,SAAS,CAAsB;IAC/B,MAAM,CAAY;IAClB,OAAO,CAAY;IACnB,KAAK,CAAY;IAEjB,WAAW,CAAS;IACpB,eAAe,CAAS;IACxB,gBAAgB,CAAS;IAEzB;;;;;;;;OAQG;IACH,MAAM,CAAC,qBAAqB,CAI1B,CAAqB;QACrB,OAAO;YACL,aAAa;YACb,MAAM,EAAE,CAAC,CAAC,OAAO;YACjB,IAAI,EAAE,CAAC,CAAC,KAAK;YACb,KAAK,EAAE,CAAC,CAAC,MAAM;YACf,MAAM,EAAE,CAAC,CAAC,OAAyB;YACnC,OAAO,EAAE,CAAC,CAAC,QAAQ;YACnB,OAAO,EAAE,CAAC,CAAC,QAAQ;YACnB,IAAI,EAAE,CAAC,CAAC,KAAK;YACb,IAAI,EAAE,CAAC,CAAC,KAAK;YACb,IAAI,IAAI;gBACN,OAAO,CAAC,CAAC,KAAK,CAAA;YAChB,CAAC;YACD,IAAI,IAAI;gBACN,OAAO,CAAC,CAAC,KAAK,CAAA;YAChB,CAAC;YACD,IAAI,EAAE,CAAC,CAAC,KAAK;YACb,UAAU;YACV,iBAAiB,EAAE,CAAC,CAAM,EAAE,EAAE,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC;YACtD,eAAe,EAAE,CACf,CAAI,EACJ,KAAyB,EACzB,OAAwC,EACxC,OAAY,EACQ,EAAE,CACtB,CAAC,CAAC,gBAAgB,CAChB,CAAC,EACD,KAA0B,EAC1B,OAAO,EACP,OAAO,CACR;YACH,UAAU,EAAE,CAAC,KAAa,EAAQ,EAAE,CAClC,CAAC,CAAC,WAAW,CAAC,KAAc,CAAC;YAC/B,OAAO,EAAE,CAAC,OAAiC,EAAE,EAAE,CAC7C,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC;YACrB,QAAQ,EAAE,CAAC,OAAiC,EAAE,EAAE,CAC9C,CAAC,CAAC,SAAS,CAAC,OAAO,CAAC;YACtB,OAAO,EAAE,CAAC,KAAyB,EAAE,EAAE,CACrC,CAAC,CAAC,QAAQ,CAAC,KAAc,CAAC;SAC7B,CAAA;IACH,CAAC;IAED,8BAA8B;IAE9B;;OAEG;IACH,IAAI,GAAG;QACL,OAAO,IAAI,CAAC,IAAI,CAAA;IAClB,CAAC;IACD;;OAEG;IACH,IAAI,OAAO;QACT,OAAO,IAAI,CAAC,QAAQ,CAAA;IACtB,CAAC;IACD;;OAEG;IACH,IAAI,cAAc;QAChB,OAAO,IAAI,CAAC,eAAe,CAAA;IAC7B,CAAC;IACD;;OAEG;IACH,IAAI,IAAI;QACN,OAAO,IAAI,CAAC,KAAK,CAAA;IACnB,CAAC;IACD;;OAEG;IACH,IAAI,WAAW;QACb,OAAO,IAAI,CAAC,YAAY,CAAA;IAC1B,CAAC;IACD;;OAEG;IACH,IAAI,OAAO;QACT,OAAO,IAAI,CAAC,QAAQ,CAAA;IACtB,CAAC;IACD;;OAEG;IACH,IAAI,YAAY;QACd,OAAO,IAAI,CAAC,aAAa,CAAA;IAC3B,CAAC;IAED,YACE,OAAwD;QAExD,MAAM,EACJ,GAAG,GAAG,CAAC,EACP,GAAG,EACH,aAAa,GAAG,CAAC,EACjB,YAAY,EACZ,cAAc,EACd,cAAc,EACd,UAAU,EACV,OAAO,EACP,YAAY,EACZ,cAAc,EACd,WAAW,EACX,OAAO,GAAG,CAAC,EACX,YAAY,GAAG,CAAC,EAChB,eAAe,EACf,WAAW,EACX,wBAAwB,EACxB,kBAAkB,EAClB,0BAA0B,EAC1B,sBAAsB,EACtB,gBAAgB,GACjB,GAAG,OAAO,CAAA;QAEX,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YAC/B,MAAM,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAA;SAChE;QAED,MAAM,SAAS,GAAG,GAAG,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAA;QACjD,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,qBAAqB,GAAG,GAAG,CAAC,CAAA;SAC7C;QAED,IAAI,CAAC,IAAI,GAAG,GAAG,CAAA;QACf,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAA;QACvB,IAAI,CAAC,YAAY,GAAG,YAAY,IAAI,IAAI,CAAC,QAAQ,CAAA;QACjD,IAAI,CAAC,eAAe,GAAG,eAAe,CAAA;QACtC,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,IAAI,CAAC,IAAI,CAAC,QAAQ,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE;gBACxC,MAAM,IAAI,SAAS,CACjB,oEAAoE,CACrE,CAAA;aACF;YACD,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,UAAU,EAAE;gBAC9C,MAAM,IAAI,SAAS,CAAC,qCAAqC,CAAC,CAAA;aAC3D;SACF;QAED,IACE,WAAW,KAAK,SAAS;YACzB,OAAO,WAAW,KAAK,UAAU,EACjC;YACA,MAAM,IAAI,SAAS,CACjB,6CAA6C,CAC9C,CAAA;SACF;QACD,IAAI,CAAC,YAAY,GAAG,WAAW,CAAA;QAC/B,IAAI,CAAC,eAAe,GAAG,CAAC,CAAC,WAAW,CAAA;QAEpC,IAAI,CAAC,OAAO,GAAG,IAAI,GAAG,EAAE,CAAA;QACxB,IAAI,CAAC,QAAQ,GAAG,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAC9C,IAAI,CAAC,QAAQ,GAAG,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAC9C,IAAI,CAAC,KAAK,GAAG,IAAI,SAAS,CAAC,GAAG,CAAC,CAAA;QAC/B,IAAI,CAAC,KAAK,GAAG,IAAI,SAAS,CAAC,GAAG,CAAC,CAAA;QAC/B,IAAI,CAAC,KAAK,GAAG,CAAU,CAAA;QACvB,IAAI,CAAC,KAAK,GAAG,CAAU,CAAA;QACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QAC9B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAA;QACd,IAAI,CAAC,eAAe,GAAG,CAAC,CAAA;QAExB,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;YACjC,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAA;SACxB;QACD,IAAI,OAAO,YAAY,KAAK,UAAU,EAAE;YACtC,IAAI,CAAC,aAAa,GAAG,YAAY,CAAA;YACjC,IAAI,CAAC,SAAS,GAAG,EAAE,CAAA;SACpB;aAAM;YACL,IAAI,CAAC,aAAa,GAAG,SAAS,CAAA;YAC9B,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;SAC3B;QACD,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAClC,IAAI,CAAC,gBAAgB,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QAE5C,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,cAAc,CAAA;QACtC,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,WAAW,CAAA;QAChC,IAAI,CAAC,wBAAwB,GAAG,CAAC,CAAC,wBAAwB,CAAA;QAC1D,IAAI,CAAC,0BAA0B,GAAG,CAAC,CAAC,0BAA0B,CAAA;QAC9D,IAAI,CAAC,sBAAsB,GAAG,CAAC,CAAC,sBAAsB,CAAA;QACtD,IAAI,CAAC,gBAAgB,GAAG,CAAC,CAAC,gBAAgB,CAAA;QAE1C,iDAAiD;QACjD,IAAI,IAAI,CAAC,YAAY,KAAK,CAAC,EAAE;YAC3B,IAAI,IAAI,CAAC,QAAQ,KAAK,CAAC,EAAE;gBACvB,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;oBAC5B,MAAM,IAAI,SAAS,CACjB,iDAAiD,CAClD,CAAA;iBACF;aACF;YACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;gBAChC,MAAM,IAAI,SAAS,CACjB,sDAAsD,CACvD,CAAA;aACF;YACD,IAAI,CAAC,uBAAuB,EAAE,CAAA;SAC/B;QAED,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,UAAU,CAAA;QAC9B,IAAI,CAAC,kBAAkB,GAAG,CAAC,CAAC,kBAAkB,CAAA;QAC9C,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,cAAc,CAAA;QACtC,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,cAAc,CAAA;QACtC,IAAI,CAAC,aAAa;YAChB,QAAQ,CAAC,aAAa,CAAC,IAAI,aAAa,KAAK,CAAC;gBAC5C,CAAC,CAAC,aAAa;gBACf,CAAC,CAAC,CAAC,CAAA;QACP,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,YAAY,CAAA;QAClC,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,CAAC,CAAA;QACnB,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;gBACvB,MAAM,IAAI,SAAS,CACjB,6CAA6C,CAC9C,CAAA;aACF;YACD,IAAI,CAAC,sBAAsB,EAAE,CAAA;SAC9B;QAED,2CAA2C;QAC3C,IAAI,IAAI,CAAC,IAAI,KAAK,CAAC,IAAI,IAAI,CAAC,GAAG,KAAK,CAAC,IAAI,IAAI,CAAC,QAAQ,KAAK,CAAC,EAAE;YAC5D,MAAM,IAAI,SAAS,CACjB,kDAAkD,CACnD,CAAA;SACF;QACD,IAAI,CAAC,IAAI,CAAC,YAAY,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YACtD,MAAM,IAAI,GAAG,qBAAqB,CAAA;YAClC,IAAI,UAAU,CAAC,IAAI,CAAC,EAAE;gBACpB,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;gBAChB,MAAM,GAAG,GACP,wDAAwD;oBACxD,yCAAyC,CAAA;gBAC3C,WAAW,CAAC,GAAG,EAAE,uBAAuB,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;aAC1D;SACF;IACH,CAAC;IAED;;OAEG;IACH,eAAe,CAAC,GAAM;QACpB,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;IAC7C,CAAC;IAED,sBAAsB;QACpB,MAAM,IAAI,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QACrC,MAAM,MAAM,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QACvC,IAAI,CAAC,KAAK,GAAG,IAAI,CAAA;QACjB,IAAI,CAAC,OAAO,GAAG,MAAM,CAAA;QAErB,IAAI,CAAC,WAAW,GAAG,CAAC,KAAK,EAAE,GAAG,EAAE,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,EAAE,EAAE;YACpD,MAAM,CAAC,KAAK,CAAC,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;YACrC,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,CAAA;YACjB,IAAI,GAAG,KAAK,CAAC,IAAI,IAAI,CAAC,YAAY,EAAE;gBAClC,MAAM,CAAC,GAAG,UAAU,CAAC,GAAG,EAAE;oBACxB,IAAI,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;wBACxB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAM,CAAC,CAAA;qBACvC;gBACH,CAAC,EAAE,GAAG,GAAG,CAAC,CAAC,CAAA;gBACX,yCAAyC;gBACzC,qBAAqB;gBACrB,IAAI,CAAC,CAAC,KAAK,EAAE;oBACX,CAAC,CAAC,KAAK,EAAE,CAAA;iBACV;gBACD,oBAAoB;aACrB;QACH,CAAC,CAAA;QAED,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC,EAAE;YAC5B,MAAM,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;QACpD,CAAC,CAAA;QAED,IAAI,CAAC,UAAU,GAAG,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE;YAClC,IAAI,IAAI,CAAC,KAAK,CAAC,EAAE;gBACf,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,CAAA;gBACvB,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAA;gBAC3B,MAAM,CAAC,GAAG,GAAG,GAAG,CAAA;gBAChB,MAAM,CAAC,KAAK,GAAG,KAAK,CAAA;gBACpB,MAAM,CAAC,GAAG,GAAG,SAAS,IAAI,MAAM,EAAE,CAAA;gBAClC,MAAM,GAAG,GAAG,MAAM,CAAC,GAAG,GAAG,KAAK,CAAA;gBAC9B,MAAM,CAAC,YAAY,GAAG,GAAG,GAAG,GAAG,CAAA;aAChC;QACH,CAAC,CAAA;QAED,0DAA0D;QAC1D,+BAA+B;QAC/B,IAAI,SAAS,GAAG,CAAC,CAAA;QACjB,MAAM,MAAM,GAAG,GAAG,EAAE;YAClB,MAAM,CAAC,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;YACpB,IAAI,IAAI,CAAC,aAAa,GAAG,CAAC,EAAE;gBAC1B,SAAS,GAAG,CAAC,CAAA;gBACb,MAAM,CAAC,GAAG,UAAU,CAClB,GAAG,EAAE,CAAC,CAAC,SAAS,GAAG,CAAC,CAAC,EACrB,IAAI,CAAC,aAAa,CACnB,CAAA;gBACD,iCAAiC;gBACjC,qBAAqB;gBACrB,IAAI,CAAC,CAAC,KAAK,EAAE;oBACX,CAAC,CAAC,KAAK,EAAE,CAAA;iBACV;gBACD,oBAAoB;aACrB;YACD,OAAO,CAAC,CAAA;QACV,CAAC,CAAA;QAED,IAAI,CAAC,eAAe,GAAG,GAAG,CAAC,EAAE;YAC3B,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;YACnC,IAAI,KAAK,KAAK,SAAS,EAAE;gBACvB,OAAO,CAAC,CAAA;aACT;YACD,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,CAAA;YACvB,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAA;YAC3B,IAAI,GAAG,KAAK,CAAC,IAAI,KAAK,KAAK,CAAC,EAAE;gBAC5B,OAAO,QAAQ,CAAA;aAChB;YACD,MAAM,GAAG,GAAG,CAAC,SAAS,IAAI,MAAM,EAAE,CAAC,GAAG,KAAK,CAAA;YAC3C,OAAO,GAAG,GAAG,GAAG,CAAA;QAClB,CAAC,CAAA;QAED,IAAI,CAAC,QAAQ,GAAG,KAAK,CAAC,EAAE;YACtB,OAAO,CACL,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC;gBACjB,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC;gBACnB,CAAC,SAAS,IAAI,MAAM,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,CACtD,CAAA;QACH,CAAC,CAAA;IACH,CAAC;IAED,mDAAmD;IACnD,cAAc,GAA2B,GAAG,EAAE,GAAE,CAAC,CAAA;IACjD,UAAU,GACR,GAAG,EAAE,GAAE,CAAC,CAAA;IACV,WAAW,GAMC,GAAG,EAAE,GAAE,CAAC,CAAA;IACpB,oBAAoB;IAEpB,QAAQ,GAA8B,GAAG,EAAE,CAAC,KAAK,CAAA;IAEjD,uBAAuB;QACrB,MAAM,KAAK,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,CAAC,eAAe,GAAG,CAAC,CAAA;QACxB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,eAAe,GAAG,KAAK,CAAC,EAAE;YAC7B,IAAI,CAAC,eAAe,IAAI,KAAK,CAAC,KAAK,CAAC,CAAA;YACpC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QAClB,CAAC,CAAA;QACD,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,EAAE;YAClD,2CAA2C;YAC3C,sDAAsD;YACtD,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;gBAC9B,OAAO,CAAC,CAAA;aACT;YACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;gBACnB,IAAI,eAAe,EAAE;oBACnB,IAAI,OAAO,eAAe,KAAK,UAAU,EAAE;wBACzC,MAAM,IAAI,SAAS,CAAC,oCAAoC,CAAC,CAAA;qBAC1D;oBACD,IAAI,GAAG,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;oBAC5B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;wBACnB,MAAM,IAAI,SAAS,CACjB,0DAA0D,CAC3D,CAAA;qBACF;iBACF;qBAAM;oBACL,MAAM,IAAI,SAAS,CACjB,iDAAiD;wBAC/C,wDAAwD;wBACxD,sBAAsB,CACzB,CAAA;iBACF;aACF;YACD,OAAO,IAAI,CAAA;QACb,CAAC,CAAA;QACD,IAAI,CAAC,YAAY,GAAG,CAClB,KAAY,EACZ,IAAmB,EACnB,MAA2B,EAC3B,EAAE;YACF,KAAK,CAAC,KAAK,CAAC,GAAG,IAAI,CAAA;YACnB,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACjB,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,KAAK,CAAC,KAAK,CAAC,CAAA;gBAC5C,OAAO,IAAI,CAAC,eAAe,GAAG,OAAO,EAAE;oBACrC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;iBAClB;aACF;YACD,IAAI,CAAC,eAAe,IAAI,KAAK,CAAC,KAAK,CAAC,CAAA;YACpC,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,SAAS,GAAG,IAAI,CAAA;gBACvB,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,eAAe,CAAA;aAClD;QACH,CAAC,CAAA;IACH,CAAC;IAED,eAAe,GAA2B,EAAE,CAAC,EAAE,GAAE,CAAC,CAAA;IAClD,YAAY,GAIA,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,EAAE,EAAE,GAAE,CAAC,CAAA;IAC/B,YAAY,GAKS,CACnB,EAAK,EACL,EAA0B,EAC1B,IAAoB,EACpB,eAA+C,EAC/C,EAAE;QACF,IAAI,IAAI,IAAI,eAAe,EAAE;YAC3B,MAAM,IAAI,SAAS,CACjB,kEAAkE,CACnE,CAAA;SACF;QACD,OAAO,CAAC,CAAA;IACV,CAAC,CAAC;IAEF,CAAC,QAAQ,CAAC,EAAE,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE,GAAG,EAAE;QAC7C,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,KAAK,EAAE,IAAI,GAAI;gBAC/B,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,EAAE;oBAC1B,MAAK;iBACN;gBACD,IAAI,UAAU,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;oBACnC,MAAM,CAAC,CAAA;iBACR;gBACD,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,EAAE;oBACpB,MAAK;iBACN;qBAAM;oBACL,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAU,CAAA;iBAC3B;aACF;SACF;IACH,CAAC;IAED,CAAC,SAAS,CAAC,EAAE,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE,GAAG,EAAE;QAC9C,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,KAAK,EAAE,IAAI,GAAI;gBAC/B,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,EAAE;oBAC1B,MAAK;iBACN;gBACD,IAAI,UAAU,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;oBACnC,MAAM,CAAC,CAAA;iBACR;gBACD,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,EAAE;oBACpB,MAAK;iBACN;qBAAM;oBACL,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAU,CAAA;iBAC3B;aACF;SACF;IACH,CAAC;IAED,aAAa,CAAC,KAAY;QACxB,OAAO,CACL,KAAK,KAAK,SAAS;YACnB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAM,CAAC,KAAK,KAAK,CACtD,CAAA;IACH,CAAC;IAED;;;OAGG;IACH,CAAC,OAAO;QACN,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAE;YAC/B,IACE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,SAAS;gBAC9B,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,SAAS;gBAC9B,CAAC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAC1C;gBACA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;aAC3C;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,CAAC,QAAQ;QACP,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,SAAS,EAAE,EAAE;YAChC,IACE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,SAAS;gBAC9B,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,SAAS;gBAC9B,CAAC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAC1C;gBACA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;aAC3C;SACF;IACH,CAAC;IAED;;;OAGG;IACH,CAAC,IAAI;QACH,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAE;YAC/B,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,IACE,CAAC,KAAK,SAAS;gBACf,CAAC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAC1C;gBACA,MAAM,CAAC,CAAA;aACR;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,CAAC,KAAK;QACJ,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,SAAS,EAAE,EAAE;YAChC,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,IACE,CAAC,KAAK,SAAS;gBACf,CAAC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAC1C;gBACA,MAAM,CAAC,CAAA;aACR;SACF;IACH,CAAC;IAED;;;OAGG;IACH,CAAC,MAAM;QACL,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAE;YAC/B,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,IACE,CAAC,KAAK,SAAS;gBACf,CAAC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAC1C;gBACA,MAAM,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;aACvB;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,CAAC,OAAO;QACN,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,SAAS,EAAE,EAAE;YAChC,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,IACE,CAAC,KAAK,SAAS;gBACf,CAAC,IAAI,CAAC,kBAAkB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAC1C;gBACA,MAAM,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;aACvB;SACF;IACH,CAAC;IAED;;;OAGG;IACH,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;IAED;;;OAGG;IACH,IAAI,CACF,EAAqD,EACrD,aAA4C,EAAE;QAE9C,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAE;YAC/B,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,MAAM,KAAK,GAAG,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC;gBACtC,CAAC,CAAC,CAAC,CAAC,oBAAoB;gBACxB,CAAC,CAAC,CAAC,CAAA;YACL,IAAI,KAAK,KAAK,SAAS;gBAAE,SAAQ;YACjC,IAAI,EAAE,CAAC,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAM,EAAE,IAAI,CAAC,EAAE;gBAC1C,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAM,EAAE,UAAU,CAAC,CAAA;aACnD;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,OAAO,CACL,EAAiD,EACjD,QAAa,IAAI;QAEjB,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,EAAE;YAC/B,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,MAAM,KAAK,GAAG,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC;gBACtC,CAAC,CAAC,CAAC,CAAC,oBAAoB;gBACxB,CAAC,CAAC,CAAC,CAAA;YACL,IAAI,KAAK,KAAK,SAAS;gBAAE,SAAQ;YACjC,EAAE,CAAC,IAAI,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAM,EAAE,IAAI,CAAC,CAAA;SACnD;IACH,CAAC;IAED;;;OAGG;IACH,QAAQ,CACN,EAAiD,EACjD,QAAa,IAAI;QAEjB,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,SAAS,EAAE,EAAE;YAChC,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,MAAM,KAAK,GAAG,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC;gBACtC,CAAC,CAAC,CAAC,CAAC,oBAAoB;gBACxB,CAAC,CAAC,CAAC,CAAA;YACL,IAAI,KAAK,KAAK,SAAS;gBAAE,SAAQ;YACjC,EAAE,CAAC,IAAI,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAM,EAAE,IAAI,CAAC,CAAA;SACnD;IACH,CAAC;IAED;;;OAGG;IACH,UAAU;QACR,IAAI,OAAO,GAAG,KAAK,CAAA;QACnB,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,SAAS,CAAC,EAAE,UAAU,EAAE,IAAI,EAAE,CAAC,EAAE;YACpD,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;gBACpB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAM,CAAC,CAAA;gBAClC,OAAO,GAAG,IAAI,CAAA;aACf;SACF;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAED;;;OAGG;IACH,IAAI;QACF,MAAM,GAAG,GAA6B,EAAE,CAAA;QACxC,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,EAAE,UAAU,EAAE,IAAI,EAAE,CAAC,EAAE;YACnD,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC5B,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;YAC1B,MAAM,KAAK,GAAkB,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC;gBACrD,CAAC,CAAC,CAAC,CAAC,oBAAoB;gBACxB,CAAC,CAAC,CAAC,CAAA;YACL,IAAI,KAAK,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS;gBAAE,SAAQ;YACtD,MAAM,KAAK,GAAsB,EAAE,KAAK,EAAE,CAAA;YAC1C,IAAI,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,KAAK,CAAC,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;gBACzB,yDAAyD;gBACzD,4DAA4D;gBAC5D,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;gBACxC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC,CAAA;aAC3C;YACD,IAAI,IAAI,CAAC,MAAM,EAAE;gBACf,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;aAC5B;YACD,GAAG,CAAC,OAAO,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAA;SAC1B;QACD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED;;;;OAIG;IACH,IAAI,CAAC,GAA6B;QAChC,IAAI,CAAC,KAAK,EAAE,CAAA;QACZ,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,GAAG,EAAE;YAC9B,IAAI,KAAK,CAAC,KAAK,EAAE;gBACf,2DAA2D;gBAC3D,6DAA6D;gBAC7D,6DAA6D;gBAC7D,eAAe;gBACf,EAAE;gBACF,4DAA4D;gBAC5D,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC,KAAK,CAAA;gBACpC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,GAAG,CAAA;aAC/B;YACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;SAClC;IACH,CAAC;IAED;;;;;OAKG;IACH,GAAG,CACD,CAAI,EACJ,CAAqC,EACrC,aAA4C,EAAE;QAE9C,IAAI,CAAC,KAAK,SAAS,EAAE;YACnB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;YACd,OAAO,IAAI,CAAA;SACZ;QACD,MAAM,EACJ,GAAG,GAAG,IAAI,CAAC,GAAG,EACd,KAAK,EACL,cAAc,GAAG,IAAI,CAAC,cAAc,EACpC,eAAe,GAAG,IAAI,CAAC,eAAe,EACtC,MAAM,GACP,GAAG,UAAU,CAAA;QACd,IAAI,EAAE,WAAW,GAAG,IAAI,CAAC,WAAW,EAAE,GAAG,UAAU,CAAA;QAEnD,MAAM,IAAI,GAAG,IAAI,CAAC,YAAY,CAC5B,CAAC,EACD,CAAC,EACD,UAAU,CAAC,IAAI,IAAI,CAAC,EACpB,eAAe,CAChB,CAAA;QACD,6CAA6C;QAC7C,6CAA6C;QAC7C,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,GAAG,IAAI,CAAC,YAAY,EAAE;YACjD,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,GAAG,GAAG,MAAM,CAAA;gBACnB,MAAM,CAAC,oBAAoB,GAAG,IAAI,CAAA;aACnC;YACD,sDAAsD;YACtD,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;YACd,OAAO,IAAI,CAAA;SACZ;QACD,IAAI,KAAK,GAAG,IAAI,CAAC,KAAK,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QAC9D,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,WAAW;YACX,KAAK,GAAG,CACN,IAAI,CAAC,KAAK,KAAK,CAAC;gBACd,CAAC,CAAC,IAAI,CAAC,KAAK;gBACZ,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;oBACzB,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE;oBAClB,CAAC,CAAC,IAAI,CAAC,KAAK,KAAK,IAAI,CAAC,IAAI;wBAC1B,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;wBACpB,CAAC,CAAC,IAAI,CAAC,KAAK,CACN,CAAA;YACV,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YACxB,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YACxB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;YAC1B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,KAAK,CAAA;YAC9B,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,KAAK,CAAA;YAC9B,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;YAClB,IAAI,CAAC,KAAK,EAAE,CAAA;YACZ,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,IAAI,EAAE,MAAM,CAAC,CAAA;YACtC,IAAI,MAAM;gBAAE,MAAM,CAAC,GAAG,GAAG,KAAK,CAAA;YAC9B,WAAW,GAAG,KAAK,CAAA;SACpB;aAAM;YACL,SAAS;YACT,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAA;YACvB,MAAM,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAA2B,CAAA;YAC7D,IAAI,CAAC,KAAK,MAAM,EAAE;gBAChB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,EAAE;oBAC3D,MAAM,CAAC,iBAAiB,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,UAAU,CAAC,CAAC,CAAA;iBACtD;qBAAM,IAAI,CAAC,cAAc,EAAE;oBAC1B,IAAI,IAAI,CAAC,WAAW,EAAE;wBACpB,IAAI,CAAC,QAAQ,EAAE,CAAC,MAAW,EAAE,CAAC,EAAE,KAAK,CAAC,CAAA;qBACvC;oBACD,IAAI,IAAI,CAAC,gBAAgB,EAAE;wBACzB,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC,MAAW,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC,CAAA;qBAC9C;iBACF;gBACD,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,CAAA;gBAC3B,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,IAAI,EAAE,MAAM,CAAC,CAAA;gBACtC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;gBACxB,IAAI,MAAM,EAAE;oBACV,MAAM,CAAC,GAAG,GAAG,SAAS,CAAA;oBACtB,MAAM,QAAQ,GACZ,MAAM,IAAI,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC;wBACvC,CAAC,CAAC,MAAM,CAAC,oBAAoB;wBAC7B,CAAC,CAAC,MAAM,CAAA;oBACZ,IAAI,QAAQ,KAAK,SAAS;wBAAE,MAAM,CAAC,QAAQ,GAAG,QAAQ,CAAA;iBACvD;aACF;iBAAM,IAAI,MAAM,EAAE;gBACjB,MAAM,CAAC,GAAG,GAAG,QAAQ,CAAA;aACtB;SACF;QACD,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YAC5B,IAAI,CAAC,sBAAsB,EAAE,CAAA;SAC9B;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,GAAG,EAAE,KAAK,CAAC,CAAA;aACpC;YACD,IAAI,MAAM;gBAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;SAC3C;QACD,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,SAAS,EAAE;YAC9D,MAAM,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;YACzB,IAAI,IAAmC,CAAA;YACvC,OAAO,CAAC,IAAI,GAAG,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE;gBAC3B,IAAI,CAAC,aAAa,EAAE,CAAC,GAAG,IAAI,CAAC,CAAA;aAC9B;SACF;QACD,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;OAGG;IACH,GAAG;QACD,IAAI;YACF,OAAO,IAAI,CAAC,KAAK,EAAE;gBACjB,MAAM,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;gBACrC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;gBACjB,IAAI,IAAI,CAAC,kBAAkB,CAAC,GAAG,CAAC,EAAE;oBAChC,IAAI,GAAG,CAAC,oBAAoB,EAAE;wBAC5B,OAAO,GAAG,CAAC,oBAAoB,CAAA;qBAChC;iBACF;qBAAM,IAAI,GAAG,KAAK,SAAS,EAAE;oBAC5B,OAAO,GAAG,CAAA;iBACX;aACF;SACF;gBAAS;YACR,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,SAAS,EAAE;gBAC3C,MAAM,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;gBACzB,IAAI,IAAmC,CAAA;gBACvC,OAAO,CAAC,IAAI,GAAG,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE;oBAC3B,IAAI,CAAC,aAAa,EAAE,CAAC,GAAG,IAAI,CAAC,CAAA;iBAC9B;aACF;SACF;IACH,CAAC;IAED,MAAM,CAAC,IAAa;QAClB,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAA;QACvB,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAM,CAAA;QAClC,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAM,CAAA;QAClC,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;YACtD,CAAC,CAAC,iBAAiB,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,SAAS,CAAC,CAAC,CAAA;SAChD;aAAM,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,gBAAgB,EAAE;YACpD,IAAI,IAAI,CAAC,WAAW,EAAE;gBACpB,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;aAC/B;YACD,IAAI,IAAI,CAAC,gBAAgB,EAAE;gBACzB,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC,CAAA;aACtC;SACF;QACD,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,CAAA;QAC1B,2DAA2D;QAC3D,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,SAAS,CAAA;YAC/B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,SAAS,CAAA;YAC/B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;SACtB;QACD,IAAI,IAAI,CAAC,KAAK,KAAK,CAAC,EAAE;YACpB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,CAAU,CAAA;YACpC,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAA;SACtB;aAAM;YACL,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAU,CAAA;SACvC;QACD,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACtB,IAAI,CAAC,KAAK,EAAE,CAAA;QACZ,OAAO,IAAI,CAAA;IACb,CAAC;IAED;;;;;;;OAOG;IACH,GAAG,CAAC,CAAI,EAAE,aAA4C,EAAE;QACtD,MAAM,EAAE,cAAc,GAAG,IAAI,CAAC,cAAc,EAAE,MAAM,EAAE,GACpD,UAAU,CAAA;QACZ,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QACjC,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;YAC9B,IACE,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC;gBAC1B,CAAC,CAAC,oBAAoB,KAAK,SAAS,EACpC;gBACA,OAAO,KAAK,CAAA;aACb;YACD,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;gBACzB,IAAI,cAAc,EAAE;oBAClB,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,CAAA;iBAC3B;gBACD,IAAI,MAAM,EAAE;oBACV,MAAM,CAAC,GAAG,GAAG,KAAK,CAAA;oBAClB,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;iBAC/B;gBACD,OAAO,IAAI,CAAA;aACZ;iBAAM,IAAI,MAAM,EAAE;gBACjB,MAAM,CAAC,GAAG,GAAG,OAAO,CAAA;gBACpB,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;aAC/B;SACF;aAAM,IAAI,MAAM,EAAE;YACjB,MAAM,CAAC,GAAG,GAAG,MAAM,CAAA;SACpB;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAED;;;;;;OAMG;IACH,IAAI,CAAC,CAAI,EAAE,cAA8C,EAAE;QACzD,MAAM,EAAE,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE,GAAG,WAAW,CAAA;QACpD,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QACjC,IACE,KAAK,KAAK,SAAS;YACnB,CAAC,UAAU,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,EACrC;YACA,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;YAC9B,oEAAoE;YACpE,OAAO,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAA;SAC/D;IACH,CAAC;IAED,gBAAgB,CACd,CAAI,EACJ,KAAwB,EACxB,OAAwC,EACxC,OAAY;QAEZ,MAAM,CAAC,GAAG,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;QAChE,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;YAC9B,OAAO,CAAC,CAAA;SACT;QAED,MAAM,EAAE,GAAG,IAAI,EAAE,EAAE,CAAA;QACnB,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,CAAA;QAC1B,yDAAyD;QACzD,MAAM,EAAE,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE;YAC/D,MAAM,EAAE,EAAE,CAAC,MAAM;SAClB,CAAC,CAAA;QAEF,MAAM,SAAS,GAAG;YAChB,MAAM,EAAE,EAAE,CAAC,MAAM;YACjB,OAAO;YACP,OAAO;SACR,CAAA;QAED,MAAM,EAAE,GAAG,CACT,CAAuB,EACvB,WAAW,GAAG,KAAK,EACG,EAAE;YACxB,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,CAAC,MAAM,CAAA;YAC7B,MAAM,WAAW,GAAG,OAAO,CAAC,gBAAgB,IAAI,CAAC,KAAK,SAAS,CAAA;YAC/D,IAAI,OAAO,CAAC,MAAM,EAAE;gBAClB,IAAI,OAAO,IAAI,CAAC,WAAW,EAAE;oBAC3B,OAAO,CAAC,MAAM,CAAC,YAAY,GAAG,IAAI,CAAA;oBAClC,OAAO,CAAC,MAAM,CAAC,UAAU,GAAG,EAAE,CAAC,MAAM,CAAC,MAAM,CAAA;oBAC5C,IAAI,WAAW;wBAAE,OAAO,CAAC,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAA;iBACzD;qBAAM;oBACL,OAAO,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;iBACpC;aACF;YACD,IAAI,OAAO,IAAI,CAAC,WAAW,IAAI,CAAC,WAAW,EAAE;gBAC3C,OAAO,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;aACnC;YACD,qEAAqE;YACrE,MAAM,EAAE,GAAG,CAAuB,CAAA;YAClC,IAAI,IAAI,CAAC,QAAQ,CAAC,KAAc,CAAC,KAAK,CAAC,EAAE;gBACvC,IAAI,CAAC,KAAK,SAAS,EAAE;oBACnB,IAAI,EAAE,CAAC,oBAAoB,EAAE;wBAC3B,IAAI,CAAC,QAAQ,CAAC,KAAc,CAAC,GAAG,EAAE,CAAC,oBAAoB,CAAA;qBACxD;yBAAM;wBACL,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;qBACf;iBACF;qBAAM;oBACL,IAAI,OAAO,CAAC,MAAM;wBAAE,OAAO,CAAC,MAAM,CAAC,YAAY,GAAG,IAAI,CAAA;oBACtD,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,OAAO,CAAC,CAAA;iBAClC;aACF;YACD,OAAO,CAAC,CAAA;QACV,CAAC,CAAA;QAED,MAAM,EAAE,GAAG,CAAC,EAAO,EAAE,EAAE;YACrB,IAAI,OAAO,CAAC,MAAM,EAAE;gBAClB,OAAO,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;gBACnC,OAAO,CAAC,MAAM,CAAC,UAAU,GAAG,EAAE,CAAA;aAC/B;YACD,OAAO,SAAS,CAAC,EAAE,CAAC,CAAA;QACtB,CAAC,CAAA;QAED,MAAM,SAAS,GAAG,CAAC,EAAO,EAAiB,EAAE;YAC3C,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,CAAC,MAAM,CAAA;YAC7B,MAAM,iBAAiB,GACrB,OAAO,IAAI,OAAO,CAAC,sBAAsB,CAAA;YAC3C,MAAM,UAAU,GACd,iBAAiB,IAAI,OAAO,CAAC,0BAA0B,CAAA;YACzD,MAAM,QAAQ,GAAG,UAAU,IAAI,OAAO,CAAC,wBAAwB,CAAA;YAC/D,MAAM,EAAE,GAAG,CAAuB,CAAA;YAClC,IAAI,IAAI,CAAC,QAAQ,CAAC,KAAc,CAAC,KAAK,CAAC,EAAE;gBACvC,qEAAqE;gBACrE,sEAAsE;gBACtE,MAAM,GAAG,GAAG,CAAC,QAAQ,IAAI,EAAE,CAAC,oBAAoB,KAAK,SAAS,CAAA;gBAC9D,IAAI,GAAG,EAAE;oBACP,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;iBACf;qBAAM,IAAI,CAAC,iBAAiB,EAAE;oBAC7B,oDAAoD;oBACpD,oDAAoD;oBACpD,mDAAmD;oBACnD,qDAAqD;oBACrD,IAAI,CAAC,QAAQ,CAAC,KAAc,CAAC,GAAG,EAAE,CAAC,oBAAoB,CAAA;iBACxD;aACF;YACD,IAAI,UAAU,EAAE;gBACd,IAAI,OAAO,CAAC,MAAM,IAAI,EAAE,CAAC,oBAAoB,KAAK,SAAS,EAAE;oBAC3D,OAAO,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;iBACpC;gBACD,OAAO,EAAE,CAAC,oBAAoB,CAAA;aAC/B;iBAAM,IAAI,EAAE,CAAC,UAAU,KAAK,EAAE,EAAE;gBAC/B,MAAM,EAAE,CAAA;aACT;QACH,CAAC,CAAA;QAED,MAAM,KAAK,GAAG,CACZ,GAAsC,EACtC,GAAqB,EACrB,EAAE;YACF,MAAM,GAAG,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAA;YAChD,IAAI,GAAG,IAAI,GAAG,YAAY,OAAO,EAAE;gBACjC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;aAC3B;YACD,8CAA8C;YAC9C,8CAA8C;YAC9C,+BAA+B;YAC/B,EAAE,CAAC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE;gBACvC,IACE,CAAC,OAAO,CAAC,gBAAgB;oBACzB,OAAO,CAAC,sBAAsB,EAC9B;oBACA,GAAG,EAAE,CAAA;oBACL,iDAAiD;oBACjD,IAAI,OAAO,CAAC,sBAAsB,EAAE;wBAClC,GAAG,GAAG,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;qBACvB;iBACF;YACH,CAAC,CAAC,CAAA;QACJ,CAAC,CAAA;QAED,IAAI,OAAO,CAAC,MAAM;YAAE,OAAO,CAAC,MAAM,CAAC,eAAe,GAAG,IAAI,CAAA;QACzD,MAAM,CAAC,GAAG,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;QACzC,MAAM,EAAE,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE;YAC1B,iBAAiB,EAAE,EAAE;YACrB,oBAAoB,EAAE,CAAC;YACvB,UAAU,EAAE,SAAS;SACtB,CAAC,CAAA;QAEF,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,iCAAiC;YACjC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,SAAS,CAAC,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,CAAC,CAAA;YAC5D,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;SAC5B;aAAM;YACL,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,EAAE,CAAA;SAC1B;QACD,OAAO,EAAE,CAAA;IACX,CAAC;IAED,kBAAkB,CAAC,CAAM;QACvB,IAAI,CAAC,IAAI,CAAC,eAAe;YAAE,OAAO,KAAK,CAAA;QACvC,MAAM,CAAC,GAAG,CAAuB,CAAA;QACjC,OAAO,CACL,CAAC,CAAC,CAAC;YACH,CAAC,YAAY,OAAO;YACpB,CAAC,CAAC,cAAc,CAAC,sBAAsB,CAAC;YACxC,CAAC,CAAC,iBAAiB,YAAY,EAAE,CAClC,CAAA;IACH,CAAC;IAwCD,KAAK,CAAC,KAAK,CACT,CAAI,EACJ,eAAgD,EAAE;QAElD,MAAM;QACJ,cAAc;QACd,UAAU,GAAG,IAAI,CAAC,UAAU,EAC5B,cAAc,GAAG,IAAI,CAAC,cAAc,EACpC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB;QAC5C,cAAc;QACd,GAAG,GAAG,IAAI,CAAC,GAAG,EACd,cAAc,GAAG,IAAI,CAAC,cAAc,EACpC,IAAI,GAAG,CAAC,EACR,eAAe,GAAG,IAAI,CAAC,eAAe,EACtC,WAAW,GAAG,IAAI,CAAC,WAAW;QAC9B,0BAA0B;QAC1B,wBAAwB,GAAG,IAAI,CAAC,wBAAwB,EACxD,0BAA0B,GAAG,IAAI,CAAC,0BAA0B,EAC5D,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,EACxC,sBAAsB,GAAG,IAAI,CAAC,sBAAsB,EACpD,OAAO,EACP,YAAY,GAAG,KAAK,EACpB,MAAM,EACN,MAAM,GACP,GAAG,YAAY,CAAA;QAEhB,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE;YACzB,IAAI,MAAM;gBAAE,MAAM,CAAC,KAAK,GAAG,KAAK,CAAA;YAChC,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE;gBACjB,UAAU;gBACV,cAAc;gBACd,kBAAkB;gBAClB,MAAM;aACP,CAAC,CAAA;SACH;QAED,MAAM,OAAO,GAAG;YACd,UAAU;YACV,cAAc;YACd,kBAAkB;YAClB,GAAG;YACH,cAAc;YACd,IAAI;YACJ,eAAe;YACf,WAAW;YACX,wBAAwB;YACxB,0BAA0B;YAC1B,sBAAsB;YACtB,gBAAgB;YAChB,MAAM;YACN,MAAM;SACP,CAAA;QAED,IAAI,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QAC/B,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,IAAI,MAAM;gBAAE,MAAM,CAAC,KAAK,GAAG,MAAM,CAAA;YACjC,MAAM,CAAC,GAAG,IAAI,CAAC,gBAAgB,CAAC,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;YAC3D,OAAO,CAAC,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,CAAA;SAC1B;aAAM;YACL,mCAAmC;YACnC,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;YAC9B,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;gBAC9B,MAAM,KAAK,GACT,UAAU,IAAI,CAAC,CAAC,oBAAoB,KAAK,SAAS,CAAA;gBACpD,IAAI,MAAM,EAAE;oBACV,MAAM,CAAC,KAAK,GAAG,UAAU,CAAA;oBACzB,IAAI,KAAK;wBAAE,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;iBACvC;gBACD,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,CAAA;aAC3D;YAED,mEAAmE;YACnE,gEAAgE;YAChE,MAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;YACpC,IAAI,CAAC,YAAY,IAAI,CAAC,OAAO,EAAE;gBAC7B,IAAI,MAAM;oBAAE,MAAM,CAAC,KAAK,GAAG,KAAK,CAAA;gBAChC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAA;gBACvB,IAAI,cAAc,EAAE;oBAClB,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,CAAA;iBAC3B;gBACD,IAAI,MAAM;oBAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;gBAC1C,OAAO,CAAC,CAAA;aACT;YAED,iEAAiE;YACjE,qBAAqB;YACrB,MAAM,CAAC,GAAG,IAAI,CAAC,gBAAgB,CAAC,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;YAC3D,MAAM,QAAQ,GAAG,CAAC,CAAC,oBAAoB,KAAK,SAAS,CAAA;YACrD,MAAM,QAAQ,GAAG,QAAQ,IAAI,UAAU,CAAA;YACvC,IAAI,MAAM,EAAE;gBACV,MAAM,CAAC,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAA;gBAC5C,IAAI,QAAQ,IAAI,OAAO;oBAAE,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;aACrD;YACD,OAAO,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,oBAAoB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,GAAG,CAAC,CAAC,CAAA;SAC9D;IACH,CAAC;IAED;;;;;OAKG;IACH,GAAG,CAAC,CAAI,EAAE,aAA4C,EAAE;QACtD,MAAM,EACJ,UAAU,GAAG,IAAI,CAAC,UAAU,EAC5B,cAAc,GAAG,IAAI,CAAC,cAAc,EACpC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB,EAC5C,MAAM,GACP,GAAG,UAAU,CAAA;QACd,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QACjC,IAAI,KAAK,KAAK,SAAS,EAAE;YACvB,MAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;YAClC,MAAM,QAAQ,GAAG,IAAI,CAAC,kBAAkB,CAAC,KAAK,CAAC,CAAA;YAC/C,IAAI,MAAM;gBAAE,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;YAC1C,IAAI,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;gBACxB,IAAI,MAAM;oBAAE,MAAM,CAAC,GAAG,GAAG,OAAO,CAAA;gBAChC,mDAAmD;gBACnD,IAAI,CAAC,QAAQ,EAAE;oBACb,IAAI,CAAC,kBAAkB,EAAE;wBACvB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;qBACf;oBACD,IAAI,MAAM,IAAI,UAAU;wBAAE,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;oBACrD,OAAO,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAA;iBACtC;qBAAM;oBACL,IACE,MAAM;wBACN,UAAU;wBACV,KAAK,CAAC,oBAAoB,KAAK,SAAS,EACxC;wBACA,MAAM,CAAC,aAAa,GAAG,IAAI,CAAA;qBAC5B;oBACD,OAAO,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,oBAAoB,CAAC,CAAC,CAAC,SAAS,CAAA;iBAC3D;aACF;iBAAM;gBACL,IAAI,MAAM;oBAAE,MAAM,CAAC,GAAG,GAAG,KAAK,CAAA;gBAC9B,gEAAgE;gBAChE,iEAAiE;gBACjE,kEAAkE;gBAClE,oEAAoE;gBACpE,qCAAqC;gBACrC,IAAI,QAAQ,EAAE;oBACZ,OAAO,KAAK,CAAC,oBAAoB,CAAA;iBAClC;gBACD,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAA;gBACvB,IAAI,cAAc,EAAE;oBAClB,IAAI,CAAC,cAAc,CAAC,KAAK,CAAC,CAAA;iBAC3B;gBACD,OAAO,KAAK,CAAA;aACb;SACF;aAAM,IAAI,MAAM,EAAE;YACjB,MAAM,CAAC,GAAG,GAAG,MAAM,CAAA;SACpB;IACH,CAAC;IAED,QAAQ,CAAC,CAAQ,EAAE,CAAQ;QACzB,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;QACjB,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;IACnB,CAAC;IAED,WAAW,CAAC,KAAY;QACtB,iCAAiC;QACjC,oCAAoC;QACpC,OAAO;QACP,6DAA6D;QAC7D,0CAA0C;QAC1C,qBAAqB;QACrB,qBAAqB;QACrB,eAAe;QACf,IAAI,KAAK,KAAK,IAAI,CAAC,KAAK,EAAE;YACxB,IAAI,KAAK,KAAK,IAAI,CAAC,KAAK,EAAE;gBACxB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAU,CAAA;aACxC;iBAAM;gBACL,IAAI,CAAC,QAAQ,CACX,IAAI,CAAC,KAAK,CAAC,KAAK,CAAU,EAC1B,IAAI,CAAC,KAAK,CAAC,KAAK,CAAU,CAC3B,CAAA;aACF;YACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;YAChC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;SACnB;IACH,CAAC;IAED;;;OAGG;IACH,MAAM,CAAC,CAAI;QACT,IAAI,OAAO,GAAG,KAAK,CAAA;QACnB,IAAI,IAAI,CAAC,KAAK,KAAK,CAAC,EAAE;YACpB,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;YACjC,IAAI,KAAK,KAAK,SAAS,EAAE;gBACvB,OAAO,GAAG,IAAI,CAAA;gBACd,IAAI,IAAI,CAAC,KAAK,KAAK,CAAC,EAAE;oBACpB,IAAI,CAAC,KAAK,EAAE,CAAA;iBACb;qBAAM;oBACL,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,CAAA;oBAC3B,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;oBAC9B,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;wBAC9B,CAAC,CAAC,iBAAiB,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,SAAS,CAAC,CAAC,CAAA;qBAChD;yBAAM,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,gBAAgB,EAAE;wBACpD,IAAI,IAAI,CAAC,WAAW,EAAE;4BACpB,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAM,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAA;yBACrC;wBACD,IAAI,IAAI,CAAC,gBAAgB,EAAE;4BACzB,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC,CAAM,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAA;yBAC5C;qBACF;oBACD,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;oBACtB,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,SAAS,CAAA;oBAChC,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,SAAS,CAAA;oBAChC,IAAI,KAAK,KAAK,IAAI,CAAC,KAAK,EAAE;wBACxB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAU,CAAA;qBACxC;yBAAM,IAAI,KAAK,KAAK,IAAI,CAAC,KAAK,EAAE;wBAC/B,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAU,CAAA;qBACxC;yBAAM;wBACL,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAA;wBACjD,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAA;qBAClD;oBACD,IAAI,CAAC,KAAK,EAAE,CAAA;oBACZ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;iBACvB;aACF;SACF;QACD,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,SAAS,EAAE,MAAM,EAAE;YACnD,MAAM,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;YACzB,IAAI,IAAmC,CAAA;YACvC,OAAO,CAAC,IAAI,GAAG,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE;gBAC3B,IAAI,CAAC,aAAa,EAAE,CAAC,GAAG,IAAI,CAAC,CAAA;aAC9B;SACF;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAED;;OAEG;IACH,KAAK;QACH,KAAK,MAAM,KAAK,IAAI,IAAI,CAAC,SAAS,CAAC,EAAE,UAAU,EAAE,IAAI,EAAE,CAAC,EAAE;YACxD,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;YAC9B,IAAI,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;gBAC9B,CAAC,CAAC,iBAAiB,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,SAAS,CAAC,CAAC,CAAA;aAChD;iBAAM;gBACL,MAAM,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;gBAC9B,IAAI,IAAI,CAAC,WAAW,EAAE;oBACpB,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAM,EAAE,CAAM,EAAE,QAAQ,CAAC,CAAA;iBAC1C;gBACD,IAAI,IAAI,CAAC,gBAAgB,EAAE;oBACzB,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC,CAAM,EAAE,CAAM,EAAE,QAAQ,CAAC,CAAC,CAAA;iBACjD;aACF;SACF;QAED,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,CAAA;QACpB,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAC7B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,OAAO,EAAE;YAC9B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;YAClB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;SACrB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;SACpB;QACD,IAAI,CAAC,KAAK,GAAG,CAAU,CAAA;QACvB,IAAI,CAAC,KAAK,GAAG,CAAU,CAAA;QACvB,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAA;QACrB,IAAI,CAAC,eAAe,GAAG,CAAC,CAAA;QACxB,IAAI,CAAC,KAAK,GAAG,CAAC,CAAA;QACd,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,SAAS,EAAE;YAC3C,MAAM,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;YACzB,IAAI,IAAmC,CAAA;YACvC,OAAO,CAAC,IAAI,GAAG,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE;gBAC3B,IAAI,CAAC,aAAa,EAAE,CAAC,GAAG,IAAI,CAAC,CAAA;aAC9B;SACF;IACH,CAAC;CACF","sourcesContent":["/**\n * @module LRUCache\n */\n\n// module-private names and types\ntype Perf = { now: () => number }\nconst perf: Perf =\n typeof performance === 'object' &&\n performance &&\n typeof performance.now === 'function'\n ? performance\n : Date\n\nconst warned = new Set()\n\n// either a function or a class\ntype ForC = ((...a: any[]) => any) | { new (...a: any[]): any }\n\n/* c8 ignore start */\nconst PROCESS = (\n typeof process === 'object' && !!process ? process : {}\n) as { [k: string]: any }\n/* c8 ignore start */\n\nconst emitWarning = (\n msg: string,\n type: string,\n code: string,\n fn: ForC\n) => {\n typeof PROCESS.emitWarning === 'function'\n ? PROCESS.emitWarning(msg, type, code, fn)\n : console.error(`[${code}] ${type}: ${msg}`)\n}\n\nlet AC = globalThis.AbortController\nlet AS = globalThis.AbortSignal\n\n/* c8 ignore start */\nif (typeof AC === 'undefined') {\n //@ts-ignore\n AS = class AbortSignal {\n onabort?: (...a: any[]) => any\n _onabort: ((...a: any[]) => any)[] = []\n reason?: any\n aborted: boolean = false\n addEventListener(_: string, fn: (...a: any[]) => any) {\n this._onabort.push(fn)\n }\n }\n //@ts-ignore\n AC = class AbortController {\n constructor() {\n warnACPolyfill()\n }\n signal = new AS()\n abort(reason: any) {\n if (this.signal.aborted) return\n //@ts-ignore\n this.signal.reason = reason\n //@ts-ignore\n this.signal.aborted = true\n //@ts-ignore\n for (const fn of this.signal._onabort) {\n fn(reason)\n }\n this.signal.onabort?.(reason)\n }\n }\n let printACPolyfillWarning =\n PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'\n const warnACPolyfill = () => {\n if (!printACPolyfillWarning) return\n printACPolyfillWarning = false\n emitWarning(\n 'AbortController is not defined. If using lru-cache in ' +\n 'node 14, load an AbortController polyfill from the ' +\n '`node-abort-controller` package. A minimal polyfill is ' +\n 'provided for use by LRUCache.fetch(), but it should not be ' +\n 'relied upon in other contexts (eg, passing it to other APIs that ' +\n 'use AbortController/AbortSignal might have undesirable effects). ' +\n 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.',\n 'NO_ABORT_CONTROLLER',\n 'ENOTSUP',\n warnACPolyfill\n )\n }\n}\n/* c8 ignore stop */\n\nconst shouldWarn = (code: string) => !warned.has(code)\n\nconst TYPE = Symbol('type')\ntype PosInt = number & { [TYPE]: 'Positive Integer' }\ntype Index = number & { [TYPE]: 'LRUCache Index' }\n\nconst isPosInt = (n: any): n is PosInt =>\n n && n === Math.floor(n) && n > 0 && isFinite(n)\n\ntype UintArray = Uint8Array | Uint16Array | Uint32Array\ntype NumberArray = UintArray | number[]\n\n/* c8 ignore start */\n// This is a little bit ridiculous, tbh.\n// The maximum array length is 2^32-1 or thereabouts on most JS impls.\n// And well before that point, you're caching the entire world, I mean,\n// that's ~32GB of just integers for the next/prev links, plus whatever\n// else to hold that many keys and values. Just filling the memory with\n// zeroes at init time is brutal when you get that big.\n// But why not be complete?\n// Maybe in the future, these limits will have expanded.\nconst getUintArray = (max: number) =>\n !isPosInt(max)\n ? null\n : max <= Math.pow(2, 8)\n ? Uint8Array\n : max <= Math.pow(2, 16)\n ? Uint16Array\n : max <= Math.pow(2, 32)\n ? Uint32Array\n : max <= Number.MAX_SAFE_INTEGER\n ? ZeroArray\n : null\n/* c8 ignore stop */\n\nclass ZeroArray extends Array {\n constructor(size: number) {\n super(size)\n this.fill(0)\n }\n}\n\ntype StackLike = Stack | Index[]\nclass Stack {\n heap: NumberArray\n length: number\n // private constructor\n static #constructing: boolean = false\n static create(max: number): StackLike {\n const HeapCls = getUintArray(max)\n if (!HeapCls) return []\n Stack.#constructing = true\n const s = new Stack(max, HeapCls)\n Stack.#constructing = false\n return s\n }\n constructor(\n max: number,\n HeapCls: { new (n: number): NumberArray }\n ) {\n /* c8 ignore start */\n if (!Stack.#constructing) {\n throw new TypeError('instantiate Stack using Stack.create(n)')\n }\n /* c8 ignore stop */\n this.heap = new HeapCls(max)\n this.length = 0\n }\n push(n: Index) {\n this.heap[this.length++] = n\n }\n pop(): Index {\n return this.heap[--this.length] as Index\n }\n}\n\n/**\n * Promise representing an in-progress {@link LRUCache#fetch} call\n */\nexport type BackgroundFetch = Promise & {\n __returned: BackgroundFetch | undefined\n __abortController: AbortController\n __staleWhileFetching: V | undefined\n}\n\ntype DisposeTask = [\n value: V,\n key: K,\n reason: LRUCache.DisposeReason\n]\n\nexport namespace LRUCache {\n /**\n * An integer greater than 0, reflecting the calculated size of items\n */\n export type Size = number\n\n /**\n * Integer greater than 0, representing some number of milliseconds, or the\n * time at which a TTL started counting from.\n */\n export type Milliseconds = number\n\n /**\n * An integer greater than 0, reflecting a number of items\n */\n export type Count = number\n\n /**\n * The reason why an item was removed from the cache, passed\n * to the {@link Disposer} methods.\n */\n export type DisposeReason = 'evict' | 'set' | 'delete'\n /**\n * A method called upon item removal, passed as the\n * {@link OptionsBase.dispose} and/or\n * {@link OptionsBase.disposeAfter} options.\n */\n export type Disposer = (\n value: V,\n key: K,\n reason: DisposeReason\n ) => void\n\n /**\n * A function that returns the effective calculated size\n * of an entry in the cache.\n */\n export type SizeCalculator = (value: V, key: K) => Size\n\n /**\n * Options provided to the\n * {@link OptionsBase.fetchMethod} function.\n */\n export interface FetcherOptions {\n signal: AbortSignal\n options: FetcherFetchOptions\n /**\n * Object provided in the {@link FetchOptions.context} option to\n * {@link LRUCache#fetch}\n */\n context: FC\n }\n\n /**\n * Status object that may be passed to {@link LRUCache#fetch},\n * {@link LRUCache#get}, {@link LRUCache#set}, and {@link LRUCache#has}.\n */\n export interface Status {\n /**\n * The status of a set() operation.\n *\n * - add: the item was not found in the cache, and was added\n * - update: the item was in the cache, with the same value provided\n * - replace: the item was in the cache, and replaced\n * - miss: the item was not added to the cache for some reason\n */\n set?: 'add' | 'update' | 'replace' | 'miss'\n\n /**\n * the ttl stored for the item, or undefined if ttls are not used.\n */\n ttl?: Milliseconds\n\n /**\n * the start time for the item, or undefined if ttls are not used.\n */\n start?: Milliseconds\n\n /**\n * The timestamp used for TTL calculation\n */\n now?: Milliseconds\n\n /**\n * the remaining ttl for the item, or undefined if ttls are not used.\n */\n remainingTTL?: Milliseconds\n\n /**\n * The calculated size for the item, if sizes are used.\n */\n entrySize?: Size\n\n /**\n * The total calculated size of the cache, if sizes are used.\n */\n totalCalculatedSize?: Size\n\n /**\n * A flag indicating that the item was not stored, due to exceeding the\n * {@link OptionsBase.maxEntrySize}\n */\n maxEntrySizeExceeded?: true\n\n /**\n * The old value, specified in the case of `set:'update'` or\n * `set:'replace'`\n */\n oldValue?: V\n\n /**\n * The results of a {@link LRUCache#has} operation\n *\n * - hit: the item was found in the cache\n * - stale: the item was found in the cache, but is stale\n * - miss: the item was not found in the cache\n */\n has?: 'hit' | 'stale' | 'miss'\n\n /**\n * The status of a {@link LRUCache#fetch} operation.\n * Note that this can change as the underlying fetch() moves through\n * various states.\n *\n * - inflight: there is another fetch() for this key which is in process\n * - get: there is no fetchMethod, so {@link LRUCache#get} was called.\n * - miss: the item is not in cache, and will be fetched.\n * - hit: the item is in the cache, and was resolved immediately.\n * - stale: the item is in the cache, but stale.\n * - refresh: the item is in the cache, and not stale, but\n * {@link FetchOptions.forceRefresh} was specified.\n */\n fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh'\n\n /**\n * The {@link OptionsBase.fetchMethod} was called\n */\n fetchDispatched?: true\n\n /**\n * The cached value was updated after a successful call to\n * {@link OptionsBase.fetchMethod}\n */\n fetchUpdated?: true\n\n /**\n * The reason for a fetch() rejection. Either the error raised by the\n * {@link OptionsBase.fetchMethod}, or the reason for an\n * AbortSignal.\n */\n fetchError?: Error\n\n /**\n * The fetch received an abort signal\n */\n fetchAborted?: true\n\n /**\n * The abort signal received was ignored, and the fetch was allowed to\n * continue.\n */\n fetchAbortIgnored?: true\n\n /**\n * The fetchMethod promise resolved successfully\n */\n fetchResolved?: true\n\n /**\n * The fetchMethod promise was rejected\n */\n fetchRejected?: true\n\n /**\n * The status of a {@link LRUCache#get} operation.\n *\n * - fetching: The item is currently being fetched. If a previous value\n * is present and allowed, that will be returned.\n * - stale: The item is in the cache, and is stale.\n * - hit: the item is in the cache\n * - miss: the item is not in the cache\n */\n get?: 'stale' | 'hit' | 'miss'\n\n /**\n * A fetch or get operation returned a stale value.\n */\n returnedStale?: true\n }\n\n /**\n * options which override the options set in the LRUCache constructor\n * when calling {@link LRUCache#fetch}.\n *\n * This is the union of {@link GetOptions} and {@link SetOptions}, plus\n * {@link OptionsBase.noDeleteOnFetchRejection},\n * {@link OptionsBase.allowStaleOnFetchRejection},\n * {@link FetchOptions.forceRefresh}, and\n * {@link OptionsBase.context}\n *\n * Any of these may be modified in the {@link OptionsBase.fetchMethod}\n * function, but the {@link GetOptions} fields will of course have no\n * effect, as the {@link LRUCache#get} call already happened by the time\n * the fetchMethod is called.\n */\n export interface FetcherFetchOptions\n extends Pick<\n OptionsBase,\n | 'allowStale'\n | 'updateAgeOnGet'\n | 'noDeleteOnStaleGet'\n | 'sizeCalculation'\n | 'ttl'\n | 'noDisposeOnSet'\n | 'noUpdateTTL'\n | 'noDeleteOnFetchRejection'\n | 'allowStaleOnFetchRejection'\n | 'ignoreFetchAbort'\n | 'allowStaleOnFetchAbort'\n > {\n status?: Status\n size?: Size\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#fetch} method.\n */\n export interface FetchOptions\n extends FetcherFetchOptions {\n /**\n * Set to true to force a re-load of the existing data, even if it\n * is not yet stale.\n */\n forceRefresh?: boolean\n /**\n * Context provided to the {@link OptionsBase.fetchMethod} as\n * the {@link FetcherOptions.context} param.\n *\n * If the FC type is specified as unknown (the default),\n * undefined or void, then this is optional. Otherwise, it will\n * be required.\n */\n context?: FC\n signal?: AbortSignal\n status?: Status\n }\n /**\n * Options provided to {@link LRUCache#fetch} when the FC type is something\n * other than `unknown`, `undefined`, or `void`\n */\n export interface FetchOptionsWithContext\n extends FetchOptions {\n context: FC\n }\n /**\n * Options provided to {@link LRUCache#fetch} when the FC type is\n * `undefined` or `void`\n */\n export interface FetchOptionsNoContext\n extends FetchOptions {\n context?: undefined\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#has} method.\n */\n export interface HasOptions\n extends Pick, 'updateAgeOnHas'> {\n status?: Status\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#get} method.\n */\n export interface GetOptions\n extends Pick<\n OptionsBase,\n 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet'\n > {\n status?: Status\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#peek} method.\n */\n export interface PeekOptions\n extends Pick, 'allowStale'> {}\n\n /**\n * Options that may be passed to the {@link LRUCache#set} method.\n */\n export interface SetOptions\n extends Pick<\n OptionsBase,\n 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'\n > {\n /**\n * If size tracking is enabled, then setting an explicit size\n * in the {@link LRUCache#set} call will prevent calling the\n * {@link OptionsBase.sizeCalculation} function.\n */\n size?: Size\n /**\n * If TTL tracking is enabled, then setting an explicit start\n * time in the {@link LRUCache#set} call will override the\n * default time from `performance.now()` or `Date.now()`.\n *\n * Note that it must be a valid value for whichever time-tracking\n * method is in use.\n */\n start?: Milliseconds\n status?: Status\n }\n\n /**\n * The type signature for the {@link OptionsBase.fetchMethod} option.\n */\n export type Fetcher = (\n key: K,\n staleValue: V | undefined,\n options: FetcherOptions\n ) => Promise | V | void | undefined\n\n /**\n * Options which may be passed to the {@link LRUCache} constructor.\n *\n * Most of these may be overridden in the various options that use\n * them.\n *\n * Despite all being technically optional, the constructor requires that\n * a cache is at minimum limited by one or more of {@link OptionsBase.max},\n * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}.\n *\n * If {@link OptionsBase.ttl} is used alone, then it is strongly advised\n * (and in fact required by the type definitions here) that the cache\n * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially\n * unbounded storage.\n */\n export interface OptionsBase {\n /**\n * The maximum number of items to store in the cache before evicting\n * old entries. This is read-only on the {@link LRUCache} instance,\n * and may not be overridden.\n *\n * If set, then storage space will be pre-allocated at construction\n * time, and the cache will perform significantly faster.\n *\n * Note that significantly fewer items may be stored, if\n * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also\n * set.\n */\n max?: Count\n\n /**\n * Max time in milliseconds for items to live in cache before they are\n * considered stale. Note that stale items are NOT preemptively removed\n * by default, and MAY live in the cache long after they have expired.\n *\n * Also, as this cache is optimized for LRU/MRU operations, some of\n * the staleness/TTL checks will reduce performance, as they will incur\n * overhead by deleting items.\n *\n * Must be an integer number of ms. If set to 0, this indicates \"no TTL\"\n *\n * @default 0\n */\n ttl?: Milliseconds\n\n /**\n * Minimum amount of time in ms in which to check for staleness.\n * Defaults to 1, which means that the current time is checked\n * at most once per millisecond.\n *\n * Set to 0 to check the current time every time staleness is tested.\n * (This reduces performance, and is theoretically unnecessary.)\n *\n * Setting this to a higher value will improve performance somewhat\n * while using ttl tracking, albeit at the expense of keeping stale\n * items around a bit longer than their TTLs would indicate.\n *\n * @default 1\n */\n ttlResolution?: Milliseconds\n\n /**\n * Preemptively remove stale items from the cache.\n * Note that this may significantly degrade performance,\n * especially if the cache is storing a large number of items.\n * It is almost always best to just leave the stale items in\n * the cache, and let them fall out as new items are added.\n *\n * Note that this means that {@link OptionsBase.allowStale} is a bit\n * pointless, as stale items will be deleted almost as soon as they\n * expire.\n *\n * @default false\n */\n ttlAutopurge?: boolean\n\n /**\n * Update the age of items on {@link LRUCache#get}, renewing their TTL\n *\n * Has no effect if {@link OptionsBase.ttl} is not set.\n *\n * @default false\n */\n updateAgeOnGet?: boolean\n\n /**\n * Update the age of items on {@link LRUCache#has}, renewing their TTL\n *\n * Has no effect if {@link OptionsBase.ttl} is not set.\n *\n * @default false\n */\n updateAgeOnHas?: boolean\n\n /**\n * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return\n * stale data, if available.\n */\n allowStale?: boolean\n\n /**\n * Function that is called on items when they are dropped from the cache.\n * This can be handy if you want to close file descriptors or do other\n * cleanup tasks when items are no longer accessible. Called with `key,\n * value`. It's called before actually removing the item from the\n * internal cache, so it is *NOT* safe to re-add them.\n *\n * Use {@link OptionsBase.disposeAfter} if you wish to dispose items after\n * they have been full removed, when it is safe to add them back to the\n * cache.\n */\n dispose?: Disposer\n\n /**\n * The same as {@link OptionsBase.dispose}, but called *after* the entry\n * is completely removed and the cache is once again in a clean state.\n * It is safe to add an item right back into the cache at this point.\n * However, note that it is *very* easy to inadvertently create infinite\n * recursion this way.\n */\n disposeAfter?: Disposer\n\n /**\n * Set to true to suppress calling the\n * {@link OptionsBase.dispose} function if the entry key is\n * still accessible within the cache.\n * This may be overridden by passing an options object to\n * {@link LRUCache#set}.\n */\n noDisposeOnSet?: boolean\n\n /**\n * Boolean flag to tell the cache to not update the TTL when\n * setting a new value for an existing key (ie, when updating a value\n * rather than inserting a new value). Note that the TTL value is\n * _always_ set (if provided) when adding a new entry into the cache.\n *\n * Has no effect if a {@link OptionsBase.ttl} is not set.\n */\n noUpdateTTL?: boolean\n\n /**\n * If you wish to track item size, you must provide a maxSize\n * note that we still will only keep up to max *actual items*,\n * if max is set, so size tracking may cause fewer than max items\n * to be stored. At the extreme, a single item of maxSize size\n * will cause everything else in the cache to be dropped when it\n * is added. Use with caution!\n *\n * Note also that size tracking can negatively impact performance,\n * though for most cases, only minimally.\n */\n maxSize?: Size\n\n /**\n * The maximum allowed size for any single item in the cache.\n *\n * If a larger item is passed to {@link LRUCache#set} or returned by a\n * {@link OptionsBase.fetchMethod}, then it will not be stored in the\n * cache.\n */\n maxEntrySize?: Size\n\n /**\n * A function that returns a number indicating the item's size.\n *\n * If not provided, and {@link OptionsBase.maxSize} or\n * {@link OptionsBase.maxEntrySize} are set, then all\n * {@link LRUCache#set} calls **must** provide an explicit\n * {@link SetOptions.size} or sizeCalculation param.\n */\n sizeCalculation?: SizeCalculator\n\n /**\n * Method that provides the implementation for {@link LRUCache#fetch}\n */\n fetchMethod?: Fetcher\n\n /**\n * Set to true to suppress the deletion of stale data when a\n * {@link OptionsBase.fetchMethod} returns a rejected promise.\n */\n noDeleteOnFetchRejection?: boolean\n\n /**\n * Do not delete stale items when they are retrieved with\n * {@link LRUCache#get}.\n *\n * Note that the `get` return value will still be `undefined`\n * unless {@link OptionsBase.allowStale} is true.\n */\n noDeleteOnStaleGet?: boolean\n\n /**\n * Set to true to allow returning stale data when a\n * {@link OptionsBase.fetchMethod} throws an error or returns a rejected\n * promise.\n *\n * This differs from using {@link OptionsBase.allowStale} in that stale\n * data will ONLY be returned in the case that the\n * {@link LRUCache#fetch} fails, not any other times.\n */\n allowStaleOnFetchRejection?: boolean\n\n /**\n * Set to true to return a stale value from the cache when the\n * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches an `'abort'`\n * event, whether user-triggered, or due to internal cache behavior.\n *\n * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying\n * {@link OptionsBase.fetchMethod} will still be considered canceled, and\n * any value it returns will be ignored and not cached.\n *\n * Caveat: since fetches are aborted when a new value is explicitly\n * set in the cache, this can lead to fetch returning a stale value,\n * since that was the fallback value _at the moment the `fetch()` was\n * initiated_, even though the new updated value is now present in\n * the cache.\n *\n * For example:\n *\n * ```ts\n * const cache = new LRUCache({\n * ttl: 100,\n * fetchMethod: async (url, oldValue, { signal }) => {\n * const res = await fetch(url, { signal })\n * return await res.json()\n * }\n * })\n * cache.set('https://example.com/', { some: 'data' })\n * // 100ms go by...\n * const result = cache.fetch('https://example.com/')\n * cache.set('https://example.com/', { other: 'thing' })\n * console.log(await result) // { some: 'data' }\n * console.log(cache.get('https://example.com/')) // { other: 'thing' }\n * ```\n */\n allowStaleOnFetchAbort?: boolean\n\n /**\n * Set to true to ignore the `abort` event emitted by the `AbortSignal`\n * object passed to {@link OptionsBase.fetchMethod}, and still cache the\n * resulting resolution value, as long as it is not `undefined`.\n *\n * When used on its own, this means aborted {@link LRUCache#fetch} calls are not\n * immediately resolved or rejected when they are aborted, and instead\n * take the full time to await.\n *\n * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted\n * {@link LRUCache#fetch} calls will resolve immediately to their stale\n * cached value or `undefined`, and will continue to process and eventually\n * update the cache when they resolve, as long as the resulting value is\n * not `undefined`, thus supporting a \"return stale on timeout while\n * refreshing\" mechanism by passing `AbortSignal.timeout(n)` as the signal.\n *\n * **Note**: regardless of this setting, an `abort` event _is still\n * emitted on the `AbortSignal` object_, so may result in invalid results\n * when passed to other underlying APIs that use AbortSignals.\n *\n * This may be overridden in the {@link OptionsBase.fetchMethod} or the\n * call to {@link LRUCache#fetch}.\n */\n ignoreFetchAbort?: boolean\n }\n\n export interface OptionsMaxLimit\n extends OptionsBase {\n max: Count\n }\n export interface OptionsTTLLimit\n extends OptionsBase {\n ttl: Milliseconds\n ttlAutopurge: boolean\n }\n export interface OptionsSizeLimit\n extends OptionsBase {\n maxSize: Size\n }\n\n /**\n * The valid safe options for the {@link LRUCache} constructor\n */\n export type Options =\n | OptionsMaxLimit\n | OptionsSizeLimit\n | OptionsTTLLimit\n\n /**\n * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump}\n */\n export interface Entry {\n value: V\n ttl?: Milliseconds\n size?: Size\n start?: Milliseconds\n }\n}\n\n/**\n * Default export, the thing you're using this module to get.\n *\n * All properties from the options object (with the exception of\n * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as\n * normal public members. (`max` and `maxBase` are read-only getters.)\n * Changing any of these will alter the defaults for subsequent method calls,\n * but is otherwise safe.\n */\nexport class LRUCache {\n // properties coming in from the options of these, only max and maxSize\n // really *need* to be protected. The rest can be modified, as they just\n // set defaults for various methods.\n readonly #max: LRUCache.Count\n readonly #maxSize: LRUCache.Size\n readonly #dispose?: LRUCache.Disposer\n readonly #disposeAfter?: LRUCache.Disposer\n readonly #fetchMethod?: LRUCache.Fetcher\n\n /**\n * {@link LRUCache.OptionsBase.ttl}\n */\n ttl: LRUCache.Milliseconds\n\n /**\n * {@link LRUCache.OptionsBase.ttlResolution}\n */\n ttlResolution: LRUCache.Milliseconds\n /**\n * {@link LRUCache.OptionsBase.ttlAutopurge}\n */\n ttlAutopurge: boolean\n /**\n * {@link LRUCache.OptionsBase.updateAgeOnGet}\n */\n updateAgeOnGet: boolean\n /**\n * {@link LRUCache.OptionsBase.updateAgeOnHas}\n */\n updateAgeOnHas: boolean\n /**\n * {@link LRUCache.OptionsBase.allowStale}\n */\n allowStale: boolean\n\n /**\n * {@link LRUCache.OptionsBase.noDisposeOnSet}\n */\n noDisposeOnSet: boolean\n /**\n * {@link LRUCache.OptionsBase.noUpdateTTL}\n */\n noUpdateTTL: boolean\n /**\n * {@link LRUCache.OptionsBase.maxEntrySize}\n */\n maxEntrySize: LRUCache.Size\n /**\n * {@link LRUCache.OptionsBase.sizeCalculation}\n */\n sizeCalculation?: LRUCache.SizeCalculator\n /**\n * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}\n */\n noDeleteOnFetchRejection: boolean\n /**\n * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}\n */\n noDeleteOnStaleGet: boolean\n /**\n * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}\n */\n allowStaleOnFetchAbort: boolean\n /**\n * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}\n */\n allowStaleOnFetchRejection: boolean\n /**\n * {@link LRUCache.OptionsBase.ignoreFetchAbort}\n */\n ignoreFetchAbort: boolean\n\n // computed properties\n #size: LRUCache.Count\n #calculatedSize: LRUCache.Size\n #keyMap: Map\n #keyList: (K | undefined)[]\n #valList: (V | BackgroundFetch | undefined)[]\n #next: NumberArray\n #prev: NumberArray\n #head: Index\n #tail: Index\n #free: StackLike\n #disposed?: DisposeTask[]\n #sizes?: ZeroArray\n #starts?: ZeroArray\n #ttls?: ZeroArray\n\n #hasDispose: boolean\n #hasFetchMethod: boolean\n #hasDisposeAfter: boolean\n\n /**\n * Do not call this method unless you need to inspect the\n * inner workings of the cache. If anything returned by this\n * object is modified in any way, strange breakage may occur.\n *\n * These fields are private for a reason!\n *\n * @internal\n */\n static unsafeExposeInternals<\n K extends {},\n V extends {},\n FC extends unknown = unknown\n >(c: LRUCache) {\n return {\n // properties\n starts: c.#starts,\n ttls: c.#ttls,\n sizes: c.#sizes,\n keyMap: c.#keyMap as Map,\n keyList: c.#keyList,\n valList: c.#valList,\n next: c.#next,\n prev: c.#prev,\n get head() {\n return c.#head\n },\n get tail() {\n return c.#tail\n },\n free: c.#free,\n // methods\n isBackgroundFetch: (p: any) => c.#isBackgroundFetch(p),\n backgroundFetch: (\n k: K,\n index: number | undefined,\n options: LRUCache.FetchOptions,\n context: any\n ): BackgroundFetch =>\n c.#backgroundFetch(\n k,\n index as Index | undefined,\n options,\n context\n ),\n moveToTail: (index: number): void =>\n c.#moveToTail(index as Index),\n indexes: (options?: { allowStale: boolean }) =>\n c.#indexes(options),\n rindexes: (options?: { allowStale: boolean }) =>\n c.#rindexes(options),\n isStale: (index: number | undefined) =>\n c.#isStale(index as Index),\n }\n }\n\n // Protected read-only members\n\n /**\n * {@link LRUCache.OptionsBase.max} (read-only)\n */\n get max(): LRUCache.Count {\n return this.#max\n }\n /**\n * {@link LRUCache.OptionsBase.maxSize} (read-only)\n */\n get maxSize(): LRUCache.Count {\n return this.#maxSize\n }\n /**\n * The total computed size of items in the cache (read-only)\n */\n get calculatedSize(): LRUCache.Size {\n return this.#calculatedSize\n }\n /**\n * The number of items stored in the cache (read-only)\n */\n get size(): LRUCache.Count {\n return this.#size\n }\n /**\n * {@link LRUCache.OptionsBase.fetchMethod} (read-only)\n */\n get fetchMethod(): LRUCache.Fetcher | undefined {\n return this.#fetchMethod\n }\n /**\n * {@link LRUCache.OptionsBase.dispose} (read-only)\n */\n get dispose() {\n return this.#dispose\n }\n /**\n * {@link LRUCache.OptionsBase.disposeAfter} (read-only)\n */\n get disposeAfter() {\n return this.#disposeAfter\n }\n\n constructor(\n options: LRUCache.Options | LRUCache\n ) {\n const {\n max = 0,\n ttl,\n ttlResolution = 1,\n ttlAutopurge,\n updateAgeOnGet,\n updateAgeOnHas,\n allowStale,\n dispose,\n disposeAfter,\n noDisposeOnSet,\n noUpdateTTL,\n maxSize = 0,\n maxEntrySize = 0,\n sizeCalculation,\n fetchMethod,\n noDeleteOnFetchRejection,\n noDeleteOnStaleGet,\n allowStaleOnFetchRejection,\n allowStaleOnFetchAbort,\n ignoreFetchAbort,\n } = options\n\n if (max !== 0 && !isPosInt(max)) {\n throw new TypeError('max option must be a nonnegative integer')\n }\n\n const UintArray = max ? getUintArray(max) : Array\n if (!UintArray) {\n throw new Error('invalid max value: ' + max)\n }\n\n this.#max = max\n this.#maxSize = maxSize\n this.maxEntrySize = maxEntrySize || this.#maxSize\n this.sizeCalculation = sizeCalculation\n if (this.sizeCalculation) {\n if (!this.#maxSize && !this.maxEntrySize) {\n throw new TypeError(\n 'cannot set sizeCalculation without setting maxSize or maxEntrySize'\n )\n }\n if (typeof this.sizeCalculation !== 'function') {\n throw new TypeError('sizeCalculation set to non-function')\n }\n }\n\n if (\n fetchMethod !== undefined &&\n typeof fetchMethod !== 'function'\n ) {\n throw new TypeError(\n 'fetchMethod must be a function if specified'\n )\n }\n this.#fetchMethod = fetchMethod\n this.#hasFetchMethod = !!fetchMethod\n\n this.#keyMap = new Map()\n this.#keyList = new Array(max).fill(undefined)\n this.#valList = new Array(max).fill(undefined)\n this.#next = new UintArray(max)\n this.#prev = new UintArray(max)\n this.#head = 0 as Index\n this.#tail = 0 as Index\n this.#free = Stack.create(max)\n this.#size = 0\n this.#calculatedSize = 0\n\n if (typeof dispose === 'function') {\n this.#dispose = dispose\n }\n if (typeof disposeAfter === 'function') {\n this.#disposeAfter = disposeAfter\n this.#disposed = []\n } else {\n this.#disposeAfter = undefined\n this.#disposed = undefined\n }\n this.#hasDispose = !!this.#dispose\n this.#hasDisposeAfter = !!this.#disposeAfter\n\n this.noDisposeOnSet = !!noDisposeOnSet\n this.noUpdateTTL = !!noUpdateTTL\n this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection\n this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection\n this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort\n this.ignoreFetchAbort = !!ignoreFetchAbort\n\n // NB: maxEntrySize is set to maxSize if it's set\n if (this.maxEntrySize !== 0) {\n if (this.#maxSize !== 0) {\n if (!isPosInt(this.#maxSize)) {\n throw new TypeError(\n 'maxSize must be a positive integer if specified'\n )\n }\n }\n if (!isPosInt(this.maxEntrySize)) {\n throw new TypeError(\n 'maxEntrySize must be a positive integer if specified'\n )\n }\n this.#initializeSizeTracking()\n }\n\n this.allowStale = !!allowStale\n this.noDeleteOnStaleGet = !!noDeleteOnStaleGet\n this.updateAgeOnGet = !!updateAgeOnGet\n this.updateAgeOnHas = !!updateAgeOnHas\n this.ttlResolution =\n isPosInt(ttlResolution) || ttlResolution === 0\n ? ttlResolution\n : 1\n this.ttlAutopurge = !!ttlAutopurge\n this.ttl = ttl || 0\n if (this.ttl) {\n if (!isPosInt(this.ttl)) {\n throw new TypeError(\n 'ttl must be a positive integer if specified'\n )\n }\n this.#initializeTTLTracking()\n }\n\n // do not allow completely unbounded caches\n if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {\n throw new TypeError(\n 'At least one of max, maxSize, or ttl is required'\n )\n }\n if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {\n const code = 'LRU_CACHE_UNBOUNDED'\n if (shouldWarn(code)) {\n warned.add(code)\n const msg =\n 'TTL caching without ttlAutopurge, max, or maxSize can ' +\n 'result in unbounded memory consumption.'\n emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)\n }\n }\n }\n\n /**\n * Return the remaining TTL time for a given entry key\n */\n getRemainingTTL(key: K) {\n return this.#keyMap.has(key) ? Infinity : 0\n }\n\n #initializeTTLTracking() {\n const ttls = new ZeroArray(this.#max)\n const starts = new ZeroArray(this.#max)\n this.#ttls = ttls\n this.#starts = starts\n\n this.#setItemTTL = (index, ttl, start = perf.now()) => {\n starts[index] = ttl !== 0 ? start : 0\n ttls[index] = ttl\n if (ttl !== 0 && this.ttlAutopurge) {\n const t = setTimeout(() => {\n if (this.#isStale(index)) {\n this.delete(this.#keyList[index] as K)\n }\n }, ttl + 1)\n // unref() not supported on all platforms\n /* c8 ignore start */\n if (t.unref) {\n t.unref()\n }\n /* c8 ignore stop */\n }\n }\n\n this.#updateItemAge = index => {\n starts[index] = ttls[index] !== 0 ? perf.now() : 0\n }\n\n this.#statusTTL = (status, index) => {\n if (ttls[index]) {\n const ttl = ttls[index]\n const start = starts[index]\n status.ttl = ttl\n status.start = start\n status.now = cachedNow || getNow()\n const age = status.now - start\n status.remainingTTL = ttl - age\n }\n }\n\n // debounce calls to perf.now() to 1s so we're not hitting\n // that costly call repeatedly.\n let cachedNow = 0\n const getNow = () => {\n const n = perf.now()\n if (this.ttlResolution > 0) {\n cachedNow = n\n const t = setTimeout(\n () => (cachedNow = 0),\n this.ttlResolution\n )\n // not available on all platforms\n /* c8 ignore start */\n if (t.unref) {\n t.unref()\n }\n /* c8 ignore stop */\n }\n return n\n }\n\n this.getRemainingTTL = key => {\n const index = this.#keyMap.get(key)\n if (index === undefined) {\n return 0\n }\n const ttl = ttls[index]\n const start = starts[index]\n if (ttl === 0 || start === 0) {\n return Infinity\n }\n const age = (cachedNow || getNow()) - start\n return ttl - age\n }\n\n this.#isStale = index => {\n return (\n ttls[index] !== 0 &&\n starts[index] !== 0 &&\n (cachedNow || getNow()) - starts[index] > ttls[index]\n )\n }\n }\n\n // conditionally set private methods related to TTL\n #updateItemAge: (index: Index) => void = () => {}\n #statusTTL: (status: LRUCache.Status, index: Index) => void =\n () => {}\n #setItemTTL: (\n index: Index,\n ttl: LRUCache.Milliseconds,\n start?: LRUCache.Milliseconds\n // ignore because we never call this if we're not already in TTL mode\n /* c8 ignore start */\n ) => void = () => {}\n /* c8 ignore stop */\n\n #isStale: (index: Index) => boolean = () => false\n\n #initializeSizeTracking() {\n const sizes = new ZeroArray(this.#max)\n this.#calculatedSize = 0\n this.#sizes = sizes\n this.#removeItemSize = index => {\n this.#calculatedSize -= sizes[index]\n sizes[index] = 0\n }\n this.#requireSize = (k, v, size, sizeCalculation) => {\n // provisionally accept background fetches.\n // actual value size will be checked when they return.\n if (this.#isBackgroundFetch(v)) {\n return 0\n }\n if (!isPosInt(size)) {\n if (sizeCalculation) {\n if (typeof sizeCalculation !== 'function') {\n throw new TypeError('sizeCalculation must be a function')\n }\n size = sizeCalculation(v, k)\n if (!isPosInt(size)) {\n throw new TypeError(\n 'sizeCalculation return invalid (expect positive integer)'\n )\n }\n } else {\n throw new TypeError(\n 'invalid size value (must be positive integer). ' +\n 'When maxSize or maxEntrySize is used, sizeCalculation ' +\n 'or size must be set.'\n )\n }\n }\n return size\n }\n this.#addItemSize = (\n index: Index,\n size: LRUCache.Size,\n status?: LRUCache.Status\n ) => {\n sizes[index] = size\n if (this.#maxSize) {\n const maxSize = this.#maxSize - sizes[index]\n while (this.#calculatedSize > maxSize) {\n this.#evict(true)\n }\n }\n this.#calculatedSize += sizes[index]\n if (status) {\n status.entrySize = size\n status.totalCalculatedSize = this.#calculatedSize\n }\n }\n }\n\n #removeItemSize: (index: Index) => void = _i => {}\n #addItemSize: (\n index: Index,\n size: LRUCache.Size,\n status?: LRUCache.Status\n ) => void = (_i, _s, _st) => {}\n #requireSize: (\n k: K,\n v: V | BackgroundFetch,\n size?: LRUCache.Size,\n sizeCalculation?: LRUCache.SizeCalculator\n ) => LRUCache.Size = (\n _k: K,\n _v: V | BackgroundFetch,\n size?: LRUCache.Size,\n sizeCalculation?: LRUCache.SizeCalculator\n ) => {\n if (size || sizeCalculation) {\n throw new TypeError(\n 'cannot set size without setting maxSize or maxEntrySize on cache'\n )\n }\n return 0\n };\n\n *#indexes({ allowStale = this.allowStale } = {}) {\n if (this.#size) {\n for (let i = this.#tail; true; ) {\n if (!this.#isValidIndex(i)) {\n break\n }\n if (allowStale || !this.#isStale(i)) {\n yield i\n }\n if (i === this.#head) {\n break\n } else {\n i = this.#prev[i] as Index\n }\n }\n }\n }\n\n *#rindexes({ allowStale = this.allowStale } = {}) {\n if (this.#size) {\n for (let i = this.#head; true; ) {\n if (!this.#isValidIndex(i)) {\n break\n }\n if (allowStale || !this.#isStale(i)) {\n yield i\n }\n if (i === this.#tail) {\n break\n } else {\n i = this.#next[i] as Index\n }\n }\n }\n }\n\n #isValidIndex(index: Index) {\n return (\n index !== undefined &&\n this.#keyMap.get(this.#keyList[index] as K) === index\n )\n }\n\n /**\n * Return a generator yielding `[key, value]` pairs,\n * in order from most recently used to least recently used.\n */\n *entries() {\n for (const i of this.#indexes()) {\n if (\n this.#valList[i] !== undefined &&\n this.#keyList[i] !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield [this.#keyList[i], this.#valList[i]]\n }\n }\n }\n\n /**\n * Inverse order version of {@link LRUCache.entries}\n *\n * Return a generator yielding `[key, value]` pairs,\n * in order from least recently used to most recently used.\n */\n *rentries() {\n for (const i of this.#rindexes()) {\n if (\n this.#valList[i] !== undefined &&\n this.#keyList[i] !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield [this.#keyList[i], this.#valList[i]]\n }\n }\n }\n\n /**\n * Return a generator yielding the keys in the cache,\n * in order from most recently used to least recently used.\n */\n *keys() {\n for (const i of this.#indexes()) {\n const k = this.#keyList[i]\n if (\n k !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield k\n }\n }\n }\n\n /**\n * Inverse order version of {@link LRUCache.keys}\n *\n * Return a generator yielding the keys in the cache,\n * in order from least recently used to most recently used.\n */\n *rkeys() {\n for (const i of this.#rindexes()) {\n const k = this.#keyList[i]\n if (\n k !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield k\n }\n }\n }\n\n /**\n * Return a generator yielding the values in the cache,\n * in order from most recently used to least recently used.\n */\n *values() {\n for (const i of this.#indexes()) {\n const v = this.#valList[i]\n if (\n v !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield this.#valList[i]\n }\n }\n }\n\n /**\n * Inverse order version of {@link LRUCache.values}\n *\n * Return a generator yielding the values in the cache,\n * in order from least recently used to most recently used.\n */\n *rvalues() {\n for (const i of this.#rindexes()) {\n const v = this.#valList[i]\n if (\n v !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield this.#valList[i]\n }\n }\n }\n\n /**\n * Iterating over the cache itself yields the same results as\n * {@link LRUCache.entries}\n */\n [Symbol.iterator]() {\n return this.entries()\n }\n\n /**\n * Find a value for which the supplied fn method returns a truthy value,\n * similar to Array.find(). fn is called as fn(value, key, cache).\n */\n find(\n fn: (v: V, k: K, self: LRUCache) => boolean,\n getOptions: LRUCache.GetOptions = {}\n ) {\n for (const i of this.#indexes()) {\n const v = this.#valList[i]\n const value = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined) continue\n if (fn(value, this.#keyList[i] as K, this)) {\n return this.get(this.#keyList[i] as K, getOptions)\n }\n }\n }\n\n /**\n * Call the supplied function on each item in the cache, in order from\n * most recently used to least recently used. fn is called as\n * fn(value, key, cache). Does not update age or recenty of use.\n * Does not iterate over stale values.\n */\n forEach(\n fn: (v: V, k: K, self: LRUCache) => any,\n thisp: any = this\n ) {\n for (const i of this.#indexes()) {\n const v = this.#valList[i]\n const value = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined) continue\n fn.call(thisp, value, this.#keyList[i] as K, this)\n }\n }\n\n /**\n * The same as {@link LRUCache.forEach} but items are iterated over in\n * reverse order. (ie, less recently used items are iterated over first.)\n */\n rforEach(\n fn: (v: V, k: K, self: LRUCache) => any,\n thisp: any = this\n ) {\n for (const i of this.#rindexes()) {\n const v = this.#valList[i]\n const value = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined) continue\n fn.call(thisp, value, this.#keyList[i] as K, this)\n }\n }\n\n /**\n * Delete any stale entries. Returns true if anything was removed,\n * false otherwise.\n */\n purgeStale() {\n let deleted = false\n for (const i of this.#rindexes({ allowStale: true })) {\n if (this.#isStale(i)) {\n this.delete(this.#keyList[i] as K)\n deleted = true\n }\n }\n return deleted\n }\n\n /**\n * Return an array of [key, {@link LRUCache.Entry}] tuples which can be\n * passed to cache.load()\n */\n dump() {\n const arr: [K, LRUCache.Entry][] = []\n for (const i of this.#indexes({ allowStale: true })) {\n const key = this.#keyList[i]\n const v = this.#valList[i]\n const value: V | undefined = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined || key === undefined) continue\n const entry: LRUCache.Entry = { value }\n if (this.#ttls && this.#starts) {\n entry.ttl = this.#ttls[i]\n // always dump the start relative to a portable timestamp\n // it's ok for this to be a bit slow, it's a rare operation.\n const age = perf.now() - this.#starts[i]\n entry.start = Math.floor(Date.now() - age)\n }\n if (this.#sizes) {\n entry.size = this.#sizes[i]\n }\n arr.unshift([key, entry])\n }\n return arr\n }\n\n /**\n * Reset the cache and load in the items in entries in the order listed.\n * Note that the shape of the resulting cache may be different if the\n * same options are not used in both caches.\n */\n load(arr: [K, LRUCache.Entry][]) {\n this.clear()\n for (const [key, entry] of arr) {\n if (entry.start) {\n // entry.start is a portable timestamp, but we may be using\n // node's performance.now(), so calculate the offset, so that\n // we get the intended remaining TTL, no matter how long it's\n // been on ice.\n //\n // it's ok for this to be a bit slow, it's a rare operation.\n const age = Date.now() - entry.start\n entry.start = perf.now() - age\n }\n this.set(key, entry.value, entry)\n }\n }\n\n /**\n * Add a value to the cache.\n *\n * Note: if `undefined` is specified as a value, this is an alias for\n * {@link LRUCache#delete}\n */\n set(\n k: K,\n v: V | BackgroundFetch | undefined,\n setOptions: LRUCache.SetOptions = {}\n ) {\n if (v === undefined) {\n this.delete(k)\n return this\n }\n const {\n ttl = this.ttl,\n start,\n noDisposeOnSet = this.noDisposeOnSet,\n sizeCalculation = this.sizeCalculation,\n status,\n } = setOptions\n let { noUpdateTTL = this.noUpdateTTL } = setOptions\n\n const size = this.#requireSize(\n k,\n v,\n setOptions.size || 0,\n sizeCalculation\n )\n // if the item doesn't fit, don't do anything\n // NB: maxEntrySize set to maxSize by default\n if (this.maxEntrySize && size > this.maxEntrySize) {\n if (status) {\n status.set = 'miss'\n status.maxEntrySizeExceeded = true\n }\n // have to delete, in case something is there already.\n this.delete(k)\n return this\n }\n let index = this.#size === 0 ? undefined : this.#keyMap.get(k)\n if (index === undefined) {\n // addition\n index = (\n this.#size === 0\n ? this.#tail\n : this.#free.length !== 0\n ? this.#free.pop()\n : this.#size === this.#max\n ? this.#evict(false)\n : this.#size\n ) as Index\n this.#keyList[index] = k\n this.#valList[index] = v\n this.#keyMap.set(k, index)\n this.#next[this.#tail] = index\n this.#prev[index] = this.#tail\n this.#tail = index\n this.#size++\n this.#addItemSize(index, size, status)\n if (status) status.set = 'add'\n noUpdateTTL = false\n } else {\n // update\n this.#moveToTail(index)\n const oldVal = this.#valList[index] as V | BackgroundFetch\n if (v !== oldVal) {\n if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {\n oldVal.__abortController.abort(new Error('replaced'))\n } else if (!noDisposeOnSet) {\n if (this.#hasDispose) {\n this.#dispose?.(oldVal as V, k, 'set')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([oldVal as V, k, 'set'])\n }\n }\n this.#removeItemSize(index)\n this.#addItemSize(index, size, status)\n this.#valList[index] = v\n if (status) {\n status.set = 'replace'\n const oldValue =\n oldVal && this.#isBackgroundFetch(oldVal)\n ? oldVal.__staleWhileFetching\n : oldVal\n if (oldValue !== undefined) status.oldValue = oldValue\n }\n } else if (status) {\n status.set = 'update'\n }\n }\n if (ttl !== 0 && !this.#ttls) {\n this.#initializeTTLTracking()\n }\n if (this.#ttls) {\n if (!noUpdateTTL) {\n this.#setItemTTL(index, ttl, start)\n }\n if (status) this.#statusTTL(status, index)\n }\n if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n return this\n }\n\n /**\n * Evict the least recently used item, returning its value or\n * `undefined` if cache is empty.\n */\n pop(): V | undefined {\n try {\n while (this.#size) {\n const val = this.#valList[this.#head]\n this.#evict(true)\n if (this.#isBackgroundFetch(val)) {\n if (val.__staleWhileFetching) {\n return val.__staleWhileFetching\n }\n } else if (val !== undefined) {\n return val\n }\n }\n } finally {\n if (this.#hasDisposeAfter && this.#disposed) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n }\n }\n\n #evict(free: boolean) {\n const head = this.#head\n const k = this.#keyList[head] as K\n const v = this.#valList[head] as V\n if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {\n v.__abortController.abort(new Error('evicted'))\n } else if (this.#hasDispose || this.#hasDisposeAfter) {\n if (this.#hasDispose) {\n this.#dispose?.(v, k, 'evict')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([v, k, 'evict'])\n }\n }\n this.#removeItemSize(head)\n // if we aren't about to use the index, then null these out\n if (free) {\n this.#keyList[head] = undefined\n this.#valList[head] = undefined\n this.#free.push(head)\n }\n if (this.#size === 1) {\n this.#head = this.#tail = 0 as Index\n this.#free.length = 0\n } else {\n this.#head = this.#next[head] as Index\n }\n this.#keyMap.delete(k)\n this.#size--\n return head\n }\n\n /**\n * Check if a key is in the cache, without updating the recency of use.\n * Will return false if the item is stale, even though it is technically\n * in the cache.\n *\n * Will not update item age unless\n * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.\n */\n has(k: K, hasOptions: LRUCache.HasOptions = {}) {\n const { updateAgeOnHas = this.updateAgeOnHas, status } =\n hasOptions\n const index = this.#keyMap.get(k)\n if (index !== undefined) {\n const v = this.#valList[index]\n if (\n this.#isBackgroundFetch(v) &&\n v.__staleWhileFetching === undefined\n ) {\n return false\n }\n if (!this.#isStale(index)) {\n if (updateAgeOnHas) {\n this.#updateItemAge(index)\n }\n if (status) {\n status.has = 'hit'\n this.#statusTTL(status, index)\n }\n return true\n } else if (status) {\n status.has = 'stale'\n this.#statusTTL(status, index)\n }\n } else if (status) {\n status.has = 'miss'\n }\n return false\n }\n\n /**\n * Like {@link LRUCache#get} but doesn't update recency or delete stale\n * items.\n *\n * Returns `undefined` if the item is stale, unless\n * {@link LRUCache.OptionsBase.allowStale} is set.\n */\n peek(k: K, peekOptions: LRUCache.PeekOptions = {}) {\n const { allowStale = this.allowStale } = peekOptions\n const index = this.#keyMap.get(k)\n if (\n index !== undefined &&\n (allowStale || !this.#isStale(index))\n ) {\n const v = this.#valList[index]\n // either stale and allowed, or forcing a refresh of non-stale value\n return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v\n }\n }\n\n #backgroundFetch(\n k: K,\n index: Index | undefined,\n options: LRUCache.FetchOptions,\n context: any\n ): BackgroundFetch {\n const v = index === undefined ? undefined : this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n return v\n }\n\n const ac = new AC()\n const { signal } = options\n // when/if our AC signals, then stop listening to theirs.\n signal?.addEventListener('abort', () => ac.abort(signal.reason), {\n signal: ac.signal,\n })\n\n const fetchOpts = {\n signal: ac.signal,\n options,\n context,\n }\n\n const cb = (\n v: V | void | undefined,\n updateCache = false\n ): V | undefined | void => {\n const { aborted } = ac.signal\n const ignoreAbort = options.ignoreFetchAbort && v !== undefined\n if (options.status) {\n if (aborted && !updateCache) {\n options.status.fetchAborted = true\n options.status.fetchError = ac.signal.reason\n if (ignoreAbort) options.status.fetchAbortIgnored = true\n } else {\n options.status.fetchResolved = true\n }\n }\n if (aborted && !ignoreAbort && !updateCache) {\n return fetchFail(ac.signal.reason)\n }\n // either we didn't abort, and are still here, or we did, and ignored\n const bf = p as BackgroundFetch\n if (this.#valList[index as Index] === p) {\n if (v === undefined) {\n if (bf.__staleWhileFetching) {\n this.#valList[index as Index] = bf.__staleWhileFetching\n } else {\n this.delete(k)\n }\n } else {\n if (options.status) options.status.fetchUpdated = true\n this.set(k, v, fetchOpts.options)\n }\n }\n return v\n }\n\n const eb = (er: any) => {\n if (options.status) {\n options.status.fetchRejected = true\n options.status.fetchError = er\n }\n return fetchFail(er)\n }\n\n const fetchFail = (er: any): V | undefined => {\n const { aborted } = ac.signal\n const allowStaleAborted =\n aborted && options.allowStaleOnFetchAbort\n const allowStale =\n allowStaleAborted || options.allowStaleOnFetchRejection\n const noDelete = allowStale || options.noDeleteOnFetchRejection\n const bf = p as BackgroundFetch\n if (this.#valList[index as Index] === p) {\n // if we allow stale on fetch rejections, then we need to ensure that\n // the stale value is not removed from the cache when the fetch fails.\n const del = !noDelete || bf.__staleWhileFetching === undefined\n if (del) {\n this.delete(k)\n } else if (!allowStaleAborted) {\n // still replace the *promise* with the stale value,\n // since we are done with the promise at this point.\n // leave it untouched if we're still waiting for an\n // aborted background fetch that hasn't yet returned.\n this.#valList[index as Index] = bf.__staleWhileFetching\n }\n }\n if (allowStale) {\n if (options.status && bf.__staleWhileFetching !== undefined) {\n options.status.returnedStale = true\n }\n return bf.__staleWhileFetching\n } else if (bf.__returned === bf) {\n throw er\n }\n }\n\n const pcall = (\n res: (v: V | void | undefined) => void,\n rej: (e: any) => void\n ) => {\n const fmp = this.#fetchMethod?.(k, v, fetchOpts)\n if (fmp && fmp instanceof Promise) {\n fmp.then(v => res(v), rej)\n }\n // ignored, we go until we finish, regardless.\n // defer check until we are actually aborting,\n // so fetchMethod can override.\n ac.signal.addEventListener('abort', () => {\n if (\n !options.ignoreFetchAbort ||\n options.allowStaleOnFetchAbort\n ) {\n res()\n // when it eventually resolves, update the cache.\n if (options.allowStaleOnFetchAbort) {\n res = v => cb(v, true)\n }\n }\n })\n }\n\n if (options.status) options.status.fetchDispatched = true\n const p = new Promise(pcall).then(cb, eb)\n const bf = Object.assign(p, {\n __abortController: ac,\n __staleWhileFetching: v,\n __returned: undefined,\n })\n\n if (index === undefined) {\n // internal, don't expose status.\n this.set(k, bf, { ...fetchOpts.options, status: undefined })\n index = this.#keyMap.get(k)\n } else {\n this.#valList[index] = bf\n }\n return bf\n }\n\n #isBackgroundFetch(p: any): p is BackgroundFetch {\n if (!this.#hasFetchMethod) return false\n const b = p as BackgroundFetch\n return (\n !!b &&\n b instanceof Promise &&\n b.hasOwnProperty('__staleWhileFetching') &&\n b.__abortController instanceof AC\n )\n }\n\n /**\n * Make an asynchronous cached fetch using the\n * {@link LRUCache.OptionsBase.fetchMethod} function.\n *\n * If multiple fetches for the same key are issued, then they will all be\n * coalesced into a single call to fetchMethod.\n *\n * Note that this means that handling options such as\n * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort},\n * {@link LRUCache.FetchOptions.signal},\n * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be\n * determined by the FIRST fetch() call for a given key.\n *\n * This is a known (fixable) shortcoming which will be addresed on when\n * someone complains about it, as the fix would involve added complexity and\n * may not be worth the costs for this edge case.\n */\n fetch(\n k: K,\n fetchOptions: unknown extends FC\n ? LRUCache.FetchOptions\n : FC extends undefined | void\n ? LRUCache.FetchOptionsNoContext\n : LRUCache.FetchOptionsWithContext\n ): Promise\n // this overload not allowed if context is required\n fetch(\n k: unknown extends FC\n ? K\n : FC extends undefined | void\n ? K\n : never,\n fetchOptions?: unknown extends FC\n ? LRUCache.FetchOptions\n : FC extends undefined | void\n ? LRUCache.FetchOptionsNoContext\n : never\n ): Promise\n async fetch(\n k: K,\n fetchOptions: LRUCache.FetchOptions = {}\n ): Promise {\n const {\n // get options\n allowStale = this.allowStale,\n updateAgeOnGet = this.updateAgeOnGet,\n noDeleteOnStaleGet = this.noDeleteOnStaleGet,\n // set options\n ttl = this.ttl,\n noDisposeOnSet = this.noDisposeOnSet,\n size = 0,\n sizeCalculation = this.sizeCalculation,\n noUpdateTTL = this.noUpdateTTL,\n // fetch exclusive options\n noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,\n allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,\n ignoreFetchAbort = this.ignoreFetchAbort,\n allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,\n context,\n forceRefresh = false,\n status,\n signal,\n } = fetchOptions\n\n if (!this.#hasFetchMethod) {\n if (status) status.fetch = 'get'\n return this.get(k, {\n allowStale,\n updateAgeOnGet,\n noDeleteOnStaleGet,\n status,\n })\n }\n\n const options = {\n allowStale,\n updateAgeOnGet,\n noDeleteOnStaleGet,\n ttl,\n noDisposeOnSet,\n size,\n sizeCalculation,\n noUpdateTTL,\n noDeleteOnFetchRejection,\n allowStaleOnFetchRejection,\n allowStaleOnFetchAbort,\n ignoreFetchAbort,\n status,\n signal,\n }\n\n let index = this.#keyMap.get(k)\n if (index === undefined) {\n if (status) status.fetch = 'miss'\n const p = this.#backgroundFetch(k, index, options, context)\n return (p.__returned = p)\n } else {\n // in cache, maybe already fetching\n const v = this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n const stale =\n allowStale && v.__staleWhileFetching !== undefined\n if (status) {\n status.fetch = 'inflight'\n if (stale) status.returnedStale = true\n }\n return stale ? v.__staleWhileFetching : (v.__returned = v)\n }\n\n // if we force a refresh, that means do NOT serve the cached value,\n // unless we are already in the process of refreshing the cache.\n const isStale = this.#isStale(index)\n if (!forceRefresh && !isStale) {\n if (status) status.fetch = 'hit'\n this.#moveToTail(index)\n if (updateAgeOnGet) {\n this.#updateItemAge(index)\n }\n if (status) this.#statusTTL(status, index)\n return v\n }\n\n // ok, it is stale or a forced refresh, and not already fetching.\n // refresh the cache.\n const p = this.#backgroundFetch(k, index, options, context)\n const hasStale = p.__staleWhileFetching !== undefined\n const staleVal = hasStale && allowStale\n if (status) {\n status.fetch = isStale ? 'stale' : 'refresh'\n if (staleVal && isStale) status.returnedStale = true\n }\n return staleVal ? p.__staleWhileFetching : (p.__returned = p)\n }\n }\n\n /**\n * Return a value from the cache. Will update the recency of the cache\n * entry found.\n *\n * If the key is not found, get() will return `undefined`.\n */\n get(k: K, getOptions: LRUCache.GetOptions = {}) {\n const {\n allowStale = this.allowStale,\n updateAgeOnGet = this.updateAgeOnGet,\n noDeleteOnStaleGet = this.noDeleteOnStaleGet,\n status,\n } = getOptions\n const index = this.#keyMap.get(k)\n if (index !== undefined) {\n const value = this.#valList[index]\n const fetching = this.#isBackgroundFetch(value)\n if (status) this.#statusTTL(status, index)\n if (this.#isStale(index)) {\n if (status) status.get = 'stale'\n // delete only if not an in-flight background fetch\n if (!fetching) {\n if (!noDeleteOnStaleGet) {\n this.delete(k)\n }\n if (status && allowStale) status.returnedStale = true\n return allowStale ? value : undefined\n } else {\n if (\n status &&\n allowStale &&\n value.__staleWhileFetching !== undefined\n ) {\n status.returnedStale = true\n }\n return allowStale ? value.__staleWhileFetching : undefined\n }\n } else {\n if (status) status.get = 'hit'\n // if we're currently fetching it, we don't actually have it yet\n // it's not stale, which means this isn't a staleWhileRefetching.\n // If it's not stale, and fetching, AND has a __staleWhileFetching\n // value, then that means the user fetched with {forceRefresh:true},\n // so it's safe to return that value.\n if (fetching) {\n return value.__staleWhileFetching\n }\n this.#moveToTail(index)\n if (updateAgeOnGet) {\n this.#updateItemAge(index)\n }\n return value\n }\n } else if (status) {\n status.get = 'miss'\n }\n }\n\n #connect(p: Index, n: Index) {\n this.#prev[n] = p\n this.#next[p] = n\n }\n\n #moveToTail(index: Index): void {\n // if tail already, nothing to do\n // if head, move head to next[index]\n // else\n // move next[prev[index]] to next[index] (head has no prev)\n // move prev[next[index]] to prev[index]\n // prev[index] = tail\n // next[tail] = index\n // tail = index\n if (index !== this.#tail) {\n if (index === this.#head) {\n this.#head = this.#next[index] as Index\n } else {\n this.#connect(\n this.#prev[index] as Index,\n this.#next[index] as Index\n )\n }\n this.#connect(this.#tail, index)\n this.#tail = index\n }\n }\n\n /**\n * Deletes a key out of the cache.\n * Returns true if the key was deleted, false otherwise.\n */\n delete(k: K) {\n let deleted = false\n if (this.#size !== 0) {\n const index = this.#keyMap.get(k)\n if (index !== undefined) {\n deleted = true\n if (this.#size === 1) {\n this.clear()\n } else {\n this.#removeItemSize(index)\n const v = this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n v.__abortController.abort(new Error('deleted'))\n } else if (this.#hasDispose || this.#hasDisposeAfter) {\n if (this.#hasDispose) {\n this.#dispose?.(v as V, k, 'delete')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([v as V, k, 'delete'])\n }\n }\n this.#keyMap.delete(k)\n this.#keyList[index] = undefined\n this.#valList[index] = undefined\n if (index === this.#tail) {\n this.#tail = this.#prev[index] as Index\n } else if (index === this.#head) {\n this.#head = this.#next[index] as Index\n } else {\n this.#next[this.#prev[index]] = this.#next[index]\n this.#prev[this.#next[index]] = this.#prev[index]\n }\n this.#size--\n this.#free.push(index)\n }\n }\n }\n if (this.#hasDisposeAfter && this.#disposed?.length) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n return deleted\n }\n\n /**\n * Clear the cache entirely, throwing away all values.\n */\n clear() {\n for (const index of this.#rindexes({ allowStale: true })) {\n const v = this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n v.__abortController.abort(new Error('deleted'))\n } else {\n const k = this.#keyList[index]\n if (this.#hasDispose) {\n this.#dispose?.(v as V, k as K, 'delete')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([v as V, k as K, 'delete'])\n }\n }\n }\n\n this.#keyMap.clear()\n this.#valList.fill(undefined)\n this.#keyList.fill(undefined)\n if (this.#ttls && this.#starts) {\n this.#ttls.fill(0)\n this.#starts.fill(0)\n }\n if (this.#sizes) {\n this.#sizes.fill(0)\n }\n this.#head = 0 as Index\n this.#tail = 0 as Index\n this.#free.length = 0\n this.#calculatedSize = 0\n this.#size = 0\n if (this.#hasDisposeAfter && this.#disposed) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n }\n}\n"]} \ No newline at end of file diff --git a/dist/node_modules/lru-cache/dist/mjs/index.min.js b/dist/node_modules/lru-cache/dist/mjs/index.min.js new file mode 100644 index 0000000..44bd1c2 --- /dev/null +++ b/dist/node_modules/lru-cache/dist/mjs/index.min.js @@ -0,0 +1,2 @@ +var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},R=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof R>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},R=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),k=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=k(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var W=class{#d;#f;#_;#g;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#p;#n;#i;#t;#l;#c;#o;#h;#w;#r;#m;#F;#S;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#S,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#w,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#p}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#_}get disposeAfter(){return this.#g}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:u,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:d,maxSize:p=0,maxEntrySize:F=0,sizeCalculation:c,fetchMethod:w,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:S,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:g,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?k(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=p,this.maxEntrySize=F||this.#f,this.sizeCalculation=c,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=w,this.#T=!!w,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#w=C.create(e),this.#s=0,this.#p=0,typeof u=="function"&&(this.#_=u),typeof b=="function"?(this.#g=b,this.#r=[]):(this.#g=void 0,this.#r=void 0),this.#b=!!this.#_,this.#a=!!this.#g,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!d,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!g,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!S,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,W))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#S=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let u=n.now-r;n.remainingTTL=a-u}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let u=(i||s())-r;return a-u},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#p=0,this.#m=t,this.#E=e=>{this.#p-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#p>n;)this.#W(!0)}this.#p+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#p)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#S&&this.#F){h.ttl=this.#S[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:u=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#w.length!==0?this.#w.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),u=!1;else{this.#v(f);let d=this.#t[f];if(e!==d){if(this.#T&&this.#e(d)?d.__abortController.abort(new Error("replaced")):h||(this.#b&&this.#_?.(d,t,"set"),this.#a&&this.#r?.push([d,t,"set"])),this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let p=d&&this.#e(d)?d.__staleWhileFetching:d;p!==void 0&&(r.oldValue=p)}}else r&&(r.set="update")}if(s!==0&&!this.#S&&this.#L(),this.#S&&(u||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let d=this.#r,p;for(;p=d?.shift();)this.#g?.(...p)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#w.push(e)),this.#s===1?(this.#o=this.#h=0,this.#w.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new R,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},u=(c,w=!1)=>{let{aborted:l}=h.signal,S=i.ignoreFetchAbort&&c!==void 0;if(i.status&&(l&&!w?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,S&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!S&&!w)return f(h.signal.reason);let y=p;return this.#t[e]===p&&(c===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,c,r.options))),c},b=c=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=c),f(c)),f=c=>{let{aborted:w}=h.signal,l=w&&i.allowStaleOnFetchAbort,S=l||i.allowStaleOnFetchRejection,y=S||i.noDeleteOnFetchRejection,g=p;if(this.#t[e]===p&&(!y||g.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=g.__staleWhileFetching)),S)return i.status&&g.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),g.__staleWhileFetching;if(g.__returned===g)throw c},d=(c,w)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(S=>c(S),w),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(c(),i.allowStaleOnFetchAbort&&(c=S=>u(S,!0)))})};i.status&&(i.status.fetchDispatched=!0);let p=new Promise(d).then(u,b),F=Object.assign(p,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof R}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:u=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:d=this.allowStaleOnFetchRejection,ignoreFetchAbort:p=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:c,forceRefresh:w=!1,status:l,signal:S}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:u,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:d,allowStaleOnFetchAbort:F,ignoreFetchAbort:p,status:l,signal:S},g=this.#n.get(t);if(g===void 0){l&&(l.fetch="miss");let _=this.#D(t,g,y,c);return _.__returned=_}else{let _=this.#t[g];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(g);if(!w&&!O)return l&&(l.fetch="hit"),this.#v(g),s&&this.#z(g),l&&this.#O(l,g),_;let A=this.#D(t,g,y,c),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],u=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),u?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),u?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#_?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#w.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#g?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#_?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#S&&this.#F&&(this.#S.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#w.length=0,this.#p=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#g?.(...e)}}};export{W as LRUCache}; +//# sourceMappingURL=index.min.js.map diff --git a/dist/node_modules/lru-cache/dist/mjs/index.min.js.map b/dist/node_modules/lru-cache/dist/mjs/index.min.js.map new file mode 100644 index 0000000..79bfeaa --- /dev/null +++ b/dist/node_modules/lru-cache/dist/mjs/index.min.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../../src/index.ts"], + "sourcesContent": ["/**\n * @module LRUCache\n */\n\n// module-private names and types\ntype Perf = { now: () => number }\nconst perf: Perf =\n typeof performance === 'object' &&\n performance &&\n typeof performance.now === 'function'\n ? performance\n : Date\n\nconst warned = new Set()\n\n// either a function or a class\ntype ForC = ((...a: any[]) => any) | { new (...a: any[]): any }\n\n/* c8 ignore start */\nconst PROCESS = (\n typeof process === 'object' && !!process ? process : {}\n) as { [k: string]: any }\n/* c8 ignore start */\n\nconst emitWarning = (\n msg: string,\n type: string,\n code: string,\n fn: ForC\n) => {\n typeof PROCESS.emitWarning === 'function'\n ? PROCESS.emitWarning(msg, type, code, fn)\n : console.error(`[${code}] ${type}: ${msg}`)\n}\n\nlet AC = globalThis.AbortController\nlet AS = globalThis.AbortSignal\n\n/* c8 ignore start */\nif (typeof AC === 'undefined') {\n //@ts-ignore\n AS = class AbortSignal {\n onabort?: (...a: any[]) => any\n _onabort: ((...a: any[]) => any)[] = []\n reason?: any\n aborted: boolean = false\n addEventListener(_: string, fn: (...a: any[]) => any) {\n this._onabort.push(fn)\n }\n }\n //@ts-ignore\n AC = class AbortController {\n constructor() {\n warnACPolyfill()\n }\n signal = new AS()\n abort(reason: any) {\n if (this.signal.aborted) return\n //@ts-ignore\n this.signal.reason = reason\n //@ts-ignore\n this.signal.aborted = true\n //@ts-ignore\n for (const fn of this.signal._onabort) {\n fn(reason)\n }\n this.signal.onabort?.(reason)\n }\n }\n let printACPolyfillWarning =\n PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'\n const warnACPolyfill = () => {\n if (!printACPolyfillWarning) return\n printACPolyfillWarning = false\n emitWarning(\n 'AbortController is not defined. If using lru-cache in ' +\n 'node 14, load an AbortController polyfill from the ' +\n '`node-abort-controller` package. A minimal polyfill is ' +\n 'provided for use by LRUCache.fetch(), but it should not be ' +\n 'relied upon in other contexts (eg, passing it to other APIs that ' +\n 'use AbortController/AbortSignal might have undesirable effects). ' +\n 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.',\n 'NO_ABORT_CONTROLLER',\n 'ENOTSUP',\n warnACPolyfill\n )\n }\n}\n/* c8 ignore stop */\n\nconst shouldWarn = (code: string) => !warned.has(code)\n\nconst TYPE = Symbol('type')\ntype PosInt = number & { [TYPE]: 'Positive Integer' }\ntype Index = number & { [TYPE]: 'LRUCache Index' }\n\nconst isPosInt = (n: any): n is PosInt =>\n n && n === Math.floor(n) && n > 0 && isFinite(n)\n\ntype UintArray = Uint8Array | Uint16Array | Uint32Array\ntype NumberArray = UintArray | number[]\n\n/* c8 ignore start */\n// This is a little bit ridiculous, tbh.\n// The maximum array length is 2^32-1 or thereabouts on most JS impls.\n// And well before that point, you're caching the entire world, I mean,\n// that's ~32GB of just integers for the next/prev links, plus whatever\n// else to hold that many keys and values. Just filling the memory with\n// zeroes at init time is brutal when you get that big.\n// But why not be complete?\n// Maybe in the future, these limits will have expanded.\nconst getUintArray = (max: number) =>\n !isPosInt(max)\n ? null\n : max <= Math.pow(2, 8)\n ? Uint8Array\n : max <= Math.pow(2, 16)\n ? Uint16Array\n : max <= Math.pow(2, 32)\n ? Uint32Array\n : max <= Number.MAX_SAFE_INTEGER\n ? ZeroArray\n : null\n/* c8 ignore stop */\n\nclass ZeroArray extends Array {\n constructor(size: number) {\n super(size)\n this.fill(0)\n }\n}\n\ntype StackLike = Stack | Index[]\nclass Stack {\n heap: NumberArray\n length: number\n // private constructor\n static #constructing: boolean = false\n static create(max: number): StackLike {\n const HeapCls = getUintArray(max)\n if (!HeapCls) return []\n Stack.#constructing = true\n const s = new Stack(max, HeapCls)\n Stack.#constructing = false\n return s\n }\n constructor(\n max: number,\n HeapCls: { new (n: number): NumberArray }\n ) {\n /* c8 ignore start */\n if (!Stack.#constructing) {\n throw new TypeError('instantiate Stack using Stack.create(n)')\n }\n /* c8 ignore stop */\n this.heap = new HeapCls(max)\n this.length = 0\n }\n push(n: Index) {\n this.heap[this.length++] = n\n }\n pop(): Index {\n return this.heap[--this.length] as Index\n }\n}\n\n/**\n * Promise representing an in-progress {@link LRUCache#fetch} call\n */\nexport type BackgroundFetch = Promise & {\n __returned: BackgroundFetch | undefined\n __abortController: AbortController\n __staleWhileFetching: V | undefined\n}\n\ntype DisposeTask = [\n value: V,\n key: K,\n reason: LRUCache.DisposeReason\n]\n\nexport namespace LRUCache {\n /**\n * An integer greater than 0, reflecting the calculated size of items\n */\n export type Size = number\n\n /**\n * Integer greater than 0, representing some number of milliseconds, or the\n * time at which a TTL started counting from.\n */\n export type Milliseconds = number\n\n /**\n * An integer greater than 0, reflecting a number of items\n */\n export type Count = number\n\n /**\n * The reason why an item was removed from the cache, passed\n * to the {@link Disposer} methods.\n */\n export type DisposeReason = 'evict' | 'set' | 'delete'\n /**\n * A method called upon item removal, passed as the\n * {@link OptionsBase.dispose} and/or\n * {@link OptionsBase.disposeAfter} options.\n */\n export type Disposer = (\n value: V,\n key: K,\n reason: DisposeReason\n ) => void\n\n /**\n * A function that returns the effective calculated size\n * of an entry in the cache.\n */\n export type SizeCalculator = (value: V, key: K) => Size\n\n /**\n * Options provided to the\n * {@link OptionsBase.fetchMethod} function.\n */\n export interface FetcherOptions {\n signal: AbortSignal\n options: FetcherFetchOptions\n /**\n * Object provided in the {@link FetchOptions.context} option to\n * {@link LRUCache#fetch}\n */\n context: FC\n }\n\n /**\n * Status object that may be passed to {@link LRUCache#fetch},\n * {@link LRUCache#get}, {@link LRUCache#set}, and {@link LRUCache#has}.\n */\n export interface Status {\n /**\n * The status of a set() operation.\n *\n * - add: the item was not found in the cache, and was added\n * - update: the item was in the cache, with the same value provided\n * - replace: the item was in the cache, and replaced\n * - miss: the item was not added to the cache for some reason\n */\n set?: 'add' | 'update' | 'replace' | 'miss'\n\n /**\n * the ttl stored for the item, or undefined if ttls are not used.\n */\n ttl?: Milliseconds\n\n /**\n * the start time for the item, or undefined if ttls are not used.\n */\n start?: Milliseconds\n\n /**\n * The timestamp used for TTL calculation\n */\n now?: Milliseconds\n\n /**\n * the remaining ttl for the item, or undefined if ttls are not used.\n */\n remainingTTL?: Milliseconds\n\n /**\n * The calculated size for the item, if sizes are used.\n */\n entrySize?: Size\n\n /**\n * The total calculated size of the cache, if sizes are used.\n */\n totalCalculatedSize?: Size\n\n /**\n * A flag indicating that the item was not stored, due to exceeding the\n * {@link OptionsBase.maxEntrySize}\n */\n maxEntrySizeExceeded?: true\n\n /**\n * The old value, specified in the case of `set:'update'` or\n * `set:'replace'`\n */\n oldValue?: V\n\n /**\n * The results of a {@link LRUCache#has} operation\n *\n * - hit: the item was found in the cache\n * - stale: the item was found in the cache, but is stale\n * - miss: the item was not found in the cache\n */\n has?: 'hit' | 'stale' | 'miss'\n\n /**\n * The status of a {@link LRUCache#fetch} operation.\n * Note that this can change as the underlying fetch() moves through\n * various states.\n *\n * - inflight: there is another fetch() for this key which is in process\n * - get: there is no fetchMethod, so {@link LRUCache#get} was called.\n * - miss: the item is not in cache, and will be fetched.\n * - hit: the item is in the cache, and was resolved immediately.\n * - stale: the item is in the cache, but stale.\n * - refresh: the item is in the cache, and not stale, but\n * {@link FetchOptions.forceRefresh} was specified.\n */\n fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh'\n\n /**\n * The {@link OptionsBase.fetchMethod} was called\n */\n fetchDispatched?: true\n\n /**\n * The cached value was updated after a successful call to\n * {@link OptionsBase.fetchMethod}\n */\n fetchUpdated?: true\n\n /**\n * The reason for a fetch() rejection. Either the error raised by the\n * {@link OptionsBase.fetchMethod}, or the reason for an\n * AbortSignal.\n */\n fetchError?: Error\n\n /**\n * The fetch received an abort signal\n */\n fetchAborted?: true\n\n /**\n * The abort signal received was ignored, and the fetch was allowed to\n * continue.\n */\n fetchAbortIgnored?: true\n\n /**\n * The fetchMethod promise resolved successfully\n */\n fetchResolved?: true\n\n /**\n * The fetchMethod promise was rejected\n */\n fetchRejected?: true\n\n /**\n * The status of a {@link LRUCache#get} operation.\n *\n * - fetching: The item is currently being fetched. If a previous value\n * is present and allowed, that will be returned.\n * - stale: The item is in the cache, and is stale.\n * - hit: the item is in the cache\n * - miss: the item is not in the cache\n */\n get?: 'stale' | 'hit' | 'miss'\n\n /**\n * A fetch or get operation returned a stale value.\n */\n returnedStale?: true\n }\n\n /**\n * options which override the options set in the LRUCache constructor\n * when calling {@link LRUCache#fetch}.\n *\n * This is the union of {@link GetOptions} and {@link SetOptions}, plus\n * {@link OptionsBase.noDeleteOnFetchRejection},\n * {@link OptionsBase.allowStaleOnFetchRejection},\n * {@link FetchOptions.forceRefresh}, and\n * {@link OptionsBase.context}\n *\n * Any of these may be modified in the {@link OptionsBase.fetchMethod}\n * function, but the {@link GetOptions} fields will of course have no\n * effect, as the {@link LRUCache#get} call already happened by the time\n * the fetchMethod is called.\n */\n export interface FetcherFetchOptions\n extends Pick<\n OptionsBase,\n | 'allowStale'\n | 'updateAgeOnGet'\n | 'noDeleteOnStaleGet'\n | 'sizeCalculation'\n | 'ttl'\n | 'noDisposeOnSet'\n | 'noUpdateTTL'\n | 'noDeleteOnFetchRejection'\n | 'allowStaleOnFetchRejection'\n | 'ignoreFetchAbort'\n | 'allowStaleOnFetchAbort'\n > {\n status?: Status\n size?: Size\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#fetch} method.\n */\n export interface FetchOptions\n extends FetcherFetchOptions {\n /**\n * Set to true to force a re-load of the existing data, even if it\n * is not yet stale.\n */\n forceRefresh?: boolean\n /**\n * Context provided to the {@link OptionsBase.fetchMethod} as\n * the {@link FetcherOptions.context} param.\n *\n * If the FC type is specified as unknown (the default),\n * undefined or void, then this is optional. Otherwise, it will\n * be required.\n */\n context?: FC\n signal?: AbortSignal\n status?: Status\n }\n /**\n * Options provided to {@link LRUCache#fetch} when the FC type is something\n * other than `unknown`, `undefined`, or `void`\n */\n export interface FetchOptionsWithContext\n extends FetchOptions {\n context: FC\n }\n /**\n * Options provided to {@link LRUCache#fetch} when the FC type is\n * `undefined` or `void`\n */\n export interface FetchOptionsNoContext\n extends FetchOptions {\n context?: undefined\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#has} method.\n */\n export interface HasOptions\n extends Pick, 'updateAgeOnHas'> {\n status?: Status\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#get} method.\n */\n export interface GetOptions\n extends Pick<\n OptionsBase,\n 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet'\n > {\n status?: Status\n }\n\n /**\n * Options that may be passed to the {@link LRUCache#peek} method.\n */\n export interface PeekOptions\n extends Pick, 'allowStale'> {}\n\n /**\n * Options that may be passed to the {@link LRUCache#set} method.\n */\n export interface SetOptions\n extends Pick<\n OptionsBase,\n 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'\n > {\n /**\n * If size tracking is enabled, then setting an explicit size\n * in the {@link LRUCache#set} call will prevent calling the\n * {@link OptionsBase.sizeCalculation} function.\n */\n size?: Size\n /**\n * If TTL tracking is enabled, then setting an explicit start\n * time in the {@link LRUCache#set} call will override the\n * default time from `performance.now()` or `Date.now()`.\n *\n * Note that it must be a valid value for whichever time-tracking\n * method is in use.\n */\n start?: Milliseconds\n status?: Status\n }\n\n /**\n * The type signature for the {@link OptionsBase.fetchMethod} option.\n */\n export type Fetcher = (\n key: K,\n staleValue: V | undefined,\n options: FetcherOptions\n ) => Promise | V | void | undefined\n\n /**\n * Options which may be passed to the {@link LRUCache} constructor.\n *\n * Most of these may be overridden in the various options that use\n * them.\n *\n * Despite all being technically optional, the constructor requires that\n * a cache is at minimum limited by one or more of {@link OptionsBase.max},\n * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}.\n *\n * If {@link OptionsBase.ttl} is used alone, then it is strongly advised\n * (and in fact required by the type definitions here) that the cache\n * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially\n * unbounded storage.\n */\n export interface OptionsBase {\n /**\n * The maximum number of items to store in the cache before evicting\n * old entries. This is read-only on the {@link LRUCache} instance,\n * and may not be overridden.\n *\n * If set, then storage space will be pre-allocated at construction\n * time, and the cache will perform significantly faster.\n *\n * Note that significantly fewer items may be stored, if\n * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also\n * set.\n */\n max?: Count\n\n /**\n * Max time in milliseconds for items to live in cache before they are\n * considered stale. Note that stale items are NOT preemptively removed\n * by default, and MAY live in the cache long after they have expired.\n *\n * Also, as this cache is optimized for LRU/MRU operations, some of\n * the staleness/TTL checks will reduce performance, as they will incur\n * overhead by deleting items.\n *\n * Must be an integer number of ms. If set to 0, this indicates \"no TTL\"\n *\n * @default 0\n */\n ttl?: Milliseconds\n\n /**\n * Minimum amount of time in ms in which to check for staleness.\n * Defaults to 1, which means that the current time is checked\n * at most once per millisecond.\n *\n * Set to 0 to check the current time every time staleness is tested.\n * (This reduces performance, and is theoretically unnecessary.)\n *\n * Setting this to a higher value will improve performance somewhat\n * while using ttl tracking, albeit at the expense of keeping stale\n * items around a bit longer than their TTLs would indicate.\n *\n * @default 1\n */\n ttlResolution?: Milliseconds\n\n /**\n * Preemptively remove stale items from the cache.\n * Note that this may significantly degrade performance,\n * especially if the cache is storing a large number of items.\n * It is almost always best to just leave the stale items in\n * the cache, and let them fall out as new items are added.\n *\n * Note that this means that {@link OptionsBase.allowStale} is a bit\n * pointless, as stale items will be deleted almost as soon as they\n * expire.\n *\n * @default false\n */\n ttlAutopurge?: boolean\n\n /**\n * Update the age of items on {@link LRUCache#get}, renewing their TTL\n *\n * Has no effect if {@link OptionsBase.ttl} is not set.\n *\n * @default false\n */\n updateAgeOnGet?: boolean\n\n /**\n * Update the age of items on {@link LRUCache#has}, renewing their TTL\n *\n * Has no effect if {@link OptionsBase.ttl} is not set.\n *\n * @default false\n */\n updateAgeOnHas?: boolean\n\n /**\n * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return\n * stale data, if available.\n */\n allowStale?: boolean\n\n /**\n * Function that is called on items when they are dropped from the cache.\n * This can be handy if you want to close file descriptors or do other\n * cleanup tasks when items are no longer accessible. Called with `key,\n * value`. It's called before actually removing the item from the\n * internal cache, so it is *NOT* safe to re-add them.\n *\n * Use {@link OptionsBase.disposeAfter} if you wish to dispose items after\n * they have been full removed, when it is safe to add them back to the\n * cache.\n */\n dispose?: Disposer\n\n /**\n * The same as {@link OptionsBase.dispose}, but called *after* the entry\n * is completely removed and the cache is once again in a clean state.\n * It is safe to add an item right back into the cache at this point.\n * However, note that it is *very* easy to inadvertently create infinite\n * recursion this way.\n */\n disposeAfter?: Disposer\n\n /**\n * Set to true to suppress calling the\n * {@link OptionsBase.dispose} function if the entry key is\n * still accessible within the cache.\n * This may be overridden by passing an options object to\n * {@link LRUCache#set}.\n */\n noDisposeOnSet?: boolean\n\n /**\n * Boolean flag to tell the cache to not update the TTL when\n * setting a new value for an existing key (ie, when updating a value\n * rather than inserting a new value). Note that the TTL value is\n * _always_ set (if provided) when adding a new entry into the cache.\n *\n * Has no effect if a {@link OptionsBase.ttl} is not set.\n */\n noUpdateTTL?: boolean\n\n /**\n * If you wish to track item size, you must provide a maxSize\n * note that we still will only keep up to max *actual items*,\n * if max is set, so size tracking may cause fewer than max items\n * to be stored. At the extreme, a single item of maxSize size\n * will cause everything else in the cache to be dropped when it\n * is added. Use with caution!\n *\n * Note also that size tracking can negatively impact performance,\n * though for most cases, only minimally.\n */\n maxSize?: Size\n\n /**\n * The maximum allowed size for any single item in the cache.\n *\n * If a larger item is passed to {@link LRUCache#set} or returned by a\n * {@link OptionsBase.fetchMethod}, then it will not be stored in the\n * cache.\n */\n maxEntrySize?: Size\n\n /**\n * A function that returns a number indicating the item's size.\n *\n * If not provided, and {@link OptionsBase.maxSize} or\n * {@link OptionsBase.maxEntrySize} are set, then all\n * {@link LRUCache#set} calls **must** provide an explicit\n * {@link SetOptions.size} or sizeCalculation param.\n */\n sizeCalculation?: SizeCalculator\n\n /**\n * Method that provides the implementation for {@link LRUCache#fetch}\n */\n fetchMethod?: Fetcher\n\n /**\n * Set to true to suppress the deletion of stale data when a\n * {@link OptionsBase.fetchMethod} returns a rejected promise.\n */\n noDeleteOnFetchRejection?: boolean\n\n /**\n * Do not delete stale items when they are retrieved with\n * {@link LRUCache#get}.\n *\n * Note that the `get` return value will still be `undefined`\n * unless {@link OptionsBase.allowStale} is true.\n */\n noDeleteOnStaleGet?: boolean\n\n /**\n * Set to true to allow returning stale data when a\n * {@link OptionsBase.fetchMethod} throws an error or returns a rejected\n * promise.\n *\n * This differs from using {@link OptionsBase.allowStale} in that stale\n * data will ONLY be returned in the case that the\n * {@link LRUCache#fetch} fails, not any other times.\n */\n allowStaleOnFetchRejection?: boolean\n\n /**\n * Set to true to return a stale value from the cache when the\n * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches an `'abort'`\n * event, whether user-triggered, or due to internal cache behavior.\n *\n * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying\n * {@link OptionsBase.fetchMethod} will still be considered canceled, and\n * any value it returns will be ignored and not cached.\n *\n * Caveat: since fetches are aborted when a new value is explicitly\n * set in the cache, this can lead to fetch returning a stale value,\n * since that was the fallback value _at the moment the `fetch()` was\n * initiated_, even though the new updated value is now present in\n * the cache.\n *\n * For example:\n *\n * ```ts\n * const cache = new LRUCache({\n * ttl: 100,\n * fetchMethod: async (url, oldValue, { signal }) => {\n * const res = await fetch(url, { signal })\n * return await res.json()\n * }\n * })\n * cache.set('https://example.com/', { some: 'data' })\n * // 100ms go by...\n * const result = cache.fetch('https://example.com/')\n * cache.set('https://example.com/', { other: 'thing' })\n * console.log(await result) // { some: 'data' }\n * console.log(cache.get('https://example.com/')) // { other: 'thing' }\n * ```\n */\n allowStaleOnFetchAbort?: boolean\n\n /**\n * Set to true to ignore the `abort` event emitted by the `AbortSignal`\n * object passed to {@link OptionsBase.fetchMethod}, and still cache the\n * resulting resolution value, as long as it is not `undefined`.\n *\n * When used on its own, this means aborted {@link LRUCache#fetch} calls are not\n * immediately resolved or rejected when they are aborted, and instead\n * take the full time to await.\n *\n * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted\n * {@link LRUCache#fetch} calls will resolve immediately to their stale\n * cached value or `undefined`, and will continue to process and eventually\n * update the cache when they resolve, as long as the resulting value is\n * not `undefined`, thus supporting a \"return stale on timeout while\n * refreshing\" mechanism by passing `AbortSignal.timeout(n)` as the signal.\n *\n * **Note**: regardless of this setting, an `abort` event _is still\n * emitted on the `AbortSignal` object_, so may result in invalid results\n * when passed to other underlying APIs that use AbortSignals.\n *\n * This may be overridden in the {@link OptionsBase.fetchMethod} or the\n * call to {@link LRUCache#fetch}.\n */\n ignoreFetchAbort?: boolean\n }\n\n export interface OptionsMaxLimit\n extends OptionsBase {\n max: Count\n }\n export interface OptionsTTLLimit\n extends OptionsBase {\n ttl: Milliseconds\n ttlAutopurge: boolean\n }\n export interface OptionsSizeLimit\n extends OptionsBase {\n maxSize: Size\n }\n\n /**\n * The valid safe options for the {@link LRUCache} constructor\n */\n export type Options =\n | OptionsMaxLimit\n | OptionsSizeLimit\n | OptionsTTLLimit\n\n /**\n * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump}\n */\n export interface Entry {\n value: V\n ttl?: Milliseconds\n size?: Size\n start?: Milliseconds\n }\n}\n\n/**\n * Default export, the thing you're using this module to get.\n *\n * All properties from the options object (with the exception of\n * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as\n * normal public members. (`max` and `maxBase` are read-only getters.)\n * Changing any of these will alter the defaults for subsequent method calls,\n * but is otherwise safe.\n */\nexport class LRUCache {\n // properties coming in from the options of these, only max and maxSize\n // really *need* to be protected. The rest can be modified, as they just\n // set defaults for various methods.\n readonly #max: LRUCache.Count\n readonly #maxSize: LRUCache.Size\n readonly #dispose?: LRUCache.Disposer\n readonly #disposeAfter?: LRUCache.Disposer\n readonly #fetchMethod?: LRUCache.Fetcher\n\n /**\n * {@link LRUCache.OptionsBase.ttl}\n */\n ttl: LRUCache.Milliseconds\n\n /**\n * {@link LRUCache.OptionsBase.ttlResolution}\n */\n ttlResolution: LRUCache.Milliseconds\n /**\n * {@link LRUCache.OptionsBase.ttlAutopurge}\n */\n ttlAutopurge: boolean\n /**\n * {@link LRUCache.OptionsBase.updateAgeOnGet}\n */\n updateAgeOnGet: boolean\n /**\n * {@link LRUCache.OptionsBase.updateAgeOnHas}\n */\n updateAgeOnHas: boolean\n /**\n * {@link LRUCache.OptionsBase.allowStale}\n */\n allowStale: boolean\n\n /**\n * {@link LRUCache.OptionsBase.noDisposeOnSet}\n */\n noDisposeOnSet: boolean\n /**\n * {@link LRUCache.OptionsBase.noUpdateTTL}\n */\n noUpdateTTL: boolean\n /**\n * {@link LRUCache.OptionsBase.maxEntrySize}\n */\n maxEntrySize: LRUCache.Size\n /**\n * {@link LRUCache.OptionsBase.sizeCalculation}\n */\n sizeCalculation?: LRUCache.SizeCalculator\n /**\n * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}\n */\n noDeleteOnFetchRejection: boolean\n /**\n * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}\n */\n noDeleteOnStaleGet: boolean\n /**\n * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}\n */\n allowStaleOnFetchAbort: boolean\n /**\n * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}\n */\n allowStaleOnFetchRejection: boolean\n /**\n * {@link LRUCache.OptionsBase.ignoreFetchAbort}\n */\n ignoreFetchAbort: boolean\n\n // computed properties\n #size: LRUCache.Count\n #calculatedSize: LRUCache.Size\n #keyMap: Map\n #keyList: (K | undefined)[]\n #valList: (V | BackgroundFetch | undefined)[]\n #next: NumberArray\n #prev: NumberArray\n #head: Index\n #tail: Index\n #free: StackLike\n #disposed?: DisposeTask[]\n #sizes?: ZeroArray\n #starts?: ZeroArray\n #ttls?: ZeroArray\n\n #hasDispose: boolean\n #hasFetchMethod: boolean\n #hasDisposeAfter: boolean\n\n /**\n * Do not call this method unless you need to inspect the\n * inner workings of the cache. If anything returned by this\n * object is modified in any way, strange breakage may occur.\n *\n * These fields are private for a reason!\n *\n * @internal\n */\n static unsafeExposeInternals<\n K extends {},\n V extends {},\n FC extends unknown = unknown\n >(c: LRUCache) {\n return {\n // properties\n starts: c.#starts,\n ttls: c.#ttls,\n sizes: c.#sizes,\n keyMap: c.#keyMap as Map,\n keyList: c.#keyList,\n valList: c.#valList,\n next: c.#next,\n prev: c.#prev,\n get head() {\n return c.#head\n },\n get tail() {\n return c.#tail\n },\n free: c.#free,\n // methods\n isBackgroundFetch: (p: any) => c.#isBackgroundFetch(p),\n backgroundFetch: (\n k: K,\n index: number | undefined,\n options: LRUCache.FetchOptions,\n context: any\n ): BackgroundFetch =>\n c.#backgroundFetch(\n k,\n index as Index | undefined,\n options,\n context\n ),\n moveToTail: (index: number): void =>\n c.#moveToTail(index as Index),\n indexes: (options?: { allowStale: boolean }) =>\n c.#indexes(options),\n rindexes: (options?: { allowStale: boolean }) =>\n c.#rindexes(options),\n isStale: (index: number | undefined) =>\n c.#isStale(index as Index),\n }\n }\n\n // Protected read-only members\n\n /**\n * {@link LRUCache.OptionsBase.max} (read-only)\n */\n get max(): LRUCache.Count {\n return this.#max\n }\n /**\n * {@link LRUCache.OptionsBase.maxSize} (read-only)\n */\n get maxSize(): LRUCache.Count {\n return this.#maxSize\n }\n /**\n * The total computed size of items in the cache (read-only)\n */\n get calculatedSize(): LRUCache.Size {\n return this.#calculatedSize\n }\n /**\n * The number of items stored in the cache (read-only)\n */\n get size(): LRUCache.Count {\n return this.#size\n }\n /**\n * {@link LRUCache.OptionsBase.fetchMethod} (read-only)\n */\n get fetchMethod(): LRUCache.Fetcher | undefined {\n return this.#fetchMethod\n }\n /**\n * {@link LRUCache.OptionsBase.dispose} (read-only)\n */\n get dispose() {\n return this.#dispose\n }\n /**\n * {@link LRUCache.OptionsBase.disposeAfter} (read-only)\n */\n get disposeAfter() {\n return this.#disposeAfter\n }\n\n constructor(\n options: LRUCache.Options | LRUCache\n ) {\n const {\n max = 0,\n ttl,\n ttlResolution = 1,\n ttlAutopurge,\n updateAgeOnGet,\n updateAgeOnHas,\n allowStale,\n dispose,\n disposeAfter,\n noDisposeOnSet,\n noUpdateTTL,\n maxSize = 0,\n maxEntrySize = 0,\n sizeCalculation,\n fetchMethod,\n noDeleteOnFetchRejection,\n noDeleteOnStaleGet,\n allowStaleOnFetchRejection,\n allowStaleOnFetchAbort,\n ignoreFetchAbort,\n } = options\n\n if (max !== 0 && !isPosInt(max)) {\n throw new TypeError('max option must be a nonnegative integer')\n }\n\n const UintArray = max ? getUintArray(max) : Array\n if (!UintArray) {\n throw new Error('invalid max value: ' + max)\n }\n\n this.#max = max\n this.#maxSize = maxSize\n this.maxEntrySize = maxEntrySize || this.#maxSize\n this.sizeCalculation = sizeCalculation\n if (this.sizeCalculation) {\n if (!this.#maxSize && !this.maxEntrySize) {\n throw new TypeError(\n 'cannot set sizeCalculation without setting maxSize or maxEntrySize'\n )\n }\n if (typeof this.sizeCalculation !== 'function') {\n throw new TypeError('sizeCalculation set to non-function')\n }\n }\n\n if (\n fetchMethod !== undefined &&\n typeof fetchMethod !== 'function'\n ) {\n throw new TypeError(\n 'fetchMethod must be a function if specified'\n )\n }\n this.#fetchMethod = fetchMethod\n this.#hasFetchMethod = !!fetchMethod\n\n this.#keyMap = new Map()\n this.#keyList = new Array(max).fill(undefined)\n this.#valList = new Array(max).fill(undefined)\n this.#next = new UintArray(max)\n this.#prev = new UintArray(max)\n this.#head = 0 as Index\n this.#tail = 0 as Index\n this.#free = Stack.create(max)\n this.#size = 0\n this.#calculatedSize = 0\n\n if (typeof dispose === 'function') {\n this.#dispose = dispose\n }\n if (typeof disposeAfter === 'function') {\n this.#disposeAfter = disposeAfter\n this.#disposed = []\n } else {\n this.#disposeAfter = undefined\n this.#disposed = undefined\n }\n this.#hasDispose = !!this.#dispose\n this.#hasDisposeAfter = !!this.#disposeAfter\n\n this.noDisposeOnSet = !!noDisposeOnSet\n this.noUpdateTTL = !!noUpdateTTL\n this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection\n this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection\n this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort\n this.ignoreFetchAbort = !!ignoreFetchAbort\n\n // NB: maxEntrySize is set to maxSize if it's set\n if (this.maxEntrySize !== 0) {\n if (this.#maxSize !== 0) {\n if (!isPosInt(this.#maxSize)) {\n throw new TypeError(\n 'maxSize must be a positive integer if specified'\n )\n }\n }\n if (!isPosInt(this.maxEntrySize)) {\n throw new TypeError(\n 'maxEntrySize must be a positive integer if specified'\n )\n }\n this.#initializeSizeTracking()\n }\n\n this.allowStale = !!allowStale\n this.noDeleteOnStaleGet = !!noDeleteOnStaleGet\n this.updateAgeOnGet = !!updateAgeOnGet\n this.updateAgeOnHas = !!updateAgeOnHas\n this.ttlResolution =\n isPosInt(ttlResolution) || ttlResolution === 0\n ? ttlResolution\n : 1\n this.ttlAutopurge = !!ttlAutopurge\n this.ttl = ttl || 0\n if (this.ttl) {\n if (!isPosInt(this.ttl)) {\n throw new TypeError(\n 'ttl must be a positive integer if specified'\n )\n }\n this.#initializeTTLTracking()\n }\n\n // do not allow completely unbounded caches\n if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {\n throw new TypeError(\n 'At least one of max, maxSize, or ttl is required'\n )\n }\n if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {\n const code = 'LRU_CACHE_UNBOUNDED'\n if (shouldWarn(code)) {\n warned.add(code)\n const msg =\n 'TTL caching without ttlAutopurge, max, or maxSize can ' +\n 'result in unbounded memory consumption.'\n emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)\n }\n }\n }\n\n /**\n * Return the remaining TTL time for a given entry key\n */\n getRemainingTTL(key: K) {\n return this.#keyMap.has(key) ? Infinity : 0\n }\n\n #initializeTTLTracking() {\n const ttls = new ZeroArray(this.#max)\n const starts = new ZeroArray(this.#max)\n this.#ttls = ttls\n this.#starts = starts\n\n this.#setItemTTL = (index, ttl, start = perf.now()) => {\n starts[index] = ttl !== 0 ? start : 0\n ttls[index] = ttl\n if (ttl !== 0 && this.ttlAutopurge) {\n const t = setTimeout(() => {\n if (this.#isStale(index)) {\n this.delete(this.#keyList[index] as K)\n }\n }, ttl + 1)\n // unref() not supported on all platforms\n /* c8 ignore start */\n if (t.unref) {\n t.unref()\n }\n /* c8 ignore stop */\n }\n }\n\n this.#updateItemAge = index => {\n starts[index] = ttls[index] !== 0 ? perf.now() : 0\n }\n\n this.#statusTTL = (status, index) => {\n if (ttls[index]) {\n const ttl = ttls[index]\n const start = starts[index]\n status.ttl = ttl\n status.start = start\n status.now = cachedNow || getNow()\n const age = status.now - start\n status.remainingTTL = ttl - age\n }\n }\n\n // debounce calls to perf.now() to 1s so we're not hitting\n // that costly call repeatedly.\n let cachedNow = 0\n const getNow = () => {\n const n = perf.now()\n if (this.ttlResolution > 0) {\n cachedNow = n\n const t = setTimeout(\n () => (cachedNow = 0),\n this.ttlResolution\n )\n // not available on all platforms\n /* c8 ignore start */\n if (t.unref) {\n t.unref()\n }\n /* c8 ignore stop */\n }\n return n\n }\n\n this.getRemainingTTL = key => {\n const index = this.#keyMap.get(key)\n if (index === undefined) {\n return 0\n }\n const ttl = ttls[index]\n const start = starts[index]\n if (ttl === 0 || start === 0) {\n return Infinity\n }\n const age = (cachedNow || getNow()) - start\n return ttl - age\n }\n\n this.#isStale = index => {\n return (\n ttls[index] !== 0 &&\n starts[index] !== 0 &&\n (cachedNow || getNow()) - starts[index] > ttls[index]\n )\n }\n }\n\n // conditionally set private methods related to TTL\n #updateItemAge: (index: Index) => void = () => {}\n #statusTTL: (status: LRUCache.Status, index: Index) => void =\n () => {}\n #setItemTTL: (\n index: Index,\n ttl: LRUCache.Milliseconds,\n start?: LRUCache.Milliseconds\n // ignore because we never call this if we're not already in TTL mode\n /* c8 ignore start */\n ) => void = () => {}\n /* c8 ignore stop */\n\n #isStale: (index: Index) => boolean = () => false\n\n #initializeSizeTracking() {\n const sizes = new ZeroArray(this.#max)\n this.#calculatedSize = 0\n this.#sizes = sizes\n this.#removeItemSize = index => {\n this.#calculatedSize -= sizes[index]\n sizes[index] = 0\n }\n this.#requireSize = (k, v, size, sizeCalculation) => {\n // provisionally accept background fetches.\n // actual value size will be checked when they return.\n if (this.#isBackgroundFetch(v)) {\n return 0\n }\n if (!isPosInt(size)) {\n if (sizeCalculation) {\n if (typeof sizeCalculation !== 'function') {\n throw new TypeError('sizeCalculation must be a function')\n }\n size = sizeCalculation(v, k)\n if (!isPosInt(size)) {\n throw new TypeError(\n 'sizeCalculation return invalid (expect positive integer)'\n )\n }\n } else {\n throw new TypeError(\n 'invalid size value (must be positive integer). ' +\n 'When maxSize or maxEntrySize is used, sizeCalculation ' +\n 'or size must be set.'\n )\n }\n }\n return size\n }\n this.#addItemSize = (\n index: Index,\n size: LRUCache.Size,\n status?: LRUCache.Status\n ) => {\n sizes[index] = size\n if (this.#maxSize) {\n const maxSize = this.#maxSize - sizes[index]\n while (this.#calculatedSize > maxSize) {\n this.#evict(true)\n }\n }\n this.#calculatedSize += sizes[index]\n if (status) {\n status.entrySize = size\n status.totalCalculatedSize = this.#calculatedSize\n }\n }\n }\n\n #removeItemSize: (index: Index) => void = _i => {}\n #addItemSize: (\n index: Index,\n size: LRUCache.Size,\n status?: LRUCache.Status\n ) => void = (_i, _s, _st) => {}\n #requireSize: (\n k: K,\n v: V | BackgroundFetch,\n size?: LRUCache.Size,\n sizeCalculation?: LRUCache.SizeCalculator\n ) => LRUCache.Size = (\n _k: K,\n _v: V | BackgroundFetch,\n size?: LRUCache.Size,\n sizeCalculation?: LRUCache.SizeCalculator\n ) => {\n if (size || sizeCalculation) {\n throw new TypeError(\n 'cannot set size without setting maxSize or maxEntrySize on cache'\n )\n }\n return 0\n };\n\n *#indexes({ allowStale = this.allowStale } = {}) {\n if (this.#size) {\n for (let i = this.#tail; true; ) {\n if (!this.#isValidIndex(i)) {\n break\n }\n if (allowStale || !this.#isStale(i)) {\n yield i\n }\n if (i === this.#head) {\n break\n } else {\n i = this.#prev[i] as Index\n }\n }\n }\n }\n\n *#rindexes({ allowStale = this.allowStale } = {}) {\n if (this.#size) {\n for (let i = this.#head; true; ) {\n if (!this.#isValidIndex(i)) {\n break\n }\n if (allowStale || !this.#isStale(i)) {\n yield i\n }\n if (i === this.#tail) {\n break\n } else {\n i = this.#next[i] as Index\n }\n }\n }\n }\n\n #isValidIndex(index: Index) {\n return (\n index !== undefined &&\n this.#keyMap.get(this.#keyList[index] as K) === index\n )\n }\n\n /**\n * Return a generator yielding `[key, value]` pairs,\n * in order from most recently used to least recently used.\n */\n *entries() {\n for (const i of this.#indexes()) {\n if (\n this.#valList[i] !== undefined &&\n this.#keyList[i] !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield [this.#keyList[i], this.#valList[i]]\n }\n }\n }\n\n /**\n * Inverse order version of {@link LRUCache.entries}\n *\n * Return a generator yielding `[key, value]` pairs,\n * in order from least recently used to most recently used.\n */\n *rentries() {\n for (const i of this.#rindexes()) {\n if (\n this.#valList[i] !== undefined &&\n this.#keyList[i] !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield [this.#keyList[i], this.#valList[i]]\n }\n }\n }\n\n /**\n * Return a generator yielding the keys in the cache,\n * in order from most recently used to least recently used.\n */\n *keys() {\n for (const i of this.#indexes()) {\n const k = this.#keyList[i]\n if (\n k !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield k\n }\n }\n }\n\n /**\n * Inverse order version of {@link LRUCache.keys}\n *\n * Return a generator yielding the keys in the cache,\n * in order from least recently used to most recently used.\n */\n *rkeys() {\n for (const i of this.#rindexes()) {\n const k = this.#keyList[i]\n if (\n k !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield k\n }\n }\n }\n\n /**\n * Return a generator yielding the values in the cache,\n * in order from most recently used to least recently used.\n */\n *values() {\n for (const i of this.#indexes()) {\n const v = this.#valList[i]\n if (\n v !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield this.#valList[i]\n }\n }\n }\n\n /**\n * Inverse order version of {@link LRUCache.values}\n *\n * Return a generator yielding the values in the cache,\n * in order from least recently used to most recently used.\n */\n *rvalues() {\n for (const i of this.#rindexes()) {\n const v = this.#valList[i]\n if (\n v !== undefined &&\n !this.#isBackgroundFetch(this.#valList[i])\n ) {\n yield this.#valList[i]\n }\n }\n }\n\n /**\n * Iterating over the cache itself yields the same results as\n * {@link LRUCache.entries}\n */\n [Symbol.iterator]() {\n return this.entries()\n }\n\n /**\n * Find a value for which the supplied fn method returns a truthy value,\n * similar to Array.find(). fn is called as fn(value, key, cache).\n */\n find(\n fn: (v: V, k: K, self: LRUCache) => boolean,\n getOptions: LRUCache.GetOptions = {}\n ) {\n for (const i of this.#indexes()) {\n const v = this.#valList[i]\n const value = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined) continue\n if (fn(value, this.#keyList[i] as K, this)) {\n return this.get(this.#keyList[i] as K, getOptions)\n }\n }\n }\n\n /**\n * Call the supplied function on each item in the cache, in order from\n * most recently used to least recently used. fn is called as\n * fn(value, key, cache). Does not update age or recenty of use.\n * Does not iterate over stale values.\n */\n forEach(\n fn: (v: V, k: K, self: LRUCache) => any,\n thisp: any = this\n ) {\n for (const i of this.#indexes()) {\n const v = this.#valList[i]\n const value = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined) continue\n fn.call(thisp, value, this.#keyList[i] as K, this)\n }\n }\n\n /**\n * The same as {@link LRUCache.forEach} but items are iterated over in\n * reverse order. (ie, less recently used items are iterated over first.)\n */\n rforEach(\n fn: (v: V, k: K, self: LRUCache) => any,\n thisp: any = this\n ) {\n for (const i of this.#rindexes()) {\n const v = this.#valList[i]\n const value = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined) continue\n fn.call(thisp, value, this.#keyList[i] as K, this)\n }\n }\n\n /**\n * Delete any stale entries. Returns true if anything was removed,\n * false otherwise.\n */\n purgeStale() {\n let deleted = false\n for (const i of this.#rindexes({ allowStale: true })) {\n if (this.#isStale(i)) {\n this.delete(this.#keyList[i] as K)\n deleted = true\n }\n }\n return deleted\n }\n\n /**\n * Return an array of [key, {@link LRUCache.Entry}] tuples which can be\n * passed to cache.load()\n */\n dump() {\n const arr: [K, LRUCache.Entry][] = []\n for (const i of this.#indexes({ allowStale: true })) {\n const key = this.#keyList[i]\n const v = this.#valList[i]\n const value: V | undefined = this.#isBackgroundFetch(v)\n ? v.__staleWhileFetching\n : v\n if (value === undefined || key === undefined) continue\n const entry: LRUCache.Entry = { value }\n if (this.#ttls && this.#starts) {\n entry.ttl = this.#ttls[i]\n // always dump the start relative to a portable timestamp\n // it's ok for this to be a bit slow, it's a rare operation.\n const age = perf.now() - this.#starts[i]\n entry.start = Math.floor(Date.now() - age)\n }\n if (this.#sizes) {\n entry.size = this.#sizes[i]\n }\n arr.unshift([key, entry])\n }\n return arr\n }\n\n /**\n * Reset the cache and load in the items in entries in the order listed.\n * Note that the shape of the resulting cache may be different if the\n * same options are not used in both caches.\n */\n load(arr: [K, LRUCache.Entry][]) {\n this.clear()\n for (const [key, entry] of arr) {\n if (entry.start) {\n // entry.start is a portable timestamp, but we may be using\n // node's performance.now(), so calculate the offset, so that\n // we get the intended remaining TTL, no matter how long it's\n // been on ice.\n //\n // it's ok for this to be a bit slow, it's a rare operation.\n const age = Date.now() - entry.start\n entry.start = perf.now() - age\n }\n this.set(key, entry.value, entry)\n }\n }\n\n /**\n * Add a value to the cache.\n *\n * Note: if `undefined` is specified as a value, this is an alias for\n * {@link LRUCache#delete}\n */\n set(\n k: K,\n v: V | BackgroundFetch | undefined,\n setOptions: LRUCache.SetOptions = {}\n ) {\n if (v === undefined) {\n this.delete(k)\n return this\n }\n const {\n ttl = this.ttl,\n start,\n noDisposeOnSet = this.noDisposeOnSet,\n sizeCalculation = this.sizeCalculation,\n status,\n } = setOptions\n let { noUpdateTTL = this.noUpdateTTL } = setOptions\n\n const size = this.#requireSize(\n k,\n v,\n setOptions.size || 0,\n sizeCalculation\n )\n // if the item doesn't fit, don't do anything\n // NB: maxEntrySize set to maxSize by default\n if (this.maxEntrySize && size > this.maxEntrySize) {\n if (status) {\n status.set = 'miss'\n status.maxEntrySizeExceeded = true\n }\n // have to delete, in case something is there already.\n this.delete(k)\n return this\n }\n let index = this.#size === 0 ? undefined : this.#keyMap.get(k)\n if (index === undefined) {\n // addition\n index = (\n this.#size === 0\n ? this.#tail\n : this.#free.length !== 0\n ? this.#free.pop()\n : this.#size === this.#max\n ? this.#evict(false)\n : this.#size\n ) as Index\n this.#keyList[index] = k\n this.#valList[index] = v\n this.#keyMap.set(k, index)\n this.#next[this.#tail] = index\n this.#prev[index] = this.#tail\n this.#tail = index\n this.#size++\n this.#addItemSize(index, size, status)\n if (status) status.set = 'add'\n noUpdateTTL = false\n } else {\n // update\n this.#moveToTail(index)\n const oldVal = this.#valList[index] as V | BackgroundFetch\n if (v !== oldVal) {\n if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {\n oldVal.__abortController.abort(new Error('replaced'))\n } else if (!noDisposeOnSet) {\n if (this.#hasDispose) {\n this.#dispose?.(oldVal as V, k, 'set')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([oldVal as V, k, 'set'])\n }\n }\n this.#removeItemSize(index)\n this.#addItemSize(index, size, status)\n this.#valList[index] = v\n if (status) {\n status.set = 'replace'\n const oldValue =\n oldVal && this.#isBackgroundFetch(oldVal)\n ? oldVal.__staleWhileFetching\n : oldVal\n if (oldValue !== undefined) status.oldValue = oldValue\n }\n } else if (status) {\n status.set = 'update'\n }\n }\n if (ttl !== 0 && !this.#ttls) {\n this.#initializeTTLTracking()\n }\n if (this.#ttls) {\n if (!noUpdateTTL) {\n this.#setItemTTL(index, ttl, start)\n }\n if (status) this.#statusTTL(status, index)\n }\n if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n return this\n }\n\n /**\n * Evict the least recently used item, returning its value or\n * `undefined` if cache is empty.\n */\n pop(): V | undefined {\n try {\n while (this.#size) {\n const val = this.#valList[this.#head]\n this.#evict(true)\n if (this.#isBackgroundFetch(val)) {\n if (val.__staleWhileFetching) {\n return val.__staleWhileFetching\n }\n } else if (val !== undefined) {\n return val\n }\n }\n } finally {\n if (this.#hasDisposeAfter && this.#disposed) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n }\n }\n\n #evict(free: boolean) {\n const head = this.#head\n const k = this.#keyList[head] as K\n const v = this.#valList[head] as V\n if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {\n v.__abortController.abort(new Error('evicted'))\n } else if (this.#hasDispose || this.#hasDisposeAfter) {\n if (this.#hasDispose) {\n this.#dispose?.(v, k, 'evict')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([v, k, 'evict'])\n }\n }\n this.#removeItemSize(head)\n // if we aren't about to use the index, then null these out\n if (free) {\n this.#keyList[head] = undefined\n this.#valList[head] = undefined\n this.#free.push(head)\n }\n if (this.#size === 1) {\n this.#head = this.#tail = 0 as Index\n this.#free.length = 0\n } else {\n this.#head = this.#next[head] as Index\n }\n this.#keyMap.delete(k)\n this.#size--\n return head\n }\n\n /**\n * Check if a key is in the cache, without updating the recency of use.\n * Will return false if the item is stale, even though it is technically\n * in the cache.\n *\n * Will not update item age unless\n * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.\n */\n has(k: K, hasOptions: LRUCache.HasOptions = {}) {\n const { updateAgeOnHas = this.updateAgeOnHas, status } =\n hasOptions\n const index = this.#keyMap.get(k)\n if (index !== undefined) {\n const v = this.#valList[index]\n if (\n this.#isBackgroundFetch(v) &&\n v.__staleWhileFetching === undefined\n ) {\n return false\n }\n if (!this.#isStale(index)) {\n if (updateAgeOnHas) {\n this.#updateItemAge(index)\n }\n if (status) {\n status.has = 'hit'\n this.#statusTTL(status, index)\n }\n return true\n } else if (status) {\n status.has = 'stale'\n this.#statusTTL(status, index)\n }\n } else if (status) {\n status.has = 'miss'\n }\n return false\n }\n\n /**\n * Like {@link LRUCache#get} but doesn't update recency or delete stale\n * items.\n *\n * Returns `undefined` if the item is stale, unless\n * {@link LRUCache.OptionsBase.allowStale} is set.\n */\n peek(k: K, peekOptions: LRUCache.PeekOptions = {}) {\n const { allowStale = this.allowStale } = peekOptions\n const index = this.#keyMap.get(k)\n if (\n index !== undefined &&\n (allowStale || !this.#isStale(index))\n ) {\n const v = this.#valList[index]\n // either stale and allowed, or forcing a refresh of non-stale value\n return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v\n }\n }\n\n #backgroundFetch(\n k: K,\n index: Index | undefined,\n options: LRUCache.FetchOptions,\n context: any\n ): BackgroundFetch {\n const v = index === undefined ? undefined : this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n return v\n }\n\n const ac = new AC()\n const { signal } = options\n // when/if our AC signals, then stop listening to theirs.\n signal?.addEventListener('abort', () => ac.abort(signal.reason), {\n signal: ac.signal,\n })\n\n const fetchOpts = {\n signal: ac.signal,\n options,\n context,\n }\n\n const cb = (\n v: V | void | undefined,\n updateCache = false\n ): V | undefined | void => {\n const { aborted } = ac.signal\n const ignoreAbort = options.ignoreFetchAbort && v !== undefined\n if (options.status) {\n if (aborted && !updateCache) {\n options.status.fetchAborted = true\n options.status.fetchError = ac.signal.reason\n if (ignoreAbort) options.status.fetchAbortIgnored = true\n } else {\n options.status.fetchResolved = true\n }\n }\n if (aborted && !ignoreAbort && !updateCache) {\n return fetchFail(ac.signal.reason)\n }\n // either we didn't abort, and are still here, or we did, and ignored\n const bf = p as BackgroundFetch\n if (this.#valList[index as Index] === p) {\n if (v === undefined) {\n if (bf.__staleWhileFetching) {\n this.#valList[index as Index] = bf.__staleWhileFetching\n } else {\n this.delete(k)\n }\n } else {\n if (options.status) options.status.fetchUpdated = true\n this.set(k, v, fetchOpts.options)\n }\n }\n return v\n }\n\n const eb = (er: any) => {\n if (options.status) {\n options.status.fetchRejected = true\n options.status.fetchError = er\n }\n return fetchFail(er)\n }\n\n const fetchFail = (er: any): V | undefined => {\n const { aborted } = ac.signal\n const allowStaleAborted =\n aborted && options.allowStaleOnFetchAbort\n const allowStale =\n allowStaleAborted || options.allowStaleOnFetchRejection\n const noDelete = allowStale || options.noDeleteOnFetchRejection\n const bf = p as BackgroundFetch\n if (this.#valList[index as Index] === p) {\n // if we allow stale on fetch rejections, then we need to ensure that\n // the stale value is not removed from the cache when the fetch fails.\n const del = !noDelete || bf.__staleWhileFetching === undefined\n if (del) {\n this.delete(k)\n } else if (!allowStaleAborted) {\n // still replace the *promise* with the stale value,\n // since we are done with the promise at this point.\n // leave it untouched if we're still waiting for an\n // aborted background fetch that hasn't yet returned.\n this.#valList[index as Index] = bf.__staleWhileFetching\n }\n }\n if (allowStale) {\n if (options.status && bf.__staleWhileFetching !== undefined) {\n options.status.returnedStale = true\n }\n return bf.__staleWhileFetching\n } else if (bf.__returned === bf) {\n throw er\n }\n }\n\n const pcall = (\n res: (v: V | void | undefined) => void,\n rej: (e: any) => void\n ) => {\n const fmp = this.#fetchMethod?.(k, v, fetchOpts)\n if (fmp && fmp instanceof Promise) {\n fmp.then(v => res(v), rej)\n }\n // ignored, we go until we finish, regardless.\n // defer check until we are actually aborting,\n // so fetchMethod can override.\n ac.signal.addEventListener('abort', () => {\n if (\n !options.ignoreFetchAbort ||\n options.allowStaleOnFetchAbort\n ) {\n res()\n // when it eventually resolves, update the cache.\n if (options.allowStaleOnFetchAbort) {\n res = v => cb(v, true)\n }\n }\n })\n }\n\n if (options.status) options.status.fetchDispatched = true\n const p = new Promise(pcall).then(cb, eb)\n const bf = Object.assign(p, {\n __abortController: ac,\n __staleWhileFetching: v,\n __returned: undefined,\n })\n\n if (index === undefined) {\n // internal, don't expose status.\n this.set(k, bf, { ...fetchOpts.options, status: undefined })\n index = this.#keyMap.get(k)\n } else {\n this.#valList[index] = bf\n }\n return bf\n }\n\n #isBackgroundFetch(p: any): p is BackgroundFetch {\n if (!this.#hasFetchMethod) return false\n const b = p as BackgroundFetch\n return (\n !!b &&\n b instanceof Promise &&\n b.hasOwnProperty('__staleWhileFetching') &&\n b.__abortController instanceof AC\n )\n }\n\n /**\n * Make an asynchronous cached fetch using the\n * {@link LRUCache.OptionsBase.fetchMethod} function.\n *\n * If multiple fetches for the same key are issued, then they will all be\n * coalesced into a single call to fetchMethod.\n *\n * Note that this means that handling options such as\n * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort},\n * {@link LRUCache.FetchOptions.signal},\n * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be\n * determined by the FIRST fetch() call for a given key.\n *\n * This is a known (fixable) shortcoming which will be addresed on when\n * someone complains about it, as the fix would involve added complexity and\n * may not be worth the costs for this edge case.\n */\n fetch(\n k: K,\n fetchOptions: unknown extends FC\n ? LRUCache.FetchOptions\n : FC extends undefined | void\n ? LRUCache.FetchOptionsNoContext\n : LRUCache.FetchOptionsWithContext\n ): Promise\n // this overload not allowed if context is required\n fetch(\n k: unknown extends FC\n ? K\n : FC extends undefined | void\n ? K\n : never,\n fetchOptions?: unknown extends FC\n ? LRUCache.FetchOptions\n : FC extends undefined | void\n ? LRUCache.FetchOptionsNoContext\n : never\n ): Promise\n async fetch(\n k: K,\n fetchOptions: LRUCache.FetchOptions = {}\n ): Promise {\n const {\n // get options\n allowStale = this.allowStale,\n updateAgeOnGet = this.updateAgeOnGet,\n noDeleteOnStaleGet = this.noDeleteOnStaleGet,\n // set options\n ttl = this.ttl,\n noDisposeOnSet = this.noDisposeOnSet,\n size = 0,\n sizeCalculation = this.sizeCalculation,\n noUpdateTTL = this.noUpdateTTL,\n // fetch exclusive options\n noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,\n allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,\n ignoreFetchAbort = this.ignoreFetchAbort,\n allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,\n context,\n forceRefresh = false,\n status,\n signal,\n } = fetchOptions\n\n if (!this.#hasFetchMethod) {\n if (status) status.fetch = 'get'\n return this.get(k, {\n allowStale,\n updateAgeOnGet,\n noDeleteOnStaleGet,\n status,\n })\n }\n\n const options = {\n allowStale,\n updateAgeOnGet,\n noDeleteOnStaleGet,\n ttl,\n noDisposeOnSet,\n size,\n sizeCalculation,\n noUpdateTTL,\n noDeleteOnFetchRejection,\n allowStaleOnFetchRejection,\n allowStaleOnFetchAbort,\n ignoreFetchAbort,\n status,\n signal,\n }\n\n let index = this.#keyMap.get(k)\n if (index === undefined) {\n if (status) status.fetch = 'miss'\n const p = this.#backgroundFetch(k, index, options, context)\n return (p.__returned = p)\n } else {\n // in cache, maybe already fetching\n const v = this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n const stale =\n allowStale && v.__staleWhileFetching !== undefined\n if (status) {\n status.fetch = 'inflight'\n if (stale) status.returnedStale = true\n }\n return stale ? v.__staleWhileFetching : (v.__returned = v)\n }\n\n // if we force a refresh, that means do NOT serve the cached value,\n // unless we are already in the process of refreshing the cache.\n const isStale = this.#isStale(index)\n if (!forceRefresh && !isStale) {\n if (status) status.fetch = 'hit'\n this.#moveToTail(index)\n if (updateAgeOnGet) {\n this.#updateItemAge(index)\n }\n if (status) this.#statusTTL(status, index)\n return v\n }\n\n // ok, it is stale or a forced refresh, and not already fetching.\n // refresh the cache.\n const p = this.#backgroundFetch(k, index, options, context)\n const hasStale = p.__staleWhileFetching !== undefined\n const staleVal = hasStale && allowStale\n if (status) {\n status.fetch = isStale ? 'stale' : 'refresh'\n if (staleVal && isStale) status.returnedStale = true\n }\n return staleVal ? p.__staleWhileFetching : (p.__returned = p)\n }\n }\n\n /**\n * Return a value from the cache. Will update the recency of the cache\n * entry found.\n *\n * If the key is not found, get() will return `undefined`.\n */\n get(k: K, getOptions: LRUCache.GetOptions = {}) {\n const {\n allowStale = this.allowStale,\n updateAgeOnGet = this.updateAgeOnGet,\n noDeleteOnStaleGet = this.noDeleteOnStaleGet,\n status,\n } = getOptions\n const index = this.#keyMap.get(k)\n if (index !== undefined) {\n const value = this.#valList[index]\n const fetching = this.#isBackgroundFetch(value)\n if (status) this.#statusTTL(status, index)\n if (this.#isStale(index)) {\n if (status) status.get = 'stale'\n // delete only if not an in-flight background fetch\n if (!fetching) {\n if (!noDeleteOnStaleGet) {\n this.delete(k)\n }\n if (status && allowStale) status.returnedStale = true\n return allowStale ? value : undefined\n } else {\n if (\n status &&\n allowStale &&\n value.__staleWhileFetching !== undefined\n ) {\n status.returnedStale = true\n }\n return allowStale ? value.__staleWhileFetching : undefined\n }\n } else {\n if (status) status.get = 'hit'\n // if we're currently fetching it, we don't actually have it yet\n // it's not stale, which means this isn't a staleWhileRefetching.\n // If it's not stale, and fetching, AND has a __staleWhileFetching\n // value, then that means the user fetched with {forceRefresh:true},\n // so it's safe to return that value.\n if (fetching) {\n return value.__staleWhileFetching\n }\n this.#moveToTail(index)\n if (updateAgeOnGet) {\n this.#updateItemAge(index)\n }\n return value\n }\n } else if (status) {\n status.get = 'miss'\n }\n }\n\n #connect(p: Index, n: Index) {\n this.#prev[n] = p\n this.#next[p] = n\n }\n\n #moveToTail(index: Index): void {\n // if tail already, nothing to do\n // if head, move head to next[index]\n // else\n // move next[prev[index]] to next[index] (head has no prev)\n // move prev[next[index]] to prev[index]\n // prev[index] = tail\n // next[tail] = index\n // tail = index\n if (index !== this.#tail) {\n if (index === this.#head) {\n this.#head = this.#next[index] as Index\n } else {\n this.#connect(\n this.#prev[index] as Index,\n this.#next[index] as Index\n )\n }\n this.#connect(this.#tail, index)\n this.#tail = index\n }\n }\n\n /**\n * Deletes a key out of the cache.\n * Returns true if the key was deleted, false otherwise.\n */\n delete(k: K) {\n let deleted = false\n if (this.#size !== 0) {\n const index = this.#keyMap.get(k)\n if (index !== undefined) {\n deleted = true\n if (this.#size === 1) {\n this.clear()\n } else {\n this.#removeItemSize(index)\n const v = this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n v.__abortController.abort(new Error('deleted'))\n } else if (this.#hasDispose || this.#hasDisposeAfter) {\n if (this.#hasDispose) {\n this.#dispose?.(v as V, k, 'delete')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([v as V, k, 'delete'])\n }\n }\n this.#keyMap.delete(k)\n this.#keyList[index] = undefined\n this.#valList[index] = undefined\n if (index === this.#tail) {\n this.#tail = this.#prev[index] as Index\n } else if (index === this.#head) {\n this.#head = this.#next[index] as Index\n } else {\n this.#next[this.#prev[index]] = this.#next[index]\n this.#prev[this.#next[index]] = this.#prev[index]\n }\n this.#size--\n this.#free.push(index)\n }\n }\n }\n if (this.#hasDisposeAfter && this.#disposed?.length) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n return deleted\n }\n\n /**\n * Clear the cache entirely, throwing away all values.\n */\n clear() {\n for (const index of this.#rindexes({ allowStale: true })) {\n const v = this.#valList[index]\n if (this.#isBackgroundFetch(v)) {\n v.__abortController.abort(new Error('deleted'))\n } else {\n const k = this.#keyList[index]\n if (this.#hasDispose) {\n this.#dispose?.(v as V, k as K, 'delete')\n }\n if (this.#hasDisposeAfter) {\n this.#disposed?.push([v as V, k as K, 'delete'])\n }\n }\n }\n\n this.#keyMap.clear()\n this.#valList.fill(undefined)\n this.#keyList.fill(undefined)\n if (this.#ttls && this.#starts) {\n this.#ttls.fill(0)\n this.#starts.fill(0)\n }\n if (this.#sizes) {\n this.#sizes.fill(0)\n }\n this.#head = 0 as Index\n this.#tail = 0 as Index\n this.#free.length = 0\n this.#calculatedSize = 0\n this.#size = 0\n if (this.#hasDisposeAfter && this.#disposed) {\n const dt = this.#disposed\n let task: DisposeTask | undefined\n while ((task = dt?.shift())) {\n this.#disposeAfter?.(...task)\n }\n }\n }\n}\n"], + "mappings": "mVAMA,IAAMA,EACJ,OAAO,aAAgB,UACvB,aACA,OAAO,YAAY,KAAQ,WACvB,YACA,KAEAC,EAAS,IAAI,IAMbC,EACJ,OAAO,SAAY,UAAc,QAAU,QAAU,CAAA,EAIjDC,EAAc,CAClBC,EACAC,EACAC,EACAC,IACE,CACF,OAAOL,EAAQ,aAAgB,WAC3BA,EAAQ,YAAYE,EAAKC,EAAMC,EAAMC,CAAE,EACvC,QAAQ,MAAM,IAAID,MAASD,MAASD,GAAK,CAC/C,EAEII,EAAK,WAAW,gBAChBC,EAAK,WAAW,YAGpB,GAAI,OAAOD,EAAO,IAAa,CAE7BC,EAAK,KAAiB,CACpB,QACA,SAAqC,CAAA,EACrC,OACA,QAAmB,GACnB,iBAAiBC,EAAWH,EAAwB,CAClD,KAAK,SAAS,KAAKA,CAAE,CACvB,GAGFC,EAAK,KAAqB,CACxB,aAAA,CACEG,EAAc,CAChB,CACA,OAAS,IAAIF,EACb,MAAMG,EAAW,CACf,GAAI,MAAK,OAAO,QAEhB,MAAK,OAAO,OAASA,EAErB,KAAK,OAAO,QAAU,GAEtB,QAAWL,KAAM,KAAK,OAAO,SAC3BA,EAAGK,CAAM,EAEX,KAAK,OAAO,UAAUA,CAAM,EAC9B,GAEF,IAAIC,EACFX,EAAQ,KAAK,8BAAgC,IACzCS,EAAiB,IAAK,CACrBE,IACLA,EAAyB,GACzBV,EACE,maAOA,sBACA,UACAQ,CAAc,EAElB,EAIF,IAAMG,EAAcR,GAAiB,CAACL,EAAO,IAAIK,CAAI,EAE/CS,EAAO,OAAO,MAAM,EAIpBC,EAAYC,GAChBA,GAAKA,IAAM,KAAK,MAAMA,CAAC,GAAKA,EAAI,GAAK,SAASA,CAAC,EAc3CC,EAAgBC,GACnBH,EAASG,CAAG,EAETA,GAAO,KAAK,IAAI,EAAG,CAAC,EACpB,WACAA,GAAO,KAAK,IAAI,EAAG,EAAE,EACrB,YACAA,GAAO,KAAK,IAAI,EAAG,EAAE,EACrB,YACAA,GAAO,OAAO,iBACdC,EACA,KATA,KAYAA,EAAN,cAAwB,KAAa,CACnC,YAAYC,EAAY,CACtB,MAAMA,CAAI,EACV,KAAK,KAAK,CAAC,CACb,GAjIFC,EAqIMC,EAAN,KAAW,CACT,KACA,OAGA,OAAO,OAAOJ,EAAW,CACvB,IAAMK,EAAUN,EAAaC,CAAG,EAChC,GAAI,CAACK,EAAS,MAAO,CAAA,EACrBC,EAAAF,EAAMD,EAAgB,IACtB,IAAMI,EAAI,IAAIH,EAAMJ,EAAKK,CAAO,EAChC,OAAAC,EAAAF,EAAMD,EAAgB,IACfI,CACT,CACA,YACEP,EACAK,EAAyC,CAGzC,GAAI,CAACG,EAAAJ,EAAMD,GACT,MAAM,IAAI,UAAU,yCAAyC,EAG/D,KAAK,KAAO,IAAIE,EAAQL,CAAG,EAC3B,KAAK,OAAS,CAChB,CACA,KAAKF,EAAQ,CACX,KAAK,KAAK,KAAK,QAAQ,EAAIA,CAC7B,CACA,KAAG,CACD,OAAO,KAAK,KAAK,EAAE,KAAK,MAAM,CAChC,GA9BIW,EAANL,EAISD,EAAA,YAAPO,EAJID,EAIGN,EAAyB,IAkqB5B,IAAOQ,EAAP,KAAe,CAIVC,GACAC,GACAC,GACAC,GACAC,GAKT,IAKA,cAIA,aAIA,eAIA,eAIA,WAKA,eAIA,YAIA,aAIA,gBAIA,yBAIA,mBAIA,uBAIA,2BAIA,iBAGAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GAEAC,GACAC,GACAC,GAWA,OAAO,sBAILC,EAAqB,CACrB,MAAO,CAEL,OAAQA,EAAEL,GACV,KAAMK,EAAEJ,GACR,MAAOI,EAAEN,GACT,OAAQM,EAAEf,GACV,QAASe,EAAEd,GACX,QAASc,EAAEb,GACX,KAAMa,EAAEZ,GACR,KAAMY,EAAEX,GACR,IAAI,MAAI,CACN,OAAOW,EAAEV,EACX,EACA,IAAI,MAAI,CACN,OAAOU,EAAET,EACX,EACA,KAAMS,EAAER,GAER,kBAAoBS,GAAWD,EAAEE,GAAmBD,CAAC,EACrD,gBAAiB,CACfE,EACAC,EACAC,EACAC,IAEAN,EAAEO,GACAJ,EACAC,EACAC,EACAC,CAAO,EAEX,WAAaF,GACXJ,EAAEQ,GAAYJ,CAAc,EAC9B,QAAUC,GACRL,EAAES,GAASJ,CAAO,EACpB,SAAWA,GACTL,EAAEU,GAAUL,CAAO,EACrB,QAAUD,GACRJ,EAAEW,GAASP,CAAc,EAE/B,CAOA,IAAI,KAAG,CACL,OAAO,KAAK1B,EACd,CAIA,IAAI,SAAO,CACT,OAAO,KAAKC,EACd,CAIA,IAAI,gBAAc,CAChB,OAAO,KAAKK,EACd,CAIA,IAAI,MAAI,CACN,OAAO,KAAKD,EACd,CAIA,IAAI,aAAW,CACb,OAAO,KAAKD,EACd,CAIA,IAAI,SAAO,CACT,OAAO,KAAKF,EACd,CAIA,IAAI,cAAY,CACd,OAAO,KAAKC,EACd,CAEA,YACEwB,EAAwD,CAExD,GAAM,CACJ,IAAAvC,EAAM,EACN,IAAA8C,EACA,cAAAC,EAAgB,EAChB,aAAAC,EACA,eAAAC,EACA,eAAAC,EACA,WAAAC,EACA,QAAAC,EACA,aAAAC,EACA,eAAAC,EACA,YAAAC,EACA,QAAAC,EAAU,EACV,aAAAC,EAAe,EACf,gBAAAC,EACA,YAAAC,EACA,yBAAAC,EACA,mBAAAC,EACA,2BAAAC,EACA,uBAAAC,EACA,iBAAAC,CAAgB,EACdzB,EAEJ,GAAIvC,IAAQ,GAAK,CAACH,EAASG,CAAG,EAC5B,MAAM,IAAI,UAAU,0CAA0C,EAGhE,IAAMiE,EAAYjE,EAAMD,EAAaC,CAAG,EAAI,MAC5C,GAAI,CAACiE,EACH,MAAM,IAAI,MAAM,sBAAwBjE,CAAG,EAO7C,GAJA,KAAKY,GAAOZ,EACZ,KAAKa,GAAW2C,EAChB,KAAK,aAAeC,GAAgB,KAAK5C,GACzC,KAAK,gBAAkB6C,EACnB,KAAK,gBAAiB,CACxB,GAAI,CAAC,KAAK7C,IAAY,CAAC,KAAK,aAC1B,MAAM,IAAI,UACR,oEAAoE,EAGxE,GAAI,OAAO,KAAK,iBAAoB,WAClC,MAAM,IAAI,UAAU,qCAAqC,EAI7D,GACE8C,IAAgB,QAChB,OAAOA,GAAgB,WAEvB,MAAM,IAAI,UACR,6CAA6C,EAsCjD,GAnCA,KAAK3C,GAAe2C,EACpB,KAAK3B,GAAkB,CAAC,CAAC2B,EAEzB,KAAKxC,GAAU,IAAI,IACnB,KAAKC,GAAW,IAAI,MAAMpB,CAAG,EAAE,KAAK,MAAS,EAC7C,KAAKqB,GAAW,IAAI,MAAMrB,CAAG,EAAE,KAAK,MAAS,EAC7C,KAAKsB,GAAQ,IAAI2C,EAAUjE,CAAG,EAC9B,KAAKuB,GAAQ,IAAI0C,EAAUjE,CAAG,EAC9B,KAAKwB,GAAQ,EACb,KAAKC,GAAQ,EACb,KAAKC,GAAQjB,EAAM,OAAOT,CAAG,EAC7B,KAAKiB,GAAQ,EACb,KAAKC,GAAkB,EAEnB,OAAOkC,GAAY,aACrB,KAAKtC,GAAWsC,GAEd,OAAOC,GAAiB,YAC1B,KAAKtC,GAAgBsC,EACrB,KAAK1B,GAAY,CAAA,IAEjB,KAAKZ,GAAgB,OACrB,KAAKY,GAAY,QAEnB,KAAKI,GAAc,CAAC,CAAC,KAAKjB,GAC1B,KAAKmB,GAAmB,CAAC,CAAC,KAAKlB,GAE/B,KAAK,eAAiB,CAAC,CAACuC,EACxB,KAAK,YAAc,CAAC,CAACC,EACrB,KAAK,yBAA2B,CAAC,CAACK,EAClC,KAAK,2BAA6B,CAAC,CAACE,EACpC,KAAK,uBAAyB,CAAC,CAACC,EAChC,KAAK,iBAAmB,CAAC,CAACC,EAGtB,KAAK,eAAiB,EAAG,CAC3B,GAAI,KAAKnD,KAAa,GAChB,CAAChB,EAAS,KAAKgB,EAAQ,EACzB,MAAM,IAAI,UACR,iDAAiD,EAIvD,GAAI,CAAChB,EAAS,KAAK,YAAY,EAC7B,MAAM,IAAI,UACR,sDAAsD,EAG1D,KAAKqE,GAAuB,EAa9B,GAVA,KAAK,WAAa,CAAC,CAACf,EACpB,KAAK,mBAAqB,CAAC,CAACU,EAC5B,KAAK,eAAiB,CAAC,CAACZ,EACxB,KAAK,eAAiB,CAAC,CAACC,EACxB,KAAK,cACHrD,EAASkD,CAAa,GAAKA,IAAkB,EACzCA,EACA,EACN,KAAK,aAAe,CAAC,CAACC,EACtB,KAAK,IAAMF,GAAO,EACd,KAAK,IAAK,CACZ,GAAI,CAACjD,EAAS,KAAK,GAAG,EACpB,MAAM,IAAI,UACR,6CAA6C,EAGjD,KAAKsE,GAAsB,EAI7B,GAAI,KAAKvD,KAAS,GAAK,KAAK,MAAQ,GAAK,KAAKC,KAAa,EACzD,MAAM,IAAI,UACR,kDAAkD,EAGtD,GAAI,CAAC,KAAK,cAAgB,CAAC,KAAKD,IAAQ,CAAC,KAAKC,GAAU,CACtD,IAAM1B,EAAO,sBACTQ,EAAWR,CAAI,IACjBL,EAAO,IAAIK,CAAI,EAIfH,EAFE,gGAEe,wBAAyBG,EAAMwB,CAAQ,GAG9D,CAKA,gBAAgByD,EAAM,CACpB,OAAO,KAAKjD,GAAQ,IAAIiD,CAAG,EAAI,IAAW,CAC5C,CAEAD,IAAsB,CACpB,IAAME,EAAO,IAAIpE,EAAU,KAAKW,EAAI,EAC9B0D,EAAS,IAAIrE,EAAU,KAAKW,EAAI,EACtC,KAAKkB,GAAQuC,EACb,KAAKxC,GAAUyC,EAEf,KAAKC,GAAc,CAACjC,EAAOQ,EAAK0B,EAAQ3F,EAAK,IAAG,IAAM,CAGpD,GAFAyF,EAAOhC,CAAK,EAAIQ,IAAQ,EAAI0B,EAAQ,EACpCH,EAAK/B,CAAK,EAAIQ,EACVA,IAAQ,GAAK,KAAK,aAAc,CAClC,IAAM2B,EAAI,WAAW,IAAK,CACpB,KAAK5B,GAASP,CAAK,GACrB,KAAK,OAAO,KAAKlB,GAASkB,CAAK,CAAM,CAEzC,EAAGQ,EAAM,CAAC,EAGN2B,EAAE,OACJA,EAAE,MAAK,EAIb,EAEA,KAAKC,GAAiBpC,GAAQ,CAC5BgC,EAAOhC,CAAK,EAAI+B,EAAK/B,CAAK,IAAM,EAAIzD,EAAK,IAAG,EAAK,CACnD,EAEA,KAAK8F,GAAa,CAACC,EAAQtC,IAAS,CAClC,GAAI+B,EAAK/B,CAAK,EAAG,CACf,IAAMQ,EAAMuB,EAAK/B,CAAK,EAChBkC,EAAQF,EAAOhC,CAAK,EAC1BsC,EAAO,IAAM9B,EACb8B,EAAO,MAAQJ,EACfI,EAAO,IAAMC,GAAaC,EAAM,EAChC,IAAMC,EAAMH,EAAO,IAAMJ,EACzBI,EAAO,aAAe9B,EAAMiC,EAEhC,EAIA,IAAIF,EAAY,EACVC,EAAS,IAAK,CAClB,IAAM,EAAIjG,EAAK,IAAG,EAClB,GAAI,KAAK,cAAgB,EAAG,CAC1BgG,EAAY,EACZ,IAAMJ,EAAI,WACR,IAAOI,EAAY,EACnB,KAAK,aAAa,EAIhBJ,EAAE,OACJA,EAAE,MAAK,EAIX,OAAO,CACT,EAEA,KAAK,gBAAkBL,GAAM,CAC3B,IAAM9B,EAAQ,KAAKnB,GAAQ,IAAIiD,CAAG,EAClC,GAAI9B,IAAU,OACZ,MAAO,GAET,IAAMQ,EAAMuB,EAAK/B,CAAK,EAChBkC,EAAQF,EAAOhC,CAAK,EAC1B,GAAIQ,IAAQ,GAAK0B,IAAU,EACzB,MAAO,KAET,IAAMO,GAAOF,GAAaC,EAAM,GAAMN,EACtC,OAAO1B,EAAMiC,CACf,EAEA,KAAKlC,GAAWP,GAEZ+B,EAAK/B,CAAK,IAAM,GAChBgC,EAAOhC,CAAK,IAAM,IACjBuC,GAAaC,EAAM,GAAMR,EAAOhC,CAAK,EAAI+B,EAAK/B,CAAK,CAG1D,CAGAoC,GAAyC,IAAK,CAAE,EAChDC,GACE,IAAK,CAAE,EACTJ,GAMY,IAAK,CAAE,EAGnB1B,GAAsC,IAAM,GAE5CqB,IAAuB,CACrB,IAAMc,EAAQ,IAAI/E,EAAU,KAAKW,EAAI,EACrC,KAAKM,GAAkB,EACvB,KAAKU,GAASoD,EACd,KAAKC,GAAkB3C,GAAQ,CAC7B,KAAKpB,IAAmB8D,EAAM1C,CAAK,EACnC0C,EAAM1C,CAAK,EAAI,CACjB,EACA,KAAK4C,GAAe,CAAC7C,EAAG8C,EAAGjF,EAAMwD,IAAmB,CAGlD,GAAI,KAAKtB,GAAmB+C,CAAC,EAC3B,MAAO,GAET,GAAI,CAACtF,EAASK,CAAI,EAChB,GAAIwD,EAAiB,CACnB,GAAI,OAAOA,GAAoB,WAC7B,MAAM,IAAI,UAAU,oCAAoC,EAG1D,GADAxD,EAAOwD,EAAgByB,EAAG9C,CAAC,EACvB,CAACxC,EAASK,CAAI,EAChB,MAAM,IAAI,UACR,0DAA0D,MAI9D,OAAM,IAAI,UACR,2HAEwB,EAI9B,OAAOA,CACT,EACA,KAAKkF,GAAe,CAClB9C,EACApC,EACA0E,IACE,CAEF,GADAI,EAAM1C,CAAK,EAAIpC,EACX,KAAKW,GAAU,CACjB,IAAM2C,EAAU,KAAK3C,GAAWmE,EAAM1C,CAAK,EAC3C,KAAO,KAAKpB,GAAkBsC,GAC5B,KAAK6B,GAAO,EAAI,EAGpB,KAAKnE,IAAmB8D,EAAM1C,CAAK,EAC/BsC,IACFA,EAAO,UAAY1E,EACnB0E,EAAO,oBAAsB,KAAK1D,GAEtC,CACF,CAEA+D,GAA0CK,GAAK,CAAE,EACjDF,GAIY,CAACE,EAAIC,EAAIC,IAAO,CAAE,EAC9BN,GAKqB,CACnBO,EACAC,EACAxF,EACAwD,IACE,CACF,GAAIxD,GAAQwD,EACV,MAAM,IAAI,UACR,kEAAkE,EAGtE,MAAO,EACT,EAEA,CAACf,GAAS,CAAE,WAAAQ,EAAa,KAAK,UAAU,EAAK,CAAA,EAAE,CAC7C,GAAI,KAAKlC,GACP,QAAS0E,EAAI,KAAKlE,GACZ,GAAC,KAAKmE,GAAcD,CAAC,KAGrBxC,GAAc,CAAC,KAAKN,GAAS8C,CAAC,KAChC,MAAMA,GAEJA,IAAM,KAAKnE,MAGbmE,EAAI,KAAKpE,GAAMoE,CAAC,CAIxB,CAEA,CAAC/C,GAAU,CAAE,WAAAO,EAAa,KAAK,UAAU,EAAK,CAAA,EAAE,CAC9C,GAAI,KAAKlC,GACP,QAAS0E,EAAI,KAAKnE,GACZ,GAAC,KAAKoE,GAAcD,CAAC,KAGrBxC,GAAc,CAAC,KAAKN,GAAS8C,CAAC,KAChC,MAAMA,GAEJA,IAAM,KAAKlE,MAGbkE,EAAI,KAAKrE,GAAMqE,CAAC,CAIxB,CAEAC,GAActD,EAAY,CACxB,OACEA,IAAU,QACV,KAAKnB,GAAQ,IAAI,KAAKC,GAASkB,CAAK,CAAM,IAAMA,CAEpD,CAMA,CAAC,SAAO,CACN,QAAWqD,KAAK,KAAKhD,GAAQ,EAEzB,KAAKtB,GAASsE,CAAC,IAAM,QACrB,KAAKvE,GAASuE,CAAC,IAAM,QACrB,CAAC,KAAKvD,GAAmB,KAAKf,GAASsE,CAAC,CAAC,IAEzC,KAAM,CAAC,KAAKvE,GAASuE,CAAC,EAAG,KAAKtE,GAASsE,CAAC,CAAC,EAG/C,CAQA,CAAC,UAAQ,CACP,QAAWA,KAAK,KAAK/C,GAAS,EAE1B,KAAKvB,GAASsE,CAAC,IAAM,QACrB,KAAKvE,GAASuE,CAAC,IAAM,QACrB,CAAC,KAAKvD,GAAmB,KAAKf,GAASsE,CAAC,CAAC,IAEzC,KAAM,CAAC,KAAKvE,GAASuE,CAAC,EAAG,KAAKtE,GAASsE,CAAC,CAAC,EAG/C,CAMA,CAAC,MAAI,CACH,QAAWA,KAAK,KAAKhD,GAAQ,EAAI,CAC/B,IAAMN,EAAI,KAAKjB,GAASuE,CAAC,EAEvBtD,IAAM,QACN,CAAC,KAAKD,GAAmB,KAAKf,GAASsE,CAAC,CAAC,IAEzC,MAAMtD,GAGZ,CAQA,CAAC,OAAK,CACJ,QAAWsD,KAAK,KAAK/C,GAAS,EAAI,CAChC,IAAMP,EAAI,KAAKjB,GAASuE,CAAC,EAEvBtD,IAAM,QACN,CAAC,KAAKD,GAAmB,KAAKf,GAASsE,CAAC,CAAC,IAEzC,MAAMtD,GAGZ,CAMA,CAAC,QAAM,CACL,QAAWsD,KAAK,KAAKhD,GAAQ,EACjB,KAAKtB,GAASsE,CAAC,IAEjB,QACN,CAAC,KAAKvD,GAAmB,KAAKf,GAASsE,CAAC,CAAC,IAEzC,MAAM,KAAKtE,GAASsE,CAAC,EAG3B,CAQA,CAAC,SAAO,CACN,QAAWA,KAAK,KAAK/C,GAAS,EAClB,KAAKvB,GAASsE,CAAC,IAEjB,QACN,CAAC,KAAKvD,GAAmB,KAAKf,GAASsE,CAAC,CAAC,IAEzC,MAAM,KAAKtE,GAASsE,CAAC,EAG3B,CAMA,CAAC,OAAO,QAAQ,GAAC,CACf,OAAO,KAAK,QAAO,CACrB,CAMA,KACEvG,EACAyG,EAA4C,CAAA,EAAE,CAE9C,QAAW,KAAK,KAAKlD,GAAQ,EAAI,CAC/B,IAAMwC,EAAI,KAAK9D,GAAS,CAAC,EACnByE,EAAQ,KAAK1D,GAAmB+C,CAAC,EACnCA,EAAE,qBACFA,EACJ,GAAIW,IAAU,QACV1G,EAAG0G,EAAO,KAAK1E,GAAS,CAAC,EAAQ,IAAI,EACvC,OAAO,KAAK,IAAI,KAAKA,GAAS,CAAC,EAAQyE,CAAU,EAGvD,CAQA,QACEzG,EACA2G,EAAa,KAAI,CAEjB,QAAW,KAAK,KAAKpD,GAAQ,EAAI,CAC/B,IAAMwC,EAAI,KAAK9D,GAAS,CAAC,EACnByE,EAAQ,KAAK1D,GAAmB+C,CAAC,EACnCA,EAAE,qBACFA,EACAW,IAAU,QACd1G,EAAG,KAAK2G,EAAOD,EAAO,KAAK1E,GAAS,CAAC,EAAQ,IAAI,EAErD,CAMA,SACEhC,EACA2G,EAAa,KAAI,CAEjB,QAAW,KAAK,KAAKnD,GAAS,EAAI,CAChC,IAAMuC,EAAI,KAAK9D,GAAS,CAAC,EACnByE,EAAQ,KAAK1D,GAAmB+C,CAAC,EACnCA,EAAE,qBACFA,EACAW,IAAU,QACd1G,EAAG,KAAK2G,EAAOD,EAAO,KAAK1E,GAAS,CAAC,EAAQ,IAAI,EAErD,CAMA,YAAU,CACR,IAAI4E,EAAU,GACd,QAAWL,KAAK,KAAK/C,GAAU,CAAE,WAAY,EAAI,CAAE,EAC7C,KAAKC,GAAS8C,CAAC,IACjB,KAAK,OAAO,KAAKvE,GAASuE,CAAC,CAAM,EACjCK,EAAU,IAGd,OAAOA,CACT,CAMA,MAAI,CACF,IAAMC,EAAgC,CAAA,EACtC,QAAWN,KAAK,KAAKhD,GAAS,CAAE,WAAY,EAAI,CAAE,EAAG,CACnD,IAAMyB,EAAM,KAAKhD,GAASuE,CAAC,EACrBR,EAAI,KAAK9D,GAASsE,CAAC,EACnBG,EAAuB,KAAK1D,GAAmB+C,CAAC,EAClDA,EAAE,qBACFA,EACJ,GAAIW,IAAU,QAAa1B,IAAQ,OAAW,SAC9C,IAAM8B,EAA2B,CAAE,MAAAJ,CAAK,EACxC,GAAI,KAAKhE,IAAS,KAAKD,GAAS,CAC9BqE,EAAM,IAAM,KAAKpE,GAAM6D,CAAC,EAGxB,IAAMZ,EAAMlG,EAAK,IAAG,EAAK,KAAKgD,GAAQ8D,CAAC,EACvCO,EAAM,MAAQ,KAAK,MAAM,KAAK,IAAG,EAAKnB,CAAG,EAEvC,KAAKnD,KACPsE,EAAM,KAAO,KAAKtE,GAAO+D,CAAC,GAE5BM,EAAI,QAAQ,CAAC7B,EAAK8B,CAAK,CAAC,EAE1B,OAAOD,CACT,CAOA,KAAKA,EAA6B,CAChC,KAAK,MAAK,EACV,OAAW,CAAC7B,EAAK8B,CAAK,IAAKD,EAAK,CAC9B,GAAIC,EAAM,MAAO,CAOf,IAAMnB,EAAM,KAAK,IAAG,EAAKmB,EAAM,MAC/BA,EAAM,MAAQrH,EAAK,IAAG,EAAKkG,EAE7B,KAAK,IAAIX,EAAK8B,EAAM,MAAOA,CAAK,EAEpC,CAQA,IACE7D,EACA8C,EACAgB,EAA4C,CAAA,EAAE,CAE9C,GAAIhB,IAAM,OACR,YAAK,OAAO9C,CAAC,EACN,KAET,GAAM,CACJ,IAAAS,EAAM,KAAK,IACX,MAAA0B,EACA,eAAAlB,EAAiB,KAAK,eACtB,gBAAAI,EAAkB,KAAK,gBACvB,OAAAkB,CAAM,EACJuB,EACA,CAAE,YAAA5C,EAAc,KAAK,WAAW,EAAK4C,EAEnCjG,EAAO,KAAKgF,GAChB7C,EACA8C,EACAgB,EAAW,MAAQ,EACnBzC,CAAe,EAIjB,GAAI,KAAK,cAAgBxD,EAAO,KAAK,aACnC,OAAI0E,IACFA,EAAO,IAAM,OACbA,EAAO,qBAAuB,IAGhC,KAAK,OAAOvC,CAAC,EACN,KAET,IAAIC,EAAQ,KAAKrB,KAAU,EAAI,OAAY,KAAKE,GAAQ,IAAIkB,CAAC,EAC7D,GAAIC,IAAU,OAEZA,EACE,KAAKrB,KAAU,EACX,KAAKQ,GACL,KAAKC,GAAM,SAAW,EACtB,KAAKA,GAAM,IAAG,EACd,KAAKT,KAAU,KAAKL,GACpB,KAAKyE,GAAO,EAAK,EACjB,KAAKpE,GAEX,KAAKG,GAASkB,CAAK,EAAID,EACvB,KAAKhB,GAASiB,CAAK,EAAI6C,EACvB,KAAKhE,GAAQ,IAAIkB,EAAGC,CAAK,EACzB,KAAKhB,GAAM,KAAKG,EAAK,EAAIa,EACzB,KAAKf,GAAMe,CAAK,EAAI,KAAKb,GACzB,KAAKA,GAAQa,EACb,KAAKrB,KACL,KAAKmE,GAAa9C,EAAOpC,EAAM0E,CAAM,EACjCA,IAAQA,EAAO,IAAM,OACzBrB,EAAc,OACT,CAEL,KAAKb,GAAYJ,CAAK,EACtB,IAAM8D,EAAS,KAAK/E,GAASiB,CAAK,EAClC,GAAI6C,IAAMiB,GAcR,GAbI,KAAKpE,IAAmB,KAAKI,GAAmBgE,CAAM,EACxDA,EAAO,kBAAkB,MAAM,IAAI,MAAM,UAAU,CAAC,EAC1C9C,IACN,KAAKvB,IACP,KAAKjB,KAAWsF,EAAa/D,EAAG,KAAK,EAEnC,KAAKJ,IACP,KAAKN,IAAW,KAAK,CAACyE,EAAa/D,EAAG,KAAK,CAAC,GAGhD,KAAK4C,GAAgB3C,CAAK,EAC1B,KAAK8C,GAAa9C,EAAOpC,EAAM0E,CAAM,EACrC,KAAKvD,GAASiB,CAAK,EAAI6C,EACnBP,EAAQ,CACVA,EAAO,IAAM,UACb,IAAMyB,EACJD,GAAU,KAAKhE,GAAmBgE,CAAM,EACpCA,EAAO,qBACPA,EACFC,IAAa,SAAWzB,EAAO,SAAWyB,SAEvCzB,IACTA,EAAO,IAAM,UAYjB,GATI9B,IAAQ,GAAK,CAAC,KAAKhB,IACrB,KAAKqC,GAAsB,EAEzB,KAAKrC,KACFyB,GACH,KAAKgB,GAAYjC,EAAOQ,EAAK0B,CAAK,EAEhCI,GAAQ,KAAKD,GAAWC,EAAQtC,CAAK,GAEvC,CAACgB,GAAkB,KAAKrB,IAAoB,KAAKN,GAAW,CAC9D,IAAM2E,EAAK,KAAK3E,GACZ4E,EACJ,KAAQA,EAAOD,GAAI,MAAK,GACtB,KAAKvF,KAAgB,GAAGwF,CAAI,EAGhC,OAAO,IACT,CAMA,KAAG,CACD,GAAI,CACF,KAAO,KAAKtF,IAAO,CACjB,IAAMuF,EAAM,KAAKnF,GAAS,KAAKG,EAAK,EAEpC,GADA,KAAK6D,GAAO,EAAI,EACZ,KAAKjD,GAAmBoE,CAAG,GAC7B,GAAIA,EAAI,qBACN,OAAOA,EAAI,6BAEJA,IAAQ,OACjB,OAAOA,WAIX,GAAI,KAAKvE,IAAoB,KAAKN,GAAW,CAC3C,IAAM2E,EAAK,KAAK3E,GACZ4E,EACJ,KAAQA,EAAOD,GAAI,MAAK,GACtB,KAAKvF,KAAgB,GAAGwF,CAAI,GAIpC,CAEAlB,GAAOoB,EAAa,CAClB,IAAMC,EAAO,KAAKlF,GACZa,EAAI,KAAKjB,GAASsF,CAAI,EACtBvB,EAAI,KAAK9D,GAASqF,CAAI,EAC5B,OAAI,KAAK1E,IAAmB,KAAKI,GAAmB+C,CAAC,EACnDA,EAAE,kBAAkB,MAAM,IAAI,MAAM,SAAS,CAAC,GACrC,KAAKpD,IAAe,KAAKE,MAC9B,KAAKF,IACP,KAAKjB,KAAWqE,EAAG9C,EAAG,OAAO,EAE3B,KAAKJ,IACP,KAAKN,IAAW,KAAK,CAACwD,EAAG9C,EAAG,OAAO,CAAC,GAGxC,KAAK4C,GAAgByB,CAAI,EAErBD,IACF,KAAKrF,GAASsF,CAAI,EAAI,OACtB,KAAKrF,GAASqF,CAAI,EAAI,OACtB,KAAKhF,GAAM,KAAKgF,CAAI,GAElB,KAAKzF,KAAU,GACjB,KAAKO,GAAQ,KAAKC,GAAQ,EAC1B,KAAKC,GAAM,OAAS,GAEpB,KAAKF,GAAQ,KAAKF,GAAMoF,CAAI,EAE9B,KAAKvF,GAAQ,OAAOkB,CAAC,EACrB,KAAKpB,KACEyF,CACT,CAUA,IAAIrE,EAAMsE,EAA4C,CAAA,EAAE,CACtD,GAAM,CAAE,eAAAzD,EAAiB,KAAK,eAAgB,OAAA0B,CAAM,EAClD+B,EACIrE,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAChC,GAAIC,IAAU,OAAW,CACvB,IAAM6C,EAAI,KAAK9D,GAASiB,CAAK,EAC7B,GACE,KAAKF,GAAmB+C,CAAC,GACzBA,EAAE,uBAAyB,OAE3B,MAAO,GAET,GAAK,KAAKtC,GAASP,CAAK,EASbsC,IACTA,EAAO,IAAM,QACb,KAAKD,GAAWC,EAAQtC,CAAK,OAV7B,QAAIY,GACF,KAAKwB,GAAepC,CAAK,EAEvBsC,IACFA,EAAO,IAAM,MACb,KAAKD,GAAWC,EAAQtC,CAAK,GAExB,QAKAsC,IACTA,EAAO,IAAM,QAEf,MAAO,EACT,CASA,KAAKvC,EAAMuE,EAA8C,CAAA,EAAE,CACzD,GAAM,CAAE,WAAAzD,EAAa,KAAK,UAAU,EAAKyD,EACnCtE,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAChC,GACEC,IAAU,SACTa,GAAc,CAAC,KAAKN,GAASP,CAAK,GACnC,CACA,IAAM6C,EAAI,KAAK9D,GAASiB,CAAK,EAE7B,OAAO,KAAKF,GAAmB+C,CAAC,EAAIA,EAAE,qBAAuBA,EAEjE,CAEA1C,GACEJ,EACAC,EACAC,EACAC,EAAY,CAEZ,IAAM2C,EAAI7C,IAAU,OAAY,OAAY,KAAKjB,GAASiB,CAAK,EAC/D,GAAI,KAAKF,GAAmB+C,CAAC,EAC3B,OAAOA,EAGT,IAAM0B,EAAK,IAAIxH,EACT,CAAE,OAAAyH,CAAM,EAAKvE,EAEnBuE,GAAQ,iBAAiB,QAAS,IAAMD,EAAG,MAAMC,EAAO,MAAM,EAAG,CAC/D,OAAQD,EAAG,OACZ,EAED,IAAME,EAAY,CAChB,OAAQF,EAAG,OACX,QAAAtE,EACA,QAAAC,GAGIwE,EAAK,CACT7B,EACA8B,EAAc,KACU,CACxB,GAAM,CAAE,QAAAC,CAAO,EAAKL,EAAG,OACjBM,EAAc5E,EAAQ,kBAAoB4C,IAAM,OAUtD,GATI5C,EAAQ,SACN2E,GAAW,CAACD,GACd1E,EAAQ,OAAO,aAAe,GAC9BA,EAAQ,OAAO,WAAasE,EAAG,OAAO,OAClCM,IAAa5E,EAAQ,OAAO,kBAAoB,KAEpDA,EAAQ,OAAO,cAAgB,IAG/B2E,GAAW,CAACC,GAAe,CAACF,EAC9B,OAAOG,EAAUP,EAAG,OAAO,MAAM,EAGnC,IAAMQ,EAAK,EACX,OAAI,KAAKhG,GAASiB,CAAc,IAAM,IAChC6C,IAAM,OACJkC,EAAG,qBACL,KAAKhG,GAASiB,CAAc,EAAI+E,EAAG,qBAEnC,KAAK,OAAOhF,CAAC,GAGXE,EAAQ,SAAQA,EAAQ,OAAO,aAAe,IAClD,KAAK,IAAIF,EAAG8C,EAAG4B,EAAU,OAAO,IAG7B5B,CACT,EAEMmC,EAAMC,IACNhF,EAAQ,SACVA,EAAQ,OAAO,cAAgB,GAC/BA,EAAQ,OAAO,WAAagF,GAEvBH,EAAUG,CAAE,GAGfH,EAAaG,GAA0B,CAC3C,GAAM,CAAE,QAAAL,CAAO,EAAKL,EAAG,OACjBW,EACJN,GAAW3E,EAAQ,uBACfY,EACJqE,GAAqBjF,EAAQ,2BACzBkF,EAAWtE,GAAcZ,EAAQ,yBACjC8E,EAAK,EAeX,GAdI,KAAKhG,GAASiB,CAAc,IAAM,IAGxB,CAACmF,GAAYJ,EAAG,uBAAyB,OAEnD,KAAK,OAAOhF,CAAC,EACHmF,IAKV,KAAKnG,GAASiB,CAAc,EAAI+E,EAAG,uBAGnClE,EACF,OAAIZ,EAAQ,QAAU8E,EAAG,uBAAyB,SAChD9E,EAAQ,OAAO,cAAgB,IAE1B8E,EAAG,qBACL,GAAIA,EAAG,aAAeA,EAC3B,MAAME,CAEV,EAEMG,EAAQ,CACZC,EACAC,IACE,CACF,IAAMC,EAAM,KAAK7G,KAAeqB,EAAG8C,EAAG4B,CAAS,EAC3Cc,GAAOA,aAAe,SACxBA,EAAI,KAAK1C,GAAKwC,EAAIxC,CAAC,EAAGyC,CAAG,EAK3Bf,EAAG,OAAO,iBAAiB,QAAS,IAAK,EAErC,CAACtE,EAAQ,kBACTA,EAAQ,0BAERoF,EAAG,EAECpF,EAAQ,yBACVoF,EAAMxC,GAAK6B,EAAG7B,EAAG,EAAI,GAG3B,CAAC,CACH,EAEI5C,EAAQ,SAAQA,EAAQ,OAAO,gBAAkB,IACrD,IAAM,EAAI,IAAI,QAAQmF,CAAK,EAAE,KAAKV,EAAIM,CAAE,EAClCD,EAAK,OAAO,OAAO,EAAG,CAC1B,kBAAmBR,EACnB,qBAAsB1B,EACtB,WAAY,OACb,EAED,OAAI7C,IAAU,QAEZ,KAAK,IAAID,EAAGgF,EAAI,CAAE,GAAGN,EAAU,QAAS,OAAQ,MAAS,CAAE,EAC3DzE,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,GAE1B,KAAKhB,GAASiB,CAAK,EAAI+E,EAElBA,CACT,CAEAjF,GAAmBD,EAAM,CACvB,GAAI,CAAC,KAAKH,GAAiB,MAAO,GAClC,IAAM8F,EAAI3F,EACV,MACE,CAAC,CAAC2F,GACFA,aAAa,SACbA,EAAE,eAAe,sBAAsB,GACvCA,EAAE,6BAA6BzI,CAEnC,CAwCA,MAAM,MACJgD,EACA0F,EAAgD,CAAA,EAAE,CAElD,GAAM,CAEJ,WAAA5E,EAAa,KAAK,WAClB,eAAAF,EAAiB,KAAK,eACtB,mBAAAY,EAAqB,KAAK,mBAE1B,IAAAf,EAAM,KAAK,IACX,eAAAQ,EAAiB,KAAK,eACtB,KAAApD,EAAO,EACP,gBAAAwD,EAAkB,KAAK,gBACvB,YAAAH,EAAc,KAAK,YAEnB,yBAAAK,EAA2B,KAAK,yBAChC,2BAAAE,EAA6B,KAAK,2BAClC,iBAAAE,EAAmB,KAAK,iBACxB,uBAAAD,EAAyB,KAAK,uBAC9B,QAAAvB,EACA,aAAAwF,EAAe,GACf,OAAApD,EACA,OAAAkC,CAAM,EACJiB,EAEJ,GAAI,CAAC,KAAK/F,GACR,OAAI4C,IAAQA,EAAO,MAAQ,OACpB,KAAK,IAAIvC,EAAG,CACjB,WAAAc,EACA,eAAAF,EACA,mBAAAY,EACA,OAAAe,EACD,EAGH,IAAMrC,EAAU,CACd,WAAAY,EACA,eAAAF,EACA,mBAAAY,EACA,IAAAf,EACA,eAAAQ,EACA,KAAApD,EACA,gBAAAwD,EACA,YAAAH,EACA,yBAAAK,EACA,2BAAAE,EACA,uBAAAC,EACA,iBAAAC,EACA,OAAAY,EACA,OAAAkC,GAGExE,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAC9B,GAAIC,IAAU,OAAW,CACnBsC,IAAQA,EAAO,MAAQ,QAC3B,IAAMzC,EAAI,KAAKM,GAAiBJ,EAAGC,EAAOC,EAASC,CAAO,EAC1D,OAAQL,EAAE,WAAaA,MAClB,CAEL,IAAMgD,EAAI,KAAK9D,GAASiB,CAAK,EAC7B,GAAI,KAAKF,GAAmB+C,CAAC,EAAG,CAC9B,IAAM8C,EACJ9E,GAAcgC,EAAE,uBAAyB,OAC3C,OAAIP,IACFA,EAAO,MAAQ,WACXqD,IAAOrD,EAAO,cAAgB,KAE7BqD,EAAQ9C,EAAE,qBAAwBA,EAAE,WAAaA,EAK1D,IAAM+C,EAAU,KAAKrF,GAASP,CAAK,EACnC,GAAI,CAAC0F,GAAgB,CAACE,EACpB,OAAItD,IAAQA,EAAO,MAAQ,OAC3B,KAAKlC,GAAYJ,CAAK,EAClBW,GACF,KAAKyB,GAAepC,CAAK,EAEvBsC,GAAQ,KAAKD,GAAWC,EAAQtC,CAAK,EAClC6C,EAKT,IAAMhD,EAAI,KAAKM,GAAiBJ,EAAGC,EAAOC,EAASC,CAAO,EAEpD2F,EADWhG,EAAE,uBAAyB,QACfgB,EAC7B,OAAIyB,IACFA,EAAO,MAAQsD,EAAU,QAAU,UAC/BC,GAAYD,IAAStD,EAAO,cAAgB,KAE3CuD,EAAWhG,EAAE,qBAAwBA,EAAE,WAAaA,EAE/D,CAQA,IAAIE,EAAMwD,EAA4C,CAAA,EAAE,CACtD,GAAM,CACJ,WAAA1C,EAAa,KAAK,WAClB,eAAAF,EAAiB,KAAK,eACtB,mBAAAY,EAAqB,KAAK,mBAC1B,OAAAe,CAAM,EACJiB,EACEvD,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAChC,GAAIC,IAAU,OAAW,CACvB,IAAMwD,EAAQ,KAAKzE,GAASiB,CAAK,EAC3B8F,EAAW,KAAKhG,GAAmB0D,CAAK,EAE9C,OADIlB,GAAQ,KAAKD,GAAWC,EAAQtC,CAAK,EACrC,KAAKO,GAASP,CAAK,GACjBsC,IAAQA,EAAO,IAAM,SAEpBwD,GAQDxD,GACAzB,GACA2C,EAAM,uBAAyB,SAE/BlB,EAAO,cAAgB,IAElBzB,EAAa2C,EAAM,qBAAuB,SAb5CjC,GACH,KAAK,OAAOxB,CAAC,EAEXuC,GAAUzB,IAAYyB,EAAO,cAAgB,IAC1CzB,EAAa2C,EAAQ,UAY1BlB,IAAQA,EAAO,IAAM,OAMrBwD,EACKtC,EAAM,sBAEf,KAAKpD,GAAYJ,CAAK,EAClBW,GACF,KAAKyB,GAAepC,CAAK,EAEpBwD,SAEAlB,IACTA,EAAO,IAAM,OAEjB,CAEAyD,GAASlG,EAAUrC,EAAQ,CACzB,KAAKyB,GAAMzB,CAAC,EAAIqC,EAChB,KAAKb,GAAMa,CAAC,EAAIrC,CAClB,CAEA4C,GAAYJ,EAAY,CASlBA,IAAU,KAAKb,KACba,IAAU,KAAKd,GACjB,KAAKA,GAAQ,KAAKF,GAAMgB,CAAK,EAE7B,KAAK+F,GACH,KAAK9G,GAAMe,CAAK,EAChB,KAAKhB,GAAMgB,CAAK,CAAU,EAG9B,KAAK+F,GAAS,KAAK5G,GAAOa,CAAK,EAC/B,KAAKb,GAAQa,EAEjB,CAMA,OAAOD,EAAI,CACT,IAAI2D,EAAU,GACd,GAAI,KAAK/E,KAAU,EAAG,CACpB,IAAMqB,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAChC,GAAIC,IAAU,OAEZ,GADA0D,EAAU,GACN,KAAK/E,KAAU,EACjB,KAAK,MAAK,MACL,CACL,KAAKgE,GAAgB3C,CAAK,EAC1B,IAAM6C,EAAI,KAAK9D,GAASiB,CAAK,EACzB,KAAKF,GAAmB+C,CAAC,EAC3BA,EAAE,kBAAkB,MAAM,IAAI,MAAM,SAAS,CAAC,GACrC,KAAKpD,IAAe,KAAKE,MAC9B,KAAKF,IACP,KAAKjB,KAAWqE,EAAQ9C,EAAG,QAAQ,EAEjC,KAAKJ,IACP,KAAKN,IAAW,KAAK,CAACwD,EAAQ9C,EAAG,QAAQ,CAAC,GAG9C,KAAKlB,GAAQ,OAAOkB,CAAC,EACrB,KAAKjB,GAASkB,CAAK,EAAI,OACvB,KAAKjB,GAASiB,CAAK,EAAI,OACnBA,IAAU,KAAKb,GACjB,KAAKA,GAAQ,KAAKF,GAAMe,CAAK,EACpBA,IAAU,KAAKd,GACxB,KAAKA,GAAQ,KAAKF,GAAMgB,CAAK,GAE7B,KAAKhB,GAAM,KAAKC,GAAMe,CAAK,CAAC,EAAI,KAAKhB,GAAMgB,CAAK,EAChD,KAAKf,GAAM,KAAKD,GAAMgB,CAAK,CAAC,EAAI,KAAKf,GAAMe,CAAK,GAElD,KAAKrB,KACL,KAAKS,GAAM,KAAKY,CAAK,GAI3B,GAAI,KAAKL,IAAoB,KAAKN,IAAW,OAAQ,CACnD,IAAM2E,EAAK,KAAK3E,GACZ4E,EACJ,KAAQA,EAAOD,GAAI,MAAK,GACtB,KAAKvF,KAAgB,GAAGwF,CAAI,EAGhC,OAAOP,CACT,CAKA,OAAK,CACH,QAAW1D,KAAS,KAAKM,GAAU,CAAE,WAAY,EAAI,CAAE,EAAG,CACxD,IAAMuC,EAAI,KAAK9D,GAASiB,CAAK,EAC7B,GAAI,KAAKF,GAAmB+C,CAAC,EAC3BA,EAAE,kBAAkB,MAAM,IAAI,MAAM,SAAS,CAAC,MACzC,CACL,IAAM9C,EAAI,KAAKjB,GAASkB,CAAK,EACzB,KAAKP,IACP,KAAKjB,KAAWqE,EAAQ9C,EAAQ,QAAQ,EAEtC,KAAKJ,IACP,KAAKN,IAAW,KAAK,CAACwD,EAAQ9C,EAAQ,QAAQ,CAAC,GAoBrD,GAfA,KAAKlB,GAAQ,MAAK,EAClB,KAAKE,GAAS,KAAK,MAAS,EAC5B,KAAKD,GAAS,KAAK,MAAS,EACxB,KAAKU,IAAS,KAAKD,KACrB,KAAKC,GAAM,KAAK,CAAC,EACjB,KAAKD,GAAQ,KAAK,CAAC,GAEjB,KAAKD,IACP,KAAKA,GAAO,KAAK,CAAC,EAEpB,KAAKJ,GAAQ,EACb,KAAKC,GAAQ,EACb,KAAKC,GAAM,OAAS,EACpB,KAAKR,GAAkB,EACvB,KAAKD,GAAQ,EACT,KAAKgB,IAAoB,KAAKN,GAAW,CAC3C,IAAM2E,EAAK,KAAK3E,GACZ4E,EACJ,KAAQA,EAAOD,GAAI,MAAK,GACtB,KAAKvF,KAAgB,GAAGwF,CAAI,EAGlC", + "names": ["perf", "warned", "PROCESS", "emitWarning", "msg", "type", "code", "fn", "AC", "AS", "_", "warnACPolyfill", "reason", "printACPolyfillWarning", "shouldWarn", "TYPE", "isPosInt", "n", "getUintArray", "max", "ZeroArray", "size", "_constructing", "_Stack", "HeapCls", "__privateSet", "s", "__privateGet", "Stack", "__privateAdd", "LRUCache", "#max", "#maxSize", "#dispose", "#disposeAfter", "#fetchMethod", "#size", "#calculatedSize", "#keyMap", "#keyList", "#valList", "#next", "#prev", "#head", "#tail", "#free", "#disposed", "#sizes", "#starts", "#ttls", "#hasDispose", "#hasFetchMethod", "#hasDisposeAfter", "c", "p", "#isBackgroundFetch", "k", "index", "options", "context", "#backgroundFetch", "#moveToTail", "#indexes", "#rindexes", "#isStale", "ttl", "ttlResolution", "ttlAutopurge", "updateAgeOnGet", "updateAgeOnHas", "allowStale", "dispose", "disposeAfter", "noDisposeOnSet", "noUpdateTTL", "maxSize", "maxEntrySize", "sizeCalculation", "fetchMethod", "noDeleteOnFetchRejection", "noDeleteOnStaleGet", "allowStaleOnFetchRejection", "allowStaleOnFetchAbort", "ignoreFetchAbort", "UintArray", "#initializeSizeTracking", "#initializeTTLTracking", "key", "ttls", "starts", "#setItemTTL", "start", "t", "#updateItemAge", "#statusTTL", "status", "cachedNow", "getNow", "age", "sizes", "#removeItemSize", "#requireSize", "v", "#addItemSize", "#evict", "_i", "_s", "_st", "_k", "_v", "i", "#isValidIndex", "getOptions", "value", "thisp", "deleted", "arr", "entry", "setOptions", "oldVal", "oldValue", "dt", "task", "val", "free", "head", "hasOptions", "peekOptions", "ac", "signal", "fetchOpts", "cb", "updateCache", "aborted", "ignoreAbort", "fetchFail", "bf", "eb", "er", "allowStaleAborted", "noDelete", "pcall", "res", "rej", "fmp", "b", "fetchOptions", "forceRefresh", "stale", "isStale", "staleVal", "fetching", "#connect"] +} diff --git a/dist/node_modules/lru-cache/dist/mjs/package.json b/dist/node_modules/lru-cache/dist/mjs/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/dist/node_modules/lru-cache/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/dist/node_modules/lru-cache/package.json b/dist/node_modules/lru-cache/package.json new file mode 100644 index 0000000..56b673d --- /dev/null +++ b/dist/node_modules/lru-cache/package.json @@ -0,0 +1,108 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "9.1.2", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", + "postprepare": "bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", + "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", + "prebenchmark": "npm run prepare", + "benchmark": "make -C benchmark", + "preprofile": "npm run prepare", + "profile": "make -C benchmark profile" + }, + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "exports": { + "./min": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.min.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.min.js" + } + }, + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + } + }, + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "@types/node": "^20.2.5", + "@types/tap": "^15.0.6", + "benchmark": "^2.1.4", + "c8": "^7.11.2", + "clock-mock": "^1.0.6", + "esbuild": "^0.17.11", + "eslint-config-prettier": "^8.5.0", + "marked": "^4.2.12", + "mkdirp": "^2.1.5", + "prettier": "^2.6.2", + "size-limit": "^7.0.8", + "tap": "^16.3.4", + "ts-node": "^10.9.1", + "tslib": "^2.4.0", + "typedoc": "^0.24.6", + "typescript": "^5.0.4" + }, + "license": "ISC", + "files": [ + "dist" + ], + "engines": { + "node": "14 || >=16.14" + }, + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--expose-gc", + "-r", + "ts-node/register" + ], + "ts": false + }, + "size-limit": [ + { + "path": "./dist/mjs/index.js" + } + ] +} diff --git a/dist/node_modules/ms/index.js b/dist/node_modules/ms/index.js new file mode 100644 index 0000000..ea734fb --- /dev/null +++ b/dist/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function (val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/dist/node_modules/ms/license.md b/dist/node_modules/ms/license.md new file mode 100644 index 0000000..fa5d39b --- /dev/null +++ b/dist/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Vercel, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/dist/node_modules/ms/package.json b/dist/node_modules/ms/package.json new file mode 100644 index 0000000..4997189 --- /dev/null +++ b/dist/node_modules/ms/package.json @@ -0,0 +1,38 @@ +{ + "name": "ms", + "version": "2.1.3", + "description": "Tiny millisecond conversion utility", + "repository": "vercel/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "4.18.2", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1", + "prettier": "2.0.5" + } +} diff --git a/dist/node_modules/ms/readme.md b/dist/node_modules/ms/readme.md new file mode 100644 index 0000000..0fc1abb --- /dev/null +++ b/dist/node_modules/ms/readme.md @@ -0,0 +1,59 @@ +# ms + +![CI](https://github.com/vercel/ms/workflows/CI/badge.svg) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/dist/node_modules/node-fetch/LICENSE.md b/dist/node_modules/node-fetch/LICENSE.md new file mode 100644 index 0000000..660ffec --- /dev/null +++ b/dist/node_modules/node-fetch/LICENSE.md @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/dist/node_modules/node-fetch/README.md b/dist/node_modules/node-fetch/README.md new file mode 100644 index 0000000..55f09b7 --- /dev/null +++ b/dist/node_modules/node-fetch/README.md @@ -0,0 +1,634 @@ +node-fetch +========== + +[![npm version][npm-image]][npm-url] +[![build status][travis-image]][travis-url] +[![coverage status][codecov-image]][codecov-url] +[![install size][install-size-image]][install-size-url] +[![Discord][discord-image]][discord-url] + +A light-weight module that brings `window.fetch` to Node.js + +(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567)) + +[![Backers][opencollective-image]][opencollective-url] + + + +- [Motivation](#motivation) +- [Features](#features) +- [Difference from client-side fetch](#difference-from-client-side-fetch) +- [Installation](#installation) +- [Loading and configuring the module](#loading-and-configuring-the-module) +- [Common Usage](#common-usage) + - [Plain text or HTML](#plain-text-or-html) + - [JSON](#json) + - [Simple Post](#simple-post) + - [Post with JSON](#post-with-json) + - [Post with form parameters](#post-with-form-parameters) + - [Handling exceptions](#handling-exceptions) + - [Handling client and server errors](#handling-client-and-server-errors) +- [Advanced Usage](#advanced-usage) + - [Streams](#streams) + - [Buffer](#buffer) + - [Accessing Headers and other Meta data](#accessing-headers-and-other-meta-data) + - [Extract Set-Cookie Header](#extract-set-cookie-header) + - [Post data using a file stream](#post-data-using-a-file-stream) + - [Post with form-data (detect multipart)](#post-with-form-data-detect-multipart) + - [Request cancellation with AbortSignal](#request-cancellation-with-abortsignal) +- [API](#api) + - [fetch(url[, options])](#fetchurl-options) + - [Options](#options) + - [Class: Request](#class-request) + - [Class: Response](#class-response) + - [Class: Headers](#class-headers) + - [Interface: Body](#interface-body) + - [Class: FetchError](#class-fetcherror) +- [License](#license) +- [Acknowledgement](#acknowledgement) + + + +## Motivation + +Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence, `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime. + +See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) or Leonardo Quixada's [cross-fetch](https://github.com/lquixada/cross-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side). + +## Features + +- Stay consistent with `window.fetch` API. +- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences. +- Use native promise but allow substituting it with [insert your favorite promise library]. +- Use native Node streams for body on both request and response. +- Decode content encoding (gzip/deflate) properly and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically. +- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting. + +## Difference from client-side fetch + +- See [Known Differences](LIMITS.md) for details. +- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue. +- Pull requests are welcomed too! + +## Installation + +Current stable release (`2.x`) + +```sh +$ npm install node-fetch +``` + +## Loading and configuring the module +We suggest you load the module via `require` until the stabilization of ES modules in node: +```js +const fetch = require('node-fetch'); +``` + +If you are using a Promise library other than native, set it through `fetch.Promise`: +```js +const Bluebird = require('bluebird'); + +fetch.Promise = Bluebird; +``` + +## Common Usage + +NOTE: The documentation below is up-to-date with `2.x` releases; see the [`1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences. + +#### Plain text or HTML +```js +fetch('https://github.com/') + .then(res => res.text()) + .then(body => console.log(body)); +``` + +#### JSON + +```js + +fetch('https://api.github.com/users/github') + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Simple Post +```js +fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' }) + .then(res => res.json()) // expecting a json response + .then(json => console.log(json)); +``` + +#### Post with JSON + +```js +const body = { a: 1 }; + +fetch('https://httpbin.org/post', { + method: 'post', + body: JSON.stringify(body), + headers: { 'Content-Type': 'application/json' }, + }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Post with form parameters +`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods. + +NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such: + +```js +const { URLSearchParams } = require('url'); + +const params = new URLSearchParams(); +params.append('a', 1); + +fetch('https://httpbin.org/post', { method: 'POST', body: params }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Handling exceptions +NOTE: 3xx-5xx responses are *NOT* exceptions and should be handled in `then()`; see the next section for more information. + +Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, network errors and operational errors, which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details. + +```js +fetch('https://domain.invalid/') + .catch(err => console.error(err)); +``` + +#### Handling client and server errors +It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses: + +```js +function checkStatus(res) { + if (res.ok) { // res.status >= 200 && res.status < 300 + return res; + } else { + throw MyCustomError(res.statusText); + } +} + +fetch('https://httpbin.org/status/400') + .then(checkStatus) + .then(res => console.log('will not get here...')) +``` + +## Advanced Usage + +#### Streams +The "Node.js way" is to use streams when possible: + +```js +fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') + .then(res => { + const dest = fs.createWriteStream('./octocat.png'); + res.body.pipe(dest); + }); +``` + +In Node.js 14 you can also use async iterators to read `body`; however, be careful to catch +errors -- the longer a response runs, the more likely it is to encounter an error. + +```js +const fetch = require('node-fetch'); +const response = await fetch('https://httpbin.org/stream/3'); +try { + for await (const chunk of response.body) { + console.dir(JSON.parse(chunk.toString())); + } +} catch (err) { + console.error(err.stack); +} +``` + +In Node.js 12 you can also use async iterators to read `body`; however, async iterators with streams +did not mature until Node.js 14, so you need to do some extra work to ensure you handle errors +directly from the stream and wait on it response to fully close. + +```js +const fetch = require('node-fetch'); +const read = async body => { + let error; + body.on('error', err => { + error = err; + }); + for await (const chunk of body) { + console.dir(JSON.parse(chunk.toString())); + } + return new Promise((resolve, reject) => { + body.on('close', () => { + error ? reject(error) : resolve(); + }); + }); +}; +try { + const response = await fetch('https://httpbin.org/stream/3'); + await read(response.body); +} catch (err) { + console.error(err.stack); +} +``` + +#### Buffer +If you prefer to cache binary data in full, use buffer(). (NOTE: `buffer()` is a `node-fetch`-only API) + +```js +const fileType = require('file-type'); + +fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') + .then(res => res.buffer()) + .then(buffer => fileType(buffer)) + .then(type => { /* ... */ }); +``` + +#### Accessing Headers and other Meta data +```js +fetch('https://github.com/') + .then(res => { + console.log(res.ok); + console.log(res.status); + console.log(res.statusText); + console.log(res.headers.raw()); + console.log(res.headers.get('content-type')); + }); +``` + +#### Extract Set-Cookie Header + +Unlike browsers, you can access raw `Set-Cookie` headers manually using `Headers.raw()`. This is a `node-fetch` only API. + +```js +fetch(url).then(res => { + // returns an array of values, instead of a string of comma-separated values + console.log(res.headers.raw()['set-cookie']); +}); +``` + +#### Post data using a file stream + +```js +const { createReadStream } = require('fs'); + +const stream = createReadStream('input.txt'); + +fetch('https://httpbin.org/post', { method: 'POST', body: stream }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Post with form-data (detect multipart) + +```js +const FormData = require('form-data'); + +const form = new FormData(); +form.append('a', 1); + +fetch('https://httpbin.org/post', { method: 'POST', body: form }) + .then(res => res.json()) + .then(json => console.log(json)); + +// OR, using custom headers +// NOTE: getHeaders() is non-standard API + +const form = new FormData(); +form.append('a', 1); + +const options = { + method: 'POST', + body: form, + headers: form.getHeaders() +} + +fetch('https://httpbin.org/post', options) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Request cancellation with AbortSignal + +> NOTE: You may cancel streamed requests only on Node >= v8.0.0 + +You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller). + +An example of timing out a request after 150ms could be achieved as the following: + +```js +import AbortController from 'abort-controller'; + +const controller = new AbortController(); +const timeout = setTimeout( + () => { controller.abort(); }, + 150, +); + +fetch(url, { signal: controller.signal }) + .then(res => res.json()) + .then( + data => { + useData(data) + }, + err => { + if (err.name === 'AbortError') { + // request was aborted + } + }, + ) + .finally(() => { + clearTimeout(timeout); + }); +``` + +See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples. + + +## API + +### fetch(url[, options]) + +- `url` A string representing the URL for fetching +- `options` [Options](#fetch-options) for the HTTP(S) request +- Returns: Promise<[Response](#class-response)> + +Perform an HTTP(S) fetch. + +`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected `Promise`. + + +### Options + +The default values are shown after each option key. + +```js +{ + // These properties are part of the Fetch Standard + method: 'GET', + headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below) + body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream + redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect + signal: null, // pass an instance of AbortSignal to optionally abort requests + + // The following properties are node-fetch extensions + follow: 20, // maximum redirect count. 0 to not follow redirect + timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead. + compress: true, // support gzip/deflate content encoding. false to disable + size: 0, // maximum response body size in bytes. 0 to disable + agent: null // http(s).Agent instance or function that returns an instance (see below) +} +``` + +##### Default Headers + +If no values are set, the following request headers will be sent automatically: + +Header | Value +------------------- | -------------------------------------------------------- +`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_ +`Accept` | `*/*` +`Content-Length` | _(automatically calculated, if possible)_ +`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_ +`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)` + +Note: when `body` is a `Stream`, `Content-Length` is not set automatically. + +##### Custom Agent + +The `agent` option allows you to specify networking related options which are out of the scope of Fetch, including and not limited to the following: + +- Support self-signed certificate +- Use only IPv4 or IPv6 +- Custom DNS Lookup + +See [`http.Agent`](https://nodejs.org/api/http.html#http_new_agent_options) for more information. + +If no agent is specified, the default agent provided by Node.js is used. Note that [this changed in Node.js 19](https://github.com/nodejs/node/blob/4267b92604ad78584244488e7f7508a690cb80d0/lib/_http_agent.js#L564) to have `keepalive` true by default. If you wish to enable `keepalive` in an earlier version of Node.js, you can override the agent as per the following code sample. + +In addition, the `agent` option accepts a function that returns `http`(s)`.Agent` instance given current [URL](https://nodejs.org/api/url.html), this is useful during a redirection chain across HTTP and HTTPS protocol. + +```js +const httpAgent = new http.Agent({ + keepAlive: true +}); +const httpsAgent = new https.Agent({ + keepAlive: true +}); + +const options = { + agent: function (_parsedURL) { + if (_parsedURL.protocol == 'http:') { + return httpAgent; + } else { + return httpsAgent; + } + } +} +``` + + +### Class: Request + +An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface. + +Due to the nature of Node.js, the following properties are not implemented at this moment: + +- `type` +- `destination` +- `referrer` +- `referrerPolicy` +- `mode` +- `credentials` +- `cache` +- `integrity` +- `keepalive` + +The following node-fetch extension properties are provided: + +- `follow` +- `compress` +- `counter` +- `agent` + +See [options](#fetch-options) for exact meaning of these extensions. + +#### new Request(input[, options]) + +*(spec-compliant)* + +- `input` A string representing a URL, or another `Request` (which will be cloned) +- `options` [Options][#fetch-options] for the HTTP(S) request + +Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request). + +In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object. + + +### Class: Response + +An HTTP(S) response. This class implements the [Body](#iface-body) interface. + +The following properties are not implemented in node-fetch at this moment: + +- `Response.error()` +- `Response.redirect()` +- `type` +- `trailer` + +#### new Response([body[, options]]) + +*(spec-compliant)* + +- `body` A `String` or [`Readable` stream][node-readable] +- `options` A [`ResponseInit`][response-init] options dictionary + +Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response). + +Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly. + +#### response.ok + +*(spec-compliant)* + +Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300. + +#### response.redirected + +*(spec-compliant)* + +Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0. + + +### Class: Headers + +This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented. + +#### new Headers([init]) + +*(spec-compliant)* + +- `init` Optional argument to pre-fill the `Headers` object + +Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object or any iterable object. + +```js +// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class + +const meta = { + 'Content-Type': 'text/xml', + 'Breaking-Bad': '<3' +}; +const headers = new Headers(meta); + +// The above is equivalent to +const meta = [ + [ 'Content-Type', 'text/xml' ], + [ 'Breaking-Bad', '<3' ] +]; +const headers = new Headers(meta); + +// You can in fact use any iterable objects, like a Map or even another Headers +const meta = new Map(); +meta.set('Content-Type', 'text/xml'); +meta.set('Breaking-Bad', '<3'); +const headers = new Headers(meta); +const copyOfHeaders = new Headers(headers); +``` + + +### Interface: Body + +`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes. + +The following methods are not yet implemented in node-fetch at this moment: + +- `formData()` + +#### body.body + +*(deviation from spec)* + +* Node.js [`Readable` stream][node-readable] + +Data are encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable]. + +#### body.bodyUsed + +*(spec-compliant)* + +* `Boolean` + +A boolean property for if this body has been consumed. Per the specs, a consumed body cannot be used again. + +#### body.arrayBuffer() +#### body.blob() +#### body.json() +#### body.text() + +*(spec-compliant)* + +* Returns: Promise + +Consume the body and return a promise that will resolve to one of these formats. + +#### body.buffer() + +*(node-fetch extension)* + +* Returns: Promise<Buffer> + +Consume the body and return a promise that will resolve to a Buffer. + +#### body.textConverted() + +*(node-fetch extension)* + +* Returns: Promise<String> + +Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8 if possible. + +(This API requires an optional dependency of the npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.) + + +### Class: FetchError + +*(node-fetch extension)* + +An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info. + + +### Class: AbortError + +*(node-fetch extension)* + +An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info. + +## Acknowledgement + +Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference. + +`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr). + +## License + +MIT + +[npm-image]: https://flat.badgen.net/npm/v/node-fetch +[npm-url]: https://www.npmjs.com/package/node-fetch +[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch +[travis-url]: https://travis-ci.org/bitinn/node-fetch +[codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master +[codecov-url]: https://codecov.io/gh/bitinn/node-fetch +[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch +[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch +[discord-image]: https://img.shields.io/discord/619915844268326952?color=%237289DA&label=Discord&style=flat-square +[discord-url]: https://discord.gg/Zxbndcm +[opencollective-image]: https://opencollective.com/node-fetch/backers.svg +[opencollective-url]: https://opencollective.com/node-fetch +[whatwg-fetch]: https://fetch.spec.whatwg.org/ +[response-init]: https://fetch.spec.whatwg.org/#responseinit +[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams +[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers +[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md +[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md +[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md diff --git a/dist/node_modules/node-fetch/browser.js b/dist/node_modules/node-fetch/browser.js new file mode 100644 index 0000000..ee86265 --- /dev/null +++ b/dist/node_modules/node-fetch/browser.js @@ -0,0 +1,25 @@ +"use strict"; + +// ref: https://github.com/tc39/proposal-global +var getGlobal = function () { + // the only reliable means to get the global object is + // `Function('return this')()` + // However, this causes CSP violations in Chrome apps. + if (typeof self !== 'undefined') { return self; } + if (typeof window !== 'undefined') { return window; } + if (typeof global !== 'undefined') { return global; } + throw new Error('unable to locate global object'); +} + +var globalObject = getGlobal(); + +module.exports = exports = globalObject.fetch; + +// Needed for TypeScript and Webpack. +if (globalObject.fetch) { + exports.default = globalObject.fetch.bind(globalObject); +} + +exports.Headers = globalObject.Headers; +exports.Request = globalObject.Request; +exports.Response = globalObject.Response; diff --git a/dist/node_modules/node-fetch/lib/index.es.js b/dist/node_modules/node-fetch/lib/index.es.js new file mode 100644 index 0000000..aae9799 --- /dev/null +++ b/dist/node_modules/node-fetch/lib/index.es.js @@ -0,0 +1,1777 @@ +process.emitWarning("The .es.js file is deprecated. Use .mjs instead."); + +import Stream from 'stream'; +import http from 'http'; +import Url from 'url'; +import whatwgUrl from 'whatwg-url'; +import https from 'https'; +import zlib from 'zlib'; + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +export default fetch; +export { Headers, Request, Response, FetchError, AbortError }; diff --git a/dist/node_modules/node-fetch/lib/index.js b/dist/node_modules/node-fetch/lib/index.js new file mode 100644 index 0000000..567ff5d --- /dev/null +++ b/dist/node_modules/node-fetch/lib/index.js @@ -0,0 +1,1787 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(require('stream')); +var http = _interopDefault(require('http')); +var Url = _interopDefault(require('url')); +var whatwgUrl = _interopDefault(require('whatwg-url')); +var https = _interopDefault(require('https')); +var zlib = _interopDefault(require('zlib')); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; +exports.AbortError = AbortError; diff --git a/dist/node_modules/node-fetch/lib/index.mjs b/dist/node_modules/node-fetch/lib/index.mjs new file mode 100644 index 0000000..2863dd9 --- /dev/null +++ b/dist/node_modules/node-fetch/lib/index.mjs @@ -0,0 +1,1775 @@ +import Stream from 'stream'; +import http from 'http'; +import Url from 'url'; +import whatwgUrl from 'whatwg-url'; +import https from 'https'; +import zlib from 'zlib'; + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +export default fetch; +export { Headers, Request, Response, FetchError, AbortError }; diff --git a/dist/node_modules/node-fetch/package.json b/dist/node_modules/node-fetch/package.json new file mode 100644 index 0000000..e0be176 --- /dev/null +++ b/dist/node_modules/node-fetch/package.json @@ -0,0 +1,89 @@ +{ + "name": "node-fetch", + "version": "2.7.0", + "description": "A light-weight module that brings window.fetch to node.js", + "main": "lib/index.js", + "browser": "./browser.js", + "module": "lib/index.mjs", + "files": [ + "lib/index.js", + "lib/index.mjs", + "lib/index.es.js", + "browser.js" + ], + "engines": { + "node": "4.x || >=6.0.0" + }, + "scripts": { + "build": "cross-env BABEL_ENV=rollup rollup -c", + "prepare": "npm run build", + "test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js", + "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js", + "coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json" + }, + "repository": { + "type": "git", + "url": "https://github.com/bitinn/node-fetch.git" + }, + "keywords": [ + "fetch", + "http", + "promise" + ], + "author": "David Frank", + "license": "MIT", + "bugs": { + "url": "https://github.com/bitinn/node-fetch/issues" + }, + "homepage": "https://github.com/bitinn/node-fetch", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + }, + "devDependencies": { + "@ungap/url-search-params": "^0.1.2", + "abort-controller": "^1.1.0", + "abortcontroller-polyfill": "^1.3.0", + "babel-core": "^6.26.3", + "babel-plugin-istanbul": "^4.1.6", + "babel-plugin-transform-async-generator-functions": "^6.24.1", + "babel-polyfill": "^6.26.0", + "babel-preset-env": "1.4.0", + "babel-register": "^6.16.3", + "chai": "^3.5.0", + "chai-as-promised": "^7.1.1", + "chai-iterator": "^1.1.1", + "chai-string": "~1.3.0", + "codecov": "3.3.0", + "cross-env": "^5.2.0", + "form-data": "^2.3.3", + "is-builtin-module": "^1.0.0", + "mocha": "^5.0.0", + "nyc": "11.9.0", + "parted": "^0.1.1", + "promise": "^8.0.3", + "resumer": "0.0.0", + "rollup": "^0.63.4", + "rollup-plugin-babel": "^3.0.7", + "string-to-arraybuffer": "^1.0.2", + "teeny-request": "3.7.0" + }, + "release": { + "branches": [ + "+([0-9]).x", + "main", + "next", + { + "name": "beta", + "prerelease": true + } + ] + } +} diff --git a/dist/node_modules/octokit/LICENSE b/dist/node_modules/octokit/LICENSE new file mode 100644 index 0000000..e0b76da --- /dev/null +++ b/dist/node_modules/octokit/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2023 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/octokit/README.md b/dist/node_modules/octokit/README.md new file mode 100644 index 0000000..4a93810 --- /dev/null +++ b/dist/node_modules/octokit/README.md @@ -0,0 +1,922 @@ +# octokit.js + +> The all-batteries-included GitHub SDK for Browsers, Node.js, and Deno. + +The `octokit` package integrates the three main Octokit libraries + +1. **API client** (REST API requests, GraphQL API queries, Authentication) +2. **App client** (GitHub App & installations, Webhooks, OAuth) +3. **Action client** (Pre-authenticated API client for single repository) + +## Table of contents + + + +- [Features](#features) +- [Usage](#usage) +- [`Octokit` API Client](#octokit-api-client) + - [Constructor options](#constructor-options) + - [Authentication](#authentication) + - [Proxy Servers (Node.js only)](#proxy-servers-nodejs-only) + - [REST API](#rest-api) + - [`octokit.rest` endpoint methods](#octokitrest-endpoint-methods) + - [`octokit.request()`](#octokitrequest) + - [Pagination](#pagination) + - [Media Type previews and formats](#media-type-previews-and-formats) + - [Request error handling](#request-error-handling) + - [GraphQL API queries](#graphql-api-queries) + - [Schema previews](#schema-previews) +- [App client](#app-client) + - [GitHub App](#github-app) + - [Webhooks](#webhooks) + - [OAuth](#oauth) + - [App Server](#app-server) + - [OAuth for browser apps](#oauth-for-browser-apps) +- [Action client](#action-client) +- [LICENSE](#license) + + + +## Features + +- **Complete**. All features of GitHub's platform APIs are covered. +- **Prescriptive**. All recommended best practises are implemented. +- **Universal**. Works in all modern browsers, [Node.js](https://nodejs.org/), and [Deno](https://deno.land/). +- **Tested**. All libraries have a 100% test coverage. +- **Typed**. All libraries have extensive TypeScript declarations. +- **Decomposable**. Use only the code you need. You can build your own Octokit in only a few lines of code or use the underlying static methods. Make your own tradeoff between functionality and bundle size. +- **Extendable**. A feature missing? Add functionalities with plugins, hook into the request or webhook lifecycle or implement your own authentication strategy. + +## Usage + + + + + + + + +
+Browsers + +Load octokit directly from esm.sh + +```html + +``` + +
+Deno + +Load octokit directly from esm.sh + +```ts +import { Octokit, App } from "https://esm.sh/octokit?dts"; +``` + +
+Node 12+ + + +Install with npm/pnpm install octokit, or yarn add octokit + +```js +import { Octokit, App } from "octokit"; +``` + +
+Node 10 and below + + +Install with npm/pnpm install octokit, or yarn add octokit + +```js +const { Octokit, App } = require("octokit"); +``` + +
+ +## `Octokit` API Client + +**standalone minimal Octokit**: [`@octokit/core`](https://github.com/octokit/core.js/#readme). + +The `Octokit` client can be used to send requests to [GitHub's REST API](https://docs.github.com/rest/) and queries to [GitHub's GraphQL API](https://docs.github.com/graphql). + +**Example**: Get the username for the authenticated user. + +```js +// Create a personal access token at https://github.com/settings/tokens/new?scopes=repo +const octokit = new Octokit({ auth: `personal-access-token123` }); + +// Compare: https://docs.github.com/en/rest/reference/users#get-the-authenticated-user +const { + data: { login }, +} = await octokit.rest.users.getAuthenticated(); +console.log("Hello, %s", login); +``` + +### Constructor options + +The most commonly used options are + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ userAgent + + String + + +Setting a user agent is required for all requests sent to GitHub's Platform APIs. The user agent defaults to something like this: `octokit.js/v1.2.3 Node.js/v8.9.4 (macOS High Sierra; x64)`. It is recommend to set your own user agent, which will prepend the default one. + +```js +const octokit = new Octokit({ + userAgent: "my-app/v1.2.3", +}); +``` + +
+ authStrategy + + Function + + +Defaults to [`@octokit/auth-token`](https://github.com/octokit/auth-token.js#readme). + +See [Authentication](#authentication) below. + +
+ auth + + String or Object + + +Set to a [personal access token](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) unless you changed the `authStrategy` option. + +See [Authentication](#authentication) below. + +
+ baseUrl + + String + + +When using with GitHub Enterprise Server, set `options.baseUrl` to the root URL of the API. For example, if your GitHub Enterprise Server's hostname is `github.acme-inc.com`, then set `options.baseUrl` to `https://github.acme-inc.com/api/v3`. Example + +```js +const octokit = new Octokit({ + baseUrl: "https://github.acme-inc.com/api/v3", +}); +``` + +
+ +Advanced options + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ name + + type + + description +
+ request + + Object + + +- `request.signal`: Use an [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) instance to cancel a request. [`abort-controller`](https://www.npmjs.com/package/abort-controller) is an implementation for Node. +- `request.fetch`: Replacement for [built-in fetch method](https://github.com/bitinn/node-fetch). Useful for testing with [`fetch-mock`](https://github.com/wheresrhys/fetch-mock) + +Node only + +- `request.timeout` sets a request timeout, defaults to 0 +- `request.agent`: A [`http(s).Agent`](https://nodejs.org/api/http.html#http_class_http_agent) e.g. for proxy usage + +The `request` option can also be set on a per-request basis. + +
+ timeZone + + String + + +Sets the `Time-Zone` header which defines a timezone according to the [list of names from the Olson database](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones). + +```js +const octokit = new Octokit({ + timeZone: "America/Los_Angeles", +}); +``` + +The time zone header will determine the timezone used for generating the timestamp when creating commits. See [GitHub's Timezones documentation](https://developer.github.com/v3/#timezones). + +
+ throttle + + Object + + +`Octokit` implements request throttling using [`@octokit/plugin-throttling`](https://github.com/octokit/plugin-throttling.js/#readme) + +By default, requests are retried once and warnings are logged in case of hitting a rate or secondary rate limit. + +```js +{ + onRateLimit: (retryAfter, options, octokit) => { + octokit.log.warn( + `Request quota exhausted for request ${options.method} ${options.url}` + ); + + if (options.request.retryCount === 0) { + // only retries once + octokit.log.info(`Retrying after ${retryAfter} seconds!`); + return true; + } + }, + onSecondaryRateLimit: (retryAfter, options, octokit) => { + octokit.log.warn( + `SecondaryRateLimit detected for request ${options.method} ${options.url}` + ); + + if (options.request.retryCount === 0) { + // only retries once + octokit.log.info(`Retrying after ${retryAfter} seconds!`); + return true; + } + }, +}; +``` + +To opt-out of this feature: + +```js +new Octokit({ throttle: { enabled: false } }); +``` + +Throttling in a cluster is supported using a Redis backend. See [`@octokit/plugin-throttling` Clustering](https://github.com/octokit/plugin-throttling.js/#clustering) + +
+ retry + + Object + + +`Octokit` implements request retries using [`@octokit/plugin-retry`](https://github.com/octokit/plugin-retry.js/#readme) + +To opt-out of this feature: + +```js +new Octokit({ retry: { enabled: false } }); +``` + +
+ +### Authentication + +By default, the `Octokit` API client supports authentication using a static token. + +There are different means of authentication that are supported by GitHub, that are described in detail at [octokit/authentication-strategies.js](https://github.com/octokit/authentication-strategies.js/#readme). You can set each of them as the `authStrategy` constructor option, and pass the strategy options as the `auth` constructor option. + +For example, in order to authenticate as a GitHub App Installation: + +```js +import { createAppAuth } from "@octokit/auth-app"; +const octokit = new Octokit({ + authStrategy: createAppAuth, + auth: { + appId: 1, + privateKey: "-----BEGIN PRIVATE KEY-----\n...", + installationId: 123, + }, +}); + +// authenticates as app based on request URLs +const { + data: { slug }, +} = await octokit.rest.apps.getAuthenticated(); + +// creates an installation access token as needed +// assumes that installationId 123 belongs to @octocat, otherwise the request will fail +await octokit.rest.issues.create({ + owner: "octocat", + repo: "hello-world", + title: "Hello world from " + slug, +}); +``` + +In most cases you can use the [`App`](#github-app) or [`OAuthApp`](#oauth-app) SDK which provide APIs and internal wiring to cover most usecase. + +For example, to implement the above using `App` + +```js +const app = new App({ appId, privateKey }); +const { data: slug } = await app.octokit.rest.apps.getAuthenticated(); +const octokit = await app.getInstallationOctokit(123); +await octokit.rest.issues.create({ + owner: "octocat", + repo: "hello-world", + title: "Hello world from " + slug, +}); +``` + +Learn more about [how authentication strategies work](https://github.com/octokit/authentication-strategies.js/#how-authentication-strategies-work) or how to [create your own](https://github.com/octokit/authentication-strategies.js/#create-your-own-octokit-authentication-strategy-module). + +### Proxy Servers (Node.js only) + +By default, the `Octokit` API client does not make use of the standard proxy server environment variables. To add support for proxy servers you will need to provide an https client that supports them such as [proxy-agent](https://www.npmjs.com/package/proxy-agent). + +For example, this would use a `proxy-agent` generated client that would configure the proxy based on the standard environment variables `http_proxy`, `https_proxy` and `no_proxy`: + +```js +import ProxyAgent from "proxy-agent"; + +const octokit = new Octokit({ + request: { + agent: new ProxyAgent(), + }, +}); +``` + +If you are writing a module that uses `Octokit` and is designed to be used by other people, you should ensure that consumers can provide an alternative agent for your `Octokit` or as a parameter to specific calls such as: + +```js +octokit.rest.repos.get({ + owner, + repo, + request: { agent }, +}); +``` + +### REST API + +There are two ways of using the GitHub REST API, the [`octokit.rest.*` endpoint methods](#octokitrest-endpoint-methods) and [`octokit.request`](#octokitrequest). Both act the same way, the `octokit.rest.*` methods are just added for convenience, they use `octokit.request` internally. + +For example + +```js +await octokit.rest.issues.create({ + owner: "octocat", + repo: "hello-world", + title: "Hello, world!", + body: "I created this issue using Octokit!", +}); +``` + +Is the same as + +```js +await octokit.request("POST /repos/{owner}/{repo}/issues", { + owner: "octocat", + repo: "hello-world", + title: "Hello, world!", + body: "I created this issue using Octokit!", +}); +``` + +In both cases a given request is authenticated, retried, and throttled transparently by the `octokit` instance which also manages the `accept` and `user-agent` headers as needed. + +`octokit.request` can be used to send requests to other domains by passing a full URL and to send requests to endpoints that are not (yet) documented in [GitHub's REST API documentation](https://docs.github.com/rest). + +#### `octokit.rest` endpoint methods + +Every GitHub REST API endpoint has an associated `octokit.rest` endpoint method for better code readability and developer convenience. See [`@octokit/plugin-rest-endpoint-methods`](https://github.com/octokit/plugin-rest-endpoint-methods.js/#readme) for full details. + +Example: [Create an issue](https://docs.github.com/en/rest/reference/issues#create-an-issue) + +```js +await octokit.rest.issues.create({ + owner: "octocat", + repo: "hello-world", + title: "Hello, world!", + body: "I created this issue using Octokit!", +}); +``` + +The `octokit.rest` endpoint methods are generated automatically from [GitHub's OpenAPI specification](https://github.com/github/rest-api-description/). We track operation ID and parameter name changes in order to implement deprecation warnings and reduce the frequency of breaking changes. + +Under the covers, every endpoint method is just `octokit.request` with defaults set, so it supports the same parameters as well as the `.endpoint()` API. + +#### `octokit.request()` + +You can call the GitHub REST API directly using `octokit.request`. The `request` API matches GitHub's REST API documentation 1:1 so anything you see there, you can call using `request`. See [`@octokit/request`](https://github.com/octokit/request.js#readme) for all the details. + +Example: [Create an issue](https://docs.github.com/en/rest/reference/issues#create-an-issue) + +[![Screenshot of REST API reference documentation for Create an issue](assets/create-an-issue-reference.png)](https://docs.github.com/en/rest/reference/issues#create-an-issue) + +The `octokit.request` API call corresponding to that issue creation documentation looks like this: + +```js +// https://docs.github.com/en/rest/reference/issues#create-an-issue +await octokit.request("POST /repos/{owner}/{repo}/issues", { + owner: "octocat", + repo: "hello-world", + title: "Hello, world!", + body: "I created this issue using Octokit!", +}); +``` + +The 1st argument is the REST API route as listed in GitHub's API documentation. The 2nd argument is an object with all parameters, independent of whether they are used in the path, query, or body. + +#### Pagination + +All REST API endpoints that paginate return the first 30 items by default. If you want to retrieve all items, you can use the pagination API. The pagination API expects the REST API route as first argument, but you can also pass any of the `octokit.rest.*.list*` methods for convenience and better code readability. + +Example: iterate through all issues in a repository + +```js +const iterator = octokit.paginate.iterator(octokit.rest.issues.listForRepo, { + owner: "octocat", + repo: "hello-world", + per_page: 100, +}); + +// iterate through each response +for await (const { data: issues } of iterator) { + for (const issue of issues) { + console.log("Issue #%d: %s", issue.number, issue.title); + } +} +``` + +Using the [async iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of) is the most memory efficient way to iterate through all items. But you can also retrieve all items in a single call + +```js +const issues = await octokit.paginate(octokit.rest.issues.listForRepo, { + owner: "octocat", + repo: "hello-world", + per_page: 100, +}); +``` + +#### Media Type previews and formats + +**Note**: The concept of _preview headers_ has been deprecated from REST API endpoints hosted via `api.github.com` but it still exists in GHES (GitHub Enterprise Server) version 3.2 and below. Instead of using _preview headers_ going forward, new features are now being tested using beta previews that users will have to opt-in to. + +Media type previews and formats can be set using `mediaType: { format, previews }` on every request. Required API previews are set automatically on the respective REST API endpoint methods. + +Example: retrieve the raw content of a `package.json` file + +```js +const { data } = await octokit.rest.repos.getContent({ + mediaType: { + format: "raw", + }, + owner: "octocat", + repo: "hello-world", + path: "package.json", +}); +console.log("package name: %s", JSON.parse(data).name); +``` + +Example: retrieve a repository with topics + +```js +const { data } = await octokit.rest.repos.getContent({ + mediaType: { + previews: ["mercy"], + }, + owner: "octocat", + repo: "hello-world", +}); +console.log("topics on octocat/hello-world: %j", data.topics); +``` + +Learn more about [Media type formats](https://docs.github.com/en/rest/overview/media-types) and [previews](https://docs.github.com/en/enterprise-server@3.2/rest/overview/api-previews) used on GitHub Enterprise Server. + +#### Request error handling + +**Standalone module:** [`@octokit/request-error`](https://github.com/octokit/request-error.js/#readme) + +For request error handling, import `RequestError` and use `try...catch` statement. + +```typescript +import { RequestError } from "octokit"; +``` + +```typescript +try { + // your code here that sends at least one Octokit request + await octokit.request("GET /"); +} catch (error) { + // Octokit errors always have a `error.status` property which is the http response code nad it's instance of RequestError + if (error instanceof RequestError) { + // handle Octokit error + // error.message; // Oops + // error.status; // 500 + // error.request; // { method, url, headers, body } + // error.response; // { url, status, headers, data } + } else { + // handle all other errors + throw error; + } +} +``` + +### GraphQL API queries + +Octokit also supports GitHub's GraphQL API directly -- you can use the same queries shown in the documentation and available in the GraphQL explorer in your calls with `octokit.graphql`. + +Example: get the login of the authenticated user + +```js +const { + viewer: { login }, +} = await octokit.graphql(`{ + viewer { + login + } +}`); +``` + +Variables can be passed as 2nd argument + +```js +const { lastIssues } = await octokit.graphql( + ` + query lastIssues($owner: String!, $repo: String!, $num: Int = 3) { + repository(owner: $owner, name: $repo) { + issues(last: $num) { + edges { + node { + title + } + } + } + } + } + `, + { + owner: "octokit", + repo: "graphql.js", + } +); +``` + +#### Schema previews + +Previews can be enabled using the `{mediaType: previews: [] }` option. + +Example: create a label + +```js +await octokit.graphql( + `mutation createLabel($repositoryId:ID!,name:String!,color:String!) { + createLabel(input:{repositoryId:$repositoryId,name:$name}) { + label: { + id + } + } +}`, + { + repositoryId: 1, + name: "important", + color: "cc0000", + mediaType: { + previews: ["bane"], + }, + } +); +``` + +Learn more about [GitHub's GraphQL schema previews](https://docs.github.com/en/graphql/overview/schema-previews) + +## App client + +The `App` client combines features for GitHub Apps, Webhooks, and OAuth + +### GitHub App + +**Standalone module**: [`@octokit/app`](https://github.com/octokit/app.js/#readme) + +For integrators, GitHub Apps are a means of authentication and authorization. A GitHub app can be registered on a GitHub user or organization account. A GitHub App registration defines a set of permissions and webhooks events it wants to receive and provides a set of credentials in return. Users can grant access to repositories by installing them. + +Some API endpoints require the GitHub app to authenticate as itself using a JSON Web Token (JWT). For requests affecting an installation, an installation access token has to be created using the app's credentials and the installation ID. + +The `App` client takes care of all that for you. + +Example: Dispatch a repository event in every repository the app is installed on + +```js +import { App } from "octokit"; + +const app = new App({ appId, privateKey }); + +for await (const { octokit, repository } of app.eachRepository.iterator()) { + // https://docs.github.com/en/rest/reference/repos#create-a-repository-dispatch-event + await octokit.rest.repos.createDispatchEvent({ + owner: repository.owner.login, + repo: repository.name, + event_type: "my_event", + client_payload: { + foo: "bar", + }, + }); + console.log("Event distpatched for %s", repository.full_name); +} +``` + +Example: Get an `octokit` instance authenticated as an installation + +```js +const octokit = await app.getInstallationOctokit(123); +``` + +Learn more about [apps](https://docs.github.com/apps). + +### Webhooks + +**Standalone module**: [`@octokit/webhooks`](https://github.com/octokit/webhooks.js/#readme) + +When installing an app, events that the app registration requests will be sent as requests to the webhook URL set in the app's registration. + +Webhook event requests are signed using the webhook secret, which is also part of the app's registration. You must verify that secret before handling the request payload. + +The `app.webhooks.*` APIs provide methods to receiving, verifying, and handling webhook events. + +Example: create a comment on new issues + +```js +import { createServer } from "node:http": +import { App, createNodeMiddleware } from "octokit"; + +const app = new App({ + appId, + privateKey, + webhooks: { secret }, +}); + +app.webhooks.on("issues.opened", ({ octokit, payload }) => { + return octokit.rest.issues.createComment({ + owner: payload.repository.owner.login, + repo: payload.repository.name, + body: "Hello, World!", + }); +}); + +// Your app can now receive webhook events at `/api/github/webhooks` +createServer(createNodeMiddleware(app)).listen(3000); +``` + +For serverless environments, you can explicitly verify and receive an event + +```js +await app.webhooks.verifyAndReceive({ + id: request.headers["x-github-delivery"], + name: request.headers["x-github-event"], + signature: request.headers["x-hub-signature-256"], + payload: request.body, +}); +``` + +Learn more about [GitHub webhooks](https://docs.github.com/webhooks). + +### OAuth + +**Standalone module:** [`@octokit/oauth-app`](https://github.com/octokit/oauth-app.js/#readme) + +Both OAuth Apps and GitHub Apps support authenticating GitHub users using OAuth, see [Authorizing OAuth Apps](https://docs.github.com/en/developers/apps/authorizing-oauth-apps) and [Identifying and authorizing users for GitHub Apps](https://docs.github.com/en/developers/apps/identifying-and-authorizing-users-for-github-apps). + +There are some differences: + +- Only OAuth Apps support scopes. GitHub apps have permissions, and access is granted via installations of the app on repositories. +- Only GitHub Apps support expiring user tokens +- Only GitHub Apps support creating a scoped token to reduce the permissions and repository access + +`App` is for GitHub Apps. If you need OAuth App-specific functionality, use [`OAuthApp` instead](https://github.com/octokit/oauth-app.js/). + +Example: Watch a repository when a user logs in using the OAuth web flow + +```js +import { createServer } from "node:http": +import { App, createNodeMiddleware } from "octokit"; + +const app = new App({ + oauth: { clientId, clientSecret }, +}); + +app.oauth.on("token.created", async ({ token, octokit }) => { + await octokit.rest.activity.setRepoSubscription({ + owner: "octocat", + repo: "hello-world", + subscribed: true, + }); +}); + +// Your app can receive the OAuth redirect at /api/github/oauth/callback +// Users can initiate the OAuth web flow by opening /api/oauth/login +createServer(createNodeMiddleware(app)).listen(3000); +``` + +For serverless environments, you can explicitly exchange the `code` from the OAuth web flow redirect for an access token. +`app.oauth.createToken()` returns an authentication object and emits the "token.created" event. + +```js +const { token } = await app.oauth.createToken({ + code: request.query.code, +}); +``` + +Example: create a token using the device flow. + +```js +const { token } = await app.oauth.createToken({ + async onVerification(verification) { + await sendMessageToUser( + request.body.phoneNumber, + `Your code is ${verification.user_code}. Enter it at ${verification.verification_uri}` + ); + }, +}); +``` + +Example: Create an OAuth App Server with default scopes + +```js +import { createServer } from "node:http": +import { OAuthApp, createNodeMiddleware } from "octokit"; + +const app = new OAuthApp({ + clientId, + clientSecret, + defaultScopes: ["repo", "gist"], +}); + +app.oauth.on("token", async ({ token, octokit }) => { + await octokit.rest.gists.create({ + description: "I created this gist using Octokit!", + public: true, + files: { + "example.js": `/* some code here */`, + }, + }); +}); + +// Your app can receive the OAuth redirect at /api/github/oauth/callback +// Users can initiate the OAuth web flow by opening /api/oauth/login +createServer(createNodeMiddleware(app)).listen(3000); +``` + +### App Server + +After registering your GitHub app, you need to create and deploy a server which can retrieve the webhook event requests from GitHub as well as accept redirects from the OAuth user web flow. + +The simplest way to create such a server is to use `createNodeMiddleware()`, it works with both, Node's [`http.createServer()`](https://nodejs.org/api/http.html#http_http_createserver_options_requestlistener) method as well as an [Express middleware](https://expressjs.com/en/guide/using-middleware.html). + +The default routes that the middleware exposes are + +| Route | Route Description | +| --------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `POST /api/github/webhooks` | Endpoint to receive GitHub Webhook Event requests | +| `GET /api/github/oauth/login` | Redirects to GitHub's authorization endpoint. Accepts optional `?state` and `?scopes` query parameters. `?scopes` is a comma-separated list of [supported OAuth scope names](https://docs.github.com/en/developers/apps/scopes-for-oauth-apps#available-scopes) | +| `GET /api/github/oauth/callback` | The client's redirect endpoint. This is where the `token` event gets triggered | +| `POST /api/github/oauth/token` | Exchange an authorization code for an OAuth Access token. If successful, the `token` event gets triggered. | +| `GET /api/github/oauth/token` | Check if token is valid. Must authenticate using token in `Authorization` header. Uses GitHub's [`POST /applications/{client_id}/token`](https://docs.github.com/en/rest/reference/apps#check-a-token) endpoint | +| `PATCH /api/github/oauth/token` | Resets a token (invalidates current one, returns new token). Must authenticate using token in `Authorization` header. Uses GitHub's [`PATCH /applications/{client_id}/token`](https://docs.github.com/en/rest/reference/apps#reset-a-token) endpoint. | +| `PATCH /api/github/oauth/refresh-token` | Refreshes an expiring token (invalidates current one, returns new access token and refresh token). Must authenticate using token in `Authorization` header. Uses GitHub's [`POST https://github.com/login/oauth/access_token`](https://docs.github.com/en/developers/apps/refreshing-user-to-server-access-tokens#renewing-a-user-token-with-a-refresh-token) OAuth endpoint. | +| `POST /api/github/oauth/token/scoped` | Creates a scoped token (does not invalidate the current one). Must authenticate using token in `Authorization` header. Uses GitHub's [`POST /applications/{client_id}/token/scoped`](https://docs.github.com/en/rest/reference/apps#create-a-scoped-access-token) endpoint. | +| `DELETE /api/github/oauth/token` | Invalidates current token, basically the equivalent of a logout. Must authenticate using token in `Authorization` header. | +| `DELETE /api/github/oauth/grant` | Revokes the user's grant, basically the equivalent of an uninstall. must authenticate using token in `Authorization` header. | + +Example: create a GitHub server with express + +```js +import express from "express"; +import { App, createNodeMiddleware } from "octokit"; + +const expressApp = express(); +const octokitApp = new App({ + appId, + privateKey, + webhooks: { secret }, + oauth: { clientId, clientSecret }, +}); + +expressApp.use(createNodeMiddleware(app)); + +expressApp.listen(3000, () => { + console.log(`Example app listening at http://localhost:3000`); +}); +``` + +### OAuth for browser apps + +You must not expose your app's client secret to the user, so you cannot use the `App` constructor. Instead, you have to create a server using the `App` constructor which exposes the `/api/github/oauth/*` routes, through which you can safely implement an OAuth login for apps running in a web browser. + +If you set `(User) Authorization callback URL` to your own app, than you need to read out the `?code=...&state=...` query parameters, compare the `state` parameter to the value returned by `app.oauthLoginUrl()` earlier to protect against forgery attacks, then exchange the `code` for an OAuth Authorization token. + +If you run an [app server](#app-server) as described above, the default route to do that is `POST /api/github/oauth/token`. + +Once you successfully retrieved the token, it is also recommended to remove the `?code=...&state=...` query parameters from the browser's URL + +```js +const code = new URL(location.href).searchParams.get("code"); +if (code) { + // remove ?code=... from URL + const path = + location.pathname + + location.search.replace(/\b(code|state)=\w+/g, "").replace(/[?&]+$/, ""); + history.replaceState({}, "", path); + + // exchange the code for a token with your backend. + // If you use https://github.com/octokit/oauth-app.js + // the exchange would look something like this + const response = await fetch("/api/github/oauth/token", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify({ code }), + }); + const { token } = await response.json(); + // `token` is the OAuth Access Token that can be use + + const { Octokit } = await import("https://esm.sh/@octokit/core"); + const octokit = new Octokit({ auth: token }); + + const { + data: { login }, + } = await octokit.request("GET /user"); + alert("Hi there, " + login); +} +``` + +🚧 We are working on [`@octokit/auth-oauth-user-client`](https://github.com/octokit/auth-oauth-user-client.js#readme) to provide a simple API for all methods related to OAuth user tokens. + +The plan is to add an new `GET /api/github/oauth/octokit.js` route to the node middleware which will return a JavaScript file that can be imported into an HTML file. It will make a pre-authenticated `octokit` Instance available. + +## Action client + +**standalone module:** [`@octokit/action`](https://github.com/octokit/action.js#readme) + +🚧 A fully fledged `Action` client is pending. You can use [`@actions/github`](https://github.com/actions/toolkit/tree/main/packages/github) for the time being + +## LICENSE + +[MIT](LICENSE) diff --git a/dist/node_modules/octokit/dist-node/index.js b/dist/node_modules/octokit/dist-node/index.js new file mode 100644 index 0000000..303fbf2 --- /dev/null +++ b/dist/node_modules/octokit/dist-node/index.js @@ -0,0 +1,87 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + App: () => App, + OAuthApp: () => OAuthApp, + Octokit: () => Octokit, + RequestError: () => import_request_error.RequestError, + createNodeMiddleware: () => import_app2.createNodeMiddleware +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/octokit.js +var import_core = require("@octokit/core"); +var import_plugin_paginate_rest = require("@octokit/plugin-paginate-rest"); +var import_plugin_rest_endpoint_methods = require("@octokit/plugin-rest-endpoint-methods"); +var import_plugin_retry = require("@octokit/plugin-retry"); +var import_plugin_throttling = require("@octokit/plugin-throttling"); + +// pkg/dist-src/version.js +var VERSION = "2.1.0"; + +// pkg/dist-src/octokit.js +var import_request_error = require("@octokit/request-error"); +var Octokit = import_core.Octokit.plugin( + import_plugin_rest_endpoint_methods.restEndpointMethods, + import_plugin_paginate_rest.paginateRest, + import_plugin_retry.retry, + import_plugin_throttling.throttling +).defaults({ + userAgent: `octokit.js/${VERSION}`, + throttle: { + onRateLimit, + onSecondaryRateLimit + } +}); +function onRateLimit(retryAfter, options, octokit) { + octokit.log.warn( + `Request quota exhausted for request ${options.method} ${options.url}` + ); + if (options.request.retryCount === 0) { + octokit.log.info(`Retrying after ${retryAfter} seconds!`); + return true; + } +} +function onSecondaryRateLimit(retryAfter, options, octokit) { + octokit.log.warn( + `SecondaryRateLimit detected for request ${options.method} ${options.url}` + ); + if (options.request.retryCount === 0) { + octokit.log.info(`Retrying after ${retryAfter} seconds!`); + return true; + } +} + +// pkg/dist-src/app.js +var import_app = require("@octokit/app"); +var import_oauth_app = require("@octokit/oauth-app"); +var import_app2 = require("@octokit/app"); +var App = import_app.App.defaults({ Octokit }); +var OAuthApp = import_oauth_app.OAuthApp.defaults({ Octokit }); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + App, + OAuthApp, + Octokit, + RequestError, + createNodeMiddleware +}); diff --git a/dist/node_modules/octokit/dist-node/index.js.map b/dist/node_modules/octokit/dist-node/index.js.map new file mode 100644 index 0000000..8480567 --- /dev/null +++ b/dist/node_modules/octokit/dist-node/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/index.js", "../dist-src/octokit.js", "../dist-src/version.js", "../dist-src/app.js"], + "sourcesContent": ["import { Octokit, RequestError } from \"./octokit\";\nimport { App, OAuthApp, createNodeMiddleware } from \"./app\";\nexport {\n App,\n OAuthApp,\n Octokit,\n RequestError,\n createNodeMiddleware\n};\n", "import { Octokit as OctokitCore } from \"@octokit/core\";\nimport { paginateRest } from \"@octokit/plugin-paginate-rest\";\nimport { restEndpointMethods } from \"@octokit/plugin-rest-endpoint-methods\";\nimport { retry } from \"@octokit/plugin-retry\";\nimport { throttling } from \"@octokit/plugin-throttling\";\nimport { VERSION } from \"./version\";\nimport { RequestError } from \"@octokit/request-error\";\nconst Octokit = OctokitCore.plugin(\n restEndpointMethods,\n paginateRest,\n retry,\n throttling\n).defaults({\n userAgent: `octokit.js/${VERSION}`,\n throttle: {\n onRateLimit,\n onSecondaryRateLimit\n }\n});\nfunction onRateLimit(retryAfter, options, octokit) {\n octokit.log.warn(\n `Request quota exhausted for request ${options.method} ${options.url}`\n );\n if (options.request.retryCount === 0) {\n octokit.log.info(`Retrying after ${retryAfter} seconds!`);\n return true;\n }\n}\nfunction onSecondaryRateLimit(retryAfter, options, octokit) {\n octokit.log.warn(\n `SecondaryRateLimit detected for request ${options.method} ${options.url}`\n );\n if (options.request.retryCount === 0) {\n octokit.log.info(`Retrying after ${retryAfter} seconds!`);\n return true;\n }\n}\nexport {\n Octokit,\n RequestError\n};\n", "const VERSION = \"2.1.0\";\nexport {\n VERSION\n};\n", "import { App as DefaultApp } from \"@octokit/app\";\nimport { OAuthApp as DefaultOAuthApp } from \"@octokit/oauth-app\";\nimport { Octokit } from \"./octokit\";\nconst App = DefaultApp.defaults({ Octokit });\nconst OAuthApp = DefaultOAuthApp.defaults({ Octokit });\nimport { createNodeMiddleware } from \"@octokit/app\";\nexport {\n App,\n OAuthApp,\n createNodeMiddleware\n};\n"], + "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,kBAAuC;AACvC,kCAA6B;AAC7B,0CAAoC;AACpC,0BAAsB;AACtB,+BAA2B;;;ACJ3B,IAAM,UAAU;;;ADMhB,2BAA6B;AAC7B,IAAM,UAAU,YAAAA,QAAY;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,EAAE,SAAS;AAAA,EACT,WAAW,cAAc;AAAA,EACzB,UAAU;AAAA,IACR;AAAA,IACA;AAAA,EACF;AACF,CAAC;AACD,SAAS,YAAY,YAAY,SAAS,SAAS;AACjD,UAAQ,IAAI;AAAA,IACV,uCAAuC,QAAQ,UAAU,QAAQ;AAAA,EACnE;AACA,MAAI,QAAQ,QAAQ,eAAe,GAAG;AACpC,YAAQ,IAAI,KAAK,kBAAkB,qBAAqB;AACxD,WAAO;AAAA,EACT;AACF;AACA,SAAS,qBAAqB,YAAY,SAAS,SAAS;AAC1D,UAAQ,IAAI;AAAA,IACV,2CAA2C,QAAQ,UAAU,QAAQ;AAAA,EACvE;AACA,MAAI,QAAQ,QAAQ,eAAe,GAAG;AACpC,YAAQ,IAAI,KAAK,kBAAkB,qBAAqB;AACxD,WAAO;AAAA,EACT;AACF;;;AEpCA,iBAAkC;AAClC,uBAA4C;AAI5C,IAAAC,cAAqC;AAFrC,IAAM,MAAM,WAAAC,IAAW,SAAS,EAAE,QAAQ,CAAC;AAC3C,IAAM,WAAW,iBAAAC,SAAgB,SAAS,EAAE,QAAQ,CAAC;", + "names": ["OctokitCore", "import_app", "DefaultApp", "DefaultOAuthApp"] +} diff --git a/dist/node_modules/octokit/dist-src/app.js b/dist/node_modules/octokit/dist-src/app.js new file mode 100644 index 0000000..f08f1f7 --- /dev/null +++ b/dist/node_modules/octokit/dist-src/app.js @@ -0,0 +1,11 @@ +import { App as DefaultApp } from "@octokit/app"; +import { OAuthApp as DefaultOAuthApp } from "@octokit/oauth-app"; +import { Octokit } from "./octokit"; +const App = DefaultApp.defaults({ Octokit }); +const OAuthApp = DefaultOAuthApp.defaults({ Octokit }); +import { createNodeMiddleware } from "@octokit/app"; +export { + App, + OAuthApp, + createNodeMiddleware +}; diff --git a/dist/node_modules/octokit/dist-src/index.js b/dist/node_modules/octokit/dist-src/index.js new file mode 100644 index 0000000..e3d6c25 --- /dev/null +++ b/dist/node_modules/octokit/dist-src/index.js @@ -0,0 +1,9 @@ +import { Octokit, RequestError } from "./octokit"; +import { App, OAuthApp, createNodeMiddleware } from "./app"; +export { + App, + OAuthApp, + Octokit, + RequestError, + createNodeMiddleware +}; diff --git a/dist/node_modules/octokit/dist-src/octokit.js b/dist/node_modules/octokit/dist-src/octokit.js new file mode 100644 index 0000000..0fac3b1 --- /dev/null +++ b/dist/node_modules/octokit/dist-src/octokit.js @@ -0,0 +1,41 @@ +import { Octokit as OctokitCore } from "@octokit/core"; +import { paginateRest } from "@octokit/plugin-paginate-rest"; +import { restEndpointMethods } from "@octokit/plugin-rest-endpoint-methods"; +import { retry } from "@octokit/plugin-retry"; +import { throttling } from "@octokit/plugin-throttling"; +import { VERSION } from "./version"; +import { RequestError } from "@octokit/request-error"; +const Octokit = OctokitCore.plugin( + restEndpointMethods, + paginateRest, + retry, + throttling +).defaults({ + userAgent: `octokit.js/${VERSION}`, + throttle: { + onRateLimit, + onSecondaryRateLimit + } +}); +function onRateLimit(retryAfter, options, octokit) { + octokit.log.warn( + `Request quota exhausted for request ${options.method} ${options.url}` + ); + if (options.request.retryCount === 0) { + octokit.log.info(`Retrying after ${retryAfter} seconds!`); + return true; + } +} +function onSecondaryRateLimit(retryAfter, options, octokit) { + octokit.log.warn( + `SecondaryRateLimit detected for request ${options.method} ${options.url}` + ); + if (options.request.retryCount === 0) { + octokit.log.info(`Retrying after ${retryAfter} seconds!`); + return true; + } +} +export { + Octokit, + RequestError +}; diff --git a/dist/node_modules/octokit/dist-src/version.js b/dist/node_modules/octokit/dist-src/version.js new file mode 100644 index 0000000..6fd466e --- /dev/null +++ b/dist/node_modules/octokit/dist-src/version.js @@ -0,0 +1,4 @@ +const VERSION = "2.1.0"; +export { + VERSION +}; diff --git a/dist/node_modules/octokit/dist-types/app.d.ts b/dist/node_modules/octokit/dist-types/app.d.ts new file mode 100644 index 0000000..c7d1f9e --- /dev/null +++ b/dist/node_modules/octokit/dist-types/app.d.ts @@ -0,0 +1,89 @@ +import { App as DefaultApp } from "@octokit/app"; +import { OAuthApp as DefaultOAuthApp } from "@octokit/oauth-app"; +export declare const App: (new (...args: any[]) => { + octokit: import("@octokit/core").Octokit & { + paginate: import("@octokit/plugin-paginate-rest").PaginateInterface; + } & import("@octokit/plugin-rest-endpoint-methods/dist-types/types").Api & { + retry: { + retryRequest: (error: import("@octokit/request-error").RequestError, retries: number, retryAfter: number) => import("@octokit/request-error").RequestError; + }; + }; + webhooks: import("@octokit/webhooks").Webhooks<{ + octokit: import("@octokit/core").Octokit & { + paginate: import("@octokit/plugin-paginate-rest").PaginateInterface; + } & import("@octokit/plugin-rest-endpoint-methods/dist-types/types").Api & { + retry: { + retryRequest: (error: import("@octokit/request-error").RequestError, retries: number, retryAfter: number) => import("@octokit/request-error").RequestError; + }; + }; + }>; + oauth: DefaultOAuthApp<{ + clientType: "github-app"; + Octokit: typeof import("@octokit/core").Octokit & import("@octokit/core/dist-types/types.d").Constructor<{ + paginate: import("@octokit/plugin-paginate-rest").PaginateInterface; + } & import("@octokit/plugin-rest-endpoint-methods/dist-types/types").Api & { + retry: { + retryRequest: (error: import("@octokit/request-error").RequestError, retries: number, retryAfter: number) => import("@octokit/request-error").RequestError; + }; + }>; + }>; + getInstallationOctokit: import("@octokit/app/dist-types/types").GetInstallationOctokitInterface import("@octokit/request-error").RequestError; + }; + }>; + eachInstallation: import("@octokit/app/dist-types/types").EachInstallationInterface import("@octokit/request-error").RequestError; + }; + }>; + eachRepository: import("@octokit/app/dist-types/types").EachRepositoryInterface import("@octokit/request-error").RequestError; + }; + }>; + log: { + [key: string]: unknown; + debug: (message: string, additionalInfo?: object | undefined) => void; + info: (message: string, additionalInfo?: object | undefined) => void; + warn: (message: string, additionalInfo?: object | undefined) => void; + error: (message: string, additionalInfo?: object | undefined) => void; + }; +}) & typeof DefaultApp; +export type App = InstanceType; +export declare const OAuthApp: (new (...args: any[]) => { + type: "oauth-app"; + on: import("@octokit/oauth-app/dist-types/types").AddEventHandler<{ + Octokit: typeof import("@octokit/core").Octokit & import("@octokit/core/dist-types/types.d").Constructor<{ + paginate: import("@octokit/plugin-paginate-rest").PaginateInterface; + } & import("@octokit/plugin-rest-endpoint-methods/dist-types/types").Api & { + retry: { + retryRequest: (error: import("@octokit/request-error").RequestError, retries: number, retryAfter: number) => import("@octokit/request-error").RequestError; + }; + }>; + }>; + octokit: import("@octokit/core").Octokit & { + paginate: import("@octokit/plugin-paginate-rest").PaginateInterface; + } & import("@octokit/plugin-rest-endpoint-methods/dist-types/types").Api & { + retry: { + retryRequest: (error: import("@octokit/request-error").RequestError, retries: number, retryAfter: number) => import("@octokit/request-error").RequestError; + }; + }; + getUserOctokit: import("@octokit/oauth-app/dist-types/methods/get-user-octokit").GetUserOctokitWithStateInterface<"oauth-app">; + getWebFlowAuthorizationUrl: import("@octokit/oauth-app/dist-types/methods/get-web-flow-authorization-url").GetWebFlowAuthorizationUrlInterface<"oauth-app">; + createToken: import("@octokit/oauth-app/dist-types/methods/create-token").CreateTokenInterface<"oauth-app">; + checkToken: import("@octokit/oauth-app/dist-types/methods/check-token").CheckTokenInterface<"oauth-app">; + resetToken: import("@octokit/oauth-app/dist-types/methods/reset-token").ResetTokenInterface<"oauth-app">; + refreshToken: import("@octokit/oauth-app/dist-types/methods/refresh-token").RefreshTokenInterface; + scopeToken: import("@octokit/oauth-app/dist-types/methods/scope-token").ScopeTokenInterface; + deleteToken: import("@octokit/oauth-app/dist-types/methods/delete-token").DeleteTokenInterface; + deleteAuthorization: import("@octokit/oauth-app/dist-types/methods/delete-authorization").DeleteAuthorizationInterface; +}) & typeof DefaultOAuthApp; +export type OAuthApp = InstanceType; +export { createNodeMiddleware } from "@octokit/app"; diff --git a/dist/node_modules/octokit/dist-types/index.d.ts b/dist/node_modules/octokit/dist-types/index.d.ts new file mode 100644 index 0000000..0f6f6e6 --- /dev/null +++ b/dist/node_modules/octokit/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export { Octokit, RequestError } from "./octokit"; +export { App, OAuthApp, createNodeMiddleware } from "./app"; diff --git a/dist/node_modules/octokit/dist-types/octokit.d.ts b/dist/node_modules/octokit/dist-types/octokit.d.ts new file mode 100644 index 0000000..abb5e5d --- /dev/null +++ b/dist/node_modules/octokit/dist-types/octokit.d.ts @@ -0,0 +1,10 @@ +import { Octokit as OctokitCore } from "@octokit/core"; +export { RequestError } from "@octokit/request-error"; +export declare const Octokit: typeof OctokitCore & import("@octokit/core/dist-types/types.d").Constructor<{ + paginate: import("@octokit/plugin-paginate-rest").PaginateInterface; +} & import("@octokit/plugin-rest-endpoint-methods/dist-types/types").Api & { + retry: { + retryRequest: (error: import("@octokit/request-error").RequestError, retries: number, retryAfter: number) => import("@octokit/request-error").RequestError; + }; +}>; +export type Octokit = InstanceType; diff --git a/dist/node_modules/octokit/dist-types/version.d.ts b/dist/node_modules/octokit/dist-types/version.d.ts new file mode 100644 index 0000000..32ed5b2 --- /dev/null +++ b/dist/node_modules/octokit/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "2.1.0"; diff --git a/dist/node_modules/octokit/dist-web/index.js b/dist/node_modules/octokit/dist-web/index.js new file mode 100644 index 0000000..34ca5c4 --- /dev/null +++ b/dist/node_modules/octokit/dist-web/index.js @@ -0,0 +1,56 @@ +// pkg/dist-src/octokit.js +import { Octokit as OctokitCore } from "@octokit/core"; +import { paginateRest } from "@octokit/plugin-paginate-rest"; +import { restEndpointMethods } from "@octokit/plugin-rest-endpoint-methods"; +import { retry } from "@octokit/plugin-retry"; +import { throttling } from "@octokit/plugin-throttling"; + +// pkg/dist-src/version.js +var VERSION = "2.1.0"; + +// pkg/dist-src/octokit.js +import { RequestError } from "@octokit/request-error"; +var Octokit = OctokitCore.plugin( + restEndpointMethods, + paginateRest, + retry, + throttling +).defaults({ + userAgent: `octokit.js/${VERSION}`, + throttle: { + onRateLimit, + onSecondaryRateLimit + } +}); +function onRateLimit(retryAfter, options, octokit) { + octokit.log.warn( + `Request quota exhausted for request ${options.method} ${options.url}` + ); + if (options.request.retryCount === 0) { + octokit.log.info(`Retrying after ${retryAfter} seconds!`); + return true; + } +} +function onSecondaryRateLimit(retryAfter, options, octokit) { + octokit.log.warn( + `SecondaryRateLimit detected for request ${options.method} ${options.url}` + ); + if (options.request.retryCount === 0) { + octokit.log.info(`Retrying after ${retryAfter} seconds!`); + return true; + } +} + +// pkg/dist-src/app.js +import { App as DefaultApp } from "@octokit/app"; +import { OAuthApp as DefaultOAuthApp } from "@octokit/oauth-app"; +import { createNodeMiddleware } from "@octokit/app"; +var App = DefaultApp.defaults({ Octokit }); +var OAuthApp = DefaultOAuthApp.defaults({ Octokit }); +export { + App, + OAuthApp, + Octokit, + RequestError, + createNodeMiddleware +}; diff --git a/dist/node_modules/octokit/dist-web/index.js.map b/dist/node_modules/octokit/dist-web/index.js.map new file mode 100644 index 0000000..2f228c4 --- /dev/null +++ b/dist/node_modules/octokit/dist-web/index.js.map @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../dist-src/octokit.js", "../dist-src/version.js", "../dist-src/app.js"], + "sourcesContent": ["import { Octokit as OctokitCore } from \"@octokit/core\";\nimport { paginateRest } from \"@octokit/plugin-paginate-rest\";\nimport { restEndpointMethods } from \"@octokit/plugin-rest-endpoint-methods\";\nimport { retry } from \"@octokit/plugin-retry\";\nimport { throttling } from \"@octokit/plugin-throttling\";\nimport { VERSION } from \"./version\";\nimport { RequestError } from \"@octokit/request-error\";\nconst Octokit = OctokitCore.plugin(\n restEndpointMethods,\n paginateRest,\n retry,\n throttling\n).defaults({\n userAgent: `octokit.js/${VERSION}`,\n throttle: {\n onRateLimit,\n onSecondaryRateLimit\n }\n});\nfunction onRateLimit(retryAfter, options, octokit) {\n octokit.log.warn(\n `Request quota exhausted for request ${options.method} ${options.url}`\n );\n if (options.request.retryCount === 0) {\n octokit.log.info(`Retrying after ${retryAfter} seconds!`);\n return true;\n }\n}\nfunction onSecondaryRateLimit(retryAfter, options, octokit) {\n octokit.log.warn(\n `SecondaryRateLimit detected for request ${options.method} ${options.url}`\n );\n if (options.request.retryCount === 0) {\n octokit.log.info(`Retrying after ${retryAfter} seconds!`);\n return true;\n }\n}\nexport {\n Octokit,\n RequestError\n};\n", "const VERSION = \"2.1.0\";\nexport {\n VERSION\n};\n", "import { App as DefaultApp } from \"@octokit/app\";\nimport { OAuthApp as DefaultOAuthApp } from \"@octokit/oauth-app\";\nimport { Octokit } from \"./octokit\";\nconst App = DefaultApp.defaults({ Octokit });\nconst OAuthApp = DefaultOAuthApp.defaults({ Octokit });\nimport { createNodeMiddleware } from \"@octokit/app\";\nexport {\n App,\n OAuthApp,\n createNodeMiddleware\n};\n"], + "mappings": ";AAAA,SAAS,WAAW,mBAAmB;AACvC,SAAS,oBAAoB;AAC7B,SAAS,2BAA2B;AACpC,SAAS,aAAa;AACtB,SAAS,kBAAkB;;;ACJ3B,IAAM,UAAU;;;ADMhB,SAAS,oBAAoB;AAC7B,IAAM,UAAU,YAAY;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,EAAE,SAAS;AAAA,EACT,WAAW,cAAc;AAAA,EACzB,UAAU;AAAA,IACR;AAAA,IACA;AAAA,EACF;AACF,CAAC;AACD,SAAS,YAAY,YAAY,SAAS,SAAS;AACjD,UAAQ,IAAI;AAAA,IACV,uCAAuC,QAAQ,UAAU,QAAQ;AAAA,EACnE;AACA,MAAI,QAAQ,QAAQ,eAAe,GAAG;AACpC,YAAQ,IAAI,KAAK,kBAAkB,qBAAqB;AACxD,WAAO;AAAA,EACT;AACF;AACA,SAAS,qBAAqB,YAAY,SAAS,SAAS;AAC1D,UAAQ,IAAI;AAAA,IACV,2CAA2C,QAAQ,UAAU,QAAQ;AAAA,EACvE;AACA,MAAI,QAAQ,QAAQ,eAAe,GAAG;AACpC,YAAQ,IAAI,KAAK,kBAAkB,qBAAqB;AACxD,WAAO;AAAA,EACT;AACF;;;AEpCA,SAAS,OAAO,kBAAkB;AAClC,SAAS,YAAY,uBAAuB;AAI5C,SAAS,4BAA4B;AAFrC,IAAM,MAAM,WAAW,SAAS,EAAE,QAAQ,CAAC;AAC3C,IAAM,WAAW,gBAAgB,SAAS,EAAE,QAAQ,CAAC;", + "names": [] +} diff --git a/dist/node_modules/octokit/package.json b/dist/node_modules/octokit/package.json new file mode 100644 index 0000000..3ff1ae3 --- /dev/null +++ b/dist/node_modules/octokit/package.json @@ -0,0 +1,54 @@ +{ + "name": "octokit", + "version": "2.1.0", + "description": "The all-batteries-included GitHub SDK for Browsers, Node.js, and Deno", + "keywords": [ + "github", + "api", + "sdk", + "octokit" + ], + "author": "Gregor Martynus (https://github.com/gr2m)", + "repository": "github:octokit/octokit.js", + "license": "MIT", + "dependencies": { + "@octokit/app": "^13.1.5", + "@octokit/core": "^4.2.1", + "@octokit/oauth-app": "^4.2.1", + "@octokit/plugin-paginate-rest": "^6.1.0", + "@octokit/plugin-rest-endpoint-methods": "^7.1.1", + "@octokit/plugin-retry": "^4.1.3", + "@octokit/plugin-throttling": "^5.2.2", + "@octokit/types": "^9.2.2", + "@octokit/request-error": "^v3.0.3" + }, + "devDependencies": { + "@octokit/tsconfig": "^2.0.0", + "@types/jest": "^29.0.0", + "@types/node": "^18.0.0", + "@types/node-fetch": "^2.5.10", + "esbuild": "^0.18.0", + "fetch-mock": "^9.11.0", + "glob": "^10.2.4", + "jest": "^29.0.0", + "mockdate": "^3.0.5", + "node-fetch": "^2.6.7", + "prettier": "2.8.8", + "semantic-release": "^21.0.0", + "semantic-release-plugin-update-version-in-files": "^1.1.0", + "ts-jest": "^29.0.0", + "typescript": "^5.0.0" + }, + "engines": { + "node": ">= 14" + }, + "files": [ + "dist-*/**", + "bin/**" + ], + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "source": "dist-src/index.js", + "sideEffects": false +} diff --git a/dist/node_modules/once/LICENSE b/dist/node_modules/once/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/dist/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/dist/node_modules/once/README.md b/dist/node_modules/once/README.md new file mode 100644 index 0000000..1f1ffca --- /dev/null +++ b/dist/node_modules/once/README.md @@ -0,0 +1,79 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` + +## `once.strict(func)` + +Throw an error if the function is called twice. + +Some functions are expected to be called only once. Using `once` for them would +potentially hide logical errors. + +In the example below, the `greet` function has to call the callback only once: + +```javascript +function greet (name, cb) { + // return is missing from the if statement + // when no name is passed, the callback is called twice + if (!name) cb('Hello anonymous') + cb('Hello ' + name) +} + +function log (msg) { + console.log(msg) +} + +// this will print 'Hello anonymous' but the logical error will be missed +greet(null, once(msg)) + +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time +greet(null, once.strict(msg)) +``` diff --git a/dist/node_modules/once/once.js b/dist/node_modules/once/once.js new file mode 100644 index 0000000..2354067 --- /dev/null +++ b/dist/node_modules/once/once.js @@ -0,0 +1,42 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} diff --git a/dist/node_modules/once/package.json b/dist/node_modules/once/package.json new file mode 100644 index 0000000..16815b2 --- /dev/null +++ b/dist/node_modules/once/package.json @@ -0,0 +1,33 @@ +{ + "name": "once", + "version": "1.4.0", + "description": "Run a function exactly one time", + "main": "once.js", + "directories": { + "test": "test" + }, + "dependencies": { + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.0.1" + }, + "scripts": { + "test": "tap test/*.js" + }, + "files": [ + "once.js" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once" + }, + "keywords": [ + "once", + "function", + "one", + "single" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/dist/node_modules/safe-buffer/LICENSE b/dist/node_modules/safe-buffer/LICENSE new file mode 100644 index 0000000..0c068ce --- /dev/null +++ b/dist/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/dist/node_modules/safe-buffer/README.md b/dist/node_modules/safe-buffer/README.md new file mode 100644 index 0000000..e9a81af --- /dev/null +++ b/dist/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/dist/node_modules/safe-buffer/index.d.ts b/dist/node_modules/safe-buffer/index.d.ts new file mode 100644 index 0000000..e9fed80 --- /dev/null +++ b/dist/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/dist/node_modules/safe-buffer/index.js b/dist/node_modules/safe-buffer/index.js new file mode 100644 index 0000000..f8d3ec9 --- /dev/null +++ b/dist/node_modules/safe-buffer/index.js @@ -0,0 +1,65 @@ +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/dist/node_modules/safe-buffer/package.json b/dist/node_modules/safe-buffer/package.json new file mode 100644 index 0000000..f2869e2 --- /dev/null +++ b/dist/node_modules/safe-buffer/package.json @@ -0,0 +1,51 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.2.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^5.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/dist/node_modules/semver/LICENSE b/dist/node_modules/semver/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/dist/node_modules/semver/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/dist/node_modules/semver/README.md b/dist/node_modules/semver/README.md new file mode 100644 index 0000000..a9d3a27 --- /dev/null +++ b/dist/node_modules/semver/README.md @@ -0,0 +1,641 @@ +semver(1) -- The semantic versioner for npm +=========================================== + +## Install + +```bash +npm install semver +```` + +## Usage + +As a node module: + +```js +const semver = require('semver') + +semver.valid('1.2.3') // '1.2.3' +semver.valid('a.b.c') // null +semver.clean(' =v1.2.3 ') // '1.2.3' +semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true +semver.gt('1.2.3', '9.8.7') // false +semver.lt('1.2.3', '9.8.7') // true +semver.minVersion('>=1.0.0') // '1.0.0' +semver.valid(semver.coerce('v2')) // '2.0.0' +semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' +``` + +You can also just load the module for the function that you care about, if +you'd like to minimize your footprint. + +```js +// load the whole API at once in a single object +const semver = require('semver') + +// or just load the bits you need +// all of them listed here, just pick and choose what you want + +// classes +const SemVer = require('semver/classes/semver') +const Comparator = require('semver/classes/comparator') +const Range = require('semver/classes/range') + +// functions for working with versions +const semverParse = require('semver/functions/parse') +const semverValid = require('semver/functions/valid') +const semverClean = require('semver/functions/clean') +const semverInc = require('semver/functions/inc') +const semverDiff = require('semver/functions/diff') +const semverMajor = require('semver/functions/major') +const semverMinor = require('semver/functions/minor') +const semverPatch = require('semver/functions/patch') +const semverPrerelease = require('semver/functions/prerelease') +const semverCompare = require('semver/functions/compare') +const semverRcompare = require('semver/functions/rcompare') +const semverCompareLoose = require('semver/functions/compare-loose') +const semverCompareBuild = require('semver/functions/compare-build') +const semverSort = require('semver/functions/sort') +const semverRsort = require('semver/functions/rsort') + +// low-level comparators between versions +const semverGt = require('semver/functions/gt') +const semverLt = require('semver/functions/lt') +const semverEq = require('semver/functions/eq') +const semverNeq = require('semver/functions/neq') +const semverGte = require('semver/functions/gte') +const semverLte = require('semver/functions/lte') +const semverCmp = require('semver/functions/cmp') +const semverCoerce = require('semver/functions/coerce') + +// working with ranges +const semverSatisfies = require('semver/functions/satisfies') +const semverMaxSatisfying = require('semver/ranges/max-satisfying') +const semverMinSatisfying = require('semver/ranges/min-satisfying') +const semverToComparators = require('semver/ranges/to-comparators') +const semverMinVersion = require('semver/ranges/min-version') +const semverValidRange = require('semver/ranges/valid') +const semverOutside = require('semver/ranges/outside') +const semverGtr = require('semver/ranges/gtr') +const semverLtr = require('semver/ranges/ltr') +const semverIntersects = require('semver/ranges/intersects') +const simplifyRange = require('semver/ranges/simplify') +const rangeSubset = require('semver/ranges/subset') +``` + +As a command-line utility: + +``` +$ semver -h + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-n <0|1> + This is the base to be used for the prerelease identifier. + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them. +``` + +## Versions + +A "version" is described by the `v2.0.0` specification found at +. + +A leading `"="` or `"v"` character is stripped off and ignored. + +## Ranges + +A `version range` is a set of `comparators` which specify versions +that satisfy the range. + +A `comparator` is composed of an `operator` and a `version`. The set +of primitive `operators` is: + +* `<` Less than +* `<=` Less than or equal to +* `>` Greater than +* `>=` Greater than or equal to +* `=` Equal. If no operator is specified, then equality is assumed, + so this operator is optional, but MAY be included. + +For example, the comparator `>=1.2.7` would match the versions +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` +or `1.1.0`. The comparator `>1` is equivalent to `>=2.0.0` and +would match the versions `2.0.0` and `3.1.0`, but not the versions +`1.0.1` or `1.1.0`. + +Comparators can be joined by whitespace to form a `comparator set`, +which is satisfied by the **intersection** of all of the comparators +it includes. + +A range is composed of one or more comparator sets, joined by `||`. A +version matches a range if and only if every comparator in at least +one of the `||`-separated comparator sets is satisfied by the version. + +For example, the range `>=1.2.7 <1.3.0` would match the versions +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, +or `1.1.0`. + +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. + +### Prerelease Tags + +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then +it will only be allowed to satisfy comparator sets if at least one +comparator with the same `[major, minor, patch]` tuple also has a +prerelease tag. + +For example, the range `>1.2.3-alpha.3` would be allowed to match the +version `1.2.3-alpha.7`, but it would *not* be satisfied by +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version +range only accepts prerelease tags on the `1.2.3` version. The +version `3.4.5` *would* satisfy the range, because it does not have a +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. + +The purpose for this behavior is twofold. First, prerelease versions +frequently are updated very quickly, and contain many breaking changes +that are (by the author's design) not yet fit for public consumption. +Therefore, by default, they are excluded from range matching +semantics. + +Second, a user who has opted into using a prerelease version has +clearly indicated the intent to use *that specific* set of +alpha/beta/rc versions. By including a prerelease tag in the range, +the user is indicating that they are aware of the risk. However, it +is still not appropriate to assume that they have opted into taking a +similar risk on the *next* set of prerelease versions. + +Note that this behavior can be suppressed (treating all prerelease +versions as if they were normal versions, for the purpose of range +matching) by setting the `includePrerelease` flag on the options +object to any +[functions](https://github.com/npm/node-semver#functions) that do +range matching. + +#### Prerelease Identifiers + +The method `.inc` takes an additional `identifier` string argument that +will append the value of the string as a prerelease identifier: + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta') +// '1.2.4-beta.0' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta +1.2.4-beta.0 +``` + +Which then can be used to increment further: + +```bash +$ semver 1.2.4-beta.0 -i prerelease +1.2.4-beta.1 +``` + +#### Prerelease Identifier Base + +The method `.inc` takes an optional parameter 'identifierBase' string +that will let you let your prerelease number as zero-based or one-based. +Set to `false` to omit the prerelease number altogether. +If you do not specify this parameter, it will default to zero-based. + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta', '1') +// '1.2.4-beta.1' +``` + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta', false) +// '1.2.4-beta' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta -n 1 +1.2.4-beta.1 +``` + +```bash +$ semver 1.2.3 -i prerelease --preid beta -n false +1.2.4-beta +``` + +### Advanced Range Syntax + +Advanced range syntax desugars to primitive comparators in +deterministic ways. + +Advanced ranges may be combined in the same way as primitive +comparators using white space or `||`. + +#### Hyphen Ranges `X.Y.Z - A.B.C` + +Specifies an inclusive set. + +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` + +If a partial version is provided as the first version in the inclusive +range, then the missing pieces are replaced with zeroes. + +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` + +If a partial version is provided as the second version in the +inclusive range, then all versions that start with the supplied parts +of the tuple are accepted, but nothing that would be greater than the +provided tuple parts. + +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0-0` +* `1.2.3 - 2` := `>=1.2.3 <3.0.0-0` + +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` + +Any of `X`, `x`, or `*` may be used to "stand in" for one of the +numeric values in the `[major, minor, patch]` tuple. + +* `*` := `>=0.0.0` (Any non-prerelease version satisfies, unless + `includePrerelease` is specified, in which case any version at all + satisfies) +* `1.x` := `>=1.0.0 <2.0.0-0` (Matching major version) +* `1.2.x` := `>=1.2.0 <1.3.0-0` (Matching major and minor versions) + +A partial version range is treated as an X-Range, so the special +character is in fact optional. + +* `""` (empty string) := `*` := `>=0.0.0` +* `1` := `1.x.x` := `>=1.0.0 <2.0.0-0` +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0-0` + +#### Tilde Ranges `~1.2.3` `~1.2` `~1` + +Allows patch-level changes if a minor version is specified on the +comparator. Allows minor-level changes if not. + +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0-0` +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0-0` (Same as `1.2.x`) +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0-0` (Same as `1.x`) +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0-0` +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0-0` (Same as `0.2.x`) +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0-0` (Same as `0.x`) +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0-0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. + +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` + +Allows changes that do not modify the left-most non-zero element in the +`[major, minor, patch]` tuple. In other words, this allows patch and +minor updates for versions `1.0.0` and above, patch updates for +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. + +Many authors treat a `0.x` version as if the `x` were the major +"breaking-change" indicator. + +Caret ranges are ideal when an author may make breaking changes +between `0.2.4` and `0.3.0` releases, which is a common practice. +However, it presumes that there will *not* be breaking changes between +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be +additive (but non-breaking), according to commonly observed practices. + +* `^1.2.3` := `>=1.2.3 <2.0.0-0` +* `^0.2.3` := `>=0.2.3 <0.3.0-0` +* `^0.0.3` := `>=0.0.3 <0.0.4-0` +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0-0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4-0` Note that prereleases in the + `0.0.3` version *only* will be allowed, if they are greater than or + equal to `beta`. So, `0.0.3-pr.2` would be allowed. + +When parsing caret ranges, a missing `patch` value desugars to the +number `0`, but will allow flexibility within that value, even if the +major and minor versions are both `0`. + +* `^1.2.x` := `>=1.2.0 <2.0.0-0` +* `^0.0.x` := `>=0.0.0 <0.1.0-0` +* `^0.0` := `>=0.0.0 <0.1.0-0` + +A missing `minor` and `patch` values will desugar to zero, but also +allow flexibility within those values, even if the major version is +zero. + +* `^1.x` := `>=1.0.0 <2.0.0-0` +* `^0.x` := `>=0.0.0 <1.0.0-0` + +### Range Grammar + +Putting all this together, here is a Backus-Naur grammar for ranges, +for the benefit of parser authors: + +```bnf +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ +``` + +## Functions + +All methods and classes take a final `options` object argument. All +options in this object are `false` by default. The options supported +are: + +- `loose` Be more forgiving about not-quite-valid semver strings. + (Any resulting output will always be 100% strict compliant, of + course.) For backwards compatibility reasons, if the `options` + argument is a boolean value instead of an object, it is interpreted + to be the `loose` param. +- `includePrerelease` Set to suppress the [default + behavior](https://github.com/npm/node-semver#prerelease-tags) of + excluding prerelease tagged versions from ranges unless they are + explicitly opted into. + +Strict-mode Comparators and Ranges will be strict about the SemVer +strings that they parse. + +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, release)`: Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, or `prerelease`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, the `prerelease` will work the + same as `prepatch`. It increments the patch version, then makes a + prerelease. If the input version is already a prerelease it simply + increments it. +* `prerelease(v)`: Returns an array of prerelease components, or null + if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` +* `major(v)`: Return the major version number. +* `minor(v)`: Return the minor version number. +* `patch(v)`: Return the patch version number. +* `intersects(r1, r2, loose)`: Return true if the two supplied ranges + or comparators intersect. +* `parse(v)`: Attempt to parse a string as a semantic version, returning either + a `SemVer` object or `null`. + +### Comparison + +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, + even if they're not the exact same string. You already know how to + compare strings. +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call + the corresponding function above. `"==="` and `"!=="` do simple + string comparison, but are included for completeness. Throws if an + invalid comparison string is provided. +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions + in descending order when passed to `Array.sort()`. +* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions + are equal. Sorts in ascending order if passed to `Array.sort()`. + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `diff(v1, v2)`: Returns difference between two versions by the release type + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), + or null if the versions are the same. + +### Comparators + +* `intersects(comparator)`: Return true if the comparators intersect + +### Ranges + +* `validRange(range)`: Return the valid range or null if it's not valid +* `satisfies(version, range)`: Return true if the version satisfies the + range. +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `minSatisfying(versions, range)`: Return the lowest version in the list + that satisfies the range, or `null` if none of them do. +* `minVersion(range)`: Return the lowest version that can possibly match + the given range. +* `gtr(version, range)`: Return `true` if version is greater than all the + versions possible in the range. +* `ltr(version, range)`: Return `true` if version is less than all the + versions possible in the range. +* `outside(version, range, hilo)`: Return true if the version is outside + the bounds of the range in either the high or low direction. The + `hilo` argument must be either the string `'>'` or `'<'`. (This is + the function called by `gtr` and `ltr`.) +* `intersects(range)`: Return true if any of the ranges comparators intersect +* `simplifyRange(versions, range)`: Return a "simplified" range that + matches the same items in `versions` list as the range specified. Note + that it does *not* guarantee that it would match the same versions in all + cases, only for the set of versions provided. This is useful when + generating ranges by joining together multiple versions with `||` + programmatically, to provide the user with something a bit more + ergonomic. If the provided range is shorter in string-length than the + generated range, then that is returned. +* `subset(subRange, superRange)`: Return `true` if the `subRange` range is + entirely contained by the `superRange` range. + +Note that, since ranges may be non-contiguous, a version might not be +greater than a range, less than a range, *or* satisfy a range! For +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` +until `2.0.0`, so the version `1.2.10` would not be greater than the +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not +satisfy the range. + +If you want to know if a version satisfies or does not satisfy a +range, use the `satisfies(version, range)` function. + +### Coercion + +* `coerce(version, options)`: Coerces a string to semver if possible + +This aims to provide a very forgiving translation of a non-semver string to +semver. It looks for the first digit in a string, and consumes all +remaining characters which satisfy at least a partial semver (e.g., `1`, +`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer +versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All +surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes +`3.4.0`). Only text which lacks digits will fail coercion (`version one` +is not valid). The maximum length for any semver component considered for +coercion is 16 characters; longer components will be ignored +(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any +semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value +components are invalid (`9999999999999999.4.7.4` is likely invalid). + +If the `options.rtl` flag is set, then `coerce` will return the right-most +coercible tuple that does not share an ending index with a longer coercible +tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not +`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of +any other overlapping SemVer tuple. + +If the `options.includePrerelease` flag is set, then the `coerce` result will contain +prerelease and build parts of a version. For example, `1.2.3.4-rc.1+rev.2` +will preserve prerelease `rc.1` and build `rev.2` in the result. + +### Clean + +* `clean(version)`: Clean a string to be a valid semver if possible + +This will return a cleaned and trimmed semver version. If the provided +version is not valid a null will be returned. This does not work for +ranges. + +ex. +* `s.clean(' = v 2.1.5foo')`: `null` +* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean(' = v 2.1.5-foo')`: `null` +* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean('=v2.1.5')`: `'2.1.5'` +* `s.clean(' =v2.1.5')`: `'2.1.5'` +* `s.clean(' 2.1.5 ')`: `'2.1.5'` +* `s.clean('~1.0.0')`: `null` + +## Constants + +As a convenience, helper constants are exported to provide information about what `node-semver` supports: + +### `RELEASE_TYPES` + +- major +- premajor +- minor +- preminor +- patch +- prepatch +- prerelease + +``` +const semver = require('semver'); + +if (semver.RELEASE_TYPES.includes(arbitraryUserInput)) { + console.log('This is a valid release type!'); +} else { + console.warn('This is NOT a valid release type!'); +} +``` + +### `SEMVER_SPEC_VERSION` + +2.0.0 + +``` +const semver = require('semver'); + +console.log('We are currently using the semver specification version:', semver.SEMVER_SPEC_VERSION); +``` + +## Exported Modules + + + +You may pull in just the part of this semver utility that you need, if you +are sensitive to packing and tree-shaking concerns. The main +`require('semver')` export uses getter functions to lazily load the parts +of the API that are used. + +The following modules are available: + +* `require('semver')` +* `require('semver/classes')` +* `require('semver/classes/comparator')` +* `require('semver/classes/range')` +* `require('semver/classes/semver')` +* `require('semver/functions/clean')` +* `require('semver/functions/cmp')` +* `require('semver/functions/coerce')` +* `require('semver/functions/compare')` +* `require('semver/functions/compare-build')` +* `require('semver/functions/compare-loose')` +* `require('semver/functions/diff')` +* `require('semver/functions/eq')` +* `require('semver/functions/gt')` +* `require('semver/functions/gte')` +* `require('semver/functions/inc')` +* `require('semver/functions/lt')` +* `require('semver/functions/lte')` +* `require('semver/functions/major')` +* `require('semver/functions/minor')` +* `require('semver/functions/neq')` +* `require('semver/functions/parse')` +* `require('semver/functions/patch')` +* `require('semver/functions/prerelease')` +* `require('semver/functions/rcompare')` +* `require('semver/functions/rsort')` +* `require('semver/functions/satisfies')` +* `require('semver/functions/sort')` +* `require('semver/functions/valid')` +* `require('semver/ranges/gtr')` +* `require('semver/ranges/intersects')` +* `require('semver/ranges/ltr')` +* `require('semver/ranges/max-satisfying')` +* `require('semver/ranges/min-satisfying')` +* `require('semver/ranges/min-version')` +* `require('semver/ranges/outside')` +* `require('semver/ranges/to-comparators')` +* `require('semver/ranges/valid')` + diff --git a/dist/node_modules/semver/bin/semver.js b/dist/node_modules/semver/bin/semver.js new file mode 100644 index 0000000..242b7ad --- /dev/null +++ b/dist/node_modules/semver/bin/semver.js @@ -0,0 +1,197 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +const argv = process.argv.slice(2) + +let versions = [] + +const range = [] + +let inc = null + +const version = require('../package.json').version + +let loose = false + +let includePrerelease = false + +let coerce = false + +let rtl = false + +let identifier + +let identifierBase + +const semver = require('../') +const parseOptions = require('../internal/parse-options') + +let reverse = false + +let options = {} + +const main = () => { + if (!argv.length) { + return help() + } + while (argv.length) { + let a = argv.shift() + const indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + const value = a.slice(indexOfEqualSign + 1) + a = a.slice(0, indexOfEqualSign) + argv.unshift(value) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-n': + identifierBase = argv.shift() + if (identifierBase === 'false') { + identifierBase = false + } + break + case '-c': case '--coerce': + coerce = true + break + case '--rtl': + rtl = true + break + case '--ltr': + rtl = false + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + options = parseOptions({ loose, includePrerelease, rtl }) + + versions = versions.map((v) => { + return coerce ? (semver.coerce(v, options) || { version: v }).version : v + }).filter((v) => { + return semver.valid(v) + }) + if (!versions.length) { + return fail() + } + if (inc && (versions.length !== 1 || range.length)) { + return failInc() + } + + for (let i = 0, l = range.length; i < l; i++) { + versions = versions.filter((v) => { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) { + return fail() + } + } + return success(versions) +} + +const failInc = () => { + console.error('--inc can only be used on a single version with no range') + fail() +} + +const fail = () => process.exit(1) + +const success = () => { + const compare = reverse ? 'rcompare' : 'compare' + versions.sort((a, b) => { + return semver[compare](a, b, options) + }).map((v) => { + return semver.clean(v, options) + }).map((v) => { + return inc ? semver.inc(v, inc, options, identifier, identifierBase) : v + }).forEach((v, i, _) => { + console.log(v) + }) +} + +const help = () => console.log( +`SemVer ${version} + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +-n + Base number to be used for the prerelease identifier. + Can be either 0 or 1, or false to omit the number altogether. + Defaults to 0. + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them.`) + +main() diff --git a/dist/node_modules/semver/classes/comparator.js b/dist/node_modules/semver/classes/comparator.js new file mode 100644 index 0000000..3d39c0e --- /dev/null +++ b/dist/node_modules/semver/classes/comparator.js @@ -0,0 +1,141 @@ +const ANY = Symbol('SemVer ANY') +// hoisted class for cyclic dependency +class Comparator { + static get ANY () { + return ANY + } + + constructor (comp, options) { + options = parseOptions(options) + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) + } + + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + const m = comp.match(r) + + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } + } + + toString () { + return this.value + } + + test (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) + } + + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } + + options = parseOptions(options) + + // Special cases where nothing can possibly be lower + if (options.includePrerelease && + (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { + return false + } + if (!options.includePrerelease && + (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { + return false + } + + // Same direction increasing (> or >=) + if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { + return true + } + // Same direction decreasing (< or <=) + if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { + return true + } + // same SemVer and both sides are inclusive (<= or >=) + if ( + (this.semver.version === comp.semver.version) && + this.operator.includes('=') && comp.operator.includes('=')) { + return true + } + // opposite directions less than + if (cmp(this.semver, '<', comp.semver, options) && + this.operator.startsWith('>') && comp.operator.startsWith('<')) { + return true + } + // opposite directions greater than + if (cmp(this.semver, '>', comp.semver, options) && + this.operator.startsWith('<') && comp.operator.startsWith('>')) { + return true + } + return false + } +} + +module.exports = Comparator + +const parseOptions = require('../internal/parse-options') +const { safeRe: re, t } = require('../internal/re') +const cmp = require('../functions/cmp') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const Range = require('./range') diff --git a/dist/node_modules/semver/classes/index.js b/dist/node_modules/semver/classes/index.js new file mode 100644 index 0000000..5e3f5c9 --- /dev/null +++ b/dist/node_modules/semver/classes/index.js @@ -0,0 +1,5 @@ +module.exports = { + SemVer: require('./semver.js'), + Range: require('./range.js'), + Comparator: require('./comparator.js'), +} diff --git a/dist/node_modules/semver/classes/range.js b/dist/node_modules/semver/classes/range.js new file mode 100644 index 0000000..7e7c414 --- /dev/null +++ b/dist/node_modules/semver/classes/range.js @@ -0,0 +1,539 @@ +// hoisted class for cyclic dependency +class Range { + constructor (range, options) { + options = parseOptions(options) + + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value + this.set = [[range]] + this.format() + return this + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') + + // First, split on || + this.set = this.raw + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length) + + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) + } + + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0] + this.set = this.set.filter(c => !isNullSet(c[0])) + if (this.set.length === 0) { + this.set = [first] + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c] + break + } + } + } + } + + this.format() + } + + format () { + this.range = this.set + .map((comps) => comps.join(' ').trim()) + .join('||') + .trim() + return this.range + } + + toString () { + return this.range + } + + parseRange (range) { + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = + (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | + (this.options.loose && FLAG_LOOSE) + const memoKey = memoOpts + ':' + range + const cached = cache.get(memoKey) + if (cached) { + return cached + } + + const loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) + debug('hyphen replace', range) + + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + debug('tilde trim', range) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + debug('caret trim', range) + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)) + + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options) + return !!comp.match(re[t.COMPARATORLOOSE]) + }) + } + debug('range list', rangeList) + + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map() + const comparators = rangeList.map(comp => new Comparator(comp, this.options)) + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp) + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete('') + } + + const result = [...rangeMap.values()] + cache.set(memoKey, result) + return result + } + + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } +} + +module.exports = Range + +const LRU = require('lru-cache') +const cache = new LRU({ max: 1000 }) + +const parseOptions = require('../internal/parse-options') +const Comparator = require('./comparator') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const { + safeRe: re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, +} = require('../internal/re') +const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require('../internal/constants') + +const isNullSet = c => c.value === '<0.0.0-0' +const isAny = c => c.value === '' + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +const isSatisfiable = (comparators, options) => { + let result = true + const remainingComparators = comparators.slice() + let testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +const parseComparator = (comp, options) => { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +const isX = id => !id || id.toLowerCase() === 'x' || id === '*' + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 +// ~0.0.1 --> >=0.0.1 <0.1.0-0 +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} + +const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0` + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` + } else if (pr) { + debug('replaceTilde pr', pr) + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0` + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 +// ^1.2.3 --> >=1.2.3 <2.0.0-0 +// ^1.2.0 --> >=1.2.0 <2.0.0-0 +// ^0.0.1 --> >=0.0.1 <0.0.2-0 +// ^0.1.0 --> >=0.1.0 <0.2.0-0 +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} + +const replaceCaret = (comp, options) => { + debug('caret', comp, options) + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + const z = options.includePrerelease ? '-0' : '' + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0` + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0` + } + } + + debug('caret return', ret) + return ret + }) +} + +const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options) + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') +} + +const replaceXRange = (comp, options) => { + comp = comp.trim() + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + const xM = isX(M) + const xm = xM || isX(m) + const xp = xm || isX(p) + const anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + if (gtlt === '<') { + pr = '-0' + } + + ret = `${gtlt + M}.${m}.${p}${pr}` + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0` + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +const replaceStars = (comp, options) => { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp + .trim() + .replace(re[t.STAR], '') +} + +const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options) + return comp + .trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 +const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) => { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}` + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` + } else if (fpr) { + from = `>=${from}` + } else { + from = `>=${from}${incPr ? '-0' : ''}` + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0` + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0` + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}` + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0` + } else { + to = `<=${to}` + } + + return `${from} ${to}`.trim() +} + +const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} diff --git a/dist/node_modules/semver/classes/semver.js b/dist/node_modules/semver/classes/semver.js new file mode 100644 index 0000000..84e8459 --- /dev/null +++ b/dist/node_modules/semver/classes/semver.js @@ -0,0 +1,302 @@ +const debug = require('../internal/debug') +const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants') +const { safeRe: re, t } = require('../internal/re') + +const parseOptions = require('../internal/parse-options') +const { compareIdentifiers } = require('../internal/identifiers') +class SemVer { + constructor (version, options) { + options = parseOptions(options) + + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease + + const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() + } + + format () { + this.version = `${this.major}.${this.minor}.${this.patch}` + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}` + } + return this.version + } + + toString () { + return this.version + } + + compare (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options) + } + + if (other.version === this.version) { + return 0 + } + + return this.compareMain(other) || this.comparePre(other) + } + + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return ( + compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) + ) + } + + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + let i = 0 + do { + const a = this.prerelease[i] + const b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + let i = 0 + do { + const a = this.build[i] + const b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier, identifierBase) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier, identifierBase) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier, identifierBase) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier, identifierBase) + this.inc('pre', identifier, identifierBase) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier, identifierBase) + } + this.inc('pre', identifier, identifierBase) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': { + const base = Number(identifierBase) ? 1 : 0 + + if (!identifier && identifierBase === false) { + throw new Error('invalid increment argument: identifier is empty') + } + + if (this.prerelease.length === 0) { + this.prerelease = [base] + } else { + let i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + if (identifier === this.prerelease.join('.') && identifierBase === false) { + throw new Error('invalid increment argument: identifier already exists') + } + this.prerelease.push(base) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + let prerelease = [identifier, base] + if (identifierBase === false) { + prerelease = [identifier] + } + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = prerelease + } + } else { + this.prerelease = prerelease + } + } + break + } + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } + return this + } +} + +module.exports = SemVer diff --git a/dist/node_modules/semver/functions/clean.js b/dist/node_modules/semver/functions/clean.js new file mode 100644 index 0000000..811fe6b --- /dev/null +++ b/dist/node_modules/semver/functions/clean.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const clean = (version, options) => { + const s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} +module.exports = clean diff --git a/dist/node_modules/semver/functions/cmp.js b/dist/node_modules/semver/functions/cmp.js new file mode 100644 index 0000000..4011909 --- /dev/null +++ b/dist/node_modules/semver/functions/cmp.js @@ -0,0 +1,52 @@ +const eq = require('./eq') +const neq = require('./neq') +const gt = require('./gt') +const gte = require('./gte') +const lt = require('./lt') +const lte = require('./lte') + +const cmp = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a === b + + case '!==': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError(`Invalid operator: ${op}`) + } +} +module.exports = cmp diff --git a/dist/node_modules/semver/functions/coerce.js b/dist/node_modules/semver/functions/coerce.js new file mode 100644 index 0000000..b378dce --- /dev/null +++ b/dist/node_modules/semver/functions/coerce.js @@ -0,0 +1,60 @@ +const SemVer = require('../classes/semver') +const parse = require('./parse') +const { safeRe: re, t } = require('../internal/re') + +const coerce = (version, options) => { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + let match = null + if (!options.rtl) { + match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL] + let next + while ((next = coerceRtlRegex.exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + coerceRtlRegex.lastIndex = -1 + } + + if (match === null) { + return null + } + + const major = match[2] + const minor = match[3] || '0' + const patch = match[4] || '0' + const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : '' + const build = options.includePrerelease && match[6] ? `+${match[6]}` : '' + + return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options) +} +module.exports = coerce diff --git a/dist/node_modules/semver/functions/compare-build.js b/dist/node_modules/semver/functions/compare-build.js new file mode 100644 index 0000000..9eb881b --- /dev/null +++ b/dist/node_modules/semver/functions/compare-build.js @@ -0,0 +1,7 @@ +const SemVer = require('../classes/semver') +const compareBuild = (a, b, loose) => { + const versionA = new SemVer(a, loose) + const versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} +module.exports = compareBuild diff --git a/dist/node_modules/semver/functions/compare-loose.js b/dist/node_modules/semver/functions/compare-loose.js new file mode 100644 index 0000000..4881fbe --- /dev/null +++ b/dist/node_modules/semver/functions/compare-loose.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const compareLoose = (a, b) => compare(a, b, true) +module.exports = compareLoose diff --git a/dist/node_modules/semver/functions/compare.js b/dist/node_modules/semver/functions/compare.js new file mode 100644 index 0000000..748b7af --- /dev/null +++ b/dist/node_modules/semver/functions/compare.js @@ -0,0 +1,5 @@ +const SemVer = require('../classes/semver') +const compare = (a, b, loose) => + new SemVer(a, loose).compare(new SemVer(b, loose)) + +module.exports = compare diff --git a/dist/node_modules/semver/functions/diff.js b/dist/node_modules/semver/functions/diff.js new file mode 100644 index 0000000..fc224e3 --- /dev/null +++ b/dist/node_modules/semver/functions/diff.js @@ -0,0 +1,65 @@ +const parse = require('./parse.js') + +const diff = (version1, version2) => { + const v1 = parse(version1, null, true) + const v2 = parse(version2, null, true) + const comparison = v1.compare(v2) + + if (comparison === 0) { + return null + } + + const v1Higher = comparison > 0 + const highVersion = v1Higher ? v1 : v2 + const lowVersion = v1Higher ? v2 : v1 + const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' + } + + // Otherwise it can be determined by checking the high version + + if (highVersion.patch) { + // anything higher than a patch bump would result in the wrong version + return 'patch' + } + + if (highVersion.minor) { + // anything higher than a minor bump would result in the wrong version + return 'minor' + } + + // bumping major/minor/patch all have same result + return 'major' + } + + // add the `pre` prefix if we are going to a prerelease version + const prefix = highHasPre ? 'pre' : '' + + if (v1.major !== v2.major) { + return prefix + 'major' + } + + if (v1.minor !== v2.minor) { + return prefix + 'minor' + } + + if (v1.patch !== v2.patch) { + return prefix + 'patch' + } + + // high and low are preleases + return 'prerelease' +} + +module.exports = diff diff --git a/dist/node_modules/semver/functions/eq.js b/dist/node_modules/semver/functions/eq.js new file mode 100644 index 0000000..271fed9 --- /dev/null +++ b/dist/node_modules/semver/functions/eq.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const eq = (a, b, loose) => compare(a, b, loose) === 0 +module.exports = eq diff --git a/dist/node_modules/semver/functions/gt.js b/dist/node_modules/semver/functions/gt.js new file mode 100644 index 0000000..d9b2156 --- /dev/null +++ b/dist/node_modules/semver/functions/gt.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const gt = (a, b, loose) => compare(a, b, loose) > 0 +module.exports = gt diff --git a/dist/node_modules/semver/functions/gte.js b/dist/node_modules/semver/functions/gte.js new file mode 100644 index 0000000..5aeaa63 --- /dev/null +++ b/dist/node_modules/semver/functions/gte.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const gte = (a, b, loose) => compare(a, b, loose) >= 0 +module.exports = gte diff --git a/dist/node_modules/semver/functions/inc.js b/dist/node_modules/semver/functions/inc.js new file mode 100644 index 0000000..7670b1b --- /dev/null +++ b/dist/node_modules/semver/functions/inc.js @@ -0,0 +1,19 @@ +const SemVer = require('../classes/semver') + +const inc = (version, release, options, identifier, identifierBase) => { + if (typeof (options) === 'string') { + identifierBase = identifier + identifier = options + options = undefined + } + + try { + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier, identifierBase).version + } catch (er) { + return null + } +} +module.exports = inc diff --git a/dist/node_modules/semver/functions/lt.js b/dist/node_modules/semver/functions/lt.js new file mode 100644 index 0000000..b440ab7 --- /dev/null +++ b/dist/node_modules/semver/functions/lt.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const lt = (a, b, loose) => compare(a, b, loose) < 0 +module.exports = lt diff --git a/dist/node_modules/semver/functions/lte.js b/dist/node_modules/semver/functions/lte.js new file mode 100644 index 0000000..6dcc956 --- /dev/null +++ b/dist/node_modules/semver/functions/lte.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const lte = (a, b, loose) => compare(a, b, loose) <= 0 +module.exports = lte diff --git a/dist/node_modules/semver/functions/major.js b/dist/node_modules/semver/functions/major.js new file mode 100644 index 0000000..4283165 --- /dev/null +++ b/dist/node_modules/semver/functions/major.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const major = (a, loose) => new SemVer(a, loose).major +module.exports = major diff --git a/dist/node_modules/semver/functions/minor.js b/dist/node_modules/semver/functions/minor.js new file mode 100644 index 0000000..57b3455 --- /dev/null +++ b/dist/node_modules/semver/functions/minor.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const minor = (a, loose) => new SemVer(a, loose).minor +module.exports = minor diff --git a/dist/node_modules/semver/functions/neq.js b/dist/node_modules/semver/functions/neq.js new file mode 100644 index 0000000..f944c01 --- /dev/null +++ b/dist/node_modules/semver/functions/neq.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const neq = (a, b, loose) => compare(a, b, loose) !== 0 +module.exports = neq diff --git a/dist/node_modules/semver/functions/parse.js b/dist/node_modules/semver/functions/parse.js new file mode 100644 index 0000000..459b3b1 --- /dev/null +++ b/dist/node_modules/semver/functions/parse.js @@ -0,0 +1,16 @@ +const SemVer = require('../classes/semver') +const parse = (version, options, throwErrors = false) => { + if (version instanceof SemVer) { + return version + } + try { + return new SemVer(version, options) + } catch (er) { + if (!throwErrors) { + return null + } + throw er + } +} + +module.exports = parse diff --git a/dist/node_modules/semver/functions/patch.js b/dist/node_modules/semver/functions/patch.js new file mode 100644 index 0000000..63afca2 --- /dev/null +++ b/dist/node_modules/semver/functions/patch.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const patch = (a, loose) => new SemVer(a, loose).patch +module.exports = patch diff --git a/dist/node_modules/semver/functions/prerelease.js b/dist/node_modules/semver/functions/prerelease.js new file mode 100644 index 0000000..06aa132 --- /dev/null +++ b/dist/node_modules/semver/functions/prerelease.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const prerelease = (version, options) => { + const parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} +module.exports = prerelease diff --git a/dist/node_modules/semver/functions/rcompare.js b/dist/node_modules/semver/functions/rcompare.js new file mode 100644 index 0000000..0ac509e --- /dev/null +++ b/dist/node_modules/semver/functions/rcompare.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const rcompare = (a, b, loose) => compare(b, a, loose) +module.exports = rcompare diff --git a/dist/node_modules/semver/functions/rsort.js b/dist/node_modules/semver/functions/rsort.js new file mode 100644 index 0000000..82404c5 --- /dev/null +++ b/dist/node_modules/semver/functions/rsort.js @@ -0,0 +1,3 @@ +const compareBuild = require('./compare-build') +const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) +module.exports = rsort diff --git a/dist/node_modules/semver/functions/satisfies.js b/dist/node_modules/semver/functions/satisfies.js new file mode 100644 index 0000000..50af1c1 --- /dev/null +++ b/dist/node_modules/semver/functions/satisfies.js @@ -0,0 +1,10 @@ +const Range = require('../classes/range') +const satisfies = (version, range, options) => { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} +module.exports = satisfies diff --git a/dist/node_modules/semver/functions/sort.js b/dist/node_modules/semver/functions/sort.js new file mode 100644 index 0000000..4d10917 --- /dev/null +++ b/dist/node_modules/semver/functions/sort.js @@ -0,0 +1,3 @@ +const compareBuild = require('./compare-build') +const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) +module.exports = sort diff --git a/dist/node_modules/semver/functions/valid.js b/dist/node_modules/semver/functions/valid.js new file mode 100644 index 0000000..f27bae1 --- /dev/null +++ b/dist/node_modules/semver/functions/valid.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const valid = (version, options) => { + const v = parse(version, options) + return v ? v.version : null +} +module.exports = valid diff --git a/dist/node_modules/semver/index.js b/dist/node_modules/semver/index.js new file mode 100644 index 0000000..86d42ac --- /dev/null +++ b/dist/node_modules/semver/index.js @@ -0,0 +1,89 @@ +// just pre-load all the stuff that index.js lazily exports +const internalRe = require('./internal/re') +const constants = require('./internal/constants') +const SemVer = require('./classes/semver') +const identifiers = require('./internal/identifiers') +const parse = require('./functions/parse') +const valid = require('./functions/valid') +const clean = require('./functions/clean') +const inc = require('./functions/inc') +const diff = require('./functions/diff') +const major = require('./functions/major') +const minor = require('./functions/minor') +const patch = require('./functions/patch') +const prerelease = require('./functions/prerelease') +const compare = require('./functions/compare') +const rcompare = require('./functions/rcompare') +const compareLoose = require('./functions/compare-loose') +const compareBuild = require('./functions/compare-build') +const sort = require('./functions/sort') +const rsort = require('./functions/rsort') +const gt = require('./functions/gt') +const lt = require('./functions/lt') +const eq = require('./functions/eq') +const neq = require('./functions/neq') +const gte = require('./functions/gte') +const lte = require('./functions/lte') +const cmp = require('./functions/cmp') +const coerce = require('./functions/coerce') +const Comparator = require('./classes/comparator') +const Range = require('./classes/range') +const satisfies = require('./functions/satisfies') +const toComparators = require('./ranges/to-comparators') +const maxSatisfying = require('./ranges/max-satisfying') +const minSatisfying = require('./ranges/min-satisfying') +const minVersion = require('./ranges/min-version') +const validRange = require('./ranges/valid') +const outside = require('./ranges/outside') +const gtr = require('./ranges/gtr') +const ltr = require('./ranges/ltr') +const intersects = require('./ranges/intersects') +const simplifyRange = require('./ranges/simplify') +const subset = require('./ranges/subset') +module.exports = { + parse, + valid, + clean, + inc, + diff, + major, + minor, + patch, + prerelease, + compare, + rcompare, + compareLoose, + compareBuild, + sort, + rsort, + gt, + lt, + eq, + neq, + gte, + lte, + cmp, + coerce, + Comparator, + Range, + satisfies, + toComparators, + maxSatisfying, + minSatisfying, + minVersion, + validRange, + outside, + gtr, + ltr, + intersects, + simplifyRange, + subset, + SemVer, + re: internalRe.re, + src: internalRe.src, + tokens: internalRe.t, + SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + RELEASE_TYPES: constants.RELEASE_TYPES, + compareIdentifiers: identifiers.compareIdentifiers, + rcompareIdentifiers: identifiers.rcompareIdentifiers, +} diff --git a/dist/node_modules/semver/internal/constants.js b/dist/node_modules/semver/internal/constants.js new file mode 100644 index 0000000..94be1c5 --- /dev/null +++ b/dist/node_modules/semver/internal/constants.js @@ -0,0 +1,35 @@ +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0' + +const MAX_LENGTH = 256 +const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16 + +// Max safe length for a build identifier. The max length minus 6 characters for +// the shortest version with a build 0.0.0+BUILD. +const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +const RELEASE_TYPES = [ + 'major', + 'premajor', + 'minor', + 'preminor', + 'patch', + 'prepatch', + 'prerelease', +] + +module.exports = { + MAX_LENGTH, + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_SAFE_INTEGER, + RELEASE_TYPES, + SEMVER_SPEC_VERSION, + FLAG_INCLUDE_PRERELEASE: 0b001, + FLAG_LOOSE: 0b010, +} diff --git a/dist/node_modules/semver/internal/debug.js b/dist/node_modules/semver/internal/debug.js new file mode 100644 index 0000000..1c00e13 --- /dev/null +++ b/dist/node_modules/semver/internal/debug.js @@ -0,0 +1,9 @@ +const debug = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {} + +module.exports = debug diff --git a/dist/node_modules/semver/internal/identifiers.js b/dist/node_modules/semver/internal/identifiers.js new file mode 100644 index 0000000..e612d0a --- /dev/null +++ b/dist/node_modules/semver/internal/identifiers.js @@ -0,0 +1,23 @@ +const numeric = /^[0-9]+$/ +const compareIdentifiers = (a, b) => { + const anum = numeric.test(a) + const bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) + +module.exports = { + compareIdentifiers, + rcompareIdentifiers, +} diff --git a/dist/node_modules/semver/internal/parse-options.js b/dist/node_modules/semver/internal/parse-options.js new file mode 100644 index 0000000..10d64ce --- /dev/null +++ b/dist/node_modules/semver/internal/parse-options.js @@ -0,0 +1,15 @@ +// parse out just the options we care about +const looseOption = Object.freeze({ loose: true }) +const emptyOpts = Object.freeze({ }) +const parseOptions = options => { + if (!options) { + return emptyOpts + } + + if (typeof options !== 'object') { + return looseOption + } + + return options +} +module.exports = parseOptions diff --git a/dist/node_modules/semver/internal/re.js b/dist/node_modules/semver/internal/re.js new file mode 100644 index 0000000..fd8920e --- /dev/null +++ b/dist/node_modules/semver/internal/re.js @@ -0,0 +1,217 @@ +const { + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_LENGTH, +} = require('./constants') +const debug = require('./debug') +exports = module.exports = {} + +// The actual regexps go on exports.re +const re = exports.re = [] +const safeRe = exports.safeRe = [] +const src = exports.src = [] +const t = exports.t = {} +let R = 0 + +const LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +const safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +const makeSafeRegex = (value) => { + for (const [token, max] of safeRegexReplacements) { + value = value + .split(`${token}*`).join(`${token}{0,${max}}`) + .split(`${token}+`).join(`${token}{1,${max}}`) + } + return value +} + +const createToken = (name, value, isGlobal) => { + const safe = makeSafeRegex(value) + const index = R++ + debug(name, index, value) + t[name] = index + src[index] = value + re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') +createToken('NUMERICIDENTIFIERLOOSE', '\\d+') + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`) + +// ## Main Version +// Three dot-separated numeric identifiers. + +createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`) + +createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] +}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + +createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] +}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`) + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] +}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +createToken('FULLPLAIN', `v?${src[t.MAINVERSION] +}${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`) + +createToken('FULL', `^${src[t.FULLPLAIN]}$`) + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] +}${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`) + +createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) + +createToken('GTLT', '((?:<|>)?=?)') + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) +createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + +createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) +createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +createToken('COERCEPLAIN', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`) +createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`) +createToken('COERCEFULL', src[t.COERCEPLAIN] + + `(?:${src[t.PRERELEASE]})?` + + `(?:${src[t.BUILD]})?` + + `(?:$|[^\\d])`) +createToken('COERCERTL', src[t.COERCE], true) +createToken('COERCERTLFULL', src[t.COERCEFULL], true) + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +createToken('LONETILDE', '(?:~>?)') + +createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) +exports.tildeTrimReplace = '$1~' + +createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) +createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +createToken('LONECARET', '(?:\\^)') + +createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) +exports.caretTrimReplace = '$1^' + +createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) +createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) +createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] +}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) +exports.comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`) + +createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`) + +// Star ranges basically just allow anything at all. +createToken('STAR', '(<|>)?=?\\s*\\*') +// >=0.0.0 is like a star +createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') +createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') diff --git a/dist/node_modules/semver/node_modules/lru-cache/LICENSE b/dist/node_modules/semver/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/dist/node_modules/semver/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/dist/node_modules/semver/node_modules/lru-cache/README.md b/dist/node_modules/semver/node_modules/lru-cache/README.md new file mode 100644 index 0000000..435dfeb --- /dev/null +++ b/dist/node_modules/semver/node_modules/lru-cache/README.md @@ -0,0 +1,166 @@ +# lru cache + +A cache object that deletes the least-recently-used items. + +[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache) + +## Installation: + +```javascript +npm install lru-cache --save +``` + +## Usage: + +```javascript +var LRU = require("lru-cache") + , options = { max: 500 + , length: function (n, key) { return n * 2 + key.length } + , dispose: function (key, n) { n.close() } + , maxAge: 1000 * 60 * 60 } + , cache = new LRU(options) + , otherCache = new LRU(50) // sets just the max size + +cache.set("key", "value") +cache.get("key") // "value" + +// non-string keys ARE fully supported +// but note that it must be THE SAME object, not +// just a JSON-equivalent object. +var someObject = { a: 1 } +cache.set(someObject, 'a value') +// Object keys are not toString()-ed +cache.set('[object Object]', 'a different value') +assert.equal(cache.get(someObject), 'a value') +// A similar object with same keys/values won't work, +// because it's a different object identity +assert.equal(cache.get({ a: 1 }), undefined) + +cache.reset() // empty the cache +``` + +If you put more stuff in it, then items will fall out. + +If you try to put an oversized thing in it, then it'll fall out right +away. + +## Options + +* `max` The maximum size of the cache, checked by applying the length + function to all values in the cache. Not setting this is kind of + silly, since that's the whole purpose of this lib, but it defaults + to `Infinity`. Setting it to a non-number or negative number will + throw a `TypeError`. Setting it to 0 makes it be `Infinity`. +* `maxAge` Maximum age in ms. Items are not pro-actively pruned out + as they age, but if you try to get an item that is too old, it'll + drop it and return undefined instead of giving it to you. + Setting this to a negative value will make everything seem old! + Setting it to a non-number will throw a `TypeError`. +* `length` Function that is used to calculate the length of stored + items. If you're storing strings or buffers, then you probably want + to do something like `function(n, key){return n.length}`. The default is + `function(){return 1}`, which is fine if you want to store `max` + like-sized things. The item is passed as the first argument, and + the key is passed as the second argumnet. +* `dispose` Function that is called on items when they are dropped + from the cache. This can be handy if you want to close file + descriptors or do other cleanup tasks when items are no longer + accessible. Called with `key, value`. It's called *before* + actually removing the item from the internal cache, so if you want + to immediately put it back in, you'll have to do that in a + `nextTick` or `setTimeout` callback or it won't do anything. +* `stale` By default, if you set a `maxAge`, it'll only actually pull + stale items out of the cache when you `get(key)`. (That is, it's + not pre-emptively doing a `setTimeout` or anything.) If you set + `stale:true`, it'll return the stale value before deleting it. If + you don't set this, then it'll return `undefined` when you try to + get a stale entry, as if it had already been deleted. +* `noDisposeOnSet` By default, if you set a `dispose()` method, then + it'll be called whenever a `set()` operation overwrites an existing + key. If you set this option, `dispose()` will only be called when a + key falls out of the cache, not when it is overwritten. +* `updateAgeOnGet` When using time-expiring entries with `maxAge`, + setting this to `true` will make each item's effective time update + to the current time whenever it is retrieved from cache, causing it + to not expire. (It can still fall out of cache based on recency of + use, of course.) + +## API + +* `set(key, value, maxAge)` +* `get(key) => value` + + Both of these will update the "recently used"-ness of the key. + They do what you think. `maxAge` is optional and overrides the + cache `maxAge` option if provided. + + If the key is not found, `get()` will return `undefined`. + + The key and val can be any value. + +* `peek(key)` + + Returns the key value (or `undefined` if not found) without + updating the "recently used"-ness of the key. + + (If you find yourself using this a lot, you *might* be using the + wrong sort of data structure, but there are some use cases where + it's handy.) + +* `del(key)` + + Deletes a key out of the cache. + +* `reset()` + + Clear the cache entirely, throwing away all values. + +* `has(key)` + + Check if a key is in the cache, without updating the recent-ness + or deleting it for being stale. + +* `forEach(function(value,key,cache), [thisp])` + + Just like `Array.prototype.forEach`. Iterates over all the keys + in the cache, in order of recent-ness. (Ie, more recently used + items are iterated over first.) + +* `rforEach(function(value,key,cache), [thisp])` + + The same as `cache.forEach(...)` but items are iterated over in + reverse order. (ie, less recently used items are iterated over + first.) + +* `keys()` + + Return an array of the keys in the cache. + +* `values()` + + Return an array of the values in the cache. + +* `length` + + Return total length of objects in cache taking into account + `length` options function. + +* `itemCount` + + Return total quantity of objects currently in cache. Note, that + `stale` (see options) items are returned as part of this item + count. + +* `dump()` + + Return an array of the cache entries ready for serialization and usage + with 'destinationCache.load(arr)`. + +* `load(cacheEntriesArray)` + + Loads another cache entries array, obtained with `sourceCache.dump()`, + into the cache. The destination cache is reset before loading new entries + +* `prune()` + + Manually iterates over the entire cache proactively pruning old entries diff --git a/dist/node_modules/semver/node_modules/lru-cache/index.js b/dist/node_modules/semver/node_modules/lru-cache/index.js new file mode 100644 index 0000000..573b6b8 --- /dev/null +++ b/dist/node_modules/semver/node_modules/lru-cache/index.js @@ -0,0 +1,334 @@ +'use strict' + +// A linked list to keep track of recently-used-ness +const Yallist = require('yallist') + +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') + +const naiveLength = () => 1 + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity + trim(this) + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA + trim(this) + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }) + } + trim(this) + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } + } + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + const node = this[CACHE].get(key) + const item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + const hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) + + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null + + del(this, node) + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)) + } + + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } +} + +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) + if (node) { + const hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() + self[LRU_LIST].unshiftNode(node) + } + } + return hit.value + } +} + +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) +} + +const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +const del = (self, node) => { + if (node) { + const hit = node.value + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value) + + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) +} + +module.exports = LRUCache diff --git a/dist/node_modules/semver/node_modules/lru-cache/package.json b/dist/node_modules/semver/node_modules/lru-cache/package.json new file mode 100644 index 0000000..43b7502 --- /dev/null +++ b/dist/node_modules/semver/node_modules/lru-cache/package.json @@ -0,0 +1,34 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "6.0.0", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "main": "index.js", + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "benchmark": "^2.1.4", + "tap": "^14.10.7" + }, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "files": [ + "index.js" + ], + "engines": { + "node": ">=10" + } +} diff --git a/dist/node_modules/semver/package.json b/dist/node_modules/semver/package.json new file mode 100644 index 0000000..f00c6bd --- /dev/null +++ b/dist/node_modules/semver/package.json @@ -0,0 +1,78 @@ +{ + "name": "semver", + "version": "7.6.0", + "description": "The semantic version parser used by npm.", + "main": "index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "tap": "^16.0.0" + }, + "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/npm/node-semver.git" + }, + "bin": { + "semver": "bin/semver.js" + }, + "files": [ + "bin/", + "lib/", + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "tap": { + "timeout": 30, + "coverage-map": "map.js", + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": ">=10" + }, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "engines": ">=10", + "distPaths": [ + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "allowPaths": [ + "/classes/", + "/functions/", + "/internal/", + "/ranges/", + "/index.js", + "/preload.js", + "/range.bnf" + ], + "publish": "true" + } +} diff --git a/dist/node_modules/semver/preload.js b/dist/node_modules/semver/preload.js new file mode 100644 index 0000000..947cd4f --- /dev/null +++ b/dist/node_modules/semver/preload.js @@ -0,0 +1,2 @@ +// XXX remove in v8 or beyond +module.exports = require('./index.js') diff --git a/dist/node_modules/semver/range.bnf b/dist/node_modules/semver/range.bnf new file mode 100644 index 0000000..d4c6ae0 --- /dev/null +++ b/dist/node_modules/semver/range.bnf @@ -0,0 +1,16 @@ +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | [1-9] ( [0-9] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ diff --git a/dist/node_modules/semver/ranges/gtr.js b/dist/node_modules/semver/ranges/gtr.js new file mode 100644 index 0000000..db7e355 --- /dev/null +++ b/dist/node_modules/semver/ranges/gtr.js @@ -0,0 +1,4 @@ +// Determine if version is greater than all the versions possible in the range. +const outside = require('./outside') +const gtr = (version, range, options) => outside(version, range, '>', options) +module.exports = gtr diff --git a/dist/node_modules/semver/ranges/intersects.js b/dist/node_modules/semver/ranges/intersects.js new file mode 100644 index 0000000..e0e9b7c --- /dev/null +++ b/dist/node_modules/semver/ranges/intersects.js @@ -0,0 +1,7 @@ +const Range = require('../classes/range') +const intersects = (r1, r2, options) => { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2, options) +} +module.exports = intersects diff --git a/dist/node_modules/semver/ranges/ltr.js b/dist/node_modules/semver/ranges/ltr.js new file mode 100644 index 0000000..528a885 --- /dev/null +++ b/dist/node_modules/semver/ranges/ltr.js @@ -0,0 +1,4 @@ +const outside = require('./outside') +// Determine if version is less than all the versions possible in the range +const ltr = (version, range, options) => outside(version, range, '<', options) +module.exports = ltr diff --git a/dist/node_modules/semver/ranges/max-satisfying.js b/dist/node_modules/semver/ranges/max-satisfying.js new file mode 100644 index 0000000..6e3d993 --- /dev/null +++ b/dist/node_modules/semver/ranges/max-satisfying.js @@ -0,0 +1,25 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') + +const maxSatisfying = (versions, range, options) => { + let max = null + let maxSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} +module.exports = maxSatisfying diff --git a/dist/node_modules/semver/ranges/min-satisfying.js b/dist/node_modules/semver/ranges/min-satisfying.js new file mode 100644 index 0000000..9b60974 --- /dev/null +++ b/dist/node_modules/semver/ranges/min-satisfying.js @@ -0,0 +1,24 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const minSatisfying = (versions, range, options) => { + let min = null + let minSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} +module.exports = minSatisfying diff --git a/dist/node_modules/semver/ranges/min-version.js b/dist/node_modules/semver/ranges/min-version.js new file mode 100644 index 0000000..350e1f7 --- /dev/null +++ b/dist/node_modules/semver/ranges/min-version.js @@ -0,0 +1,61 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const gt = require('../functions/gt') + +const minVersion = (range, loose) => { + range = new Range(range, loose) + + let minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let setMin = null + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt(compver, setMin)) { + setMin = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }) + if (setMin && (!minver || gt(minver, setMin))) { + minver = setMin + } + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} +module.exports = minVersion diff --git a/dist/node_modules/semver/ranges/outside.js b/dist/node_modules/semver/ranges/outside.js new file mode 100644 index 0000000..ae99b10 --- /dev/null +++ b/dist/node_modules/semver/ranges/outside.js @@ -0,0 +1,80 @@ +const SemVer = require('../classes/semver') +const Comparator = require('../classes/comparator') +const { ANY } = Comparator +const Range = require('../classes/range') +const satisfies = require('../functions/satisfies') +const gt = require('../functions/gt') +const lt = require('../functions/lt') +const lte = require('../functions/lte') +const gte = require('../functions/gte') + +const outside = (version, range, hilo, options) => { + version = new SemVer(version, options) + range = new Range(range, options) + + let gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisfies the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let high = null + let low = null + + comparators.forEach((comparator) => { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +module.exports = outside diff --git a/dist/node_modules/semver/ranges/simplify.js b/dist/node_modules/semver/ranges/simplify.js new file mode 100644 index 0000000..618d5b6 --- /dev/null +++ b/dist/node_modules/semver/ranges/simplify.js @@ -0,0 +1,47 @@ +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') +module.exports = (versions, range, options) => { + const set = [] + let first = null + let prev = null + const v = versions.sort((a, b) => compare(a, b, options)) + for (const version of v) { + const included = satisfies(version, range, options) + if (included) { + prev = version + if (!first) { + first = version + } + } else { + if (prev) { + set.push([first, prev]) + } + prev = null + first = null + } + } + if (first) { + set.push([first, null]) + } + + const ranges = [] + for (const [min, max] of set) { + if (min === max) { + ranges.push(min) + } else if (!max && min === v[0]) { + ranges.push('*') + } else if (!max) { + ranges.push(`>=${min}`) + } else if (min === v[0]) { + ranges.push(`<=${max}`) + } else { + ranges.push(`${min} - ${max}`) + } + } + const simplified = ranges.join(' || ') + const original = typeof range.raw === 'string' ? range.raw : String(range) + return simplified.length < original.length ? simplified : range +} diff --git a/dist/node_modules/semver/ranges/subset.js b/dist/node_modules/semver/ranges/subset.js new file mode 100644 index 0000000..1e5c268 --- /dev/null +++ b/dist/node_modules/semver/ranges/subset.js @@ -0,0 +1,247 @@ +const Range = require('../classes/range.js') +const Comparator = require('../classes/comparator.js') +const { ANY } = Comparator +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') + +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true + +const subset = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } + + sub = new Range(sub, options) + dom = new Range(dom, options) + let sawNonNull = false + + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options) + sawNonNull = sawNonNull || isSub !== null + if (isSub) { + continue OUTER + } + } + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false + } + } + return true +} + +const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] +const minimumVersion = [new Comparator('>=0.0.0')] + +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true + } + + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = minimumVersionWithPreRelease + } else { + sub = minimumVersion + } + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true + } else { + dom = minimumVersion + } + } + + const eqSet = new Set() + let gt, lt + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options) + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options) + } else { + eqSet.add(c.semver) + } + } + + if (eqSet.size > 1) { + return null + } + + let gtltComp + if (gt && lt) { + gtltComp = compare(gt.semver, lt.semver, options) + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } + + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies(eq, String(gt), options)) { + return null + } + + if (lt && !satisfies(eq, String(lt), options)) { + return null + } + + for (const c of dom) { + if (!satisfies(eq, String(c), options)) { + return false + } + } + + return true + } + + let higher, lower + let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false + } + + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options) + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options) + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { + return false + } + } + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false + } + } + + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false + } + + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false + } + + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false + } + + return true +} + +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +} + +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +} + +module.exports = subset diff --git a/dist/node_modules/semver/ranges/to-comparators.js b/dist/node_modules/semver/ranges/to-comparators.js new file mode 100644 index 0000000..6c8bc7e --- /dev/null +++ b/dist/node_modules/semver/ranges/to-comparators.js @@ -0,0 +1,8 @@ +const Range = require('../classes/range') + +// Mostly just for testing and legacy API reasons +const toComparators = (range, options) => + new Range(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) + +module.exports = toComparators diff --git a/dist/node_modules/semver/ranges/valid.js b/dist/node_modules/semver/ranges/valid.js new file mode 100644 index 0000000..365f356 --- /dev/null +++ b/dist/node_modules/semver/ranges/valid.js @@ -0,0 +1,11 @@ +const Range = require('../classes/range') +const validRange = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} +module.exports = validRange diff --git a/dist/node_modules/tr46/.npmignore b/dist/node_modules/tr46/.npmignore new file mode 100644 index 0000000..96e9161 --- /dev/null +++ b/dist/node_modules/tr46/.npmignore @@ -0,0 +1,4 @@ +scripts/ +test/ + +!lib/mapping_table.json diff --git a/dist/node_modules/tr46/index.js b/dist/node_modules/tr46/index.js new file mode 100644 index 0000000..9ce12ca --- /dev/null +++ b/dist/node_modules/tr46/index.js @@ -0,0 +1,193 @@ +"use strict"; + +var punycode = require("punycode"); +var mappingTable = require("./lib/mappingTable.json"); + +var PROCESSING_OPTIONS = { + TRANSITIONAL: 0, + NONTRANSITIONAL: 1 +}; + +function normalize(str) { // fix bug in v8 + return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); +} + +function findStatus(val) { + var start = 0; + var end = mappingTable.length - 1; + + while (start <= end) { + var mid = Math.floor((start + end) / 2); + + var target = mappingTable[mid]; + if (target[0][0] <= val && target[0][1] >= val) { + return target; + } else if (target[0][0] > val) { + end = mid - 1; + } else { + start = mid + 1; + } + } + + return null; +} + +var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; + +function countSymbols(string) { + return string + // replace every surrogate pair with a BMP symbol + .replace(regexAstralSymbols, '_') + // then get the length + .length; +} + +function mapChars(domain_name, useSTD3, processing_option) { + var hasError = false; + var processed = ""; + + var len = countSymbols(domain_name); + for (var i = 0; i < len; ++i) { + var codePoint = domain_name.codePointAt(i); + var status = findStatus(codePoint); + + switch (status[1]) { + case "disallowed": + hasError = true; + processed += String.fromCodePoint(codePoint); + break; + case "ignored": + break; + case "mapped": + processed += String.fromCodePoint.apply(String, status[2]); + break; + case "deviation": + if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { + processed += String.fromCodePoint.apply(String, status[2]); + } else { + processed += String.fromCodePoint(codePoint); + } + break; + case "valid": + processed += String.fromCodePoint(codePoint); + break; + case "disallowed_STD3_mapped": + if (useSTD3) { + hasError = true; + processed += String.fromCodePoint(codePoint); + } else { + processed += String.fromCodePoint.apply(String, status[2]); + } + break; + case "disallowed_STD3_valid": + if (useSTD3) { + hasError = true; + } + + processed += String.fromCodePoint(codePoint); + break; + } + } + + return { + string: processed, + error: hasError + }; +} + +var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; + +function validateLabel(label, processing_option) { + if (label.substr(0, 4) === "xn--") { + label = punycode.toUnicode(label); + processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; + } + + var error = false; + + if (normalize(label) !== label || + (label[3] === "-" && label[4] === "-") || + label[0] === "-" || label[label.length - 1] === "-" || + label.indexOf(".") !== -1 || + label.search(combiningMarksRegex) === 0) { + error = true; + } + + var len = countSymbols(label); + for (var i = 0; i < len; ++i) { + var status = findStatus(label.codePointAt(i)); + if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || + (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && + status[1] !== "valid" && status[1] !== "deviation")) { + error = true; + break; + } + } + + return { + label: label, + error: error + }; +} + +function processing(domain_name, useSTD3, processing_option) { + var result = mapChars(domain_name, useSTD3, processing_option); + result.string = normalize(result.string); + + var labels = result.string.split("."); + for (var i = 0; i < labels.length; ++i) { + try { + var validation = validateLabel(labels[i]); + labels[i] = validation.label; + result.error = result.error || validation.error; + } catch(e) { + result.error = true; + } + } + + return { + string: labels.join("."), + error: result.error + }; +} + +module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { + var result = processing(domain_name, useSTD3, processing_option); + var labels = result.string.split("."); + labels = labels.map(function(l) { + try { + return punycode.toASCII(l); + } catch(e) { + result.error = true; + return l; + } + }); + + if (verifyDnsLength) { + var total = labels.slice(0, labels.length - 1).join(".").length; + if (total.length > 253 || total.length === 0) { + result.error = true; + } + + for (var i=0; i < labels.length; ++i) { + if (labels.length > 63 || labels.length === 0) { + result.error = true; + break; + } + } + } + + if (result.error) return null; + return labels.join("."); +}; + +module.exports.toUnicode = function(domain_name, useSTD3) { + var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); + + return { + domain: result.string, + error: result.error + }; +}; + +module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; diff --git a/dist/node_modules/tr46/lib/.gitkeep b/dist/node_modules/tr46/lib/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/dist/node_modules/tr46/lib/mappingTable.json b/dist/node_modules/tr46/lib/mappingTable.json new file mode 100644 index 0000000..89cf19a --- /dev/null +++ b/dist/node_modules/tr46/lib/mappingTable.json @@ -0,0 +1 @@ +[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",[108,183]],[[321,321],"mapped",[322]],[[322,322],"valid"],[[323,323],"mapped",[324]],[[324,324],"valid"],[[325,325],"mapped",[326]],[[326,326],"valid"],[[327,327],"mapped",[328]],[[328,328],"valid"],[[329,329],"mapped",[700,110]],[[330,330],"mapped",[331]],[[331,331],"valid"],[[332,332],"mapped",[333]],[[333,333],"valid"],[[334,334],"mapped",[335]],[[335,335],"valid"],[[336,336],"mapped",[337]],[[337,337],"valid"],[[338,338],"mapped",[339]],[[339,339],"valid"],[[340,340],"mapped",[341]],[[341,341],"valid"],[[342,342],"mapped",[343]],[[343,343],"valid"],[[344,344],"mapped",[345]],[[345,345],"valid"],[[346,346],"mapped",[347]],[[347,347],"valid"],[[348,348],"mapped",[349]],[[349,349],"valid"],[[350,350],"mapped",[351]],[[351,351],"valid"],[[352,352],"mapped",[353]],[[353,353],"valid"],[[354,354],"mapped",[355]],[[355,355],"valid"],[[356,356],"mapped",[357]],[[357,357],"valid"],[[358,358],"mapped",[359]],[[359,359],"valid"],[[360,360],"mapped",[361]],[[361,361],"valid"],[[362,362],"mapped",[363]],[[363,363],"valid"],[[364,364],"mapped",[365]],[[365,365],"valid"],[[366,366],"mapped",[367]],[[367,367],"valid"],[[368,368],"mapped",[369]],[[369,369],"valid"],[[370,370],"mapped",[371]],[[371,371],"valid"],[[372,372],"mapped",[373]],[[373,373],"valid"],[[374,374],"mapped",[375]],[[375,375],"valid"],[[376,376],"mapped",[255]],[[377,377],"mapped",[378]],[[378,378],"valid"],[[379,379],"mapped",[380]],[[380,380],"valid"],[[381,381],"mapped",[382]],[[382,382],"valid"],[[383,383],"mapped",[115]],[[384,384],"valid"],[[385,385],"mapped",[595]],[[386,386],"mapped",[387]],[[387,387],"valid"],[[388,388],"mapped",[389]],[[389,389],"valid"],[[390,390],"mapped",[596]],[[391,391],"mapped",[392]],[[392,392],"valid"],[[393,393],"mapped",[598]],[[394,394],"mapped",[599]],[[395,395],"mapped",[396]],[[396,397],"valid"],[[398,398],"mapped",[477]],[[399,399],"mapped",[601]],[[400,400],"mapped",[603]],[[401,401],"mapped",[402]],[[402,402],"valid"],[[403,403],"mapped",[608]],[[404,404],"mapped",[611]],[[405,405],"valid"],[[406,406],"mapped",[617]],[[407,407],"mapped",[616]],[[408,408],"mapped",[409]],[[409,411],"valid"],[[412,412],"mapped",[623]],[[413,413],"mapped",[626]],[[414,414],"valid"],[[415,415],"mapped",[629]],[[416,416],"mapped",[417]],[[417,417],"valid"],[[418,418],"mapped",[419]],[[419,419],"valid"],[[420,420],"mapped",[421]],[[421,421],"valid"],[[422,422],"mapped",[640]],[[423,423],"mapped",[424]],[[424,424],"valid"],[[425,425],"mapped",[643]],[[426,427],"valid"],[[428,428],"mapped",[429]],[[429,429],"valid"],[[430,430],"mapped",[648]],[[431,431],"mapped",[432]],[[432,432],"valid"],[[433,433],"mapped",[650]],[[434,434],"mapped",[651]],[[435,435],"mapped",[436]],[[436,436],"valid"],[[437,437],"mapped",[438]],[[438,438],"valid"],[[439,439],"mapped",[658]],[[440,440],"mapped",[441]],[[441,443],"valid"],[[444,444],"mapped",[445]],[[445,451],"valid"],[[452,454],"mapped",[100,382]],[[455,457],"mapped",[108,106]],[[458,460],"mapped",[110,106]],[[461,461],"mapped",[462]],[[462,462],"valid"],[[463,463],"mapped",[464]],[[464,464],"valid"],[[465,465],"mapped",[466]],[[466,466],"valid"],[[467,467],"mapped",[468]],[[468,468],"valid"],[[469,469],"mapped",[470]],[[470,470],"valid"],[[471,471],"mapped",[472]],[[472,472],"valid"],[[473,473],"mapped",[474]],[[474,474],"valid"],[[475,475],"mapped",[476]],[[476,477],"valid"],[[478,478],"mapped",[479]],[[479,479],"valid"],[[480,480],"mapped",[481]],[[481,481],"valid"],[[482,482],"mapped",[483]],[[483,483],"valid"],[[484,484],"mapped",[485]],[[485,485],"valid"],[[486,486],"mapped",[487]],[[487,487],"valid"],[[488,488],"mapped",[489]],[[489,489],"valid"],[[490,490],"mapped",[491]],[[491,491],"valid"],[[492,492],"mapped",[493]],[[493,493],"valid"],[[494,494],"mapped",[495]],[[495,496],"valid"],[[497,499],"mapped",[100,122]],[[500,500],"mapped",[501]],[[501,501],"valid"],[[502,502],"mapped",[405]],[[503,503],"mapped",[447]],[[504,504],"mapped",[505]],[[505,505],"valid"],[[506,506],"mapped",[507]],[[507,507],"valid"],[[508,508],"mapped",[509]],[[509,509],"valid"],[[510,510],"mapped",[511]],[[511,511],"valid"],[[512,512],"mapped",[513]],[[513,513],"valid"],[[514,514],"mapped",[515]],[[515,515],"valid"],[[516,516],"mapped",[517]],[[517,517],"valid"],[[518,518],"mapped",[519]],[[519,519],"valid"],[[520,520],"mapped",[521]],[[521,521],"valid"],[[522,522],"mapped",[523]],[[523,523],"valid"],[[524,524],"mapped",[525]],[[525,525],"valid"],[[526,526],"mapped",[527]],[[527,527],"valid"],[[528,528],"mapped",[529]],[[529,529],"valid"],[[530,530],"mapped",[531]],[[531,531],"valid"],[[532,532],"mapped",[533]],[[533,533],"valid"],[[534,534],"mapped",[535]],[[535,535],"valid"],[[536,536],"mapped",[537]],[[537,537],"valid"],[[538,538],"mapped",[539]],[[539,539],"valid"],[[540,540],"mapped",[541]],[[541,541],"valid"],[[542,542],"mapped",[543]],[[543,543],"valid"],[[544,544],"mapped",[414]],[[545,545],"valid"],[[546,546],"mapped",[547]],[[547,547],"valid"],[[548,548],"mapped",[549]],[[549,549],"valid"],[[550,550],"mapped",[551]],[[551,551],"valid"],[[552,552],"mapped",[553]],[[553,553],"valid"],[[554,554],"mapped",[555]],[[555,555],"valid"],[[556,556],"mapped",[557]],[[557,557],"valid"],[[558,558],"mapped",[559]],[[559,559],"valid"],[[560,560],"mapped",[561]],[[561,561],"valid"],[[562,562],"mapped",[563]],[[563,563],"valid"],[[564,566],"valid"],[[567,569],"valid"],[[570,570],"mapped",[11365]],[[571,571],"mapped",[572]],[[572,572],"valid"],[[573,573],"mapped",[410]],[[574,574],"mapped",[11366]],[[575,576],"valid"],[[577,577],"mapped",[578]],[[578,578],"valid"],[[579,579],"mapped",[384]],[[580,580],"mapped",[649]],[[581,581],"mapped",[652]],[[582,582],"mapped",[583]],[[583,583],"valid"],[[584,584],"mapped",[585]],[[585,585],"valid"],[[586,586],"mapped",[587]],[[587,587],"valid"],[[588,588],"mapped",[589]],[[589,589],"valid"],[[590,590],"mapped",[591]],[[591,591],"valid"],[[592,680],"valid"],[[681,685],"valid"],[[686,687],"valid"],[[688,688],"mapped",[104]],[[689,689],"mapped",[614]],[[690,690],"mapped",[106]],[[691,691],"mapped",[114]],[[692,692],"mapped",[633]],[[693,693],"mapped",[635]],[[694,694],"mapped",[641]],[[695,695],"mapped",[119]],[[696,696],"mapped",[121]],[[697,705],"valid"],[[706,709],"valid",[],"NV8"],[[710,721],"valid"],[[722,727],"valid",[],"NV8"],[[728,728],"disallowed_STD3_mapped",[32,774]],[[729,729],"disallowed_STD3_mapped",[32,775]],[[730,730],"disallowed_STD3_mapped",[32,778]],[[731,731],"disallowed_STD3_mapped",[32,808]],[[732,732],"disallowed_STD3_mapped",[32,771]],[[733,733],"disallowed_STD3_mapped",[32,779]],[[734,734],"valid",[],"NV8"],[[735,735],"valid",[],"NV8"],[[736,736],"mapped",[611]],[[737,737],"mapped",[108]],[[738,738],"mapped",[115]],[[739,739],"mapped",[120]],[[740,740],"mapped",[661]],[[741,745],"valid",[],"NV8"],[[746,747],"valid",[],"NV8"],[[748,748],"valid"],[[749,749],"valid",[],"NV8"],[[750,750],"valid"],[[751,767],"valid",[],"NV8"],[[768,831],"valid"],[[832,832],"mapped",[768]],[[833,833],"mapped",[769]],[[834,834],"valid"],[[835,835],"mapped",[787]],[[836,836],"mapped",[776,769]],[[837,837],"mapped",[953]],[[838,846],"valid"],[[847,847],"ignored"],[[848,855],"valid"],[[856,860],"valid"],[[861,863],"valid"],[[864,865],"valid"],[[866,866],"valid"],[[867,879],"valid"],[[880,880],"mapped",[881]],[[881,881],"valid"],[[882,882],"mapped",[883]],[[883,883],"valid"],[[884,884],"mapped",[697]],[[885,885],"valid"],[[886,886],"mapped",[887]],[[887,887],"valid"],[[888,889],"disallowed"],[[890,890],"disallowed_STD3_mapped",[32,953]],[[891,893],"valid"],[[894,894],"disallowed_STD3_mapped",[59]],[[895,895],"mapped",[1011]],[[896,899],"disallowed"],[[900,900],"disallowed_STD3_mapped",[32,769]],[[901,901],"disallowed_STD3_mapped",[32,776,769]],[[902,902],"mapped",[940]],[[903,903],"mapped",[183]],[[904,904],"mapped",[941]],[[905,905],"mapped",[942]],[[906,906],"mapped",[943]],[[907,907],"disallowed"],[[908,908],"mapped",[972]],[[909,909],"disallowed"],[[910,910],"mapped",[973]],[[911,911],"mapped",[974]],[[912,912],"valid"],[[913,913],"mapped",[945]],[[914,914],"mapped",[946]],[[915,915],"mapped",[947]],[[916,916],"mapped",[948]],[[917,917],"mapped",[949]],[[918,918],"mapped",[950]],[[919,919],"mapped",[951]],[[920,920],"mapped",[952]],[[921,921],"mapped",[953]],[[922,922],"mapped",[954]],[[923,923],"mapped",[955]],[[924,924],"mapped",[956]],[[925,925],"mapped",[957]],[[926,926],"mapped",[958]],[[927,927],"mapped",[959]],[[928,928],"mapped",[960]],[[929,929],"mapped",[961]],[[930,930],"disallowed"],[[931,931],"mapped",[963]],[[932,932],"mapped",[964]],[[933,933],"mapped",[965]],[[934,934],"mapped",[966]],[[935,935],"mapped",[967]],[[936,936],"mapped",[968]],[[937,937],"mapped",[969]],[[938,938],"mapped",[970]],[[939,939],"mapped",[971]],[[940,961],"valid"],[[962,962],"deviation",[963]],[[963,974],"valid"],[[975,975],"mapped",[983]],[[976,976],"mapped",[946]],[[977,977],"mapped",[952]],[[978,978],"mapped",[965]],[[979,979],"mapped",[973]],[[980,980],"mapped",[971]],[[981,981],"mapped",[966]],[[982,982],"mapped",[960]],[[983,983],"valid"],[[984,984],"mapped",[985]],[[985,985],"valid"],[[986,986],"mapped",[987]],[[987,987],"valid"],[[988,988],"mapped",[989]],[[989,989],"valid"],[[990,990],"mapped",[991]],[[991,991],"valid"],[[992,992],"mapped",[993]],[[993,993],"valid"],[[994,994],"mapped",[995]],[[995,995],"valid"],[[996,996],"mapped",[997]],[[997,997],"valid"],[[998,998],"mapped",[999]],[[999,999],"valid"],[[1000,1000],"mapped",[1001]],[[1001,1001],"valid"],[[1002,1002],"mapped",[1003]],[[1003,1003],"valid"],[[1004,1004],"mapped",[1005]],[[1005,1005],"valid"],[[1006,1006],"mapped",[1007]],[[1007,1007],"valid"],[[1008,1008],"mapped",[954]],[[1009,1009],"mapped",[961]],[[1010,1010],"mapped",[963]],[[1011,1011],"valid"],[[1012,1012],"mapped",[952]],[[1013,1013],"mapped",[949]],[[1014,1014],"valid",[],"NV8"],[[1015,1015],"mapped",[1016]],[[1016,1016],"valid"],[[1017,1017],"mapped",[963]],[[1018,1018],"mapped",[1019]],[[1019,1019],"valid"],[[1020,1020],"valid"],[[1021,1021],"mapped",[891]],[[1022,1022],"mapped",[892]],[[1023,1023],"mapped",[893]],[[1024,1024],"mapped",[1104]],[[1025,1025],"mapped",[1105]],[[1026,1026],"mapped",[1106]],[[1027,1027],"mapped",[1107]],[[1028,1028],"mapped",[1108]],[[1029,1029],"mapped",[1109]],[[1030,1030],"mapped",[1110]],[[1031,1031],"mapped",[1111]],[[1032,1032],"mapped",[1112]],[[1033,1033],"mapped",[1113]],[[1034,1034],"mapped",[1114]],[[1035,1035],"mapped",[1115]],[[1036,1036],"mapped",[1116]],[[1037,1037],"mapped",[1117]],[[1038,1038],"mapped",[1118]],[[1039,1039],"mapped",[1119]],[[1040,1040],"mapped",[1072]],[[1041,1041],"mapped",[1073]],[[1042,1042],"mapped",[1074]],[[1043,1043],"mapped",[1075]],[[1044,1044],"mapped",[1076]],[[1045,1045],"mapped",[1077]],[[1046,1046],"mapped",[1078]],[[1047,1047],"mapped",[1079]],[[1048,1048],"mapped",[1080]],[[1049,1049],"mapped",[1081]],[[1050,1050],"mapped",[1082]],[[1051,1051],"mapped",[1083]],[[1052,1052],"mapped",[1084]],[[1053,1053],"mapped",[1085]],[[1054,1054],"mapped",[1086]],[[1055,1055],"mapped",[1087]],[[1056,1056],"mapped",[1088]],[[1057,1057],"mapped",[1089]],[[1058,1058],"mapped",[1090]],[[1059,1059],"mapped",[1091]],[[1060,1060],"mapped",[1092]],[[1061,1061],"mapped",[1093]],[[1062,1062],"mapped",[1094]],[[1063,1063],"mapped",[1095]],[[1064,1064],"mapped",[1096]],[[1065,1065],"mapped",[1097]],[[1066,1066],"mapped",[1098]],[[1067,1067],"mapped",[1099]],[[1068,1068],"mapped",[1100]],[[1069,1069],"mapped",[1101]],[[1070,1070],"mapped",[1102]],[[1071,1071],"mapped",[1103]],[[1072,1103],"valid"],[[1104,1104],"valid"],[[1105,1116],"valid"],[[1117,1117],"valid"],[[1118,1119],"valid"],[[1120,1120],"mapped",[1121]],[[1121,1121],"valid"],[[1122,1122],"mapped",[1123]],[[1123,1123],"valid"],[[1124,1124],"mapped",[1125]],[[1125,1125],"valid"],[[1126,1126],"mapped",[1127]],[[1127,1127],"valid"],[[1128,1128],"mapped",[1129]],[[1129,1129],"valid"],[[1130,1130],"mapped",[1131]],[[1131,1131],"valid"],[[1132,1132],"mapped",[1133]],[[1133,1133],"valid"],[[1134,1134],"mapped",[1135]],[[1135,1135],"valid"],[[1136,1136],"mapped",[1137]],[[1137,1137],"valid"],[[1138,1138],"mapped",[1139]],[[1139,1139],"valid"],[[1140,1140],"mapped",[1141]],[[1141,1141],"valid"],[[1142,1142],"mapped",[1143]],[[1143,1143],"valid"],[[1144,1144],"mapped",[1145]],[[1145,1145],"valid"],[[1146,1146],"mapped",[1147]],[[1147,1147],"valid"],[[1148,1148],"mapped",[1149]],[[1149,1149],"valid"],[[1150,1150],"mapped",[1151]],[[1151,1151],"valid"],[[1152,1152],"mapped",[1153]],[[1153,1153],"valid"],[[1154,1154],"valid",[],"NV8"],[[1155,1158],"valid"],[[1159,1159],"valid"],[[1160,1161],"valid",[],"NV8"],[[1162,1162],"mapped",[1163]],[[1163,1163],"valid"],[[1164,1164],"mapped",[1165]],[[1165,1165],"valid"],[[1166,1166],"mapped",[1167]],[[1167,1167],"valid"],[[1168,1168],"mapped",[1169]],[[1169,1169],"valid"],[[1170,1170],"mapped",[1171]],[[1171,1171],"valid"],[[1172,1172],"mapped",[1173]],[[1173,1173],"valid"],[[1174,1174],"mapped",[1175]],[[1175,1175],"valid"],[[1176,1176],"mapped",[1177]],[[1177,1177],"valid"],[[1178,1178],"mapped",[1179]],[[1179,1179],"valid"],[[1180,1180],"mapped",[1181]],[[1181,1181],"valid"],[[1182,1182],"mapped",[1183]],[[1183,1183],"valid"],[[1184,1184],"mapped",[1185]],[[1185,1185],"valid"],[[1186,1186],"mapped",[1187]],[[1187,1187],"valid"],[[1188,1188],"mapped",[1189]],[[1189,1189],"valid"],[[1190,1190],"mapped",[1191]],[[1191,1191],"valid"],[[1192,1192],"mapped",[1193]],[[1193,1193],"valid"],[[1194,1194],"mapped",[1195]],[[1195,1195],"valid"],[[1196,1196],"mapped",[1197]],[[1197,1197],"valid"],[[1198,1198],"mapped",[1199]],[[1199,1199],"valid"],[[1200,1200],"mapped",[1201]],[[1201,1201],"valid"],[[1202,1202],"mapped",[1203]],[[1203,1203],"valid"],[[1204,1204],"mapped",[1205]],[[1205,1205],"valid"],[[1206,1206],"mapped",[1207]],[[1207,1207],"valid"],[[1208,1208],"mapped",[1209]],[[1209,1209],"valid"],[[1210,1210],"mapped",[1211]],[[1211,1211],"valid"],[[1212,1212],"mapped",[1213]],[[1213,1213],"valid"],[[1214,1214],"mapped",[1215]],[[1215,1215],"valid"],[[1216,1216],"disallowed"],[[1217,1217],"mapped",[1218]],[[1218,1218],"valid"],[[1219,1219],"mapped",[1220]],[[1220,1220],"valid"],[[1221,1221],"mapped",[1222]],[[1222,1222],"valid"],[[1223,1223],"mapped",[1224]],[[1224,1224],"valid"],[[1225,1225],"mapped",[1226]],[[1226,1226],"valid"],[[1227,1227],"mapped",[1228]],[[1228,1228],"valid"],[[1229,1229],"mapped",[1230]],[[1230,1230],"valid"],[[1231,1231],"valid"],[[1232,1232],"mapped",[1233]],[[1233,1233],"valid"],[[1234,1234],"mapped",[1235]],[[1235,1235],"valid"],[[1236,1236],"mapped",[1237]],[[1237,1237],"valid"],[[1238,1238],"mapped",[1239]],[[1239,1239],"valid"],[[1240,1240],"mapped",[1241]],[[1241,1241],"valid"],[[1242,1242],"mapped",[1243]],[[1243,1243],"valid"],[[1244,1244],"mapped",[1245]],[[1245,1245],"valid"],[[1246,1246],"mapped",[1247]],[[1247,1247],"valid"],[[1248,1248],"mapped",[1249]],[[1249,1249],"valid"],[[1250,1250],"mapped",[1251]],[[1251,1251],"valid"],[[1252,1252],"mapped",[1253]],[[1253,1253],"valid"],[[1254,1254],"mapped",[1255]],[[1255,1255],"valid"],[[1256,1256],"mapped",[1257]],[[1257,1257],"valid"],[[1258,1258],"mapped",[1259]],[[1259,1259],"valid"],[[1260,1260],"mapped",[1261]],[[1261,1261],"valid"],[[1262,1262],"mapped",[1263]],[[1263,1263],"valid"],[[1264,1264],"mapped",[1265]],[[1265,1265],"valid"],[[1266,1266],"mapped",[1267]],[[1267,1267],"valid"],[[1268,1268],"mapped",[1269]],[[1269,1269],"valid"],[[1270,1270],"mapped",[1271]],[[1271,1271],"valid"],[[1272,1272],"mapped",[1273]],[[1273,1273],"valid"],[[1274,1274],"mapped",[1275]],[[1275,1275],"valid"],[[1276,1276],"mapped",[1277]],[[1277,1277],"valid"],[[1278,1278],"mapped",[1279]],[[1279,1279],"valid"],[[1280,1280],"mapped",[1281]],[[1281,1281],"valid"],[[1282,1282],"mapped",[1283]],[[1283,1283],"valid"],[[1284,1284],"mapped",[1285]],[[1285,1285],"valid"],[[1286,1286],"mapped",[1287]],[[1287,1287],"valid"],[[1288,1288],"mapped",[1289]],[[1289,1289],"valid"],[[1290,1290],"mapped",[1291]],[[1291,1291],"valid"],[[1292,1292],"mapped",[1293]],[[1293,1293],"valid"],[[1294,1294],"mapped",[1295]],[[1295,1295],"valid"],[[1296,1296],"mapped",[1297]],[[1297,1297],"valid"],[[1298,1298],"mapped",[1299]],[[1299,1299],"valid"],[[1300,1300],"mapped",[1301]],[[1301,1301],"valid"],[[1302,1302],"mapped",[1303]],[[1303,1303],"valid"],[[1304,1304],"mapped",[1305]],[[1305,1305],"valid"],[[1306,1306],"mapped",[1307]],[[1307,1307],"valid"],[[1308,1308],"mapped",[1309]],[[1309,1309],"valid"],[[1310,1310],"mapped",[1311]],[[1311,1311],"valid"],[[1312,1312],"mapped",[1313]],[[1313,1313],"valid"],[[1314,1314],"mapped",[1315]],[[1315,1315],"valid"],[[1316,1316],"mapped",[1317]],[[1317,1317],"valid"],[[1318,1318],"mapped",[1319]],[[1319,1319],"valid"],[[1320,1320],"mapped",[1321]],[[1321,1321],"valid"],[[1322,1322],"mapped",[1323]],[[1323,1323],"valid"],[[1324,1324],"mapped",[1325]],[[1325,1325],"valid"],[[1326,1326],"mapped",[1327]],[[1327,1327],"valid"],[[1328,1328],"disallowed"],[[1329,1329],"mapped",[1377]],[[1330,1330],"mapped",[1378]],[[1331,1331],"mapped",[1379]],[[1332,1332],"mapped",[1380]],[[1333,1333],"mapped",[1381]],[[1334,1334],"mapped",[1382]],[[1335,1335],"mapped",[1383]],[[1336,1336],"mapped",[1384]],[[1337,1337],"mapped",[1385]],[[1338,1338],"mapped",[1386]],[[1339,1339],"mapped",[1387]],[[1340,1340],"mapped",[1388]],[[1341,1341],"mapped",[1389]],[[1342,1342],"mapped",[1390]],[[1343,1343],"mapped",[1391]],[[1344,1344],"mapped",[1392]],[[1345,1345],"mapped",[1393]],[[1346,1346],"mapped",[1394]],[[1347,1347],"mapped",[1395]],[[1348,1348],"mapped",[1396]],[[1349,1349],"mapped",[1397]],[[1350,1350],"mapped",[1398]],[[1351,1351],"mapped",[1399]],[[1352,1352],"mapped",[1400]],[[1353,1353],"mapped",[1401]],[[1354,1354],"mapped",[1402]],[[1355,1355],"mapped",[1403]],[[1356,1356],"mapped",[1404]],[[1357,1357],"mapped",[1405]],[[1358,1358],"mapped",[1406]],[[1359,1359],"mapped",[1407]],[[1360,1360],"mapped",[1408]],[[1361,1361],"mapped",[1409]],[[1362,1362],"mapped",[1410]],[[1363,1363],"mapped",[1411]],[[1364,1364],"mapped",[1412]],[[1365,1365],"mapped",[1413]],[[1366,1366],"mapped",[1414]],[[1367,1368],"disallowed"],[[1369,1369],"valid"],[[1370,1375],"valid",[],"NV8"],[[1376,1376],"disallowed"],[[1377,1414],"valid"],[[1415,1415],"mapped",[1381,1410]],[[1416,1416],"disallowed"],[[1417,1417],"valid",[],"NV8"],[[1418,1418],"valid",[],"NV8"],[[1419,1420],"disallowed"],[[1421,1422],"valid",[],"NV8"],[[1423,1423],"valid",[],"NV8"],[[1424,1424],"disallowed"],[[1425,1441],"valid"],[[1442,1442],"valid"],[[1443,1455],"valid"],[[1456,1465],"valid"],[[1466,1466],"valid"],[[1467,1469],"valid"],[[1470,1470],"valid",[],"NV8"],[[1471,1471],"valid"],[[1472,1472],"valid",[],"NV8"],[[1473,1474],"valid"],[[1475,1475],"valid",[],"NV8"],[[1476,1476],"valid"],[[1477,1477],"valid"],[[1478,1478],"valid",[],"NV8"],[[1479,1479],"valid"],[[1480,1487],"disallowed"],[[1488,1514],"valid"],[[1515,1519],"disallowed"],[[1520,1524],"valid"],[[1525,1535],"disallowed"],[[1536,1539],"disallowed"],[[1540,1540],"disallowed"],[[1541,1541],"disallowed"],[[1542,1546],"valid",[],"NV8"],[[1547,1547],"valid",[],"NV8"],[[1548,1548],"valid",[],"NV8"],[[1549,1551],"valid",[],"NV8"],[[1552,1557],"valid"],[[1558,1562],"valid"],[[1563,1563],"valid",[],"NV8"],[[1564,1564],"disallowed"],[[1565,1565],"disallowed"],[[1566,1566],"valid",[],"NV8"],[[1567,1567],"valid",[],"NV8"],[[1568,1568],"valid"],[[1569,1594],"valid"],[[1595,1599],"valid"],[[1600,1600],"valid",[],"NV8"],[[1601,1618],"valid"],[[1619,1621],"valid"],[[1622,1624],"valid"],[[1625,1630],"valid"],[[1631,1631],"valid"],[[1632,1641],"valid"],[[1642,1645],"valid",[],"NV8"],[[1646,1647],"valid"],[[1648,1652],"valid"],[[1653,1653],"mapped",[1575,1652]],[[1654,1654],"mapped",[1608,1652]],[[1655,1655],"mapped",[1735,1652]],[[1656,1656],"mapped",[1610,1652]],[[1657,1719],"valid"],[[1720,1721],"valid"],[[1722,1726],"valid"],[[1727,1727],"valid"],[[1728,1742],"valid"],[[1743,1743],"valid"],[[1744,1747],"valid"],[[1748,1748],"valid",[],"NV8"],[[1749,1756],"valid"],[[1757,1757],"disallowed"],[[1758,1758],"valid",[],"NV8"],[[1759,1768],"valid"],[[1769,1769],"valid",[],"NV8"],[[1770,1773],"valid"],[[1774,1775],"valid"],[[1776,1785],"valid"],[[1786,1790],"valid"],[[1791,1791],"valid"],[[1792,1805],"valid",[],"NV8"],[[1806,1806],"disallowed"],[[1807,1807],"disallowed"],[[1808,1836],"valid"],[[1837,1839],"valid"],[[1840,1866],"valid"],[[1867,1868],"disallowed"],[[1869,1871],"valid"],[[1872,1901],"valid"],[[1902,1919],"valid"],[[1920,1968],"valid"],[[1969,1969],"valid"],[[1970,1983],"disallowed"],[[1984,2037],"valid"],[[2038,2042],"valid",[],"NV8"],[[2043,2047],"disallowed"],[[2048,2093],"valid"],[[2094,2095],"disallowed"],[[2096,2110],"valid",[],"NV8"],[[2111,2111],"disallowed"],[[2112,2139],"valid"],[[2140,2141],"disallowed"],[[2142,2142],"valid",[],"NV8"],[[2143,2207],"disallowed"],[[2208,2208],"valid"],[[2209,2209],"valid"],[[2210,2220],"valid"],[[2221,2226],"valid"],[[2227,2228],"valid"],[[2229,2274],"disallowed"],[[2275,2275],"valid"],[[2276,2302],"valid"],[[2303,2303],"valid"],[[2304,2304],"valid"],[[2305,2307],"valid"],[[2308,2308],"valid"],[[2309,2361],"valid"],[[2362,2363],"valid"],[[2364,2381],"valid"],[[2382,2382],"valid"],[[2383,2383],"valid"],[[2384,2388],"valid"],[[2389,2389],"valid"],[[2390,2391],"valid"],[[2392,2392],"mapped",[2325,2364]],[[2393,2393],"mapped",[2326,2364]],[[2394,2394],"mapped",[2327,2364]],[[2395,2395],"mapped",[2332,2364]],[[2396,2396],"mapped",[2337,2364]],[[2397,2397],"mapped",[2338,2364]],[[2398,2398],"mapped",[2347,2364]],[[2399,2399],"mapped",[2351,2364]],[[2400,2403],"valid"],[[2404,2405],"valid",[],"NV8"],[[2406,2415],"valid"],[[2416,2416],"valid",[],"NV8"],[[2417,2418],"valid"],[[2419,2423],"valid"],[[2424,2424],"valid"],[[2425,2426],"valid"],[[2427,2428],"valid"],[[2429,2429],"valid"],[[2430,2431],"valid"],[[2432,2432],"valid"],[[2433,2435],"valid"],[[2436,2436],"disallowed"],[[2437,2444],"valid"],[[2445,2446],"disallowed"],[[2447,2448],"valid"],[[2449,2450],"disallowed"],[[2451,2472],"valid"],[[2473,2473],"disallowed"],[[2474,2480],"valid"],[[2481,2481],"disallowed"],[[2482,2482],"valid"],[[2483,2485],"disallowed"],[[2486,2489],"valid"],[[2490,2491],"disallowed"],[[2492,2492],"valid"],[[2493,2493],"valid"],[[2494,2500],"valid"],[[2501,2502],"disallowed"],[[2503,2504],"valid"],[[2505,2506],"disallowed"],[[2507,2509],"valid"],[[2510,2510],"valid"],[[2511,2518],"disallowed"],[[2519,2519],"valid"],[[2520,2523],"disallowed"],[[2524,2524],"mapped",[2465,2492]],[[2525,2525],"mapped",[2466,2492]],[[2526,2526],"disallowed"],[[2527,2527],"mapped",[2479,2492]],[[2528,2531],"valid"],[[2532,2533],"disallowed"],[[2534,2545],"valid"],[[2546,2554],"valid",[],"NV8"],[[2555,2555],"valid",[],"NV8"],[[2556,2560],"disallowed"],[[2561,2561],"valid"],[[2562,2562],"valid"],[[2563,2563],"valid"],[[2564,2564],"disallowed"],[[2565,2570],"valid"],[[2571,2574],"disallowed"],[[2575,2576],"valid"],[[2577,2578],"disallowed"],[[2579,2600],"valid"],[[2601,2601],"disallowed"],[[2602,2608],"valid"],[[2609,2609],"disallowed"],[[2610,2610],"valid"],[[2611,2611],"mapped",[2610,2620]],[[2612,2612],"disallowed"],[[2613,2613],"valid"],[[2614,2614],"mapped",[2616,2620]],[[2615,2615],"disallowed"],[[2616,2617],"valid"],[[2618,2619],"disallowed"],[[2620,2620],"valid"],[[2621,2621],"disallowed"],[[2622,2626],"valid"],[[2627,2630],"disallowed"],[[2631,2632],"valid"],[[2633,2634],"disallowed"],[[2635,2637],"valid"],[[2638,2640],"disallowed"],[[2641,2641],"valid"],[[2642,2648],"disallowed"],[[2649,2649],"mapped",[2582,2620]],[[2650,2650],"mapped",[2583,2620]],[[2651,2651],"mapped",[2588,2620]],[[2652,2652],"valid"],[[2653,2653],"disallowed"],[[2654,2654],"mapped",[2603,2620]],[[2655,2661],"disallowed"],[[2662,2676],"valid"],[[2677,2677],"valid"],[[2678,2688],"disallowed"],[[2689,2691],"valid"],[[2692,2692],"disallowed"],[[2693,2699],"valid"],[[2700,2700],"valid"],[[2701,2701],"valid"],[[2702,2702],"disallowed"],[[2703,2705],"valid"],[[2706,2706],"disallowed"],[[2707,2728],"valid"],[[2729,2729],"disallowed"],[[2730,2736],"valid"],[[2737,2737],"disallowed"],[[2738,2739],"valid"],[[2740,2740],"disallowed"],[[2741,2745],"valid"],[[2746,2747],"disallowed"],[[2748,2757],"valid"],[[2758,2758],"disallowed"],[[2759,2761],"valid"],[[2762,2762],"disallowed"],[[2763,2765],"valid"],[[2766,2767],"disallowed"],[[2768,2768],"valid"],[[2769,2783],"disallowed"],[[2784,2784],"valid"],[[2785,2787],"valid"],[[2788,2789],"disallowed"],[[2790,2799],"valid"],[[2800,2800],"valid",[],"NV8"],[[2801,2801],"valid",[],"NV8"],[[2802,2808],"disallowed"],[[2809,2809],"valid"],[[2810,2816],"disallowed"],[[2817,2819],"valid"],[[2820,2820],"disallowed"],[[2821,2828],"valid"],[[2829,2830],"disallowed"],[[2831,2832],"valid"],[[2833,2834],"disallowed"],[[2835,2856],"valid"],[[2857,2857],"disallowed"],[[2858,2864],"valid"],[[2865,2865],"disallowed"],[[2866,2867],"valid"],[[2868,2868],"disallowed"],[[2869,2869],"valid"],[[2870,2873],"valid"],[[2874,2875],"disallowed"],[[2876,2883],"valid"],[[2884,2884],"valid"],[[2885,2886],"disallowed"],[[2887,2888],"valid"],[[2889,2890],"disallowed"],[[2891,2893],"valid"],[[2894,2901],"disallowed"],[[2902,2903],"valid"],[[2904,2907],"disallowed"],[[2908,2908],"mapped",[2849,2876]],[[2909,2909],"mapped",[2850,2876]],[[2910,2910],"disallowed"],[[2911,2913],"valid"],[[2914,2915],"valid"],[[2916,2917],"disallowed"],[[2918,2927],"valid"],[[2928,2928],"valid",[],"NV8"],[[2929,2929],"valid"],[[2930,2935],"valid",[],"NV8"],[[2936,2945],"disallowed"],[[2946,2947],"valid"],[[2948,2948],"disallowed"],[[2949,2954],"valid"],[[2955,2957],"disallowed"],[[2958,2960],"valid"],[[2961,2961],"disallowed"],[[2962,2965],"valid"],[[2966,2968],"disallowed"],[[2969,2970],"valid"],[[2971,2971],"disallowed"],[[2972,2972],"valid"],[[2973,2973],"disallowed"],[[2974,2975],"valid"],[[2976,2978],"disallowed"],[[2979,2980],"valid"],[[2981,2983],"disallowed"],[[2984,2986],"valid"],[[2987,2989],"disallowed"],[[2990,2997],"valid"],[[2998,2998],"valid"],[[2999,3001],"valid"],[[3002,3005],"disallowed"],[[3006,3010],"valid"],[[3011,3013],"disallowed"],[[3014,3016],"valid"],[[3017,3017],"disallowed"],[[3018,3021],"valid"],[[3022,3023],"disallowed"],[[3024,3024],"valid"],[[3025,3030],"disallowed"],[[3031,3031],"valid"],[[3032,3045],"disallowed"],[[3046,3046],"valid"],[[3047,3055],"valid"],[[3056,3058],"valid",[],"NV8"],[[3059,3066],"valid",[],"NV8"],[[3067,3071],"disallowed"],[[3072,3072],"valid"],[[3073,3075],"valid"],[[3076,3076],"disallowed"],[[3077,3084],"valid"],[[3085,3085],"disallowed"],[[3086,3088],"valid"],[[3089,3089],"disallowed"],[[3090,3112],"valid"],[[3113,3113],"disallowed"],[[3114,3123],"valid"],[[3124,3124],"valid"],[[3125,3129],"valid"],[[3130,3132],"disallowed"],[[3133,3133],"valid"],[[3134,3140],"valid"],[[3141,3141],"disallowed"],[[3142,3144],"valid"],[[3145,3145],"disallowed"],[[3146,3149],"valid"],[[3150,3156],"disallowed"],[[3157,3158],"valid"],[[3159,3159],"disallowed"],[[3160,3161],"valid"],[[3162,3162],"valid"],[[3163,3167],"disallowed"],[[3168,3169],"valid"],[[3170,3171],"valid"],[[3172,3173],"disallowed"],[[3174,3183],"valid"],[[3184,3191],"disallowed"],[[3192,3199],"valid",[],"NV8"],[[3200,3200],"disallowed"],[[3201,3201],"valid"],[[3202,3203],"valid"],[[3204,3204],"disallowed"],[[3205,3212],"valid"],[[3213,3213],"disallowed"],[[3214,3216],"valid"],[[3217,3217],"disallowed"],[[3218,3240],"valid"],[[3241,3241],"disallowed"],[[3242,3251],"valid"],[[3252,3252],"disallowed"],[[3253,3257],"valid"],[[3258,3259],"disallowed"],[[3260,3261],"valid"],[[3262,3268],"valid"],[[3269,3269],"disallowed"],[[3270,3272],"valid"],[[3273,3273],"disallowed"],[[3274,3277],"valid"],[[3278,3284],"disallowed"],[[3285,3286],"valid"],[[3287,3293],"disallowed"],[[3294,3294],"valid"],[[3295,3295],"disallowed"],[[3296,3297],"valid"],[[3298,3299],"valid"],[[3300,3301],"disallowed"],[[3302,3311],"valid"],[[3312,3312],"disallowed"],[[3313,3314],"valid"],[[3315,3328],"disallowed"],[[3329,3329],"valid"],[[3330,3331],"valid"],[[3332,3332],"disallowed"],[[3333,3340],"valid"],[[3341,3341],"disallowed"],[[3342,3344],"valid"],[[3345,3345],"disallowed"],[[3346,3368],"valid"],[[3369,3369],"valid"],[[3370,3385],"valid"],[[3386,3386],"valid"],[[3387,3388],"disallowed"],[[3389,3389],"valid"],[[3390,3395],"valid"],[[3396,3396],"valid"],[[3397,3397],"disallowed"],[[3398,3400],"valid"],[[3401,3401],"disallowed"],[[3402,3405],"valid"],[[3406,3406],"valid"],[[3407,3414],"disallowed"],[[3415,3415],"valid"],[[3416,3422],"disallowed"],[[3423,3423],"valid"],[[3424,3425],"valid"],[[3426,3427],"valid"],[[3428,3429],"disallowed"],[[3430,3439],"valid"],[[3440,3445],"valid",[],"NV8"],[[3446,3448],"disallowed"],[[3449,3449],"valid",[],"NV8"],[[3450,3455],"valid"],[[3456,3457],"disallowed"],[[3458,3459],"valid"],[[3460,3460],"disallowed"],[[3461,3478],"valid"],[[3479,3481],"disallowed"],[[3482,3505],"valid"],[[3506,3506],"disallowed"],[[3507,3515],"valid"],[[3516,3516],"disallowed"],[[3517,3517],"valid"],[[3518,3519],"disallowed"],[[3520,3526],"valid"],[[3527,3529],"disallowed"],[[3530,3530],"valid"],[[3531,3534],"disallowed"],[[3535,3540],"valid"],[[3541,3541],"disallowed"],[[3542,3542],"valid"],[[3543,3543],"disallowed"],[[3544,3551],"valid"],[[3552,3557],"disallowed"],[[3558,3567],"valid"],[[3568,3569],"disallowed"],[[3570,3571],"valid"],[[3572,3572],"valid",[],"NV8"],[[3573,3584],"disallowed"],[[3585,3634],"valid"],[[3635,3635],"mapped",[3661,3634]],[[3636,3642],"valid"],[[3643,3646],"disallowed"],[[3647,3647],"valid",[],"NV8"],[[3648,3662],"valid"],[[3663,3663],"valid",[],"NV8"],[[3664,3673],"valid"],[[3674,3675],"valid",[],"NV8"],[[3676,3712],"disallowed"],[[3713,3714],"valid"],[[3715,3715],"disallowed"],[[3716,3716],"valid"],[[3717,3718],"disallowed"],[[3719,3720],"valid"],[[3721,3721],"disallowed"],[[3722,3722],"valid"],[[3723,3724],"disallowed"],[[3725,3725],"valid"],[[3726,3731],"disallowed"],[[3732,3735],"valid"],[[3736,3736],"disallowed"],[[3737,3743],"valid"],[[3744,3744],"disallowed"],[[3745,3747],"valid"],[[3748,3748],"disallowed"],[[3749,3749],"valid"],[[3750,3750],"disallowed"],[[3751,3751],"valid"],[[3752,3753],"disallowed"],[[3754,3755],"valid"],[[3756,3756],"disallowed"],[[3757,3762],"valid"],[[3763,3763],"mapped",[3789,3762]],[[3764,3769],"valid"],[[3770,3770],"disallowed"],[[3771,3773],"valid"],[[3774,3775],"disallowed"],[[3776,3780],"valid"],[[3781,3781],"disallowed"],[[3782,3782],"valid"],[[3783,3783],"disallowed"],[[3784,3789],"valid"],[[3790,3791],"disallowed"],[[3792,3801],"valid"],[[3802,3803],"disallowed"],[[3804,3804],"mapped",[3755,3737]],[[3805,3805],"mapped",[3755,3745]],[[3806,3807],"valid"],[[3808,3839],"disallowed"],[[3840,3840],"valid"],[[3841,3850],"valid",[],"NV8"],[[3851,3851],"valid"],[[3852,3852],"mapped",[3851]],[[3853,3863],"valid",[],"NV8"],[[3864,3865],"valid"],[[3866,3871],"valid",[],"NV8"],[[3872,3881],"valid"],[[3882,3892],"valid",[],"NV8"],[[3893,3893],"valid"],[[3894,3894],"valid",[],"NV8"],[[3895,3895],"valid"],[[3896,3896],"valid",[],"NV8"],[[3897,3897],"valid"],[[3898,3901],"valid",[],"NV8"],[[3902,3906],"valid"],[[3907,3907],"mapped",[3906,4023]],[[3908,3911],"valid"],[[3912,3912],"disallowed"],[[3913,3916],"valid"],[[3917,3917],"mapped",[3916,4023]],[[3918,3921],"valid"],[[3922,3922],"mapped",[3921,4023]],[[3923,3926],"valid"],[[3927,3927],"mapped",[3926,4023]],[[3928,3931],"valid"],[[3932,3932],"mapped",[3931,4023]],[[3933,3944],"valid"],[[3945,3945],"mapped",[3904,4021]],[[3946,3946],"valid"],[[3947,3948],"valid"],[[3949,3952],"disallowed"],[[3953,3954],"valid"],[[3955,3955],"mapped",[3953,3954]],[[3956,3956],"valid"],[[3957,3957],"mapped",[3953,3956]],[[3958,3958],"mapped",[4018,3968]],[[3959,3959],"mapped",[4018,3953,3968]],[[3960,3960],"mapped",[4019,3968]],[[3961,3961],"mapped",[4019,3953,3968]],[[3962,3968],"valid"],[[3969,3969],"mapped",[3953,3968]],[[3970,3972],"valid"],[[3973,3973],"valid",[],"NV8"],[[3974,3979],"valid"],[[3980,3983],"valid"],[[3984,3986],"valid"],[[3987,3987],"mapped",[3986,4023]],[[3988,3989],"valid"],[[3990,3990],"valid"],[[3991,3991],"valid"],[[3992,3992],"disallowed"],[[3993,3996],"valid"],[[3997,3997],"mapped",[3996,4023]],[[3998,4001],"valid"],[[4002,4002],"mapped",[4001,4023]],[[4003,4006],"valid"],[[4007,4007],"mapped",[4006,4023]],[[4008,4011],"valid"],[[4012,4012],"mapped",[4011,4023]],[[4013,4013],"valid"],[[4014,4016],"valid"],[[4017,4023],"valid"],[[4024,4024],"valid"],[[4025,4025],"mapped",[3984,4021]],[[4026,4028],"valid"],[[4029,4029],"disallowed"],[[4030,4037],"valid",[],"NV8"],[[4038,4038],"valid"],[[4039,4044],"valid",[],"NV8"],[[4045,4045],"disallowed"],[[4046,4046],"valid",[],"NV8"],[[4047,4047],"valid",[],"NV8"],[[4048,4049],"valid",[],"NV8"],[[4050,4052],"valid",[],"NV8"],[[4053,4056],"valid",[],"NV8"],[[4057,4058],"valid",[],"NV8"],[[4059,4095],"disallowed"],[[4096,4129],"valid"],[[4130,4130],"valid"],[[4131,4135],"valid"],[[4136,4136],"valid"],[[4137,4138],"valid"],[[4139,4139],"valid"],[[4140,4146],"valid"],[[4147,4149],"valid"],[[4150,4153],"valid"],[[4154,4159],"valid"],[[4160,4169],"valid"],[[4170,4175],"valid",[],"NV8"],[[4176,4185],"valid"],[[4186,4249],"valid"],[[4250,4253],"valid"],[[4254,4255],"valid",[],"NV8"],[[4256,4293],"disallowed"],[[4294,4294],"disallowed"],[[4295,4295],"mapped",[11559]],[[4296,4300],"disallowed"],[[4301,4301],"mapped",[11565]],[[4302,4303],"disallowed"],[[4304,4342],"valid"],[[4343,4344],"valid"],[[4345,4346],"valid"],[[4347,4347],"valid",[],"NV8"],[[4348,4348],"mapped",[4316]],[[4349,4351],"valid"],[[4352,4441],"valid",[],"NV8"],[[4442,4446],"valid",[],"NV8"],[[4447,4448],"disallowed"],[[4449,4514],"valid",[],"NV8"],[[4515,4519],"valid",[],"NV8"],[[4520,4601],"valid",[],"NV8"],[[4602,4607],"valid",[],"NV8"],[[4608,4614],"valid"],[[4615,4615],"valid"],[[4616,4678],"valid"],[[4679,4679],"valid"],[[4680,4680],"valid"],[[4681,4681],"disallowed"],[[4682,4685],"valid"],[[4686,4687],"disallowed"],[[4688,4694],"valid"],[[4695,4695],"disallowed"],[[4696,4696],"valid"],[[4697,4697],"disallowed"],[[4698,4701],"valid"],[[4702,4703],"disallowed"],[[4704,4742],"valid"],[[4743,4743],"valid"],[[4744,4744],"valid"],[[4745,4745],"disallowed"],[[4746,4749],"valid"],[[4750,4751],"disallowed"],[[4752,4782],"valid"],[[4783,4783],"valid"],[[4784,4784],"valid"],[[4785,4785],"disallowed"],[[4786,4789],"valid"],[[4790,4791],"disallowed"],[[4792,4798],"valid"],[[4799,4799],"disallowed"],[[4800,4800],"valid"],[[4801,4801],"disallowed"],[[4802,4805],"valid"],[[4806,4807],"disallowed"],[[4808,4814],"valid"],[[4815,4815],"valid"],[[4816,4822],"valid"],[[4823,4823],"disallowed"],[[4824,4846],"valid"],[[4847,4847],"valid"],[[4848,4878],"valid"],[[4879,4879],"valid"],[[4880,4880],"valid"],[[4881,4881],"disallowed"],[[4882,4885],"valid"],[[4886,4887],"disallowed"],[[4888,4894],"valid"],[[4895,4895],"valid"],[[4896,4934],"valid"],[[4935,4935],"valid"],[[4936,4954],"valid"],[[4955,4956],"disallowed"],[[4957,4958],"valid"],[[4959,4959],"valid"],[[4960,4960],"valid",[],"NV8"],[[4961,4988],"valid",[],"NV8"],[[4989,4991],"disallowed"],[[4992,5007],"valid"],[[5008,5017],"valid",[],"NV8"],[[5018,5023],"disallowed"],[[5024,5108],"valid"],[[5109,5109],"valid"],[[5110,5111],"disallowed"],[[5112,5112],"mapped",[5104]],[[5113,5113],"mapped",[5105]],[[5114,5114],"mapped",[5106]],[[5115,5115],"mapped",[5107]],[[5116,5116],"mapped",[5108]],[[5117,5117],"mapped",[5109]],[[5118,5119],"disallowed"],[[5120,5120],"valid",[],"NV8"],[[5121,5740],"valid"],[[5741,5742],"valid",[],"NV8"],[[5743,5750],"valid"],[[5751,5759],"valid"],[[5760,5760],"disallowed"],[[5761,5786],"valid"],[[5787,5788],"valid",[],"NV8"],[[5789,5791],"disallowed"],[[5792,5866],"valid"],[[5867,5872],"valid",[],"NV8"],[[5873,5880],"valid"],[[5881,5887],"disallowed"],[[5888,5900],"valid"],[[5901,5901],"disallowed"],[[5902,5908],"valid"],[[5909,5919],"disallowed"],[[5920,5940],"valid"],[[5941,5942],"valid",[],"NV8"],[[5943,5951],"disallowed"],[[5952,5971],"valid"],[[5972,5983],"disallowed"],[[5984,5996],"valid"],[[5997,5997],"disallowed"],[[5998,6000],"valid"],[[6001,6001],"disallowed"],[[6002,6003],"valid"],[[6004,6015],"disallowed"],[[6016,6067],"valid"],[[6068,6069],"disallowed"],[[6070,6099],"valid"],[[6100,6102],"valid",[],"NV8"],[[6103,6103],"valid"],[[6104,6107],"valid",[],"NV8"],[[6108,6108],"valid"],[[6109,6109],"valid"],[[6110,6111],"disallowed"],[[6112,6121],"valid"],[[6122,6127],"disallowed"],[[6128,6137],"valid",[],"NV8"],[[6138,6143],"disallowed"],[[6144,6149],"valid",[],"NV8"],[[6150,6150],"disallowed"],[[6151,6154],"valid",[],"NV8"],[[6155,6157],"ignored"],[[6158,6158],"disallowed"],[[6159,6159],"disallowed"],[[6160,6169],"valid"],[[6170,6175],"disallowed"],[[6176,6263],"valid"],[[6264,6271],"disallowed"],[[6272,6313],"valid"],[[6314,6314],"valid"],[[6315,6319],"disallowed"],[[6320,6389],"valid"],[[6390,6399],"disallowed"],[[6400,6428],"valid"],[[6429,6430],"valid"],[[6431,6431],"disallowed"],[[6432,6443],"valid"],[[6444,6447],"disallowed"],[[6448,6459],"valid"],[[6460,6463],"disallowed"],[[6464,6464],"valid",[],"NV8"],[[6465,6467],"disallowed"],[[6468,6469],"valid",[],"NV8"],[[6470,6509],"valid"],[[6510,6511],"disallowed"],[[6512,6516],"valid"],[[6517,6527],"disallowed"],[[6528,6569],"valid"],[[6570,6571],"valid"],[[6572,6575],"disallowed"],[[6576,6601],"valid"],[[6602,6607],"disallowed"],[[6608,6617],"valid"],[[6618,6618],"valid",[],"XV8"],[[6619,6621],"disallowed"],[[6622,6623],"valid",[],"NV8"],[[6624,6655],"valid",[],"NV8"],[[6656,6683],"valid"],[[6684,6685],"disallowed"],[[6686,6687],"valid",[],"NV8"],[[6688,6750],"valid"],[[6751,6751],"disallowed"],[[6752,6780],"valid"],[[6781,6782],"disallowed"],[[6783,6793],"valid"],[[6794,6799],"disallowed"],[[6800,6809],"valid"],[[6810,6815],"disallowed"],[[6816,6822],"valid",[],"NV8"],[[6823,6823],"valid"],[[6824,6829],"valid",[],"NV8"],[[6830,6831],"disallowed"],[[6832,6845],"valid"],[[6846,6846],"valid",[],"NV8"],[[6847,6911],"disallowed"],[[6912,6987],"valid"],[[6988,6991],"disallowed"],[[6992,7001],"valid"],[[7002,7018],"valid",[],"NV8"],[[7019,7027],"valid"],[[7028,7036],"valid",[],"NV8"],[[7037,7039],"disallowed"],[[7040,7082],"valid"],[[7083,7085],"valid"],[[7086,7097],"valid"],[[7098,7103],"valid"],[[7104,7155],"valid"],[[7156,7163],"disallowed"],[[7164,7167],"valid",[],"NV8"],[[7168,7223],"valid"],[[7224,7226],"disallowed"],[[7227,7231],"valid",[],"NV8"],[[7232,7241],"valid"],[[7242,7244],"disallowed"],[[7245,7293],"valid"],[[7294,7295],"valid",[],"NV8"],[[7296,7359],"disallowed"],[[7360,7367],"valid",[],"NV8"],[[7368,7375],"disallowed"],[[7376,7378],"valid"],[[7379,7379],"valid",[],"NV8"],[[7380,7410],"valid"],[[7411,7414],"valid"],[[7415,7415],"disallowed"],[[7416,7417],"valid"],[[7418,7423],"disallowed"],[[7424,7467],"valid"],[[7468,7468],"mapped",[97]],[[7469,7469],"mapped",[230]],[[7470,7470],"mapped",[98]],[[7471,7471],"valid"],[[7472,7472],"mapped",[100]],[[7473,7473],"mapped",[101]],[[7474,7474],"mapped",[477]],[[7475,7475],"mapped",[103]],[[7476,7476],"mapped",[104]],[[7477,7477],"mapped",[105]],[[7478,7478],"mapped",[106]],[[7479,7479],"mapped",[107]],[[7480,7480],"mapped",[108]],[[7481,7481],"mapped",[109]],[[7482,7482],"mapped",[110]],[[7483,7483],"valid"],[[7484,7484],"mapped",[111]],[[7485,7485],"mapped",[547]],[[7486,7486],"mapped",[112]],[[7487,7487],"mapped",[114]],[[7488,7488],"mapped",[116]],[[7489,7489],"mapped",[117]],[[7490,7490],"mapped",[119]],[[7491,7491],"mapped",[97]],[[7492,7492],"mapped",[592]],[[7493,7493],"mapped",[593]],[[7494,7494],"mapped",[7426]],[[7495,7495],"mapped",[98]],[[7496,7496],"mapped",[100]],[[7497,7497],"mapped",[101]],[[7498,7498],"mapped",[601]],[[7499,7499],"mapped",[603]],[[7500,7500],"mapped",[604]],[[7501,7501],"mapped",[103]],[[7502,7502],"valid"],[[7503,7503],"mapped",[107]],[[7504,7504],"mapped",[109]],[[7505,7505],"mapped",[331]],[[7506,7506],"mapped",[111]],[[7507,7507],"mapped",[596]],[[7508,7508],"mapped",[7446]],[[7509,7509],"mapped",[7447]],[[7510,7510],"mapped",[112]],[[7511,7511],"mapped",[116]],[[7512,7512],"mapped",[117]],[[7513,7513],"mapped",[7453]],[[7514,7514],"mapped",[623]],[[7515,7515],"mapped",[118]],[[7516,7516],"mapped",[7461]],[[7517,7517],"mapped",[946]],[[7518,7518],"mapped",[947]],[[7519,7519],"mapped",[948]],[[7520,7520],"mapped",[966]],[[7521,7521],"mapped",[967]],[[7522,7522],"mapped",[105]],[[7523,7523],"mapped",[114]],[[7524,7524],"mapped",[117]],[[7525,7525],"mapped",[118]],[[7526,7526],"mapped",[946]],[[7527,7527],"mapped",[947]],[[7528,7528],"mapped",[961]],[[7529,7529],"mapped",[966]],[[7530,7530],"mapped",[967]],[[7531,7531],"valid"],[[7532,7543],"valid"],[[7544,7544],"mapped",[1085]],[[7545,7578],"valid"],[[7579,7579],"mapped",[594]],[[7580,7580],"mapped",[99]],[[7581,7581],"mapped",[597]],[[7582,7582],"mapped",[240]],[[7583,7583],"mapped",[604]],[[7584,7584],"mapped",[102]],[[7585,7585],"mapped",[607]],[[7586,7586],"mapped",[609]],[[7587,7587],"mapped",[613]],[[7588,7588],"mapped",[616]],[[7589,7589],"mapped",[617]],[[7590,7590],"mapped",[618]],[[7591,7591],"mapped",[7547]],[[7592,7592],"mapped",[669]],[[7593,7593],"mapped",[621]],[[7594,7594],"mapped",[7557]],[[7595,7595],"mapped",[671]],[[7596,7596],"mapped",[625]],[[7597,7597],"mapped",[624]],[[7598,7598],"mapped",[626]],[[7599,7599],"mapped",[627]],[[7600,7600],"mapped",[628]],[[7601,7601],"mapped",[629]],[[7602,7602],"mapped",[632]],[[7603,7603],"mapped",[642]],[[7604,7604],"mapped",[643]],[[7605,7605],"mapped",[427]],[[7606,7606],"mapped",[649]],[[7607,7607],"mapped",[650]],[[7608,7608],"mapped",[7452]],[[7609,7609],"mapped",[651]],[[7610,7610],"mapped",[652]],[[7611,7611],"mapped",[122]],[[7612,7612],"mapped",[656]],[[7613,7613],"mapped",[657]],[[7614,7614],"mapped",[658]],[[7615,7615],"mapped",[952]],[[7616,7619],"valid"],[[7620,7626],"valid"],[[7627,7654],"valid"],[[7655,7669],"valid"],[[7670,7675],"disallowed"],[[7676,7676],"valid"],[[7677,7677],"valid"],[[7678,7679],"valid"],[[7680,7680],"mapped",[7681]],[[7681,7681],"valid"],[[7682,7682],"mapped",[7683]],[[7683,7683],"valid"],[[7684,7684],"mapped",[7685]],[[7685,7685],"valid"],[[7686,7686],"mapped",[7687]],[[7687,7687],"valid"],[[7688,7688],"mapped",[7689]],[[7689,7689],"valid"],[[7690,7690],"mapped",[7691]],[[7691,7691],"valid"],[[7692,7692],"mapped",[7693]],[[7693,7693],"valid"],[[7694,7694],"mapped",[7695]],[[7695,7695],"valid"],[[7696,7696],"mapped",[7697]],[[7697,7697],"valid"],[[7698,7698],"mapped",[7699]],[[7699,7699],"valid"],[[7700,7700],"mapped",[7701]],[[7701,7701],"valid"],[[7702,7702],"mapped",[7703]],[[7703,7703],"valid"],[[7704,7704],"mapped",[7705]],[[7705,7705],"valid"],[[7706,7706],"mapped",[7707]],[[7707,7707],"valid"],[[7708,7708],"mapped",[7709]],[[7709,7709],"valid"],[[7710,7710],"mapped",[7711]],[[7711,7711],"valid"],[[7712,7712],"mapped",[7713]],[[7713,7713],"valid"],[[7714,7714],"mapped",[7715]],[[7715,7715],"valid"],[[7716,7716],"mapped",[7717]],[[7717,7717],"valid"],[[7718,7718],"mapped",[7719]],[[7719,7719],"valid"],[[7720,7720],"mapped",[7721]],[[7721,7721],"valid"],[[7722,7722],"mapped",[7723]],[[7723,7723],"valid"],[[7724,7724],"mapped",[7725]],[[7725,7725],"valid"],[[7726,7726],"mapped",[7727]],[[7727,7727],"valid"],[[7728,7728],"mapped",[7729]],[[7729,7729],"valid"],[[7730,7730],"mapped",[7731]],[[7731,7731],"valid"],[[7732,7732],"mapped",[7733]],[[7733,7733],"valid"],[[7734,7734],"mapped",[7735]],[[7735,7735],"valid"],[[7736,7736],"mapped",[7737]],[[7737,7737],"valid"],[[7738,7738],"mapped",[7739]],[[7739,7739],"valid"],[[7740,7740],"mapped",[7741]],[[7741,7741],"valid"],[[7742,7742],"mapped",[7743]],[[7743,7743],"valid"],[[7744,7744],"mapped",[7745]],[[7745,7745],"valid"],[[7746,7746],"mapped",[7747]],[[7747,7747],"valid"],[[7748,7748],"mapped",[7749]],[[7749,7749],"valid"],[[7750,7750],"mapped",[7751]],[[7751,7751],"valid"],[[7752,7752],"mapped",[7753]],[[7753,7753],"valid"],[[7754,7754],"mapped",[7755]],[[7755,7755],"valid"],[[7756,7756],"mapped",[7757]],[[7757,7757],"valid"],[[7758,7758],"mapped",[7759]],[[7759,7759],"valid"],[[7760,7760],"mapped",[7761]],[[7761,7761],"valid"],[[7762,7762],"mapped",[7763]],[[7763,7763],"valid"],[[7764,7764],"mapped",[7765]],[[7765,7765],"valid"],[[7766,7766],"mapped",[7767]],[[7767,7767],"valid"],[[7768,7768],"mapped",[7769]],[[7769,7769],"valid"],[[7770,7770],"mapped",[7771]],[[7771,7771],"valid"],[[7772,7772],"mapped",[7773]],[[7773,7773],"valid"],[[7774,7774],"mapped",[7775]],[[7775,7775],"valid"],[[7776,7776],"mapped",[7777]],[[7777,7777],"valid"],[[7778,7778],"mapped",[7779]],[[7779,7779],"valid"],[[7780,7780],"mapped",[7781]],[[7781,7781],"valid"],[[7782,7782],"mapped",[7783]],[[7783,7783],"valid"],[[7784,7784],"mapped",[7785]],[[7785,7785],"valid"],[[7786,7786],"mapped",[7787]],[[7787,7787],"valid"],[[7788,7788],"mapped",[7789]],[[7789,7789],"valid"],[[7790,7790],"mapped",[7791]],[[7791,7791],"valid"],[[7792,7792],"mapped",[7793]],[[7793,7793],"valid"],[[7794,7794],"mapped",[7795]],[[7795,7795],"valid"],[[7796,7796],"mapped",[7797]],[[7797,7797],"valid"],[[7798,7798],"mapped",[7799]],[[7799,7799],"valid"],[[7800,7800],"mapped",[7801]],[[7801,7801],"valid"],[[7802,7802],"mapped",[7803]],[[7803,7803],"valid"],[[7804,7804],"mapped",[7805]],[[7805,7805],"valid"],[[7806,7806],"mapped",[7807]],[[7807,7807],"valid"],[[7808,7808],"mapped",[7809]],[[7809,7809],"valid"],[[7810,7810],"mapped",[7811]],[[7811,7811],"valid"],[[7812,7812],"mapped",[7813]],[[7813,7813],"valid"],[[7814,7814],"mapped",[7815]],[[7815,7815],"valid"],[[7816,7816],"mapped",[7817]],[[7817,7817],"valid"],[[7818,7818],"mapped",[7819]],[[7819,7819],"valid"],[[7820,7820],"mapped",[7821]],[[7821,7821],"valid"],[[7822,7822],"mapped",[7823]],[[7823,7823],"valid"],[[7824,7824],"mapped",[7825]],[[7825,7825],"valid"],[[7826,7826],"mapped",[7827]],[[7827,7827],"valid"],[[7828,7828],"mapped",[7829]],[[7829,7833],"valid"],[[7834,7834],"mapped",[97,702]],[[7835,7835],"mapped",[7777]],[[7836,7837],"valid"],[[7838,7838],"mapped",[115,115]],[[7839,7839],"valid"],[[7840,7840],"mapped",[7841]],[[7841,7841],"valid"],[[7842,7842],"mapped",[7843]],[[7843,7843],"valid"],[[7844,7844],"mapped",[7845]],[[7845,7845],"valid"],[[7846,7846],"mapped",[7847]],[[7847,7847],"valid"],[[7848,7848],"mapped",[7849]],[[7849,7849],"valid"],[[7850,7850],"mapped",[7851]],[[7851,7851],"valid"],[[7852,7852],"mapped",[7853]],[[7853,7853],"valid"],[[7854,7854],"mapped",[7855]],[[7855,7855],"valid"],[[7856,7856],"mapped",[7857]],[[7857,7857],"valid"],[[7858,7858],"mapped",[7859]],[[7859,7859],"valid"],[[7860,7860],"mapped",[7861]],[[7861,7861],"valid"],[[7862,7862],"mapped",[7863]],[[7863,7863],"valid"],[[7864,7864],"mapped",[7865]],[[7865,7865],"valid"],[[7866,7866],"mapped",[7867]],[[7867,7867],"valid"],[[7868,7868],"mapped",[7869]],[[7869,7869],"valid"],[[7870,7870],"mapped",[7871]],[[7871,7871],"valid"],[[7872,7872],"mapped",[7873]],[[7873,7873],"valid"],[[7874,7874],"mapped",[7875]],[[7875,7875],"valid"],[[7876,7876],"mapped",[7877]],[[7877,7877],"valid"],[[7878,7878],"mapped",[7879]],[[7879,7879],"valid"],[[7880,7880],"mapped",[7881]],[[7881,7881],"valid"],[[7882,7882],"mapped",[7883]],[[7883,7883],"valid"],[[7884,7884],"mapped",[7885]],[[7885,7885],"valid"],[[7886,7886],"mapped",[7887]],[[7887,7887],"valid"],[[7888,7888],"mapped",[7889]],[[7889,7889],"valid"],[[7890,7890],"mapped",[7891]],[[7891,7891],"valid"],[[7892,7892],"mapped",[7893]],[[7893,7893],"valid"],[[7894,7894],"mapped",[7895]],[[7895,7895],"valid"],[[7896,7896],"mapped",[7897]],[[7897,7897],"valid"],[[7898,7898],"mapped",[7899]],[[7899,7899],"valid"],[[7900,7900],"mapped",[7901]],[[7901,7901],"valid"],[[7902,7902],"mapped",[7903]],[[7903,7903],"valid"],[[7904,7904],"mapped",[7905]],[[7905,7905],"valid"],[[7906,7906],"mapped",[7907]],[[7907,7907],"valid"],[[7908,7908],"mapped",[7909]],[[7909,7909],"valid"],[[7910,7910],"mapped",[7911]],[[7911,7911],"valid"],[[7912,7912],"mapped",[7913]],[[7913,7913],"valid"],[[7914,7914],"mapped",[7915]],[[7915,7915],"valid"],[[7916,7916],"mapped",[7917]],[[7917,7917],"valid"],[[7918,7918],"mapped",[7919]],[[7919,7919],"valid"],[[7920,7920],"mapped",[7921]],[[7921,7921],"valid"],[[7922,7922],"mapped",[7923]],[[7923,7923],"valid"],[[7924,7924],"mapped",[7925]],[[7925,7925],"valid"],[[7926,7926],"mapped",[7927]],[[7927,7927],"valid"],[[7928,7928],"mapped",[7929]],[[7929,7929],"valid"],[[7930,7930],"mapped",[7931]],[[7931,7931],"valid"],[[7932,7932],"mapped",[7933]],[[7933,7933],"valid"],[[7934,7934],"mapped",[7935]],[[7935,7935],"valid"],[[7936,7943],"valid"],[[7944,7944],"mapped",[7936]],[[7945,7945],"mapped",[7937]],[[7946,7946],"mapped",[7938]],[[7947,7947],"mapped",[7939]],[[7948,7948],"mapped",[7940]],[[7949,7949],"mapped",[7941]],[[7950,7950],"mapped",[7942]],[[7951,7951],"mapped",[7943]],[[7952,7957],"valid"],[[7958,7959],"disallowed"],[[7960,7960],"mapped",[7952]],[[7961,7961],"mapped",[7953]],[[7962,7962],"mapped",[7954]],[[7963,7963],"mapped",[7955]],[[7964,7964],"mapped",[7956]],[[7965,7965],"mapped",[7957]],[[7966,7967],"disallowed"],[[7968,7975],"valid"],[[7976,7976],"mapped",[7968]],[[7977,7977],"mapped",[7969]],[[7978,7978],"mapped",[7970]],[[7979,7979],"mapped",[7971]],[[7980,7980],"mapped",[7972]],[[7981,7981],"mapped",[7973]],[[7982,7982],"mapped",[7974]],[[7983,7983],"mapped",[7975]],[[7984,7991],"valid"],[[7992,7992],"mapped",[7984]],[[7993,7993],"mapped",[7985]],[[7994,7994],"mapped",[7986]],[[7995,7995],"mapped",[7987]],[[7996,7996],"mapped",[7988]],[[7997,7997],"mapped",[7989]],[[7998,7998],"mapped",[7990]],[[7999,7999],"mapped",[7991]],[[8000,8005],"valid"],[[8006,8007],"disallowed"],[[8008,8008],"mapped",[8000]],[[8009,8009],"mapped",[8001]],[[8010,8010],"mapped",[8002]],[[8011,8011],"mapped",[8003]],[[8012,8012],"mapped",[8004]],[[8013,8013],"mapped",[8005]],[[8014,8015],"disallowed"],[[8016,8023],"valid"],[[8024,8024],"disallowed"],[[8025,8025],"mapped",[8017]],[[8026,8026],"disallowed"],[[8027,8027],"mapped",[8019]],[[8028,8028],"disallowed"],[[8029,8029],"mapped",[8021]],[[8030,8030],"disallowed"],[[8031,8031],"mapped",[8023]],[[8032,8039],"valid"],[[8040,8040],"mapped",[8032]],[[8041,8041],"mapped",[8033]],[[8042,8042],"mapped",[8034]],[[8043,8043],"mapped",[8035]],[[8044,8044],"mapped",[8036]],[[8045,8045],"mapped",[8037]],[[8046,8046],"mapped",[8038]],[[8047,8047],"mapped",[8039]],[[8048,8048],"valid"],[[8049,8049],"mapped",[940]],[[8050,8050],"valid"],[[8051,8051],"mapped",[941]],[[8052,8052],"valid"],[[8053,8053],"mapped",[942]],[[8054,8054],"valid"],[[8055,8055],"mapped",[943]],[[8056,8056],"valid"],[[8057,8057],"mapped",[972]],[[8058,8058],"valid"],[[8059,8059],"mapped",[973]],[[8060,8060],"valid"],[[8061,8061],"mapped",[974]],[[8062,8063],"disallowed"],[[8064,8064],"mapped",[7936,953]],[[8065,8065],"mapped",[7937,953]],[[8066,8066],"mapped",[7938,953]],[[8067,8067],"mapped",[7939,953]],[[8068,8068],"mapped",[7940,953]],[[8069,8069],"mapped",[7941,953]],[[8070,8070],"mapped",[7942,953]],[[8071,8071],"mapped",[7943,953]],[[8072,8072],"mapped",[7936,953]],[[8073,8073],"mapped",[7937,953]],[[8074,8074],"mapped",[7938,953]],[[8075,8075],"mapped",[7939,953]],[[8076,8076],"mapped",[7940,953]],[[8077,8077],"mapped",[7941,953]],[[8078,8078],"mapped",[7942,953]],[[8079,8079],"mapped",[7943,953]],[[8080,8080],"mapped",[7968,953]],[[8081,8081],"mapped",[7969,953]],[[8082,8082],"mapped",[7970,953]],[[8083,8083],"mapped",[7971,953]],[[8084,8084],"mapped",[7972,953]],[[8085,8085],"mapped",[7973,953]],[[8086,8086],"mapped",[7974,953]],[[8087,8087],"mapped",[7975,953]],[[8088,8088],"mapped",[7968,953]],[[8089,8089],"mapped",[7969,953]],[[8090,8090],"mapped",[7970,953]],[[8091,8091],"mapped",[7971,953]],[[8092,8092],"mapped",[7972,953]],[[8093,8093],"mapped",[7973,953]],[[8094,8094],"mapped",[7974,953]],[[8095,8095],"mapped",[7975,953]],[[8096,8096],"mapped",[8032,953]],[[8097,8097],"mapped",[8033,953]],[[8098,8098],"mapped",[8034,953]],[[8099,8099],"mapped",[8035,953]],[[8100,8100],"mapped",[8036,953]],[[8101,8101],"mapped",[8037,953]],[[8102,8102],"mapped",[8038,953]],[[8103,8103],"mapped",[8039,953]],[[8104,8104],"mapped",[8032,953]],[[8105,8105],"mapped",[8033,953]],[[8106,8106],"mapped",[8034,953]],[[8107,8107],"mapped",[8035,953]],[[8108,8108],"mapped",[8036,953]],[[8109,8109],"mapped",[8037,953]],[[8110,8110],"mapped",[8038,953]],[[8111,8111],"mapped",[8039,953]],[[8112,8113],"valid"],[[8114,8114],"mapped",[8048,953]],[[8115,8115],"mapped",[945,953]],[[8116,8116],"mapped",[940,953]],[[8117,8117],"disallowed"],[[8118,8118],"valid"],[[8119,8119],"mapped",[8118,953]],[[8120,8120],"mapped",[8112]],[[8121,8121],"mapped",[8113]],[[8122,8122],"mapped",[8048]],[[8123,8123],"mapped",[940]],[[8124,8124],"mapped",[945,953]],[[8125,8125],"disallowed_STD3_mapped",[32,787]],[[8126,8126],"mapped",[953]],[[8127,8127],"disallowed_STD3_mapped",[32,787]],[[8128,8128],"disallowed_STD3_mapped",[32,834]],[[8129,8129],"disallowed_STD3_mapped",[32,776,834]],[[8130,8130],"mapped",[8052,953]],[[8131,8131],"mapped",[951,953]],[[8132,8132],"mapped",[942,953]],[[8133,8133],"disallowed"],[[8134,8134],"valid"],[[8135,8135],"mapped",[8134,953]],[[8136,8136],"mapped",[8050]],[[8137,8137],"mapped",[941]],[[8138,8138],"mapped",[8052]],[[8139,8139],"mapped",[942]],[[8140,8140],"mapped",[951,953]],[[8141,8141],"disallowed_STD3_mapped",[32,787,768]],[[8142,8142],"disallowed_STD3_mapped",[32,787,769]],[[8143,8143],"disallowed_STD3_mapped",[32,787,834]],[[8144,8146],"valid"],[[8147,8147],"mapped",[912]],[[8148,8149],"disallowed"],[[8150,8151],"valid"],[[8152,8152],"mapped",[8144]],[[8153,8153],"mapped",[8145]],[[8154,8154],"mapped",[8054]],[[8155,8155],"mapped",[943]],[[8156,8156],"disallowed"],[[8157,8157],"disallowed_STD3_mapped",[32,788,768]],[[8158,8158],"disallowed_STD3_mapped",[32,788,769]],[[8159,8159],"disallowed_STD3_mapped",[32,788,834]],[[8160,8162],"valid"],[[8163,8163],"mapped",[944]],[[8164,8167],"valid"],[[8168,8168],"mapped",[8160]],[[8169,8169],"mapped",[8161]],[[8170,8170],"mapped",[8058]],[[8171,8171],"mapped",[973]],[[8172,8172],"mapped",[8165]],[[8173,8173],"disallowed_STD3_mapped",[32,776,768]],[[8174,8174],"disallowed_STD3_mapped",[32,776,769]],[[8175,8175],"disallowed_STD3_mapped",[96]],[[8176,8177],"disallowed"],[[8178,8178],"mapped",[8060,953]],[[8179,8179],"mapped",[969,953]],[[8180,8180],"mapped",[974,953]],[[8181,8181],"disallowed"],[[8182,8182],"valid"],[[8183,8183],"mapped",[8182,953]],[[8184,8184],"mapped",[8056]],[[8185,8185],"mapped",[972]],[[8186,8186],"mapped",[8060]],[[8187,8187],"mapped",[974]],[[8188,8188],"mapped",[969,953]],[[8189,8189],"disallowed_STD3_mapped",[32,769]],[[8190,8190],"disallowed_STD3_mapped",[32,788]],[[8191,8191],"disallowed"],[[8192,8202],"disallowed_STD3_mapped",[32]],[[8203,8203],"ignored"],[[8204,8205],"deviation",[]],[[8206,8207],"disallowed"],[[8208,8208],"valid",[],"NV8"],[[8209,8209],"mapped",[8208]],[[8210,8214],"valid",[],"NV8"],[[8215,8215],"disallowed_STD3_mapped",[32,819]],[[8216,8227],"valid",[],"NV8"],[[8228,8230],"disallowed"],[[8231,8231],"valid",[],"NV8"],[[8232,8238],"disallowed"],[[8239,8239],"disallowed_STD3_mapped",[32]],[[8240,8242],"valid",[],"NV8"],[[8243,8243],"mapped",[8242,8242]],[[8244,8244],"mapped",[8242,8242,8242]],[[8245,8245],"valid",[],"NV8"],[[8246,8246],"mapped",[8245,8245]],[[8247,8247],"mapped",[8245,8245,8245]],[[8248,8251],"valid",[],"NV8"],[[8252,8252],"disallowed_STD3_mapped",[33,33]],[[8253,8253],"valid",[],"NV8"],[[8254,8254],"disallowed_STD3_mapped",[32,773]],[[8255,8262],"valid",[],"NV8"],[[8263,8263],"disallowed_STD3_mapped",[63,63]],[[8264,8264],"disallowed_STD3_mapped",[63,33]],[[8265,8265],"disallowed_STD3_mapped",[33,63]],[[8266,8269],"valid",[],"NV8"],[[8270,8274],"valid",[],"NV8"],[[8275,8276],"valid",[],"NV8"],[[8277,8278],"valid",[],"NV8"],[[8279,8279],"mapped",[8242,8242,8242,8242]],[[8280,8286],"valid",[],"NV8"],[[8287,8287],"disallowed_STD3_mapped",[32]],[[8288,8288],"ignored"],[[8289,8291],"disallowed"],[[8292,8292],"ignored"],[[8293,8293],"disallowed"],[[8294,8297],"disallowed"],[[8298,8303],"disallowed"],[[8304,8304],"mapped",[48]],[[8305,8305],"mapped",[105]],[[8306,8307],"disallowed"],[[8308,8308],"mapped",[52]],[[8309,8309],"mapped",[53]],[[8310,8310],"mapped",[54]],[[8311,8311],"mapped",[55]],[[8312,8312],"mapped",[56]],[[8313,8313],"mapped",[57]],[[8314,8314],"disallowed_STD3_mapped",[43]],[[8315,8315],"mapped",[8722]],[[8316,8316],"disallowed_STD3_mapped",[61]],[[8317,8317],"disallowed_STD3_mapped",[40]],[[8318,8318],"disallowed_STD3_mapped",[41]],[[8319,8319],"mapped",[110]],[[8320,8320],"mapped",[48]],[[8321,8321],"mapped",[49]],[[8322,8322],"mapped",[50]],[[8323,8323],"mapped",[51]],[[8324,8324],"mapped",[52]],[[8325,8325],"mapped",[53]],[[8326,8326],"mapped",[54]],[[8327,8327],"mapped",[55]],[[8328,8328],"mapped",[56]],[[8329,8329],"mapped",[57]],[[8330,8330],"disallowed_STD3_mapped",[43]],[[8331,8331],"mapped",[8722]],[[8332,8332],"disallowed_STD3_mapped",[61]],[[8333,8333],"disallowed_STD3_mapped",[40]],[[8334,8334],"disallowed_STD3_mapped",[41]],[[8335,8335],"disallowed"],[[8336,8336],"mapped",[97]],[[8337,8337],"mapped",[101]],[[8338,8338],"mapped",[111]],[[8339,8339],"mapped",[120]],[[8340,8340],"mapped",[601]],[[8341,8341],"mapped",[104]],[[8342,8342],"mapped",[107]],[[8343,8343],"mapped",[108]],[[8344,8344],"mapped",[109]],[[8345,8345],"mapped",[110]],[[8346,8346],"mapped",[112]],[[8347,8347],"mapped",[115]],[[8348,8348],"mapped",[116]],[[8349,8351],"disallowed"],[[8352,8359],"valid",[],"NV8"],[[8360,8360],"mapped",[114,115]],[[8361,8362],"valid",[],"NV8"],[[8363,8363],"valid",[],"NV8"],[[8364,8364],"valid",[],"NV8"],[[8365,8367],"valid",[],"NV8"],[[8368,8369],"valid",[],"NV8"],[[8370,8373],"valid",[],"NV8"],[[8374,8376],"valid",[],"NV8"],[[8377,8377],"valid",[],"NV8"],[[8378,8378],"valid",[],"NV8"],[[8379,8381],"valid",[],"NV8"],[[8382,8382],"valid",[],"NV8"],[[8383,8399],"disallowed"],[[8400,8417],"valid",[],"NV8"],[[8418,8419],"valid",[],"NV8"],[[8420,8426],"valid",[],"NV8"],[[8427,8427],"valid",[],"NV8"],[[8428,8431],"valid",[],"NV8"],[[8432,8432],"valid",[],"NV8"],[[8433,8447],"disallowed"],[[8448,8448],"disallowed_STD3_mapped",[97,47,99]],[[8449,8449],"disallowed_STD3_mapped",[97,47,115]],[[8450,8450],"mapped",[99]],[[8451,8451],"mapped",[176,99]],[[8452,8452],"valid",[],"NV8"],[[8453,8453],"disallowed_STD3_mapped",[99,47,111]],[[8454,8454],"disallowed_STD3_mapped",[99,47,117]],[[8455,8455],"mapped",[603]],[[8456,8456],"valid",[],"NV8"],[[8457,8457],"mapped",[176,102]],[[8458,8458],"mapped",[103]],[[8459,8462],"mapped",[104]],[[8463,8463],"mapped",[295]],[[8464,8465],"mapped",[105]],[[8466,8467],"mapped",[108]],[[8468,8468],"valid",[],"NV8"],[[8469,8469],"mapped",[110]],[[8470,8470],"mapped",[110,111]],[[8471,8472],"valid",[],"NV8"],[[8473,8473],"mapped",[112]],[[8474,8474],"mapped",[113]],[[8475,8477],"mapped",[114]],[[8478,8479],"valid",[],"NV8"],[[8480,8480],"mapped",[115,109]],[[8481,8481],"mapped",[116,101,108]],[[8482,8482],"mapped",[116,109]],[[8483,8483],"valid",[],"NV8"],[[8484,8484],"mapped",[122]],[[8485,8485],"valid",[],"NV8"],[[8486,8486],"mapped",[969]],[[8487,8487],"valid",[],"NV8"],[[8488,8488],"mapped",[122]],[[8489,8489],"valid",[],"NV8"],[[8490,8490],"mapped",[107]],[[8491,8491],"mapped",[229]],[[8492,8492],"mapped",[98]],[[8493,8493],"mapped",[99]],[[8494,8494],"valid",[],"NV8"],[[8495,8496],"mapped",[101]],[[8497,8497],"mapped",[102]],[[8498,8498],"disallowed"],[[8499,8499],"mapped",[109]],[[8500,8500],"mapped",[111]],[[8501,8501],"mapped",[1488]],[[8502,8502],"mapped",[1489]],[[8503,8503],"mapped",[1490]],[[8504,8504],"mapped",[1491]],[[8505,8505],"mapped",[105]],[[8506,8506],"valid",[],"NV8"],[[8507,8507],"mapped",[102,97,120]],[[8508,8508],"mapped",[960]],[[8509,8510],"mapped",[947]],[[8511,8511],"mapped",[960]],[[8512,8512],"mapped",[8721]],[[8513,8516],"valid",[],"NV8"],[[8517,8518],"mapped",[100]],[[8519,8519],"mapped",[101]],[[8520,8520],"mapped",[105]],[[8521,8521],"mapped",[106]],[[8522,8523],"valid",[],"NV8"],[[8524,8524],"valid",[],"NV8"],[[8525,8525],"valid",[],"NV8"],[[8526,8526],"valid"],[[8527,8527],"valid",[],"NV8"],[[8528,8528],"mapped",[49,8260,55]],[[8529,8529],"mapped",[49,8260,57]],[[8530,8530],"mapped",[49,8260,49,48]],[[8531,8531],"mapped",[49,8260,51]],[[8532,8532],"mapped",[50,8260,51]],[[8533,8533],"mapped",[49,8260,53]],[[8534,8534],"mapped",[50,8260,53]],[[8535,8535],"mapped",[51,8260,53]],[[8536,8536],"mapped",[52,8260,53]],[[8537,8537],"mapped",[49,8260,54]],[[8538,8538],"mapped",[53,8260,54]],[[8539,8539],"mapped",[49,8260,56]],[[8540,8540],"mapped",[51,8260,56]],[[8541,8541],"mapped",[53,8260,56]],[[8542,8542],"mapped",[55,8260,56]],[[8543,8543],"mapped",[49,8260]],[[8544,8544],"mapped",[105]],[[8545,8545],"mapped",[105,105]],[[8546,8546],"mapped",[105,105,105]],[[8547,8547],"mapped",[105,118]],[[8548,8548],"mapped",[118]],[[8549,8549],"mapped",[118,105]],[[8550,8550],"mapped",[118,105,105]],[[8551,8551],"mapped",[118,105,105,105]],[[8552,8552],"mapped",[105,120]],[[8553,8553],"mapped",[120]],[[8554,8554],"mapped",[120,105]],[[8555,8555],"mapped",[120,105,105]],[[8556,8556],"mapped",[108]],[[8557,8557],"mapped",[99]],[[8558,8558],"mapped",[100]],[[8559,8559],"mapped",[109]],[[8560,8560],"mapped",[105]],[[8561,8561],"mapped",[105,105]],[[8562,8562],"mapped",[105,105,105]],[[8563,8563],"mapped",[105,118]],[[8564,8564],"mapped",[118]],[[8565,8565],"mapped",[118,105]],[[8566,8566],"mapped",[118,105,105]],[[8567,8567],"mapped",[118,105,105,105]],[[8568,8568],"mapped",[105,120]],[[8569,8569],"mapped",[120]],[[8570,8570],"mapped",[120,105]],[[8571,8571],"mapped",[120,105,105]],[[8572,8572],"mapped",[108]],[[8573,8573],"mapped",[99]],[[8574,8574],"mapped",[100]],[[8575,8575],"mapped",[109]],[[8576,8578],"valid",[],"NV8"],[[8579,8579],"disallowed"],[[8580,8580],"valid"],[[8581,8584],"valid",[],"NV8"],[[8585,8585],"mapped",[48,8260,51]],[[8586,8587],"valid",[],"NV8"],[[8588,8591],"disallowed"],[[8592,8682],"valid",[],"NV8"],[[8683,8691],"valid",[],"NV8"],[[8692,8703],"valid",[],"NV8"],[[8704,8747],"valid",[],"NV8"],[[8748,8748],"mapped",[8747,8747]],[[8749,8749],"mapped",[8747,8747,8747]],[[8750,8750],"valid",[],"NV8"],[[8751,8751],"mapped",[8750,8750]],[[8752,8752],"mapped",[8750,8750,8750]],[[8753,8799],"valid",[],"NV8"],[[8800,8800],"disallowed_STD3_valid"],[[8801,8813],"valid",[],"NV8"],[[8814,8815],"disallowed_STD3_valid"],[[8816,8945],"valid",[],"NV8"],[[8946,8959],"valid",[],"NV8"],[[8960,8960],"valid",[],"NV8"],[[8961,8961],"valid",[],"NV8"],[[8962,9000],"valid",[],"NV8"],[[9001,9001],"mapped",[12296]],[[9002,9002],"mapped",[12297]],[[9003,9082],"valid",[],"NV8"],[[9083,9083],"valid",[],"NV8"],[[9084,9084],"valid",[],"NV8"],[[9085,9114],"valid",[],"NV8"],[[9115,9166],"valid",[],"NV8"],[[9167,9168],"valid",[],"NV8"],[[9169,9179],"valid",[],"NV8"],[[9180,9191],"valid",[],"NV8"],[[9192,9192],"valid",[],"NV8"],[[9193,9203],"valid",[],"NV8"],[[9204,9210],"valid",[],"NV8"],[[9211,9215],"disallowed"],[[9216,9252],"valid",[],"NV8"],[[9253,9254],"valid",[],"NV8"],[[9255,9279],"disallowed"],[[9280,9290],"valid",[],"NV8"],[[9291,9311],"disallowed"],[[9312,9312],"mapped",[49]],[[9313,9313],"mapped",[50]],[[9314,9314],"mapped",[51]],[[9315,9315],"mapped",[52]],[[9316,9316],"mapped",[53]],[[9317,9317],"mapped",[54]],[[9318,9318],"mapped",[55]],[[9319,9319],"mapped",[56]],[[9320,9320],"mapped",[57]],[[9321,9321],"mapped",[49,48]],[[9322,9322],"mapped",[49,49]],[[9323,9323],"mapped",[49,50]],[[9324,9324],"mapped",[49,51]],[[9325,9325],"mapped",[49,52]],[[9326,9326],"mapped",[49,53]],[[9327,9327],"mapped",[49,54]],[[9328,9328],"mapped",[49,55]],[[9329,9329],"mapped",[49,56]],[[9330,9330],"mapped",[49,57]],[[9331,9331],"mapped",[50,48]],[[9332,9332],"disallowed_STD3_mapped",[40,49,41]],[[9333,9333],"disallowed_STD3_mapped",[40,50,41]],[[9334,9334],"disallowed_STD3_mapped",[40,51,41]],[[9335,9335],"disallowed_STD3_mapped",[40,52,41]],[[9336,9336],"disallowed_STD3_mapped",[40,53,41]],[[9337,9337],"disallowed_STD3_mapped",[40,54,41]],[[9338,9338],"disallowed_STD3_mapped",[40,55,41]],[[9339,9339],"disallowed_STD3_mapped",[40,56,41]],[[9340,9340],"disallowed_STD3_mapped",[40,57,41]],[[9341,9341],"disallowed_STD3_mapped",[40,49,48,41]],[[9342,9342],"disallowed_STD3_mapped",[40,49,49,41]],[[9343,9343],"disallowed_STD3_mapped",[40,49,50,41]],[[9344,9344],"disallowed_STD3_mapped",[40,49,51,41]],[[9345,9345],"disallowed_STD3_mapped",[40,49,52,41]],[[9346,9346],"disallowed_STD3_mapped",[40,49,53,41]],[[9347,9347],"disallowed_STD3_mapped",[40,49,54,41]],[[9348,9348],"disallowed_STD3_mapped",[40,49,55,41]],[[9349,9349],"disallowed_STD3_mapped",[40,49,56,41]],[[9350,9350],"disallowed_STD3_mapped",[40,49,57,41]],[[9351,9351],"disallowed_STD3_mapped",[40,50,48,41]],[[9352,9371],"disallowed"],[[9372,9372],"disallowed_STD3_mapped",[40,97,41]],[[9373,9373],"disallowed_STD3_mapped",[40,98,41]],[[9374,9374],"disallowed_STD3_mapped",[40,99,41]],[[9375,9375],"disallowed_STD3_mapped",[40,100,41]],[[9376,9376],"disallowed_STD3_mapped",[40,101,41]],[[9377,9377],"disallowed_STD3_mapped",[40,102,41]],[[9378,9378],"disallowed_STD3_mapped",[40,103,41]],[[9379,9379],"disallowed_STD3_mapped",[40,104,41]],[[9380,9380],"disallowed_STD3_mapped",[40,105,41]],[[9381,9381],"disallowed_STD3_mapped",[40,106,41]],[[9382,9382],"disallowed_STD3_mapped",[40,107,41]],[[9383,9383],"disallowed_STD3_mapped",[40,108,41]],[[9384,9384],"disallowed_STD3_mapped",[40,109,41]],[[9385,9385],"disallowed_STD3_mapped",[40,110,41]],[[9386,9386],"disallowed_STD3_mapped",[40,111,41]],[[9387,9387],"disallowed_STD3_mapped",[40,112,41]],[[9388,9388],"disallowed_STD3_mapped",[40,113,41]],[[9389,9389],"disallowed_STD3_mapped",[40,114,41]],[[9390,9390],"disallowed_STD3_mapped",[40,115,41]],[[9391,9391],"disallowed_STD3_mapped",[40,116,41]],[[9392,9392],"disallowed_STD3_mapped",[40,117,41]],[[9393,9393],"disallowed_STD3_mapped",[40,118,41]],[[9394,9394],"disallowed_STD3_mapped",[40,119,41]],[[9395,9395],"disallowed_STD3_mapped",[40,120,41]],[[9396,9396],"disallowed_STD3_mapped",[40,121,41]],[[9397,9397],"disallowed_STD3_mapped",[40,122,41]],[[9398,9398],"mapped",[97]],[[9399,9399],"mapped",[98]],[[9400,9400],"mapped",[99]],[[9401,9401],"mapped",[100]],[[9402,9402],"mapped",[101]],[[9403,9403],"mapped",[102]],[[9404,9404],"mapped",[103]],[[9405,9405],"mapped",[104]],[[9406,9406],"mapped",[105]],[[9407,9407],"mapped",[106]],[[9408,9408],"mapped",[107]],[[9409,9409],"mapped",[108]],[[9410,9410],"mapped",[109]],[[9411,9411],"mapped",[110]],[[9412,9412],"mapped",[111]],[[9413,9413],"mapped",[112]],[[9414,9414],"mapped",[113]],[[9415,9415],"mapped",[114]],[[9416,9416],"mapped",[115]],[[9417,9417],"mapped",[116]],[[9418,9418],"mapped",[117]],[[9419,9419],"mapped",[118]],[[9420,9420],"mapped",[119]],[[9421,9421],"mapped",[120]],[[9422,9422],"mapped",[121]],[[9423,9423],"mapped",[122]],[[9424,9424],"mapped",[97]],[[9425,9425],"mapped",[98]],[[9426,9426],"mapped",[99]],[[9427,9427],"mapped",[100]],[[9428,9428],"mapped",[101]],[[9429,9429],"mapped",[102]],[[9430,9430],"mapped",[103]],[[9431,9431],"mapped",[104]],[[9432,9432],"mapped",[105]],[[9433,9433],"mapped",[106]],[[9434,9434],"mapped",[107]],[[9435,9435],"mapped",[108]],[[9436,9436],"mapped",[109]],[[9437,9437],"mapped",[110]],[[9438,9438],"mapped",[111]],[[9439,9439],"mapped",[112]],[[9440,9440],"mapped",[113]],[[9441,9441],"mapped",[114]],[[9442,9442],"mapped",[115]],[[9443,9443],"mapped",[116]],[[9444,9444],"mapped",[117]],[[9445,9445],"mapped",[118]],[[9446,9446],"mapped",[119]],[[9447,9447],"mapped",[120]],[[9448,9448],"mapped",[121]],[[9449,9449],"mapped",[122]],[[9450,9450],"mapped",[48]],[[9451,9470],"valid",[],"NV8"],[[9471,9471],"valid",[],"NV8"],[[9472,9621],"valid",[],"NV8"],[[9622,9631],"valid",[],"NV8"],[[9632,9711],"valid",[],"NV8"],[[9712,9719],"valid",[],"NV8"],[[9720,9727],"valid",[],"NV8"],[[9728,9747],"valid",[],"NV8"],[[9748,9749],"valid",[],"NV8"],[[9750,9751],"valid",[],"NV8"],[[9752,9752],"valid",[],"NV8"],[[9753,9753],"valid",[],"NV8"],[[9754,9839],"valid",[],"NV8"],[[9840,9841],"valid",[],"NV8"],[[9842,9853],"valid",[],"NV8"],[[9854,9855],"valid",[],"NV8"],[[9856,9865],"valid",[],"NV8"],[[9866,9873],"valid",[],"NV8"],[[9874,9884],"valid",[],"NV8"],[[9885,9885],"valid",[],"NV8"],[[9886,9887],"valid",[],"NV8"],[[9888,9889],"valid",[],"NV8"],[[9890,9905],"valid",[],"NV8"],[[9906,9906],"valid",[],"NV8"],[[9907,9916],"valid",[],"NV8"],[[9917,9919],"valid",[],"NV8"],[[9920,9923],"valid",[],"NV8"],[[9924,9933],"valid",[],"NV8"],[[9934,9934],"valid",[],"NV8"],[[9935,9953],"valid",[],"NV8"],[[9954,9954],"valid",[],"NV8"],[[9955,9955],"valid",[],"NV8"],[[9956,9959],"valid",[],"NV8"],[[9960,9983],"valid",[],"NV8"],[[9984,9984],"valid",[],"NV8"],[[9985,9988],"valid",[],"NV8"],[[9989,9989],"valid",[],"NV8"],[[9990,9993],"valid",[],"NV8"],[[9994,9995],"valid",[],"NV8"],[[9996,10023],"valid",[],"NV8"],[[10024,10024],"valid",[],"NV8"],[[10025,10059],"valid",[],"NV8"],[[10060,10060],"valid",[],"NV8"],[[10061,10061],"valid",[],"NV8"],[[10062,10062],"valid",[],"NV8"],[[10063,10066],"valid",[],"NV8"],[[10067,10069],"valid",[],"NV8"],[[10070,10070],"valid",[],"NV8"],[[10071,10071],"valid",[],"NV8"],[[10072,10078],"valid",[],"NV8"],[[10079,10080],"valid",[],"NV8"],[[10081,10087],"valid",[],"NV8"],[[10088,10101],"valid",[],"NV8"],[[10102,10132],"valid",[],"NV8"],[[10133,10135],"valid",[],"NV8"],[[10136,10159],"valid",[],"NV8"],[[10160,10160],"valid",[],"NV8"],[[10161,10174],"valid",[],"NV8"],[[10175,10175],"valid",[],"NV8"],[[10176,10182],"valid",[],"NV8"],[[10183,10186],"valid",[],"NV8"],[[10187,10187],"valid",[],"NV8"],[[10188,10188],"valid",[],"NV8"],[[10189,10189],"valid",[],"NV8"],[[10190,10191],"valid",[],"NV8"],[[10192,10219],"valid",[],"NV8"],[[10220,10223],"valid",[],"NV8"],[[10224,10239],"valid",[],"NV8"],[[10240,10495],"valid",[],"NV8"],[[10496,10763],"valid",[],"NV8"],[[10764,10764],"mapped",[8747,8747,8747,8747]],[[10765,10867],"valid",[],"NV8"],[[10868,10868],"disallowed_STD3_mapped",[58,58,61]],[[10869,10869],"disallowed_STD3_mapped",[61,61]],[[10870,10870],"disallowed_STD3_mapped",[61,61,61]],[[10871,10971],"valid",[],"NV8"],[[10972,10972],"mapped",[10973,824]],[[10973,11007],"valid",[],"NV8"],[[11008,11021],"valid",[],"NV8"],[[11022,11027],"valid",[],"NV8"],[[11028,11034],"valid",[],"NV8"],[[11035,11039],"valid",[],"NV8"],[[11040,11043],"valid",[],"NV8"],[[11044,11084],"valid",[],"NV8"],[[11085,11087],"valid",[],"NV8"],[[11088,11092],"valid",[],"NV8"],[[11093,11097],"valid",[],"NV8"],[[11098,11123],"valid",[],"NV8"],[[11124,11125],"disallowed"],[[11126,11157],"valid",[],"NV8"],[[11158,11159],"disallowed"],[[11160,11193],"valid",[],"NV8"],[[11194,11196],"disallowed"],[[11197,11208],"valid",[],"NV8"],[[11209,11209],"disallowed"],[[11210,11217],"valid",[],"NV8"],[[11218,11243],"disallowed"],[[11244,11247],"valid",[],"NV8"],[[11248,11263],"disallowed"],[[11264,11264],"mapped",[11312]],[[11265,11265],"mapped",[11313]],[[11266,11266],"mapped",[11314]],[[11267,11267],"mapped",[11315]],[[11268,11268],"mapped",[11316]],[[11269,11269],"mapped",[11317]],[[11270,11270],"mapped",[11318]],[[11271,11271],"mapped",[11319]],[[11272,11272],"mapped",[11320]],[[11273,11273],"mapped",[11321]],[[11274,11274],"mapped",[11322]],[[11275,11275],"mapped",[11323]],[[11276,11276],"mapped",[11324]],[[11277,11277],"mapped",[11325]],[[11278,11278],"mapped",[11326]],[[11279,11279],"mapped",[11327]],[[11280,11280],"mapped",[11328]],[[11281,11281],"mapped",[11329]],[[11282,11282],"mapped",[11330]],[[11283,11283],"mapped",[11331]],[[11284,11284],"mapped",[11332]],[[11285,11285],"mapped",[11333]],[[11286,11286],"mapped",[11334]],[[11287,11287],"mapped",[11335]],[[11288,11288],"mapped",[11336]],[[11289,11289],"mapped",[11337]],[[11290,11290],"mapped",[11338]],[[11291,11291],"mapped",[11339]],[[11292,11292],"mapped",[11340]],[[11293,11293],"mapped",[11341]],[[11294,11294],"mapped",[11342]],[[11295,11295],"mapped",[11343]],[[11296,11296],"mapped",[11344]],[[11297,11297],"mapped",[11345]],[[11298,11298],"mapped",[11346]],[[11299,11299],"mapped",[11347]],[[11300,11300],"mapped",[11348]],[[11301,11301],"mapped",[11349]],[[11302,11302],"mapped",[11350]],[[11303,11303],"mapped",[11351]],[[11304,11304],"mapped",[11352]],[[11305,11305],"mapped",[11353]],[[11306,11306],"mapped",[11354]],[[11307,11307],"mapped",[11355]],[[11308,11308],"mapped",[11356]],[[11309,11309],"mapped",[11357]],[[11310,11310],"mapped",[11358]],[[11311,11311],"disallowed"],[[11312,11358],"valid"],[[11359,11359],"disallowed"],[[11360,11360],"mapped",[11361]],[[11361,11361],"valid"],[[11362,11362],"mapped",[619]],[[11363,11363],"mapped",[7549]],[[11364,11364],"mapped",[637]],[[11365,11366],"valid"],[[11367,11367],"mapped",[11368]],[[11368,11368],"valid"],[[11369,11369],"mapped",[11370]],[[11370,11370],"valid"],[[11371,11371],"mapped",[11372]],[[11372,11372],"valid"],[[11373,11373],"mapped",[593]],[[11374,11374],"mapped",[625]],[[11375,11375],"mapped",[592]],[[11376,11376],"mapped",[594]],[[11377,11377],"valid"],[[11378,11378],"mapped",[11379]],[[11379,11379],"valid"],[[11380,11380],"valid"],[[11381,11381],"mapped",[11382]],[[11382,11383],"valid"],[[11384,11387],"valid"],[[11388,11388],"mapped",[106]],[[11389,11389],"mapped",[118]],[[11390,11390],"mapped",[575]],[[11391,11391],"mapped",[576]],[[11392,11392],"mapped",[11393]],[[11393,11393],"valid"],[[11394,11394],"mapped",[11395]],[[11395,11395],"valid"],[[11396,11396],"mapped",[11397]],[[11397,11397],"valid"],[[11398,11398],"mapped",[11399]],[[11399,11399],"valid"],[[11400,11400],"mapped",[11401]],[[11401,11401],"valid"],[[11402,11402],"mapped",[11403]],[[11403,11403],"valid"],[[11404,11404],"mapped",[11405]],[[11405,11405],"valid"],[[11406,11406],"mapped",[11407]],[[11407,11407],"valid"],[[11408,11408],"mapped",[11409]],[[11409,11409],"valid"],[[11410,11410],"mapped",[11411]],[[11411,11411],"valid"],[[11412,11412],"mapped",[11413]],[[11413,11413],"valid"],[[11414,11414],"mapped",[11415]],[[11415,11415],"valid"],[[11416,11416],"mapped",[11417]],[[11417,11417],"valid"],[[11418,11418],"mapped",[11419]],[[11419,11419],"valid"],[[11420,11420],"mapped",[11421]],[[11421,11421],"valid"],[[11422,11422],"mapped",[11423]],[[11423,11423],"valid"],[[11424,11424],"mapped",[11425]],[[11425,11425],"valid"],[[11426,11426],"mapped",[11427]],[[11427,11427],"valid"],[[11428,11428],"mapped",[11429]],[[11429,11429],"valid"],[[11430,11430],"mapped",[11431]],[[11431,11431],"valid"],[[11432,11432],"mapped",[11433]],[[11433,11433],"valid"],[[11434,11434],"mapped",[11435]],[[11435,11435],"valid"],[[11436,11436],"mapped",[11437]],[[11437,11437],"valid"],[[11438,11438],"mapped",[11439]],[[11439,11439],"valid"],[[11440,11440],"mapped",[11441]],[[11441,11441],"valid"],[[11442,11442],"mapped",[11443]],[[11443,11443],"valid"],[[11444,11444],"mapped",[11445]],[[11445,11445],"valid"],[[11446,11446],"mapped",[11447]],[[11447,11447],"valid"],[[11448,11448],"mapped",[11449]],[[11449,11449],"valid"],[[11450,11450],"mapped",[11451]],[[11451,11451],"valid"],[[11452,11452],"mapped",[11453]],[[11453,11453],"valid"],[[11454,11454],"mapped",[11455]],[[11455,11455],"valid"],[[11456,11456],"mapped",[11457]],[[11457,11457],"valid"],[[11458,11458],"mapped",[11459]],[[11459,11459],"valid"],[[11460,11460],"mapped",[11461]],[[11461,11461],"valid"],[[11462,11462],"mapped",[11463]],[[11463,11463],"valid"],[[11464,11464],"mapped",[11465]],[[11465,11465],"valid"],[[11466,11466],"mapped",[11467]],[[11467,11467],"valid"],[[11468,11468],"mapped",[11469]],[[11469,11469],"valid"],[[11470,11470],"mapped",[11471]],[[11471,11471],"valid"],[[11472,11472],"mapped",[11473]],[[11473,11473],"valid"],[[11474,11474],"mapped",[11475]],[[11475,11475],"valid"],[[11476,11476],"mapped",[11477]],[[11477,11477],"valid"],[[11478,11478],"mapped",[11479]],[[11479,11479],"valid"],[[11480,11480],"mapped",[11481]],[[11481,11481],"valid"],[[11482,11482],"mapped",[11483]],[[11483,11483],"valid"],[[11484,11484],"mapped",[11485]],[[11485,11485],"valid"],[[11486,11486],"mapped",[11487]],[[11487,11487],"valid"],[[11488,11488],"mapped",[11489]],[[11489,11489],"valid"],[[11490,11490],"mapped",[11491]],[[11491,11492],"valid"],[[11493,11498],"valid",[],"NV8"],[[11499,11499],"mapped",[11500]],[[11500,11500],"valid"],[[11501,11501],"mapped",[11502]],[[11502,11505],"valid"],[[11506,11506],"mapped",[11507]],[[11507,11507],"valid"],[[11508,11512],"disallowed"],[[11513,11519],"valid",[],"NV8"],[[11520,11557],"valid"],[[11558,11558],"disallowed"],[[11559,11559],"valid"],[[11560,11564],"disallowed"],[[11565,11565],"valid"],[[11566,11567],"disallowed"],[[11568,11621],"valid"],[[11622,11623],"valid"],[[11624,11630],"disallowed"],[[11631,11631],"mapped",[11617]],[[11632,11632],"valid",[],"NV8"],[[11633,11646],"disallowed"],[[11647,11647],"valid"],[[11648,11670],"valid"],[[11671,11679],"disallowed"],[[11680,11686],"valid"],[[11687,11687],"disallowed"],[[11688,11694],"valid"],[[11695,11695],"disallowed"],[[11696,11702],"valid"],[[11703,11703],"disallowed"],[[11704,11710],"valid"],[[11711,11711],"disallowed"],[[11712,11718],"valid"],[[11719,11719],"disallowed"],[[11720,11726],"valid"],[[11727,11727],"disallowed"],[[11728,11734],"valid"],[[11735,11735],"disallowed"],[[11736,11742],"valid"],[[11743,11743],"disallowed"],[[11744,11775],"valid"],[[11776,11799],"valid",[],"NV8"],[[11800,11803],"valid",[],"NV8"],[[11804,11805],"valid",[],"NV8"],[[11806,11822],"valid",[],"NV8"],[[11823,11823],"valid"],[[11824,11824],"valid",[],"NV8"],[[11825,11825],"valid",[],"NV8"],[[11826,11835],"valid",[],"NV8"],[[11836,11842],"valid",[],"NV8"],[[11843,11903],"disallowed"],[[11904,11929],"valid",[],"NV8"],[[11930,11930],"disallowed"],[[11931,11934],"valid",[],"NV8"],[[11935,11935],"mapped",[27597]],[[11936,12018],"valid",[],"NV8"],[[12019,12019],"mapped",[40863]],[[12020,12031],"disallowed"],[[12032,12032],"mapped",[19968]],[[12033,12033],"mapped",[20008]],[[12034,12034],"mapped",[20022]],[[12035,12035],"mapped",[20031]],[[12036,12036],"mapped",[20057]],[[12037,12037],"mapped",[20101]],[[12038,12038],"mapped",[20108]],[[12039,12039],"mapped",[20128]],[[12040,12040],"mapped",[20154]],[[12041,12041],"mapped",[20799]],[[12042,12042],"mapped",[20837]],[[12043,12043],"mapped",[20843]],[[12044,12044],"mapped",[20866]],[[12045,12045],"mapped",[20886]],[[12046,12046],"mapped",[20907]],[[12047,12047],"mapped",[20960]],[[12048,12048],"mapped",[20981]],[[12049,12049],"mapped",[20992]],[[12050,12050],"mapped",[21147]],[[12051,12051],"mapped",[21241]],[[12052,12052],"mapped",[21269]],[[12053,12053],"mapped",[21274]],[[12054,12054],"mapped",[21304]],[[12055,12055],"mapped",[21313]],[[12056,12056],"mapped",[21340]],[[12057,12057],"mapped",[21353]],[[12058,12058],"mapped",[21378]],[[12059,12059],"mapped",[21430]],[[12060,12060],"mapped",[21448]],[[12061,12061],"mapped",[21475]],[[12062,12062],"mapped",[22231]],[[12063,12063],"mapped",[22303]],[[12064,12064],"mapped",[22763]],[[12065,12065],"mapped",[22786]],[[12066,12066],"mapped",[22794]],[[12067,12067],"mapped",[22805]],[[12068,12068],"mapped",[22823]],[[12069,12069],"mapped",[22899]],[[12070,12070],"mapped",[23376]],[[12071,12071],"mapped",[23424]],[[12072,12072],"mapped",[23544]],[[12073,12073],"mapped",[23567]],[[12074,12074],"mapped",[23586]],[[12075,12075],"mapped",[23608]],[[12076,12076],"mapped",[23662]],[[12077,12077],"mapped",[23665]],[[12078,12078],"mapped",[24027]],[[12079,12079],"mapped",[24037]],[[12080,12080],"mapped",[24049]],[[12081,12081],"mapped",[24062]],[[12082,12082],"mapped",[24178]],[[12083,12083],"mapped",[24186]],[[12084,12084],"mapped",[24191]],[[12085,12085],"mapped",[24308]],[[12086,12086],"mapped",[24318]],[[12087,12087],"mapped",[24331]],[[12088,12088],"mapped",[24339]],[[12089,12089],"mapped",[24400]],[[12090,12090],"mapped",[24417]],[[12091,12091],"mapped",[24435]],[[12092,12092],"mapped",[24515]],[[12093,12093],"mapped",[25096]],[[12094,12094],"mapped",[25142]],[[12095,12095],"mapped",[25163]],[[12096,12096],"mapped",[25903]],[[12097,12097],"mapped",[25908]],[[12098,12098],"mapped",[25991]],[[12099,12099],"mapped",[26007]],[[12100,12100],"mapped",[26020]],[[12101,12101],"mapped",[26041]],[[12102,12102],"mapped",[26080]],[[12103,12103],"mapped",[26085]],[[12104,12104],"mapped",[26352]],[[12105,12105],"mapped",[26376]],[[12106,12106],"mapped",[26408]],[[12107,12107],"mapped",[27424]],[[12108,12108],"mapped",[27490]],[[12109,12109],"mapped",[27513]],[[12110,12110],"mapped",[27571]],[[12111,12111],"mapped",[27595]],[[12112,12112],"mapped",[27604]],[[12113,12113],"mapped",[27611]],[[12114,12114],"mapped",[27663]],[[12115,12115],"mapped",[27668]],[[12116,12116],"mapped",[27700]],[[12117,12117],"mapped",[28779]],[[12118,12118],"mapped",[29226]],[[12119,12119],"mapped",[29238]],[[12120,12120],"mapped",[29243]],[[12121,12121],"mapped",[29247]],[[12122,12122],"mapped",[29255]],[[12123,12123],"mapped",[29273]],[[12124,12124],"mapped",[29275]],[[12125,12125],"mapped",[29356]],[[12126,12126],"mapped",[29572]],[[12127,12127],"mapped",[29577]],[[12128,12128],"mapped",[29916]],[[12129,12129],"mapped",[29926]],[[12130,12130],"mapped",[29976]],[[12131,12131],"mapped",[29983]],[[12132,12132],"mapped",[29992]],[[12133,12133],"mapped",[30000]],[[12134,12134],"mapped",[30091]],[[12135,12135],"mapped",[30098]],[[12136,12136],"mapped",[30326]],[[12137,12137],"mapped",[30333]],[[12138,12138],"mapped",[30382]],[[12139,12139],"mapped",[30399]],[[12140,12140],"mapped",[30446]],[[12141,12141],"mapped",[30683]],[[12142,12142],"mapped",[30690]],[[12143,12143],"mapped",[30707]],[[12144,12144],"mapped",[31034]],[[12145,12145],"mapped",[31160]],[[12146,12146],"mapped",[31166]],[[12147,12147],"mapped",[31348]],[[12148,12148],"mapped",[31435]],[[12149,12149],"mapped",[31481]],[[12150,12150],"mapped",[31859]],[[12151,12151],"mapped",[31992]],[[12152,12152],"mapped",[32566]],[[12153,12153],"mapped",[32593]],[[12154,12154],"mapped",[32650]],[[12155,12155],"mapped",[32701]],[[12156,12156],"mapped",[32769]],[[12157,12157],"mapped",[32780]],[[12158,12158],"mapped",[32786]],[[12159,12159],"mapped",[32819]],[[12160,12160],"mapped",[32895]],[[12161,12161],"mapped",[32905]],[[12162,12162],"mapped",[33251]],[[12163,12163],"mapped",[33258]],[[12164,12164],"mapped",[33267]],[[12165,12165],"mapped",[33276]],[[12166,12166],"mapped",[33292]],[[12167,12167],"mapped",[33307]],[[12168,12168],"mapped",[33311]],[[12169,12169],"mapped",[33390]],[[12170,12170],"mapped",[33394]],[[12171,12171],"mapped",[33400]],[[12172,12172],"mapped",[34381]],[[12173,12173],"mapped",[34411]],[[12174,12174],"mapped",[34880]],[[12175,12175],"mapped",[34892]],[[12176,12176],"mapped",[34915]],[[12177,12177],"mapped",[35198]],[[12178,12178],"mapped",[35211]],[[12179,12179],"mapped",[35282]],[[12180,12180],"mapped",[35328]],[[12181,12181],"mapped",[35895]],[[12182,12182],"mapped",[35910]],[[12183,12183],"mapped",[35925]],[[12184,12184],"mapped",[35960]],[[12185,12185],"mapped",[35997]],[[12186,12186],"mapped",[36196]],[[12187,12187],"mapped",[36208]],[[12188,12188],"mapped",[36275]],[[12189,12189],"mapped",[36523]],[[12190,12190],"mapped",[36554]],[[12191,12191],"mapped",[36763]],[[12192,12192],"mapped",[36784]],[[12193,12193],"mapped",[36789]],[[12194,12194],"mapped",[37009]],[[12195,12195],"mapped",[37193]],[[12196,12196],"mapped",[37318]],[[12197,12197],"mapped",[37324]],[[12198,12198],"mapped",[37329]],[[12199,12199],"mapped",[38263]],[[12200,12200],"mapped",[38272]],[[12201,12201],"mapped",[38428]],[[12202,12202],"mapped",[38582]],[[12203,12203],"mapped",[38585]],[[12204,12204],"mapped",[38632]],[[12205,12205],"mapped",[38737]],[[12206,12206],"mapped",[38750]],[[12207,12207],"mapped",[38754]],[[12208,12208],"mapped",[38761]],[[12209,12209],"mapped",[38859]],[[12210,12210],"mapped",[38893]],[[12211,12211],"mapped",[38899]],[[12212,12212],"mapped",[38913]],[[12213,12213],"mapped",[39080]],[[12214,12214],"mapped",[39131]],[[12215,12215],"mapped",[39135]],[[12216,12216],"mapped",[39318]],[[12217,12217],"mapped",[39321]],[[12218,12218],"mapped",[39340]],[[12219,12219],"mapped",[39592]],[[12220,12220],"mapped",[39640]],[[12221,12221],"mapped",[39647]],[[12222,12222],"mapped",[39717]],[[12223,12223],"mapped",[39727]],[[12224,12224],"mapped",[39730]],[[12225,12225],"mapped",[39740]],[[12226,12226],"mapped",[39770]],[[12227,12227],"mapped",[40165]],[[12228,12228],"mapped",[40565]],[[12229,12229],"mapped",[40575]],[[12230,12230],"mapped",[40613]],[[12231,12231],"mapped",[40635]],[[12232,12232],"mapped",[40643]],[[12233,12233],"mapped",[40653]],[[12234,12234],"mapped",[40657]],[[12235,12235],"mapped",[40697]],[[12236,12236],"mapped",[40701]],[[12237,12237],"mapped",[40718]],[[12238,12238],"mapped",[40723]],[[12239,12239],"mapped",[40736]],[[12240,12240],"mapped",[40763]],[[12241,12241],"mapped",[40778]],[[12242,12242],"mapped",[40786]],[[12243,12243],"mapped",[40845]],[[12244,12244],"mapped",[40860]],[[12245,12245],"mapped",[40864]],[[12246,12271],"disallowed"],[[12272,12283],"disallowed"],[[12284,12287],"disallowed"],[[12288,12288],"disallowed_STD3_mapped",[32]],[[12289,12289],"valid",[],"NV8"],[[12290,12290],"mapped",[46]],[[12291,12292],"valid",[],"NV8"],[[12293,12295],"valid"],[[12296,12329],"valid",[],"NV8"],[[12330,12333],"valid"],[[12334,12341],"valid",[],"NV8"],[[12342,12342],"mapped",[12306]],[[12343,12343],"valid",[],"NV8"],[[12344,12344],"mapped",[21313]],[[12345,12345],"mapped",[21316]],[[12346,12346],"mapped",[21317]],[[12347,12347],"valid",[],"NV8"],[[12348,12348],"valid"],[[12349,12349],"valid",[],"NV8"],[[12350,12350],"valid",[],"NV8"],[[12351,12351],"valid",[],"NV8"],[[12352,12352],"disallowed"],[[12353,12436],"valid"],[[12437,12438],"valid"],[[12439,12440],"disallowed"],[[12441,12442],"valid"],[[12443,12443],"disallowed_STD3_mapped",[32,12441]],[[12444,12444],"disallowed_STD3_mapped",[32,12442]],[[12445,12446],"valid"],[[12447,12447],"mapped",[12424,12426]],[[12448,12448],"valid",[],"NV8"],[[12449,12542],"valid"],[[12543,12543],"mapped",[12467,12488]],[[12544,12548],"disallowed"],[[12549,12588],"valid"],[[12589,12589],"valid"],[[12590,12592],"disallowed"],[[12593,12593],"mapped",[4352]],[[12594,12594],"mapped",[4353]],[[12595,12595],"mapped",[4522]],[[12596,12596],"mapped",[4354]],[[12597,12597],"mapped",[4524]],[[12598,12598],"mapped",[4525]],[[12599,12599],"mapped",[4355]],[[12600,12600],"mapped",[4356]],[[12601,12601],"mapped",[4357]],[[12602,12602],"mapped",[4528]],[[12603,12603],"mapped",[4529]],[[12604,12604],"mapped",[4530]],[[12605,12605],"mapped",[4531]],[[12606,12606],"mapped",[4532]],[[12607,12607],"mapped",[4533]],[[12608,12608],"mapped",[4378]],[[12609,12609],"mapped",[4358]],[[12610,12610],"mapped",[4359]],[[12611,12611],"mapped",[4360]],[[12612,12612],"mapped",[4385]],[[12613,12613],"mapped",[4361]],[[12614,12614],"mapped",[4362]],[[12615,12615],"mapped",[4363]],[[12616,12616],"mapped",[4364]],[[12617,12617],"mapped",[4365]],[[12618,12618],"mapped",[4366]],[[12619,12619],"mapped",[4367]],[[12620,12620],"mapped",[4368]],[[12621,12621],"mapped",[4369]],[[12622,12622],"mapped",[4370]],[[12623,12623],"mapped",[4449]],[[12624,12624],"mapped",[4450]],[[12625,12625],"mapped",[4451]],[[12626,12626],"mapped",[4452]],[[12627,12627],"mapped",[4453]],[[12628,12628],"mapped",[4454]],[[12629,12629],"mapped",[4455]],[[12630,12630],"mapped",[4456]],[[12631,12631],"mapped",[4457]],[[12632,12632],"mapped",[4458]],[[12633,12633],"mapped",[4459]],[[12634,12634],"mapped",[4460]],[[12635,12635],"mapped",[4461]],[[12636,12636],"mapped",[4462]],[[12637,12637],"mapped",[4463]],[[12638,12638],"mapped",[4464]],[[12639,12639],"mapped",[4465]],[[12640,12640],"mapped",[4466]],[[12641,12641],"mapped",[4467]],[[12642,12642],"mapped",[4468]],[[12643,12643],"mapped",[4469]],[[12644,12644],"disallowed"],[[12645,12645],"mapped",[4372]],[[12646,12646],"mapped",[4373]],[[12647,12647],"mapped",[4551]],[[12648,12648],"mapped",[4552]],[[12649,12649],"mapped",[4556]],[[12650,12650],"mapped",[4558]],[[12651,12651],"mapped",[4563]],[[12652,12652],"mapped",[4567]],[[12653,12653],"mapped",[4569]],[[12654,12654],"mapped",[4380]],[[12655,12655],"mapped",[4573]],[[12656,12656],"mapped",[4575]],[[12657,12657],"mapped",[4381]],[[12658,12658],"mapped",[4382]],[[12659,12659],"mapped",[4384]],[[12660,12660],"mapped",[4386]],[[12661,12661],"mapped",[4387]],[[12662,12662],"mapped",[4391]],[[12663,12663],"mapped",[4393]],[[12664,12664],"mapped",[4395]],[[12665,12665],"mapped",[4396]],[[12666,12666],"mapped",[4397]],[[12667,12667],"mapped",[4398]],[[12668,12668],"mapped",[4399]],[[12669,12669],"mapped",[4402]],[[12670,12670],"mapped",[4406]],[[12671,12671],"mapped",[4416]],[[12672,12672],"mapped",[4423]],[[12673,12673],"mapped",[4428]],[[12674,12674],"mapped",[4593]],[[12675,12675],"mapped",[4594]],[[12676,12676],"mapped",[4439]],[[12677,12677],"mapped",[4440]],[[12678,12678],"mapped",[4441]],[[12679,12679],"mapped",[4484]],[[12680,12680],"mapped",[4485]],[[12681,12681],"mapped",[4488]],[[12682,12682],"mapped",[4497]],[[12683,12683],"mapped",[4498]],[[12684,12684],"mapped",[4500]],[[12685,12685],"mapped",[4510]],[[12686,12686],"mapped",[4513]],[[12687,12687],"disallowed"],[[12688,12689],"valid",[],"NV8"],[[12690,12690],"mapped",[19968]],[[12691,12691],"mapped",[20108]],[[12692,12692],"mapped",[19977]],[[12693,12693],"mapped",[22235]],[[12694,12694],"mapped",[19978]],[[12695,12695],"mapped",[20013]],[[12696,12696],"mapped",[19979]],[[12697,12697],"mapped",[30002]],[[12698,12698],"mapped",[20057]],[[12699,12699],"mapped",[19993]],[[12700,12700],"mapped",[19969]],[[12701,12701],"mapped",[22825]],[[12702,12702],"mapped",[22320]],[[12703,12703],"mapped",[20154]],[[12704,12727],"valid"],[[12728,12730],"valid"],[[12731,12735],"disallowed"],[[12736,12751],"valid",[],"NV8"],[[12752,12771],"valid",[],"NV8"],[[12772,12783],"disallowed"],[[12784,12799],"valid"],[[12800,12800],"disallowed_STD3_mapped",[40,4352,41]],[[12801,12801],"disallowed_STD3_mapped",[40,4354,41]],[[12802,12802],"disallowed_STD3_mapped",[40,4355,41]],[[12803,12803],"disallowed_STD3_mapped",[40,4357,41]],[[12804,12804],"disallowed_STD3_mapped",[40,4358,41]],[[12805,12805],"disallowed_STD3_mapped",[40,4359,41]],[[12806,12806],"disallowed_STD3_mapped",[40,4361,41]],[[12807,12807],"disallowed_STD3_mapped",[40,4363,41]],[[12808,12808],"disallowed_STD3_mapped",[40,4364,41]],[[12809,12809],"disallowed_STD3_mapped",[40,4366,41]],[[12810,12810],"disallowed_STD3_mapped",[40,4367,41]],[[12811,12811],"disallowed_STD3_mapped",[40,4368,41]],[[12812,12812],"disallowed_STD3_mapped",[40,4369,41]],[[12813,12813],"disallowed_STD3_mapped",[40,4370,41]],[[12814,12814],"disallowed_STD3_mapped",[40,44032,41]],[[12815,12815],"disallowed_STD3_mapped",[40,45208,41]],[[12816,12816],"disallowed_STD3_mapped",[40,45796,41]],[[12817,12817],"disallowed_STD3_mapped",[40,46972,41]],[[12818,12818],"disallowed_STD3_mapped",[40,47560,41]],[[12819,12819],"disallowed_STD3_mapped",[40,48148,41]],[[12820,12820],"disallowed_STD3_mapped",[40,49324,41]],[[12821,12821],"disallowed_STD3_mapped",[40,50500,41]],[[12822,12822],"disallowed_STD3_mapped",[40,51088,41]],[[12823,12823],"disallowed_STD3_mapped",[40,52264,41]],[[12824,12824],"disallowed_STD3_mapped",[40,52852,41]],[[12825,12825],"disallowed_STD3_mapped",[40,53440,41]],[[12826,12826],"disallowed_STD3_mapped",[40,54028,41]],[[12827,12827],"disallowed_STD3_mapped",[40,54616,41]],[[12828,12828],"disallowed_STD3_mapped",[40,51452,41]],[[12829,12829],"disallowed_STD3_mapped",[40,50724,51204,41]],[[12830,12830],"disallowed_STD3_mapped",[40,50724,54980,41]],[[12831,12831],"disallowed"],[[12832,12832],"disallowed_STD3_mapped",[40,19968,41]],[[12833,12833],"disallowed_STD3_mapped",[40,20108,41]],[[12834,12834],"disallowed_STD3_mapped",[40,19977,41]],[[12835,12835],"disallowed_STD3_mapped",[40,22235,41]],[[12836,12836],"disallowed_STD3_mapped",[40,20116,41]],[[12837,12837],"disallowed_STD3_mapped",[40,20845,41]],[[12838,12838],"disallowed_STD3_mapped",[40,19971,41]],[[12839,12839],"disallowed_STD3_mapped",[40,20843,41]],[[12840,12840],"disallowed_STD3_mapped",[40,20061,41]],[[12841,12841],"disallowed_STD3_mapped",[40,21313,41]],[[12842,12842],"disallowed_STD3_mapped",[40,26376,41]],[[12843,12843],"disallowed_STD3_mapped",[40,28779,41]],[[12844,12844],"disallowed_STD3_mapped",[40,27700,41]],[[12845,12845],"disallowed_STD3_mapped",[40,26408,41]],[[12846,12846],"disallowed_STD3_mapped",[40,37329,41]],[[12847,12847],"disallowed_STD3_mapped",[40,22303,41]],[[12848,12848],"disallowed_STD3_mapped",[40,26085,41]],[[12849,12849],"disallowed_STD3_mapped",[40,26666,41]],[[12850,12850],"disallowed_STD3_mapped",[40,26377,41]],[[12851,12851],"disallowed_STD3_mapped",[40,31038,41]],[[12852,12852],"disallowed_STD3_mapped",[40,21517,41]],[[12853,12853],"disallowed_STD3_mapped",[40,29305,41]],[[12854,12854],"disallowed_STD3_mapped",[40,36001,41]],[[12855,12855],"disallowed_STD3_mapped",[40,31069,41]],[[12856,12856],"disallowed_STD3_mapped",[40,21172,41]],[[12857,12857],"disallowed_STD3_mapped",[40,20195,41]],[[12858,12858],"disallowed_STD3_mapped",[40,21628,41]],[[12859,12859],"disallowed_STD3_mapped",[40,23398,41]],[[12860,12860],"disallowed_STD3_mapped",[40,30435,41]],[[12861,12861],"disallowed_STD3_mapped",[40,20225,41]],[[12862,12862],"disallowed_STD3_mapped",[40,36039,41]],[[12863,12863],"disallowed_STD3_mapped",[40,21332,41]],[[12864,12864],"disallowed_STD3_mapped",[40,31085,41]],[[12865,12865],"disallowed_STD3_mapped",[40,20241,41]],[[12866,12866],"disallowed_STD3_mapped",[40,33258,41]],[[12867,12867],"disallowed_STD3_mapped",[40,33267,41]],[[12868,12868],"mapped",[21839]],[[12869,12869],"mapped",[24188]],[[12870,12870],"mapped",[25991]],[[12871,12871],"mapped",[31631]],[[12872,12879],"valid",[],"NV8"],[[12880,12880],"mapped",[112,116,101]],[[12881,12881],"mapped",[50,49]],[[12882,12882],"mapped",[50,50]],[[12883,12883],"mapped",[50,51]],[[12884,12884],"mapped",[50,52]],[[12885,12885],"mapped",[50,53]],[[12886,12886],"mapped",[50,54]],[[12887,12887],"mapped",[50,55]],[[12888,12888],"mapped",[50,56]],[[12889,12889],"mapped",[50,57]],[[12890,12890],"mapped",[51,48]],[[12891,12891],"mapped",[51,49]],[[12892,12892],"mapped",[51,50]],[[12893,12893],"mapped",[51,51]],[[12894,12894],"mapped",[51,52]],[[12895,12895],"mapped",[51,53]],[[12896,12896],"mapped",[4352]],[[12897,12897],"mapped",[4354]],[[12898,12898],"mapped",[4355]],[[12899,12899],"mapped",[4357]],[[12900,12900],"mapped",[4358]],[[12901,12901],"mapped",[4359]],[[12902,12902],"mapped",[4361]],[[12903,12903],"mapped",[4363]],[[12904,12904],"mapped",[4364]],[[12905,12905],"mapped",[4366]],[[12906,12906],"mapped",[4367]],[[12907,12907],"mapped",[4368]],[[12908,12908],"mapped",[4369]],[[12909,12909],"mapped",[4370]],[[12910,12910],"mapped",[44032]],[[12911,12911],"mapped",[45208]],[[12912,12912],"mapped",[45796]],[[12913,12913],"mapped",[46972]],[[12914,12914],"mapped",[47560]],[[12915,12915],"mapped",[48148]],[[12916,12916],"mapped",[49324]],[[12917,12917],"mapped",[50500]],[[12918,12918],"mapped",[51088]],[[12919,12919],"mapped",[52264]],[[12920,12920],"mapped",[52852]],[[12921,12921],"mapped",[53440]],[[12922,12922],"mapped",[54028]],[[12923,12923],"mapped",[54616]],[[12924,12924],"mapped",[52280,44256]],[[12925,12925],"mapped",[51452,51032]],[[12926,12926],"mapped",[50864]],[[12927,12927],"valid",[],"NV8"],[[12928,12928],"mapped",[19968]],[[12929,12929],"mapped",[20108]],[[12930,12930],"mapped",[19977]],[[12931,12931],"mapped",[22235]],[[12932,12932],"mapped",[20116]],[[12933,12933],"mapped",[20845]],[[12934,12934],"mapped",[19971]],[[12935,12935],"mapped",[20843]],[[12936,12936],"mapped",[20061]],[[12937,12937],"mapped",[21313]],[[12938,12938],"mapped",[26376]],[[12939,12939],"mapped",[28779]],[[12940,12940],"mapped",[27700]],[[12941,12941],"mapped",[26408]],[[12942,12942],"mapped",[37329]],[[12943,12943],"mapped",[22303]],[[12944,12944],"mapped",[26085]],[[12945,12945],"mapped",[26666]],[[12946,12946],"mapped",[26377]],[[12947,12947],"mapped",[31038]],[[12948,12948],"mapped",[21517]],[[12949,12949],"mapped",[29305]],[[12950,12950],"mapped",[36001]],[[12951,12951],"mapped",[31069]],[[12952,12952],"mapped",[21172]],[[12953,12953],"mapped",[31192]],[[12954,12954],"mapped",[30007]],[[12955,12955],"mapped",[22899]],[[12956,12956],"mapped",[36969]],[[12957,12957],"mapped",[20778]],[[12958,12958],"mapped",[21360]],[[12959,12959],"mapped",[27880]],[[12960,12960],"mapped",[38917]],[[12961,12961],"mapped",[20241]],[[12962,12962],"mapped",[20889]],[[12963,12963],"mapped",[27491]],[[12964,12964],"mapped",[19978]],[[12965,12965],"mapped",[20013]],[[12966,12966],"mapped",[19979]],[[12967,12967],"mapped",[24038]],[[12968,12968],"mapped",[21491]],[[12969,12969],"mapped",[21307]],[[12970,12970],"mapped",[23447]],[[12971,12971],"mapped",[23398]],[[12972,12972],"mapped",[30435]],[[12973,12973],"mapped",[20225]],[[12974,12974],"mapped",[36039]],[[12975,12975],"mapped",[21332]],[[12976,12976],"mapped",[22812]],[[12977,12977],"mapped",[51,54]],[[12978,12978],"mapped",[51,55]],[[12979,12979],"mapped",[51,56]],[[12980,12980],"mapped",[51,57]],[[12981,12981],"mapped",[52,48]],[[12982,12982],"mapped",[52,49]],[[12983,12983],"mapped",[52,50]],[[12984,12984],"mapped",[52,51]],[[12985,12985],"mapped",[52,52]],[[12986,12986],"mapped",[52,53]],[[12987,12987],"mapped",[52,54]],[[12988,12988],"mapped",[52,55]],[[12989,12989],"mapped",[52,56]],[[12990,12990],"mapped",[52,57]],[[12991,12991],"mapped",[53,48]],[[12992,12992],"mapped",[49,26376]],[[12993,12993],"mapped",[50,26376]],[[12994,12994],"mapped",[51,26376]],[[12995,12995],"mapped",[52,26376]],[[12996,12996],"mapped",[53,26376]],[[12997,12997],"mapped",[54,26376]],[[12998,12998],"mapped",[55,26376]],[[12999,12999],"mapped",[56,26376]],[[13000,13000],"mapped",[57,26376]],[[13001,13001],"mapped",[49,48,26376]],[[13002,13002],"mapped",[49,49,26376]],[[13003,13003],"mapped",[49,50,26376]],[[13004,13004],"mapped",[104,103]],[[13005,13005],"mapped",[101,114,103]],[[13006,13006],"mapped",[101,118]],[[13007,13007],"mapped",[108,116,100]],[[13008,13008],"mapped",[12450]],[[13009,13009],"mapped",[12452]],[[13010,13010],"mapped",[12454]],[[13011,13011],"mapped",[12456]],[[13012,13012],"mapped",[12458]],[[13013,13013],"mapped",[12459]],[[13014,13014],"mapped",[12461]],[[13015,13015],"mapped",[12463]],[[13016,13016],"mapped",[12465]],[[13017,13017],"mapped",[12467]],[[13018,13018],"mapped",[12469]],[[13019,13019],"mapped",[12471]],[[13020,13020],"mapped",[12473]],[[13021,13021],"mapped",[12475]],[[13022,13022],"mapped",[12477]],[[13023,13023],"mapped",[12479]],[[13024,13024],"mapped",[12481]],[[13025,13025],"mapped",[12484]],[[13026,13026],"mapped",[12486]],[[13027,13027],"mapped",[12488]],[[13028,13028],"mapped",[12490]],[[13029,13029],"mapped",[12491]],[[13030,13030],"mapped",[12492]],[[13031,13031],"mapped",[12493]],[[13032,13032],"mapped",[12494]],[[13033,13033],"mapped",[12495]],[[13034,13034],"mapped",[12498]],[[13035,13035],"mapped",[12501]],[[13036,13036],"mapped",[12504]],[[13037,13037],"mapped",[12507]],[[13038,13038],"mapped",[12510]],[[13039,13039],"mapped",[12511]],[[13040,13040],"mapped",[12512]],[[13041,13041],"mapped",[12513]],[[13042,13042],"mapped",[12514]],[[13043,13043],"mapped",[12516]],[[13044,13044],"mapped",[12518]],[[13045,13045],"mapped",[12520]],[[13046,13046],"mapped",[12521]],[[13047,13047],"mapped",[12522]],[[13048,13048],"mapped",[12523]],[[13049,13049],"mapped",[12524]],[[13050,13050],"mapped",[12525]],[[13051,13051],"mapped",[12527]],[[13052,13052],"mapped",[12528]],[[13053,13053],"mapped",[12529]],[[13054,13054],"mapped",[12530]],[[13055,13055],"disallowed"],[[13056,13056],"mapped",[12450,12497,12540,12488]],[[13057,13057],"mapped",[12450,12523,12501,12449]],[[13058,13058],"mapped",[12450,12531,12506,12450]],[[13059,13059],"mapped",[12450,12540,12523]],[[13060,13060],"mapped",[12452,12491,12531,12464]],[[13061,13061],"mapped",[12452,12531,12481]],[[13062,13062],"mapped",[12454,12457,12531]],[[13063,13063],"mapped",[12456,12473,12463,12540,12489]],[[13064,13064],"mapped",[12456,12540,12459,12540]],[[13065,13065],"mapped",[12458,12531,12473]],[[13066,13066],"mapped",[12458,12540,12512]],[[13067,13067],"mapped",[12459,12452,12522]],[[13068,13068],"mapped",[12459,12521,12483,12488]],[[13069,13069],"mapped",[12459,12525,12522,12540]],[[13070,13070],"mapped",[12460,12525,12531]],[[13071,13071],"mapped",[12460,12531,12510]],[[13072,13072],"mapped",[12462,12460]],[[13073,13073],"mapped",[12462,12491,12540]],[[13074,13074],"mapped",[12461,12517,12522,12540]],[[13075,13075],"mapped",[12462,12523,12480,12540]],[[13076,13076],"mapped",[12461,12525]],[[13077,13077],"mapped",[12461,12525,12464,12521,12512]],[[13078,13078],"mapped",[12461,12525,12513,12540,12488,12523]],[[13079,13079],"mapped",[12461,12525,12527,12483,12488]],[[13080,13080],"mapped",[12464,12521,12512]],[[13081,13081],"mapped",[12464,12521,12512,12488,12531]],[[13082,13082],"mapped",[12463,12523,12476,12452,12525]],[[13083,13083],"mapped",[12463,12525,12540,12493]],[[13084,13084],"mapped",[12465,12540,12473]],[[13085,13085],"mapped",[12467,12523,12490]],[[13086,13086],"mapped",[12467,12540,12509]],[[13087,13087],"mapped",[12469,12452,12463,12523]],[[13088,13088],"mapped",[12469,12531,12481,12540,12512]],[[13089,13089],"mapped",[12471,12522,12531,12464]],[[13090,13090],"mapped",[12475,12531,12481]],[[13091,13091],"mapped",[12475,12531,12488]],[[13092,13092],"mapped",[12480,12540,12473]],[[13093,13093],"mapped",[12487,12471]],[[13094,13094],"mapped",[12489,12523]],[[13095,13095],"mapped",[12488,12531]],[[13096,13096],"mapped",[12490,12494]],[[13097,13097],"mapped",[12494,12483,12488]],[[13098,13098],"mapped",[12495,12452,12484]],[[13099,13099],"mapped",[12497,12540,12475,12531,12488]],[[13100,13100],"mapped",[12497,12540,12484]],[[13101,13101],"mapped",[12496,12540,12524,12523]],[[13102,13102],"mapped",[12500,12450,12473,12488,12523]],[[13103,13103],"mapped",[12500,12463,12523]],[[13104,13104],"mapped",[12500,12467]],[[13105,13105],"mapped",[12499,12523]],[[13106,13106],"mapped",[12501,12449,12521,12483,12489]],[[13107,13107],"mapped",[12501,12451,12540,12488]],[[13108,13108],"mapped",[12502,12483,12471,12455,12523]],[[13109,13109],"mapped",[12501,12521,12531]],[[13110,13110],"mapped",[12504,12463,12479,12540,12523]],[[13111,13111],"mapped",[12506,12477]],[[13112,13112],"mapped",[12506,12491,12498]],[[13113,13113],"mapped",[12504,12523,12484]],[[13114,13114],"mapped",[12506,12531,12473]],[[13115,13115],"mapped",[12506,12540,12472]],[[13116,13116],"mapped",[12505,12540,12479]],[[13117,13117],"mapped",[12509,12452,12531,12488]],[[13118,13118],"mapped",[12508,12523,12488]],[[13119,13119],"mapped",[12507,12531]],[[13120,13120],"mapped",[12509,12531,12489]],[[13121,13121],"mapped",[12507,12540,12523]],[[13122,13122],"mapped",[12507,12540,12531]],[[13123,13123],"mapped",[12510,12452,12463,12525]],[[13124,13124],"mapped",[12510,12452,12523]],[[13125,13125],"mapped",[12510,12483,12495]],[[13126,13126],"mapped",[12510,12523,12463]],[[13127,13127],"mapped",[12510,12531,12471,12519,12531]],[[13128,13128],"mapped",[12511,12463,12525,12531]],[[13129,13129],"mapped",[12511,12522]],[[13130,13130],"mapped",[12511,12522,12496,12540,12523]],[[13131,13131],"mapped",[12513,12460]],[[13132,13132],"mapped",[12513,12460,12488,12531]],[[13133,13133],"mapped",[12513,12540,12488,12523]],[[13134,13134],"mapped",[12516,12540,12489]],[[13135,13135],"mapped",[12516,12540,12523]],[[13136,13136],"mapped",[12518,12450,12531]],[[13137,13137],"mapped",[12522,12483,12488,12523]],[[13138,13138],"mapped",[12522,12521]],[[13139,13139],"mapped",[12523,12500,12540]],[[13140,13140],"mapped",[12523,12540,12502,12523]],[[13141,13141],"mapped",[12524,12512]],[[13142,13142],"mapped",[12524,12531,12488,12466,12531]],[[13143,13143],"mapped",[12527,12483,12488]],[[13144,13144],"mapped",[48,28857]],[[13145,13145],"mapped",[49,28857]],[[13146,13146],"mapped",[50,28857]],[[13147,13147],"mapped",[51,28857]],[[13148,13148],"mapped",[52,28857]],[[13149,13149],"mapped",[53,28857]],[[13150,13150],"mapped",[54,28857]],[[13151,13151],"mapped",[55,28857]],[[13152,13152],"mapped",[56,28857]],[[13153,13153],"mapped",[57,28857]],[[13154,13154],"mapped",[49,48,28857]],[[13155,13155],"mapped",[49,49,28857]],[[13156,13156],"mapped",[49,50,28857]],[[13157,13157],"mapped",[49,51,28857]],[[13158,13158],"mapped",[49,52,28857]],[[13159,13159],"mapped",[49,53,28857]],[[13160,13160],"mapped",[49,54,28857]],[[13161,13161],"mapped",[49,55,28857]],[[13162,13162],"mapped",[49,56,28857]],[[13163,13163],"mapped",[49,57,28857]],[[13164,13164],"mapped",[50,48,28857]],[[13165,13165],"mapped",[50,49,28857]],[[13166,13166],"mapped",[50,50,28857]],[[13167,13167],"mapped",[50,51,28857]],[[13168,13168],"mapped",[50,52,28857]],[[13169,13169],"mapped",[104,112,97]],[[13170,13170],"mapped",[100,97]],[[13171,13171],"mapped",[97,117]],[[13172,13172],"mapped",[98,97,114]],[[13173,13173],"mapped",[111,118]],[[13174,13174],"mapped",[112,99]],[[13175,13175],"mapped",[100,109]],[[13176,13176],"mapped",[100,109,50]],[[13177,13177],"mapped",[100,109,51]],[[13178,13178],"mapped",[105,117]],[[13179,13179],"mapped",[24179,25104]],[[13180,13180],"mapped",[26157,21644]],[[13181,13181],"mapped",[22823,27491]],[[13182,13182],"mapped",[26126,27835]],[[13183,13183],"mapped",[26666,24335,20250,31038]],[[13184,13184],"mapped",[112,97]],[[13185,13185],"mapped",[110,97]],[[13186,13186],"mapped",[956,97]],[[13187,13187],"mapped",[109,97]],[[13188,13188],"mapped",[107,97]],[[13189,13189],"mapped",[107,98]],[[13190,13190],"mapped",[109,98]],[[13191,13191],"mapped",[103,98]],[[13192,13192],"mapped",[99,97,108]],[[13193,13193],"mapped",[107,99,97,108]],[[13194,13194],"mapped",[112,102]],[[13195,13195],"mapped",[110,102]],[[13196,13196],"mapped",[956,102]],[[13197,13197],"mapped",[956,103]],[[13198,13198],"mapped",[109,103]],[[13199,13199],"mapped",[107,103]],[[13200,13200],"mapped",[104,122]],[[13201,13201],"mapped",[107,104,122]],[[13202,13202],"mapped",[109,104,122]],[[13203,13203],"mapped",[103,104,122]],[[13204,13204],"mapped",[116,104,122]],[[13205,13205],"mapped",[956,108]],[[13206,13206],"mapped",[109,108]],[[13207,13207],"mapped",[100,108]],[[13208,13208],"mapped",[107,108]],[[13209,13209],"mapped",[102,109]],[[13210,13210],"mapped",[110,109]],[[13211,13211],"mapped",[956,109]],[[13212,13212],"mapped",[109,109]],[[13213,13213],"mapped",[99,109]],[[13214,13214],"mapped",[107,109]],[[13215,13215],"mapped",[109,109,50]],[[13216,13216],"mapped",[99,109,50]],[[13217,13217],"mapped",[109,50]],[[13218,13218],"mapped",[107,109,50]],[[13219,13219],"mapped",[109,109,51]],[[13220,13220],"mapped",[99,109,51]],[[13221,13221],"mapped",[109,51]],[[13222,13222],"mapped",[107,109,51]],[[13223,13223],"mapped",[109,8725,115]],[[13224,13224],"mapped",[109,8725,115,50]],[[13225,13225],"mapped",[112,97]],[[13226,13226],"mapped",[107,112,97]],[[13227,13227],"mapped",[109,112,97]],[[13228,13228],"mapped",[103,112,97]],[[13229,13229],"mapped",[114,97,100]],[[13230,13230],"mapped",[114,97,100,8725,115]],[[13231,13231],"mapped",[114,97,100,8725,115,50]],[[13232,13232],"mapped",[112,115]],[[13233,13233],"mapped",[110,115]],[[13234,13234],"mapped",[956,115]],[[13235,13235],"mapped",[109,115]],[[13236,13236],"mapped",[112,118]],[[13237,13237],"mapped",[110,118]],[[13238,13238],"mapped",[956,118]],[[13239,13239],"mapped",[109,118]],[[13240,13240],"mapped",[107,118]],[[13241,13241],"mapped",[109,118]],[[13242,13242],"mapped",[112,119]],[[13243,13243],"mapped",[110,119]],[[13244,13244],"mapped",[956,119]],[[13245,13245],"mapped",[109,119]],[[13246,13246],"mapped",[107,119]],[[13247,13247],"mapped",[109,119]],[[13248,13248],"mapped",[107,969]],[[13249,13249],"mapped",[109,969]],[[13250,13250],"disallowed"],[[13251,13251],"mapped",[98,113]],[[13252,13252],"mapped",[99,99]],[[13253,13253],"mapped",[99,100]],[[13254,13254],"mapped",[99,8725,107,103]],[[13255,13255],"disallowed"],[[13256,13256],"mapped",[100,98]],[[13257,13257],"mapped",[103,121]],[[13258,13258],"mapped",[104,97]],[[13259,13259],"mapped",[104,112]],[[13260,13260],"mapped",[105,110]],[[13261,13261],"mapped",[107,107]],[[13262,13262],"mapped",[107,109]],[[13263,13263],"mapped",[107,116]],[[13264,13264],"mapped",[108,109]],[[13265,13265],"mapped",[108,110]],[[13266,13266],"mapped",[108,111,103]],[[13267,13267],"mapped",[108,120]],[[13268,13268],"mapped",[109,98]],[[13269,13269],"mapped",[109,105,108]],[[13270,13270],"mapped",[109,111,108]],[[13271,13271],"mapped",[112,104]],[[13272,13272],"disallowed"],[[13273,13273],"mapped",[112,112,109]],[[13274,13274],"mapped",[112,114]],[[13275,13275],"mapped",[115,114]],[[13276,13276],"mapped",[115,118]],[[13277,13277],"mapped",[119,98]],[[13278,13278],"mapped",[118,8725,109]],[[13279,13279],"mapped",[97,8725,109]],[[13280,13280],"mapped",[49,26085]],[[13281,13281],"mapped",[50,26085]],[[13282,13282],"mapped",[51,26085]],[[13283,13283],"mapped",[52,26085]],[[13284,13284],"mapped",[53,26085]],[[13285,13285],"mapped",[54,26085]],[[13286,13286],"mapped",[55,26085]],[[13287,13287],"mapped",[56,26085]],[[13288,13288],"mapped",[57,26085]],[[13289,13289],"mapped",[49,48,26085]],[[13290,13290],"mapped",[49,49,26085]],[[13291,13291],"mapped",[49,50,26085]],[[13292,13292],"mapped",[49,51,26085]],[[13293,13293],"mapped",[49,52,26085]],[[13294,13294],"mapped",[49,53,26085]],[[13295,13295],"mapped",[49,54,26085]],[[13296,13296],"mapped",[49,55,26085]],[[13297,13297],"mapped",[49,56,26085]],[[13298,13298],"mapped",[49,57,26085]],[[13299,13299],"mapped",[50,48,26085]],[[13300,13300],"mapped",[50,49,26085]],[[13301,13301],"mapped",[50,50,26085]],[[13302,13302],"mapped",[50,51,26085]],[[13303,13303],"mapped",[50,52,26085]],[[13304,13304],"mapped",[50,53,26085]],[[13305,13305],"mapped",[50,54,26085]],[[13306,13306],"mapped",[50,55,26085]],[[13307,13307],"mapped",[50,56,26085]],[[13308,13308],"mapped",[50,57,26085]],[[13309,13309],"mapped",[51,48,26085]],[[13310,13310],"mapped",[51,49,26085]],[[13311,13311],"mapped",[103,97,108]],[[13312,19893],"valid"],[[19894,19903],"disallowed"],[[19904,19967],"valid",[],"NV8"],[[19968,40869],"valid"],[[40870,40891],"valid"],[[40892,40899],"valid"],[[40900,40907],"valid"],[[40908,40908],"valid"],[[40909,40917],"valid"],[[40918,40959],"disallowed"],[[40960,42124],"valid"],[[42125,42127],"disallowed"],[[42128,42145],"valid",[],"NV8"],[[42146,42147],"valid",[],"NV8"],[[42148,42163],"valid",[],"NV8"],[[42164,42164],"valid",[],"NV8"],[[42165,42176],"valid",[],"NV8"],[[42177,42177],"valid",[],"NV8"],[[42178,42180],"valid",[],"NV8"],[[42181,42181],"valid",[],"NV8"],[[42182,42182],"valid",[],"NV8"],[[42183,42191],"disallowed"],[[42192,42237],"valid"],[[42238,42239],"valid",[],"NV8"],[[42240,42508],"valid"],[[42509,42511],"valid",[],"NV8"],[[42512,42539],"valid"],[[42540,42559],"disallowed"],[[42560,42560],"mapped",[42561]],[[42561,42561],"valid"],[[42562,42562],"mapped",[42563]],[[42563,42563],"valid"],[[42564,42564],"mapped",[42565]],[[42565,42565],"valid"],[[42566,42566],"mapped",[42567]],[[42567,42567],"valid"],[[42568,42568],"mapped",[42569]],[[42569,42569],"valid"],[[42570,42570],"mapped",[42571]],[[42571,42571],"valid"],[[42572,42572],"mapped",[42573]],[[42573,42573],"valid"],[[42574,42574],"mapped",[42575]],[[42575,42575],"valid"],[[42576,42576],"mapped",[42577]],[[42577,42577],"valid"],[[42578,42578],"mapped",[42579]],[[42579,42579],"valid"],[[42580,42580],"mapped",[42581]],[[42581,42581],"valid"],[[42582,42582],"mapped",[42583]],[[42583,42583],"valid"],[[42584,42584],"mapped",[42585]],[[42585,42585],"valid"],[[42586,42586],"mapped",[42587]],[[42587,42587],"valid"],[[42588,42588],"mapped",[42589]],[[42589,42589],"valid"],[[42590,42590],"mapped",[42591]],[[42591,42591],"valid"],[[42592,42592],"mapped",[42593]],[[42593,42593],"valid"],[[42594,42594],"mapped",[42595]],[[42595,42595],"valid"],[[42596,42596],"mapped",[42597]],[[42597,42597],"valid"],[[42598,42598],"mapped",[42599]],[[42599,42599],"valid"],[[42600,42600],"mapped",[42601]],[[42601,42601],"valid"],[[42602,42602],"mapped",[42603]],[[42603,42603],"valid"],[[42604,42604],"mapped",[42605]],[[42605,42607],"valid"],[[42608,42611],"valid",[],"NV8"],[[42612,42619],"valid"],[[42620,42621],"valid"],[[42622,42622],"valid",[],"NV8"],[[42623,42623],"valid"],[[42624,42624],"mapped",[42625]],[[42625,42625],"valid"],[[42626,42626],"mapped",[42627]],[[42627,42627],"valid"],[[42628,42628],"mapped",[42629]],[[42629,42629],"valid"],[[42630,42630],"mapped",[42631]],[[42631,42631],"valid"],[[42632,42632],"mapped",[42633]],[[42633,42633],"valid"],[[42634,42634],"mapped",[42635]],[[42635,42635],"valid"],[[42636,42636],"mapped",[42637]],[[42637,42637],"valid"],[[42638,42638],"mapped",[42639]],[[42639,42639],"valid"],[[42640,42640],"mapped",[42641]],[[42641,42641],"valid"],[[42642,42642],"mapped",[42643]],[[42643,42643],"valid"],[[42644,42644],"mapped",[42645]],[[42645,42645],"valid"],[[42646,42646],"mapped",[42647]],[[42647,42647],"valid"],[[42648,42648],"mapped",[42649]],[[42649,42649],"valid"],[[42650,42650],"mapped",[42651]],[[42651,42651],"valid"],[[42652,42652],"mapped",[1098]],[[42653,42653],"mapped",[1100]],[[42654,42654],"valid"],[[42655,42655],"valid"],[[42656,42725],"valid"],[[42726,42735],"valid",[],"NV8"],[[42736,42737],"valid"],[[42738,42743],"valid",[],"NV8"],[[42744,42751],"disallowed"],[[42752,42774],"valid",[],"NV8"],[[42775,42778],"valid"],[[42779,42783],"valid"],[[42784,42785],"valid",[],"NV8"],[[42786,42786],"mapped",[42787]],[[42787,42787],"valid"],[[42788,42788],"mapped",[42789]],[[42789,42789],"valid"],[[42790,42790],"mapped",[42791]],[[42791,42791],"valid"],[[42792,42792],"mapped",[42793]],[[42793,42793],"valid"],[[42794,42794],"mapped",[42795]],[[42795,42795],"valid"],[[42796,42796],"mapped",[42797]],[[42797,42797],"valid"],[[42798,42798],"mapped",[42799]],[[42799,42801],"valid"],[[42802,42802],"mapped",[42803]],[[42803,42803],"valid"],[[42804,42804],"mapped",[42805]],[[42805,42805],"valid"],[[42806,42806],"mapped",[42807]],[[42807,42807],"valid"],[[42808,42808],"mapped",[42809]],[[42809,42809],"valid"],[[42810,42810],"mapped",[42811]],[[42811,42811],"valid"],[[42812,42812],"mapped",[42813]],[[42813,42813],"valid"],[[42814,42814],"mapped",[42815]],[[42815,42815],"valid"],[[42816,42816],"mapped",[42817]],[[42817,42817],"valid"],[[42818,42818],"mapped",[42819]],[[42819,42819],"valid"],[[42820,42820],"mapped",[42821]],[[42821,42821],"valid"],[[42822,42822],"mapped",[42823]],[[42823,42823],"valid"],[[42824,42824],"mapped",[42825]],[[42825,42825],"valid"],[[42826,42826],"mapped",[42827]],[[42827,42827],"valid"],[[42828,42828],"mapped",[42829]],[[42829,42829],"valid"],[[42830,42830],"mapped",[42831]],[[42831,42831],"valid"],[[42832,42832],"mapped",[42833]],[[42833,42833],"valid"],[[42834,42834],"mapped",[42835]],[[42835,42835],"valid"],[[42836,42836],"mapped",[42837]],[[42837,42837],"valid"],[[42838,42838],"mapped",[42839]],[[42839,42839],"valid"],[[42840,42840],"mapped",[42841]],[[42841,42841],"valid"],[[42842,42842],"mapped",[42843]],[[42843,42843],"valid"],[[42844,42844],"mapped",[42845]],[[42845,42845],"valid"],[[42846,42846],"mapped",[42847]],[[42847,42847],"valid"],[[42848,42848],"mapped",[42849]],[[42849,42849],"valid"],[[42850,42850],"mapped",[42851]],[[42851,42851],"valid"],[[42852,42852],"mapped",[42853]],[[42853,42853],"valid"],[[42854,42854],"mapped",[42855]],[[42855,42855],"valid"],[[42856,42856],"mapped",[42857]],[[42857,42857],"valid"],[[42858,42858],"mapped",[42859]],[[42859,42859],"valid"],[[42860,42860],"mapped",[42861]],[[42861,42861],"valid"],[[42862,42862],"mapped",[42863]],[[42863,42863],"valid"],[[42864,42864],"mapped",[42863]],[[42865,42872],"valid"],[[42873,42873],"mapped",[42874]],[[42874,42874],"valid"],[[42875,42875],"mapped",[42876]],[[42876,42876],"valid"],[[42877,42877],"mapped",[7545]],[[42878,42878],"mapped",[42879]],[[42879,42879],"valid"],[[42880,42880],"mapped",[42881]],[[42881,42881],"valid"],[[42882,42882],"mapped",[42883]],[[42883,42883],"valid"],[[42884,42884],"mapped",[42885]],[[42885,42885],"valid"],[[42886,42886],"mapped",[42887]],[[42887,42888],"valid"],[[42889,42890],"valid",[],"NV8"],[[42891,42891],"mapped",[42892]],[[42892,42892],"valid"],[[42893,42893],"mapped",[613]],[[42894,42894],"valid"],[[42895,42895],"valid"],[[42896,42896],"mapped",[42897]],[[42897,42897],"valid"],[[42898,42898],"mapped",[42899]],[[42899,42899],"valid"],[[42900,42901],"valid"],[[42902,42902],"mapped",[42903]],[[42903,42903],"valid"],[[42904,42904],"mapped",[42905]],[[42905,42905],"valid"],[[42906,42906],"mapped",[42907]],[[42907,42907],"valid"],[[42908,42908],"mapped",[42909]],[[42909,42909],"valid"],[[42910,42910],"mapped",[42911]],[[42911,42911],"valid"],[[42912,42912],"mapped",[42913]],[[42913,42913],"valid"],[[42914,42914],"mapped",[42915]],[[42915,42915],"valid"],[[42916,42916],"mapped",[42917]],[[42917,42917],"valid"],[[42918,42918],"mapped",[42919]],[[42919,42919],"valid"],[[42920,42920],"mapped",[42921]],[[42921,42921],"valid"],[[42922,42922],"mapped",[614]],[[42923,42923],"mapped",[604]],[[42924,42924],"mapped",[609]],[[42925,42925],"mapped",[620]],[[42926,42927],"disallowed"],[[42928,42928],"mapped",[670]],[[42929,42929],"mapped",[647]],[[42930,42930],"mapped",[669]],[[42931,42931],"mapped",[43859]],[[42932,42932],"mapped",[42933]],[[42933,42933],"valid"],[[42934,42934],"mapped",[42935]],[[42935,42935],"valid"],[[42936,42998],"disallowed"],[[42999,42999],"valid"],[[43000,43000],"mapped",[295]],[[43001,43001],"mapped",[339]],[[43002,43002],"valid"],[[43003,43007],"valid"],[[43008,43047],"valid"],[[43048,43051],"valid",[],"NV8"],[[43052,43055],"disallowed"],[[43056,43065],"valid",[],"NV8"],[[43066,43071],"disallowed"],[[43072,43123],"valid"],[[43124,43127],"valid",[],"NV8"],[[43128,43135],"disallowed"],[[43136,43204],"valid"],[[43205,43213],"disallowed"],[[43214,43215],"valid",[],"NV8"],[[43216,43225],"valid"],[[43226,43231],"disallowed"],[[43232,43255],"valid"],[[43256,43258],"valid",[],"NV8"],[[43259,43259],"valid"],[[43260,43260],"valid",[],"NV8"],[[43261,43261],"valid"],[[43262,43263],"disallowed"],[[43264,43309],"valid"],[[43310,43311],"valid",[],"NV8"],[[43312,43347],"valid"],[[43348,43358],"disallowed"],[[43359,43359],"valid",[],"NV8"],[[43360,43388],"valid",[],"NV8"],[[43389,43391],"disallowed"],[[43392,43456],"valid"],[[43457,43469],"valid",[],"NV8"],[[43470,43470],"disallowed"],[[43471,43481],"valid"],[[43482,43485],"disallowed"],[[43486,43487],"valid",[],"NV8"],[[43488,43518],"valid"],[[43519,43519],"disallowed"],[[43520,43574],"valid"],[[43575,43583],"disallowed"],[[43584,43597],"valid"],[[43598,43599],"disallowed"],[[43600,43609],"valid"],[[43610,43611],"disallowed"],[[43612,43615],"valid",[],"NV8"],[[43616,43638],"valid"],[[43639,43641],"valid",[],"NV8"],[[43642,43643],"valid"],[[43644,43647],"valid"],[[43648,43714],"valid"],[[43715,43738],"disallowed"],[[43739,43741],"valid"],[[43742,43743],"valid",[],"NV8"],[[43744,43759],"valid"],[[43760,43761],"valid",[],"NV8"],[[43762,43766],"valid"],[[43767,43776],"disallowed"],[[43777,43782],"valid"],[[43783,43784],"disallowed"],[[43785,43790],"valid"],[[43791,43792],"disallowed"],[[43793,43798],"valid"],[[43799,43807],"disallowed"],[[43808,43814],"valid"],[[43815,43815],"disallowed"],[[43816,43822],"valid"],[[43823,43823],"disallowed"],[[43824,43866],"valid"],[[43867,43867],"valid",[],"NV8"],[[43868,43868],"mapped",[42791]],[[43869,43869],"mapped",[43831]],[[43870,43870],"mapped",[619]],[[43871,43871],"mapped",[43858]],[[43872,43875],"valid"],[[43876,43877],"valid"],[[43878,43887],"disallowed"],[[43888,43888],"mapped",[5024]],[[43889,43889],"mapped",[5025]],[[43890,43890],"mapped",[5026]],[[43891,43891],"mapped",[5027]],[[43892,43892],"mapped",[5028]],[[43893,43893],"mapped",[5029]],[[43894,43894],"mapped",[5030]],[[43895,43895],"mapped",[5031]],[[43896,43896],"mapped",[5032]],[[43897,43897],"mapped",[5033]],[[43898,43898],"mapped",[5034]],[[43899,43899],"mapped",[5035]],[[43900,43900],"mapped",[5036]],[[43901,43901],"mapped",[5037]],[[43902,43902],"mapped",[5038]],[[43903,43903],"mapped",[5039]],[[43904,43904],"mapped",[5040]],[[43905,43905],"mapped",[5041]],[[43906,43906],"mapped",[5042]],[[43907,43907],"mapped",[5043]],[[43908,43908],"mapped",[5044]],[[43909,43909],"mapped",[5045]],[[43910,43910],"mapped",[5046]],[[43911,43911],"mapped",[5047]],[[43912,43912],"mapped",[5048]],[[43913,43913],"mapped",[5049]],[[43914,43914],"mapped",[5050]],[[43915,43915],"mapped",[5051]],[[43916,43916],"mapped",[5052]],[[43917,43917],"mapped",[5053]],[[43918,43918],"mapped",[5054]],[[43919,43919],"mapped",[5055]],[[43920,43920],"mapped",[5056]],[[43921,43921],"mapped",[5057]],[[43922,43922],"mapped",[5058]],[[43923,43923],"mapped",[5059]],[[43924,43924],"mapped",[5060]],[[43925,43925],"mapped",[5061]],[[43926,43926],"mapped",[5062]],[[43927,43927],"mapped",[5063]],[[43928,43928],"mapped",[5064]],[[43929,43929],"mapped",[5065]],[[43930,43930],"mapped",[5066]],[[43931,43931],"mapped",[5067]],[[43932,43932],"mapped",[5068]],[[43933,43933],"mapped",[5069]],[[43934,43934],"mapped",[5070]],[[43935,43935],"mapped",[5071]],[[43936,43936],"mapped",[5072]],[[43937,43937],"mapped",[5073]],[[43938,43938],"mapped",[5074]],[[43939,43939],"mapped",[5075]],[[43940,43940],"mapped",[5076]],[[43941,43941],"mapped",[5077]],[[43942,43942],"mapped",[5078]],[[43943,43943],"mapped",[5079]],[[43944,43944],"mapped",[5080]],[[43945,43945],"mapped",[5081]],[[43946,43946],"mapped",[5082]],[[43947,43947],"mapped",[5083]],[[43948,43948],"mapped",[5084]],[[43949,43949],"mapped",[5085]],[[43950,43950],"mapped",[5086]],[[43951,43951],"mapped",[5087]],[[43952,43952],"mapped",[5088]],[[43953,43953],"mapped",[5089]],[[43954,43954],"mapped",[5090]],[[43955,43955],"mapped",[5091]],[[43956,43956],"mapped",[5092]],[[43957,43957],"mapped",[5093]],[[43958,43958],"mapped",[5094]],[[43959,43959],"mapped",[5095]],[[43960,43960],"mapped",[5096]],[[43961,43961],"mapped",[5097]],[[43962,43962],"mapped",[5098]],[[43963,43963],"mapped",[5099]],[[43964,43964],"mapped",[5100]],[[43965,43965],"mapped",[5101]],[[43966,43966],"mapped",[5102]],[[43967,43967],"mapped",[5103]],[[43968,44010],"valid"],[[44011,44011],"valid",[],"NV8"],[[44012,44013],"valid"],[[44014,44015],"disallowed"],[[44016,44025],"valid"],[[44026,44031],"disallowed"],[[44032,55203],"valid"],[[55204,55215],"disallowed"],[[55216,55238],"valid",[],"NV8"],[[55239,55242],"disallowed"],[[55243,55291],"valid",[],"NV8"],[[55292,55295],"disallowed"],[[55296,57343],"disallowed"],[[57344,63743],"disallowed"],[[63744,63744],"mapped",[35912]],[[63745,63745],"mapped",[26356]],[[63746,63746],"mapped",[36554]],[[63747,63747],"mapped",[36040]],[[63748,63748],"mapped",[28369]],[[63749,63749],"mapped",[20018]],[[63750,63750],"mapped",[21477]],[[63751,63752],"mapped",[40860]],[[63753,63753],"mapped",[22865]],[[63754,63754],"mapped",[37329]],[[63755,63755],"mapped",[21895]],[[63756,63756],"mapped",[22856]],[[63757,63757],"mapped",[25078]],[[63758,63758],"mapped",[30313]],[[63759,63759],"mapped",[32645]],[[63760,63760],"mapped",[34367]],[[63761,63761],"mapped",[34746]],[[63762,63762],"mapped",[35064]],[[63763,63763],"mapped",[37007]],[[63764,63764],"mapped",[27138]],[[63765,63765],"mapped",[27931]],[[63766,63766],"mapped",[28889]],[[63767,63767],"mapped",[29662]],[[63768,63768],"mapped",[33853]],[[63769,63769],"mapped",[37226]],[[63770,63770],"mapped",[39409]],[[63771,63771],"mapped",[20098]],[[63772,63772],"mapped",[21365]],[[63773,63773],"mapped",[27396]],[[63774,63774],"mapped",[29211]],[[63775,63775],"mapped",[34349]],[[63776,63776],"mapped",[40478]],[[63777,63777],"mapped",[23888]],[[63778,63778],"mapped",[28651]],[[63779,63779],"mapped",[34253]],[[63780,63780],"mapped",[35172]],[[63781,63781],"mapped",[25289]],[[63782,63782],"mapped",[33240]],[[63783,63783],"mapped",[34847]],[[63784,63784],"mapped",[24266]],[[63785,63785],"mapped",[26391]],[[63786,63786],"mapped",[28010]],[[63787,63787],"mapped",[29436]],[[63788,63788],"mapped",[37070]],[[63789,63789],"mapped",[20358]],[[63790,63790],"mapped",[20919]],[[63791,63791],"mapped",[21214]],[[63792,63792],"mapped",[25796]],[[63793,63793],"mapped",[27347]],[[63794,63794],"mapped",[29200]],[[63795,63795],"mapped",[30439]],[[63796,63796],"mapped",[32769]],[[63797,63797],"mapped",[34310]],[[63798,63798],"mapped",[34396]],[[63799,63799],"mapped",[36335]],[[63800,63800],"mapped",[38706]],[[63801,63801],"mapped",[39791]],[[63802,63802],"mapped",[40442]],[[63803,63803],"mapped",[30860]],[[63804,63804],"mapped",[31103]],[[63805,63805],"mapped",[32160]],[[63806,63806],"mapped",[33737]],[[63807,63807],"mapped",[37636]],[[63808,63808],"mapped",[40575]],[[63809,63809],"mapped",[35542]],[[63810,63810],"mapped",[22751]],[[63811,63811],"mapped",[24324]],[[63812,63812],"mapped",[31840]],[[63813,63813],"mapped",[32894]],[[63814,63814],"mapped",[29282]],[[63815,63815],"mapped",[30922]],[[63816,63816],"mapped",[36034]],[[63817,63817],"mapped",[38647]],[[63818,63818],"mapped",[22744]],[[63819,63819],"mapped",[23650]],[[63820,63820],"mapped",[27155]],[[63821,63821],"mapped",[28122]],[[63822,63822],"mapped",[28431]],[[63823,63823],"mapped",[32047]],[[63824,63824],"mapped",[32311]],[[63825,63825],"mapped",[38475]],[[63826,63826],"mapped",[21202]],[[63827,63827],"mapped",[32907]],[[63828,63828],"mapped",[20956]],[[63829,63829],"mapped",[20940]],[[63830,63830],"mapped",[31260]],[[63831,63831],"mapped",[32190]],[[63832,63832],"mapped",[33777]],[[63833,63833],"mapped",[38517]],[[63834,63834],"mapped",[35712]],[[63835,63835],"mapped",[25295]],[[63836,63836],"mapped",[27138]],[[63837,63837],"mapped",[35582]],[[63838,63838],"mapped",[20025]],[[63839,63839],"mapped",[23527]],[[63840,63840],"mapped",[24594]],[[63841,63841],"mapped",[29575]],[[63842,63842],"mapped",[30064]],[[63843,63843],"mapped",[21271]],[[63844,63844],"mapped",[30971]],[[63845,63845],"mapped",[20415]],[[63846,63846],"mapped",[24489]],[[63847,63847],"mapped",[19981]],[[63848,63848],"mapped",[27852]],[[63849,63849],"mapped",[25976]],[[63850,63850],"mapped",[32034]],[[63851,63851],"mapped",[21443]],[[63852,63852],"mapped",[22622]],[[63853,63853],"mapped",[30465]],[[63854,63854],"mapped",[33865]],[[63855,63855],"mapped",[35498]],[[63856,63856],"mapped",[27578]],[[63857,63857],"mapped",[36784]],[[63858,63858],"mapped",[27784]],[[63859,63859],"mapped",[25342]],[[63860,63860],"mapped",[33509]],[[63861,63861],"mapped",[25504]],[[63862,63862],"mapped",[30053]],[[63863,63863],"mapped",[20142]],[[63864,63864],"mapped",[20841]],[[63865,63865],"mapped",[20937]],[[63866,63866],"mapped",[26753]],[[63867,63867],"mapped",[31975]],[[63868,63868],"mapped",[33391]],[[63869,63869],"mapped",[35538]],[[63870,63870],"mapped",[37327]],[[63871,63871],"mapped",[21237]],[[63872,63872],"mapped",[21570]],[[63873,63873],"mapped",[22899]],[[63874,63874],"mapped",[24300]],[[63875,63875],"mapped",[26053]],[[63876,63876],"mapped",[28670]],[[63877,63877],"mapped",[31018]],[[63878,63878],"mapped",[38317]],[[63879,63879],"mapped",[39530]],[[63880,63880],"mapped",[40599]],[[63881,63881],"mapped",[40654]],[[63882,63882],"mapped",[21147]],[[63883,63883],"mapped",[26310]],[[63884,63884],"mapped",[27511]],[[63885,63885],"mapped",[36706]],[[63886,63886],"mapped",[24180]],[[63887,63887],"mapped",[24976]],[[63888,63888],"mapped",[25088]],[[63889,63889],"mapped",[25754]],[[63890,63890],"mapped",[28451]],[[63891,63891],"mapped",[29001]],[[63892,63892],"mapped",[29833]],[[63893,63893],"mapped",[31178]],[[63894,63894],"mapped",[32244]],[[63895,63895],"mapped",[32879]],[[63896,63896],"mapped",[36646]],[[63897,63897],"mapped",[34030]],[[63898,63898],"mapped",[36899]],[[63899,63899],"mapped",[37706]],[[63900,63900],"mapped",[21015]],[[63901,63901],"mapped",[21155]],[[63902,63902],"mapped",[21693]],[[63903,63903],"mapped",[28872]],[[63904,63904],"mapped",[35010]],[[63905,63905],"mapped",[35498]],[[63906,63906],"mapped",[24265]],[[63907,63907],"mapped",[24565]],[[63908,63908],"mapped",[25467]],[[63909,63909],"mapped",[27566]],[[63910,63910],"mapped",[31806]],[[63911,63911],"mapped",[29557]],[[63912,63912],"mapped",[20196]],[[63913,63913],"mapped",[22265]],[[63914,63914],"mapped",[23527]],[[63915,63915],"mapped",[23994]],[[63916,63916],"mapped",[24604]],[[63917,63917],"mapped",[29618]],[[63918,63918],"mapped",[29801]],[[63919,63919],"mapped",[32666]],[[63920,63920],"mapped",[32838]],[[63921,63921],"mapped",[37428]],[[63922,63922],"mapped",[38646]],[[63923,63923],"mapped",[38728]],[[63924,63924],"mapped",[38936]],[[63925,63925],"mapped",[20363]],[[63926,63926],"mapped",[31150]],[[63927,63927],"mapped",[37300]],[[63928,63928],"mapped",[38584]],[[63929,63929],"mapped",[24801]],[[63930,63930],"mapped",[20102]],[[63931,63931],"mapped",[20698]],[[63932,63932],"mapped",[23534]],[[63933,63933],"mapped",[23615]],[[63934,63934],"mapped",[26009]],[[63935,63935],"mapped",[27138]],[[63936,63936],"mapped",[29134]],[[63937,63937],"mapped",[30274]],[[63938,63938],"mapped",[34044]],[[63939,63939],"mapped",[36988]],[[63940,63940],"mapped",[40845]],[[63941,63941],"mapped",[26248]],[[63942,63942],"mapped",[38446]],[[63943,63943],"mapped",[21129]],[[63944,63944],"mapped",[26491]],[[63945,63945],"mapped",[26611]],[[63946,63946],"mapped",[27969]],[[63947,63947],"mapped",[28316]],[[63948,63948],"mapped",[29705]],[[63949,63949],"mapped",[30041]],[[63950,63950],"mapped",[30827]],[[63951,63951],"mapped",[32016]],[[63952,63952],"mapped",[39006]],[[63953,63953],"mapped",[20845]],[[63954,63954],"mapped",[25134]],[[63955,63955],"mapped",[38520]],[[63956,63956],"mapped",[20523]],[[63957,63957],"mapped",[23833]],[[63958,63958],"mapped",[28138]],[[63959,63959],"mapped",[36650]],[[63960,63960],"mapped",[24459]],[[63961,63961],"mapped",[24900]],[[63962,63962],"mapped",[26647]],[[63963,63963],"mapped",[29575]],[[63964,63964],"mapped",[38534]],[[63965,63965],"mapped",[21033]],[[63966,63966],"mapped",[21519]],[[63967,63967],"mapped",[23653]],[[63968,63968],"mapped",[26131]],[[63969,63969],"mapped",[26446]],[[63970,63970],"mapped",[26792]],[[63971,63971],"mapped",[27877]],[[63972,63972],"mapped",[29702]],[[63973,63973],"mapped",[30178]],[[63974,63974],"mapped",[32633]],[[63975,63975],"mapped",[35023]],[[63976,63976],"mapped",[35041]],[[63977,63977],"mapped",[37324]],[[63978,63978],"mapped",[38626]],[[63979,63979],"mapped",[21311]],[[63980,63980],"mapped",[28346]],[[63981,63981],"mapped",[21533]],[[63982,63982],"mapped",[29136]],[[63983,63983],"mapped",[29848]],[[63984,63984],"mapped",[34298]],[[63985,63985],"mapped",[38563]],[[63986,63986],"mapped",[40023]],[[63987,63987],"mapped",[40607]],[[63988,63988],"mapped",[26519]],[[63989,63989],"mapped",[28107]],[[63990,63990],"mapped",[33256]],[[63991,63991],"mapped",[31435]],[[63992,63992],"mapped",[31520]],[[63993,63993],"mapped",[31890]],[[63994,63994],"mapped",[29376]],[[63995,63995],"mapped",[28825]],[[63996,63996],"mapped",[35672]],[[63997,63997],"mapped",[20160]],[[63998,63998],"mapped",[33590]],[[63999,63999],"mapped",[21050]],[[64000,64000],"mapped",[20999]],[[64001,64001],"mapped",[24230]],[[64002,64002],"mapped",[25299]],[[64003,64003],"mapped",[31958]],[[64004,64004],"mapped",[23429]],[[64005,64005],"mapped",[27934]],[[64006,64006],"mapped",[26292]],[[64007,64007],"mapped",[36667]],[[64008,64008],"mapped",[34892]],[[64009,64009],"mapped",[38477]],[[64010,64010],"mapped",[35211]],[[64011,64011],"mapped",[24275]],[[64012,64012],"mapped",[20800]],[[64013,64013],"mapped",[21952]],[[64014,64015],"valid"],[[64016,64016],"mapped",[22618]],[[64017,64017],"valid"],[[64018,64018],"mapped",[26228]],[[64019,64020],"valid"],[[64021,64021],"mapped",[20958]],[[64022,64022],"mapped",[29482]],[[64023,64023],"mapped",[30410]],[[64024,64024],"mapped",[31036]],[[64025,64025],"mapped",[31070]],[[64026,64026],"mapped",[31077]],[[64027,64027],"mapped",[31119]],[[64028,64028],"mapped",[38742]],[[64029,64029],"mapped",[31934]],[[64030,64030],"mapped",[32701]],[[64031,64031],"valid"],[[64032,64032],"mapped",[34322]],[[64033,64033],"valid"],[[64034,64034],"mapped",[35576]],[[64035,64036],"valid"],[[64037,64037],"mapped",[36920]],[[64038,64038],"mapped",[37117]],[[64039,64041],"valid"],[[64042,64042],"mapped",[39151]],[[64043,64043],"mapped",[39164]],[[64044,64044],"mapped",[39208]],[[64045,64045],"mapped",[40372]],[[64046,64046],"mapped",[37086]],[[64047,64047],"mapped",[38583]],[[64048,64048],"mapped",[20398]],[[64049,64049],"mapped",[20711]],[[64050,64050],"mapped",[20813]],[[64051,64051],"mapped",[21193]],[[64052,64052],"mapped",[21220]],[[64053,64053],"mapped",[21329]],[[64054,64054],"mapped",[21917]],[[64055,64055],"mapped",[22022]],[[64056,64056],"mapped",[22120]],[[64057,64057],"mapped",[22592]],[[64058,64058],"mapped",[22696]],[[64059,64059],"mapped",[23652]],[[64060,64060],"mapped",[23662]],[[64061,64061],"mapped",[24724]],[[64062,64062],"mapped",[24936]],[[64063,64063],"mapped",[24974]],[[64064,64064],"mapped",[25074]],[[64065,64065],"mapped",[25935]],[[64066,64066],"mapped",[26082]],[[64067,64067],"mapped",[26257]],[[64068,64068],"mapped",[26757]],[[64069,64069],"mapped",[28023]],[[64070,64070],"mapped",[28186]],[[64071,64071],"mapped",[28450]],[[64072,64072],"mapped",[29038]],[[64073,64073],"mapped",[29227]],[[64074,64074],"mapped",[29730]],[[64075,64075],"mapped",[30865]],[[64076,64076],"mapped",[31038]],[[64077,64077],"mapped",[31049]],[[64078,64078],"mapped",[31048]],[[64079,64079],"mapped",[31056]],[[64080,64080],"mapped",[31062]],[[64081,64081],"mapped",[31069]],[[64082,64082],"mapped",[31117]],[[64083,64083],"mapped",[31118]],[[64084,64084],"mapped",[31296]],[[64085,64085],"mapped",[31361]],[[64086,64086],"mapped",[31680]],[[64087,64087],"mapped",[32244]],[[64088,64088],"mapped",[32265]],[[64089,64089],"mapped",[32321]],[[64090,64090],"mapped",[32626]],[[64091,64091],"mapped",[32773]],[[64092,64092],"mapped",[33261]],[[64093,64094],"mapped",[33401]],[[64095,64095],"mapped",[33879]],[[64096,64096],"mapped",[35088]],[[64097,64097],"mapped",[35222]],[[64098,64098],"mapped",[35585]],[[64099,64099],"mapped",[35641]],[[64100,64100],"mapped",[36051]],[[64101,64101],"mapped",[36104]],[[64102,64102],"mapped",[36790]],[[64103,64103],"mapped",[36920]],[[64104,64104],"mapped",[38627]],[[64105,64105],"mapped",[38911]],[[64106,64106],"mapped",[38971]],[[64107,64107],"mapped",[24693]],[[64108,64108],"mapped",[148206]],[[64109,64109],"mapped",[33304]],[[64110,64111],"disallowed"],[[64112,64112],"mapped",[20006]],[[64113,64113],"mapped",[20917]],[[64114,64114],"mapped",[20840]],[[64115,64115],"mapped",[20352]],[[64116,64116],"mapped",[20805]],[[64117,64117],"mapped",[20864]],[[64118,64118],"mapped",[21191]],[[64119,64119],"mapped",[21242]],[[64120,64120],"mapped",[21917]],[[64121,64121],"mapped",[21845]],[[64122,64122],"mapped",[21913]],[[64123,64123],"mapped",[21986]],[[64124,64124],"mapped",[22618]],[[64125,64125],"mapped",[22707]],[[64126,64126],"mapped",[22852]],[[64127,64127],"mapped",[22868]],[[64128,64128],"mapped",[23138]],[[64129,64129],"mapped",[23336]],[[64130,64130],"mapped",[24274]],[[64131,64131],"mapped",[24281]],[[64132,64132],"mapped",[24425]],[[64133,64133],"mapped",[24493]],[[64134,64134],"mapped",[24792]],[[64135,64135],"mapped",[24910]],[[64136,64136],"mapped",[24840]],[[64137,64137],"mapped",[24974]],[[64138,64138],"mapped",[24928]],[[64139,64139],"mapped",[25074]],[[64140,64140],"mapped",[25140]],[[64141,64141],"mapped",[25540]],[[64142,64142],"mapped",[25628]],[[64143,64143],"mapped",[25682]],[[64144,64144],"mapped",[25942]],[[64145,64145],"mapped",[26228]],[[64146,64146],"mapped",[26391]],[[64147,64147],"mapped",[26395]],[[64148,64148],"mapped",[26454]],[[64149,64149],"mapped",[27513]],[[64150,64150],"mapped",[27578]],[[64151,64151],"mapped",[27969]],[[64152,64152],"mapped",[28379]],[[64153,64153],"mapped",[28363]],[[64154,64154],"mapped",[28450]],[[64155,64155],"mapped",[28702]],[[64156,64156],"mapped",[29038]],[[64157,64157],"mapped",[30631]],[[64158,64158],"mapped",[29237]],[[64159,64159],"mapped",[29359]],[[64160,64160],"mapped",[29482]],[[64161,64161],"mapped",[29809]],[[64162,64162],"mapped",[29958]],[[64163,64163],"mapped",[30011]],[[64164,64164],"mapped",[30237]],[[64165,64165],"mapped",[30239]],[[64166,64166],"mapped",[30410]],[[64167,64167],"mapped",[30427]],[[64168,64168],"mapped",[30452]],[[64169,64169],"mapped",[30538]],[[64170,64170],"mapped",[30528]],[[64171,64171],"mapped",[30924]],[[64172,64172],"mapped",[31409]],[[64173,64173],"mapped",[31680]],[[64174,64174],"mapped",[31867]],[[64175,64175],"mapped",[32091]],[[64176,64176],"mapped",[32244]],[[64177,64177],"mapped",[32574]],[[64178,64178],"mapped",[32773]],[[64179,64179],"mapped",[33618]],[[64180,64180],"mapped",[33775]],[[64181,64181],"mapped",[34681]],[[64182,64182],"mapped",[35137]],[[64183,64183],"mapped",[35206]],[[64184,64184],"mapped",[35222]],[[64185,64185],"mapped",[35519]],[[64186,64186],"mapped",[35576]],[[64187,64187],"mapped",[35531]],[[64188,64188],"mapped",[35585]],[[64189,64189],"mapped",[35582]],[[64190,64190],"mapped",[35565]],[[64191,64191],"mapped",[35641]],[[64192,64192],"mapped",[35722]],[[64193,64193],"mapped",[36104]],[[64194,64194],"mapped",[36664]],[[64195,64195],"mapped",[36978]],[[64196,64196],"mapped",[37273]],[[64197,64197],"mapped",[37494]],[[64198,64198],"mapped",[38524]],[[64199,64199],"mapped",[38627]],[[64200,64200],"mapped",[38742]],[[64201,64201],"mapped",[38875]],[[64202,64202],"mapped",[38911]],[[64203,64203],"mapped",[38923]],[[64204,64204],"mapped",[38971]],[[64205,64205],"mapped",[39698]],[[64206,64206],"mapped",[40860]],[[64207,64207],"mapped",[141386]],[[64208,64208],"mapped",[141380]],[[64209,64209],"mapped",[144341]],[[64210,64210],"mapped",[15261]],[[64211,64211],"mapped",[16408]],[[64212,64212],"mapped",[16441]],[[64213,64213],"mapped",[152137]],[[64214,64214],"mapped",[154832]],[[64215,64215],"mapped",[163539]],[[64216,64216],"mapped",[40771]],[[64217,64217],"mapped",[40846]],[[64218,64255],"disallowed"],[[64256,64256],"mapped",[102,102]],[[64257,64257],"mapped",[102,105]],[[64258,64258],"mapped",[102,108]],[[64259,64259],"mapped",[102,102,105]],[[64260,64260],"mapped",[102,102,108]],[[64261,64262],"mapped",[115,116]],[[64263,64274],"disallowed"],[[64275,64275],"mapped",[1396,1398]],[[64276,64276],"mapped",[1396,1381]],[[64277,64277],"mapped",[1396,1387]],[[64278,64278],"mapped",[1406,1398]],[[64279,64279],"mapped",[1396,1389]],[[64280,64284],"disallowed"],[[64285,64285],"mapped",[1497,1460]],[[64286,64286],"valid"],[[64287,64287],"mapped",[1522,1463]],[[64288,64288],"mapped",[1506]],[[64289,64289],"mapped",[1488]],[[64290,64290],"mapped",[1491]],[[64291,64291],"mapped",[1492]],[[64292,64292],"mapped",[1499]],[[64293,64293],"mapped",[1500]],[[64294,64294],"mapped",[1501]],[[64295,64295],"mapped",[1512]],[[64296,64296],"mapped",[1514]],[[64297,64297],"disallowed_STD3_mapped",[43]],[[64298,64298],"mapped",[1513,1473]],[[64299,64299],"mapped",[1513,1474]],[[64300,64300],"mapped",[1513,1468,1473]],[[64301,64301],"mapped",[1513,1468,1474]],[[64302,64302],"mapped",[1488,1463]],[[64303,64303],"mapped",[1488,1464]],[[64304,64304],"mapped",[1488,1468]],[[64305,64305],"mapped",[1489,1468]],[[64306,64306],"mapped",[1490,1468]],[[64307,64307],"mapped",[1491,1468]],[[64308,64308],"mapped",[1492,1468]],[[64309,64309],"mapped",[1493,1468]],[[64310,64310],"mapped",[1494,1468]],[[64311,64311],"disallowed"],[[64312,64312],"mapped",[1496,1468]],[[64313,64313],"mapped",[1497,1468]],[[64314,64314],"mapped",[1498,1468]],[[64315,64315],"mapped",[1499,1468]],[[64316,64316],"mapped",[1500,1468]],[[64317,64317],"disallowed"],[[64318,64318],"mapped",[1502,1468]],[[64319,64319],"disallowed"],[[64320,64320],"mapped",[1504,1468]],[[64321,64321],"mapped",[1505,1468]],[[64322,64322],"disallowed"],[[64323,64323],"mapped",[1507,1468]],[[64324,64324],"mapped",[1508,1468]],[[64325,64325],"disallowed"],[[64326,64326],"mapped",[1510,1468]],[[64327,64327],"mapped",[1511,1468]],[[64328,64328],"mapped",[1512,1468]],[[64329,64329],"mapped",[1513,1468]],[[64330,64330],"mapped",[1514,1468]],[[64331,64331],"mapped",[1493,1465]],[[64332,64332],"mapped",[1489,1471]],[[64333,64333],"mapped",[1499,1471]],[[64334,64334],"mapped",[1508,1471]],[[64335,64335],"mapped",[1488,1500]],[[64336,64337],"mapped",[1649]],[[64338,64341],"mapped",[1659]],[[64342,64345],"mapped",[1662]],[[64346,64349],"mapped",[1664]],[[64350,64353],"mapped",[1658]],[[64354,64357],"mapped",[1663]],[[64358,64361],"mapped",[1657]],[[64362,64365],"mapped",[1700]],[[64366,64369],"mapped",[1702]],[[64370,64373],"mapped",[1668]],[[64374,64377],"mapped",[1667]],[[64378,64381],"mapped",[1670]],[[64382,64385],"mapped",[1671]],[[64386,64387],"mapped",[1677]],[[64388,64389],"mapped",[1676]],[[64390,64391],"mapped",[1678]],[[64392,64393],"mapped",[1672]],[[64394,64395],"mapped",[1688]],[[64396,64397],"mapped",[1681]],[[64398,64401],"mapped",[1705]],[[64402,64405],"mapped",[1711]],[[64406,64409],"mapped",[1715]],[[64410,64413],"mapped",[1713]],[[64414,64415],"mapped",[1722]],[[64416,64419],"mapped",[1723]],[[64420,64421],"mapped",[1728]],[[64422,64425],"mapped",[1729]],[[64426,64429],"mapped",[1726]],[[64430,64431],"mapped",[1746]],[[64432,64433],"mapped",[1747]],[[64434,64449],"valid",[],"NV8"],[[64450,64466],"disallowed"],[[64467,64470],"mapped",[1709]],[[64471,64472],"mapped",[1735]],[[64473,64474],"mapped",[1734]],[[64475,64476],"mapped",[1736]],[[64477,64477],"mapped",[1735,1652]],[[64478,64479],"mapped",[1739]],[[64480,64481],"mapped",[1733]],[[64482,64483],"mapped",[1737]],[[64484,64487],"mapped",[1744]],[[64488,64489],"mapped",[1609]],[[64490,64491],"mapped",[1574,1575]],[[64492,64493],"mapped",[1574,1749]],[[64494,64495],"mapped",[1574,1608]],[[64496,64497],"mapped",[1574,1735]],[[64498,64499],"mapped",[1574,1734]],[[64500,64501],"mapped",[1574,1736]],[[64502,64504],"mapped",[1574,1744]],[[64505,64507],"mapped",[1574,1609]],[[64508,64511],"mapped",[1740]],[[64512,64512],"mapped",[1574,1580]],[[64513,64513],"mapped",[1574,1581]],[[64514,64514],"mapped",[1574,1605]],[[64515,64515],"mapped",[1574,1609]],[[64516,64516],"mapped",[1574,1610]],[[64517,64517],"mapped",[1576,1580]],[[64518,64518],"mapped",[1576,1581]],[[64519,64519],"mapped",[1576,1582]],[[64520,64520],"mapped",[1576,1605]],[[64521,64521],"mapped",[1576,1609]],[[64522,64522],"mapped",[1576,1610]],[[64523,64523],"mapped",[1578,1580]],[[64524,64524],"mapped",[1578,1581]],[[64525,64525],"mapped",[1578,1582]],[[64526,64526],"mapped",[1578,1605]],[[64527,64527],"mapped",[1578,1609]],[[64528,64528],"mapped",[1578,1610]],[[64529,64529],"mapped",[1579,1580]],[[64530,64530],"mapped",[1579,1605]],[[64531,64531],"mapped",[1579,1609]],[[64532,64532],"mapped",[1579,1610]],[[64533,64533],"mapped",[1580,1581]],[[64534,64534],"mapped",[1580,1605]],[[64535,64535],"mapped",[1581,1580]],[[64536,64536],"mapped",[1581,1605]],[[64537,64537],"mapped",[1582,1580]],[[64538,64538],"mapped",[1582,1581]],[[64539,64539],"mapped",[1582,1605]],[[64540,64540],"mapped",[1587,1580]],[[64541,64541],"mapped",[1587,1581]],[[64542,64542],"mapped",[1587,1582]],[[64543,64543],"mapped",[1587,1605]],[[64544,64544],"mapped",[1589,1581]],[[64545,64545],"mapped",[1589,1605]],[[64546,64546],"mapped",[1590,1580]],[[64547,64547],"mapped",[1590,1581]],[[64548,64548],"mapped",[1590,1582]],[[64549,64549],"mapped",[1590,1605]],[[64550,64550],"mapped",[1591,1581]],[[64551,64551],"mapped",[1591,1605]],[[64552,64552],"mapped",[1592,1605]],[[64553,64553],"mapped",[1593,1580]],[[64554,64554],"mapped",[1593,1605]],[[64555,64555],"mapped",[1594,1580]],[[64556,64556],"mapped",[1594,1605]],[[64557,64557],"mapped",[1601,1580]],[[64558,64558],"mapped",[1601,1581]],[[64559,64559],"mapped",[1601,1582]],[[64560,64560],"mapped",[1601,1605]],[[64561,64561],"mapped",[1601,1609]],[[64562,64562],"mapped",[1601,1610]],[[64563,64563],"mapped",[1602,1581]],[[64564,64564],"mapped",[1602,1605]],[[64565,64565],"mapped",[1602,1609]],[[64566,64566],"mapped",[1602,1610]],[[64567,64567],"mapped",[1603,1575]],[[64568,64568],"mapped",[1603,1580]],[[64569,64569],"mapped",[1603,1581]],[[64570,64570],"mapped",[1603,1582]],[[64571,64571],"mapped",[1603,1604]],[[64572,64572],"mapped",[1603,1605]],[[64573,64573],"mapped",[1603,1609]],[[64574,64574],"mapped",[1603,1610]],[[64575,64575],"mapped",[1604,1580]],[[64576,64576],"mapped",[1604,1581]],[[64577,64577],"mapped",[1604,1582]],[[64578,64578],"mapped",[1604,1605]],[[64579,64579],"mapped",[1604,1609]],[[64580,64580],"mapped",[1604,1610]],[[64581,64581],"mapped",[1605,1580]],[[64582,64582],"mapped",[1605,1581]],[[64583,64583],"mapped",[1605,1582]],[[64584,64584],"mapped",[1605,1605]],[[64585,64585],"mapped",[1605,1609]],[[64586,64586],"mapped",[1605,1610]],[[64587,64587],"mapped",[1606,1580]],[[64588,64588],"mapped",[1606,1581]],[[64589,64589],"mapped",[1606,1582]],[[64590,64590],"mapped",[1606,1605]],[[64591,64591],"mapped",[1606,1609]],[[64592,64592],"mapped",[1606,1610]],[[64593,64593],"mapped",[1607,1580]],[[64594,64594],"mapped",[1607,1605]],[[64595,64595],"mapped",[1607,1609]],[[64596,64596],"mapped",[1607,1610]],[[64597,64597],"mapped",[1610,1580]],[[64598,64598],"mapped",[1610,1581]],[[64599,64599],"mapped",[1610,1582]],[[64600,64600],"mapped",[1610,1605]],[[64601,64601],"mapped",[1610,1609]],[[64602,64602],"mapped",[1610,1610]],[[64603,64603],"mapped",[1584,1648]],[[64604,64604],"mapped",[1585,1648]],[[64605,64605],"mapped",[1609,1648]],[[64606,64606],"disallowed_STD3_mapped",[32,1612,1617]],[[64607,64607],"disallowed_STD3_mapped",[32,1613,1617]],[[64608,64608],"disallowed_STD3_mapped",[32,1614,1617]],[[64609,64609],"disallowed_STD3_mapped",[32,1615,1617]],[[64610,64610],"disallowed_STD3_mapped",[32,1616,1617]],[[64611,64611],"disallowed_STD3_mapped",[32,1617,1648]],[[64612,64612],"mapped",[1574,1585]],[[64613,64613],"mapped",[1574,1586]],[[64614,64614],"mapped",[1574,1605]],[[64615,64615],"mapped",[1574,1606]],[[64616,64616],"mapped",[1574,1609]],[[64617,64617],"mapped",[1574,1610]],[[64618,64618],"mapped",[1576,1585]],[[64619,64619],"mapped",[1576,1586]],[[64620,64620],"mapped",[1576,1605]],[[64621,64621],"mapped",[1576,1606]],[[64622,64622],"mapped",[1576,1609]],[[64623,64623],"mapped",[1576,1610]],[[64624,64624],"mapped",[1578,1585]],[[64625,64625],"mapped",[1578,1586]],[[64626,64626],"mapped",[1578,1605]],[[64627,64627],"mapped",[1578,1606]],[[64628,64628],"mapped",[1578,1609]],[[64629,64629],"mapped",[1578,1610]],[[64630,64630],"mapped",[1579,1585]],[[64631,64631],"mapped",[1579,1586]],[[64632,64632],"mapped",[1579,1605]],[[64633,64633],"mapped",[1579,1606]],[[64634,64634],"mapped",[1579,1609]],[[64635,64635],"mapped",[1579,1610]],[[64636,64636],"mapped",[1601,1609]],[[64637,64637],"mapped",[1601,1610]],[[64638,64638],"mapped",[1602,1609]],[[64639,64639],"mapped",[1602,1610]],[[64640,64640],"mapped",[1603,1575]],[[64641,64641],"mapped",[1603,1604]],[[64642,64642],"mapped",[1603,1605]],[[64643,64643],"mapped",[1603,1609]],[[64644,64644],"mapped",[1603,1610]],[[64645,64645],"mapped",[1604,1605]],[[64646,64646],"mapped",[1604,1609]],[[64647,64647],"mapped",[1604,1610]],[[64648,64648],"mapped",[1605,1575]],[[64649,64649],"mapped",[1605,1605]],[[64650,64650],"mapped",[1606,1585]],[[64651,64651],"mapped",[1606,1586]],[[64652,64652],"mapped",[1606,1605]],[[64653,64653],"mapped",[1606,1606]],[[64654,64654],"mapped",[1606,1609]],[[64655,64655],"mapped",[1606,1610]],[[64656,64656],"mapped",[1609,1648]],[[64657,64657],"mapped",[1610,1585]],[[64658,64658],"mapped",[1610,1586]],[[64659,64659],"mapped",[1610,1605]],[[64660,64660],"mapped",[1610,1606]],[[64661,64661],"mapped",[1610,1609]],[[64662,64662],"mapped",[1610,1610]],[[64663,64663],"mapped",[1574,1580]],[[64664,64664],"mapped",[1574,1581]],[[64665,64665],"mapped",[1574,1582]],[[64666,64666],"mapped",[1574,1605]],[[64667,64667],"mapped",[1574,1607]],[[64668,64668],"mapped",[1576,1580]],[[64669,64669],"mapped",[1576,1581]],[[64670,64670],"mapped",[1576,1582]],[[64671,64671],"mapped",[1576,1605]],[[64672,64672],"mapped",[1576,1607]],[[64673,64673],"mapped",[1578,1580]],[[64674,64674],"mapped",[1578,1581]],[[64675,64675],"mapped",[1578,1582]],[[64676,64676],"mapped",[1578,1605]],[[64677,64677],"mapped",[1578,1607]],[[64678,64678],"mapped",[1579,1605]],[[64679,64679],"mapped",[1580,1581]],[[64680,64680],"mapped",[1580,1605]],[[64681,64681],"mapped",[1581,1580]],[[64682,64682],"mapped",[1581,1605]],[[64683,64683],"mapped",[1582,1580]],[[64684,64684],"mapped",[1582,1605]],[[64685,64685],"mapped",[1587,1580]],[[64686,64686],"mapped",[1587,1581]],[[64687,64687],"mapped",[1587,1582]],[[64688,64688],"mapped",[1587,1605]],[[64689,64689],"mapped",[1589,1581]],[[64690,64690],"mapped",[1589,1582]],[[64691,64691],"mapped",[1589,1605]],[[64692,64692],"mapped",[1590,1580]],[[64693,64693],"mapped",[1590,1581]],[[64694,64694],"mapped",[1590,1582]],[[64695,64695],"mapped",[1590,1605]],[[64696,64696],"mapped",[1591,1581]],[[64697,64697],"mapped",[1592,1605]],[[64698,64698],"mapped",[1593,1580]],[[64699,64699],"mapped",[1593,1605]],[[64700,64700],"mapped",[1594,1580]],[[64701,64701],"mapped",[1594,1605]],[[64702,64702],"mapped",[1601,1580]],[[64703,64703],"mapped",[1601,1581]],[[64704,64704],"mapped",[1601,1582]],[[64705,64705],"mapped",[1601,1605]],[[64706,64706],"mapped",[1602,1581]],[[64707,64707],"mapped",[1602,1605]],[[64708,64708],"mapped",[1603,1580]],[[64709,64709],"mapped",[1603,1581]],[[64710,64710],"mapped",[1603,1582]],[[64711,64711],"mapped",[1603,1604]],[[64712,64712],"mapped",[1603,1605]],[[64713,64713],"mapped",[1604,1580]],[[64714,64714],"mapped",[1604,1581]],[[64715,64715],"mapped",[1604,1582]],[[64716,64716],"mapped",[1604,1605]],[[64717,64717],"mapped",[1604,1607]],[[64718,64718],"mapped",[1605,1580]],[[64719,64719],"mapped",[1605,1581]],[[64720,64720],"mapped",[1605,1582]],[[64721,64721],"mapped",[1605,1605]],[[64722,64722],"mapped",[1606,1580]],[[64723,64723],"mapped",[1606,1581]],[[64724,64724],"mapped",[1606,1582]],[[64725,64725],"mapped",[1606,1605]],[[64726,64726],"mapped",[1606,1607]],[[64727,64727],"mapped",[1607,1580]],[[64728,64728],"mapped",[1607,1605]],[[64729,64729],"mapped",[1607,1648]],[[64730,64730],"mapped",[1610,1580]],[[64731,64731],"mapped",[1610,1581]],[[64732,64732],"mapped",[1610,1582]],[[64733,64733],"mapped",[1610,1605]],[[64734,64734],"mapped",[1610,1607]],[[64735,64735],"mapped",[1574,1605]],[[64736,64736],"mapped",[1574,1607]],[[64737,64737],"mapped",[1576,1605]],[[64738,64738],"mapped",[1576,1607]],[[64739,64739],"mapped",[1578,1605]],[[64740,64740],"mapped",[1578,1607]],[[64741,64741],"mapped",[1579,1605]],[[64742,64742],"mapped",[1579,1607]],[[64743,64743],"mapped",[1587,1605]],[[64744,64744],"mapped",[1587,1607]],[[64745,64745],"mapped",[1588,1605]],[[64746,64746],"mapped",[1588,1607]],[[64747,64747],"mapped",[1603,1604]],[[64748,64748],"mapped",[1603,1605]],[[64749,64749],"mapped",[1604,1605]],[[64750,64750],"mapped",[1606,1605]],[[64751,64751],"mapped",[1606,1607]],[[64752,64752],"mapped",[1610,1605]],[[64753,64753],"mapped",[1610,1607]],[[64754,64754],"mapped",[1600,1614,1617]],[[64755,64755],"mapped",[1600,1615,1617]],[[64756,64756],"mapped",[1600,1616,1617]],[[64757,64757],"mapped",[1591,1609]],[[64758,64758],"mapped",[1591,1610]],[[64759,64759],"mapped",[1593,1609]],[[64760,64760],"mapped",[1593,1610]],[[64761,64761],"mapped",[1594,1609]],[[64762,64762],"mapped",[1594,1610]],[[64763,64763],"mapped",[1587,1609]],[[64764,64764],"mapped",[1587,1610]],[[64765,64765],"mapped",[1588,1609]],[[64766,64766],"mapped",[1588,1610]],[[64767,64767],"mapped",[1581,1609]],[[64768,64768],"mapped",[1581,1610]],[[64769,64769],"mapped",[1580,1609]],[[64770,64770],"mapped",[1580,1610]],[[64771,64771],"mapped",[1582,1609]],[[64772,64772],"mapped",[1582,1610]],[[64773,64773],"mapped",[1589,1609]],[[64774,64774],"mapped",[1589,1610]],[[64775,64775],"mapped",[1590,1609]],[[64776,64776],"mapped",[1590,1610]],[[64777,64777],"mapped",[1588,1580]],[[64778,64778],"mapped",[1588,1581]],[[64779,64779],"mapped",[1588,1582]],[[64780,64780],"mapped",[1588,1605]],[[64781,64781],"mapped",[1588,1585]],[[64782,64782],"mapped",[1587,1585]],[[64783,64783],"mapped",[1589,1585]],[[64784,64784],"mapped",[1590,1585]],[[64785,64785],"mapped",[1591,1609]],[[64786,64786],"mapped",[1591,1610]],[[64787,64787],"mapped",[1593,1609]],[[64788,64788],"mapped",[1593,1610]],[[64789,64789],"mapped",[1594,1609]],[[64790,64790],"mapped",[1594,1610]],[[64791,64791],"mapped",[1587,1609]],[[64792,64792],"mapped",[1587,1610]],[[64793,64793],"mapped",[1588,1609]],[[64794,64794],"mapped",[1588,1610]],[[64795,64795],"mapped",[1581,1609]],[[64796,64796],"mapped",[1581,1610]],[[64797,64797],"mapped",[1580,1609]],[[64798,64798],"mapped",[1580,1610]],[[64799,64799],"mapped",[1582,1609]],[[64800,64800],"mapped",[1582,1610]],[[64801,64801],"mapped",[1589,1609]],[[64802,64802],"mapped",[1589,1610]],[[64803,64803],"mapped",[1590,1609]],[[64804,64804],"mapped",[1590,1610]],[[64805,64805],"mapped",[1588,1580]],[[64806,64806],"mapped",[1588,1581]],[[64807,64807],"mapped",[1588,1582]],[[64808,64808],"mapped",[1588,1605]],[[64809,64809],"mapped",[1588,1585]],[[64810,64810],"mapped",[1587,1585]],[[64811,64811],"mapped",[1589,1585]],[[64812,64812],"mapped",[1590,1585]],[[64813,64813],"mapped",[1588,1580]],[[64814,64814],"mapped",[1588,1581]],[[64815,64815],"mapped",[1588,1582]],[[64816,64816],"mapped",[1588,1605]],[[64817,64817],"mapped",[1587,1607]],[[64818,64818],"mapped",[1588,1607]],[[64819,64819],"mapped",[1591,1605]],[[64820,64820],"mapped",[1587,1580]],[[64821,64821],"mapped",[1587,1581]],[[64822,64822],"mapped",[1587,1582]],[[64823,64823],"mapped",[1588,1580]],[[64824,64824],"mapped",[1588,1581]],[[64825,64825],"mapped",[1588,1582]],[[64826,64826],"mapped",[1591,1605]],[[64827,64827],"mapped",[1592,1605]],[[64828,64829],"mapped",[1575,1611]],[[64830,64831],"valid",[],"NV8"],[[64832,64847],"disallowed"],[[64848,64848],"mapped",[1578,1580,1605]],[[64849,64850],"mapped",[1578,1581,1580]],[[64851,64851],"mapped",[1578,1581,1605]],[[64852,64852],"mapped",[1578,1582,1605]],[[64853,64853],"mapped",[1578,1605,1580]],[[64854,64854],"mapped",[1578,1605,1581]],[[64855,64855],"mapped",[1578,1605,1582]],[[64856,64857],"mapped",[1580,1605,1581]],[[64858,64858],"mapped",[1581,1605,1610]],[[64859,64859],"mapped",[1581,1605,1609]],[[64860,64860],"mapped",[1587,1581,1580]],[[64861,64861],"mapped",[1587,1580,1581]],[[64862,64862],"mapped",[1587,1580,1609]],[[64863,64864],"mapped",[1587,1605,1581]],[[64865,64865],"mapped",[1587,1605,1580]],[[64866,64867],"mapped",[1587,1605,1605]],[[64868,64869],"mapped",[1589,1581,1581]],[[64870,64870],"mapped",[1589,1605,1605]],[[64871,64872],"mapped",[1588,1581,1605]],[[64873,64873],"mapped",[1588,1580,1610]],[[64874,64875],"mapped",[1588,1605,1582]],[[64876,64877],"mapped",[1588,1605,1605]],[[64878,64878],"mapped",[1590,1581,1609]],[[64879,64880],"mapped",[1590,1582,1605]],[[64881,64882],"mapped",[1591,1605,1581]],[[64883,64883],"mapped",[1591,1605,1605]],[[64884,64884],"mapped",[1591,1605,1610]],[[64885,64885],"mapped",[1593,1580,1605]],[[64886,64887],"mapped",[1593,1605,1605]],[[64888,64888],"mapped",[1593,1605,1609]],[[64889,64889],"mapped",[1594,1605,1605]],[[64890,64890],"mapped",[1594,1605,1610]],[[64891,64891],"mapped",[1594,1605,1609]],[[64892,64893],"mapped",[1601,1582,1605]],[[64894,64894],"mapped",[1602,1605,1581]],[[64895,64895],"mapped",[1602,1605,1605]],[[64896,64896],"mapped",[1604,1581,1605]],[[64897,64897],"mapped",[1604,1581,1610]],[[64898,64898],"mapped",[1604,1581,1609]],[[64899,64900],"mapped",[1604,1580,1580]],[[64901,64902],"mapped",[1604,1582,1605]],[[64903,64904],"mapped",[1604,1605,1581]],[[64905,64905],"mapped",[1605,1581,1580]],[[64906,64906],"mapped",[1605,1581,1605]],[[64907,64907],"mapped",[1605,1581,1610]],[[64908,64908],"mapped",[1605,1580,1581]],[[64909,64909],"mapped",[1605,1580,1605]],[[64910,64910],"mapped",[1605,1582,1580]],[[64911,64911],"mapped",[1605,1582,1605]],[[64912,64913],"disallowed"],[[64914,64914],"mapped",[1605,1580,1582]],[[64915,64915],"mapped",[1607,1605,1580]],[[64916,64916],"mapped",[1607,1605,1605]],[[64917,64917],"mapped",[1606,1581,1605]],[[64918,64918],"mapped",[1606,1581,1609]],[[64919,64920],"mapped",[1606,1580,1605]],[[64921,64921],"mapped",[1606,1580,1609]],[[64922,64922],"mapped",[1606,1605,1610]],[[64923,64923],"mapped",[1606,1605,1609]],[[64924,64925],"mapped",[1610,1605,1605]],[[64926,64926],"mapped",[1576,1582,1610]],[[64927,64927],"mapped",[1578,1580,1610]],[[64928,64928],"mapped",[1578,1580,1609]],[[64929,64929],"mapped",[1578,1582,1610]],[[64930,64930],"mapped",[1578,1582,1609]],[[64931,64931],"mapped",[1578,1605,1610]],[[64932,64932],"mapped",[1578,1605,1609]],[[64933,64933],"mapped",[1580,1605,1610]],[[64934,64934],"mapped",[1580,1581,1609]],[[64935,64935],"mapped",[1580,1605,1609]],[[64936,64936],"mapped",[1587,1582,1609]],[[64937,64937],"mapped",[1589,1581,1610]],[[64938,64938],"mapped",[1588,1581,1610]],[[64939,64939],"mapped",[1590,1581,1610]],[[64940,64940],"mapped",[1604,1580,1610]],[[64941,64941],"mapped",[1604,1605,1610]],[[64942,64942],"mapped",[1610,1581,1610]],[[64943,64943],"mapped",[1610,1580,1610]],[[64944,64944],"mapped",[1610,1605,1610]],[[64945,64945],"mapped",[1605,1605,1610]],[[64946,64946],"mapped",[1602,1605,1610]],[[64947,64947],"mapped",[1606,1581,1610]],[[64948,64948],"mapped",[1602,1605,1581]],[[64949,64949],"mapped",[1604,1581,1605]],[[64950,64950],"mapped",[1593,1605,1610]],[[64951,64951],"mapped",[1603,1605,1610]],[[64952,64952],"mapped",[1606,1580,1581]],[[64953,64953],"mapped",[1605,1582,1610]],[[64954,64954],"mapped",[1604,1580,1605]],[[64955,64955],"mapped",[1603,1605,1605]],[[64956,64956],"mapped",[1604,1580,1605]],[[64957,64957],"mapped",[1606,1580,1581]],[[64958,64958],"mapped",[1580,1581,1610]],[[64959,64959],"mapped",[1581,1580,1610]],[[64960,64960],"mapped",[1605,1580,1610]],[[64961,64961],"mapped",[1601,1605,1610]],[[64962,64962],"mapped",[1576,1581,1610]],[[64963,64963],"mapped",[1603,1605,1605]],[[64964,64964],"mapped",[1593,1580,1605]],[[64965,64965],"mapped",[1589,1605,1605]],[[64966,64966],"mapped",[1587,1582,1610]],[[64967,64967],"mapped",[1606,1580,1610]],[[64968,64975],"disallowed"],[[64976,65007],"disallowed"],[[65008,65008],"mapped",[1589,1604,1746]],[[65009,65009],"mapped",[1602,1604,1746]],[[65010,65010],"mapped",[1575,1604,1604,1607]],[[65011,65011],"mapped",[1575,1603,1576,1585]],[[65012,65012],"mapped",[1605,1581,1605,1583]],[[65013,65013],"mapped",[1589,1604,1593,1605]],[[65014,65014],"mapped",[1585,1587,1608,1604]],[[65015,65015],"mapped",[1593,1604,1610,1607]],[[65016,65016],"mapped",[1608,1587,1604,1605]],[[65017,65017],"mapped",[1589,1604,1609]],[[65018,65018],"disallowed_STD3_mapped",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],"disallowed_STD3_mapped",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],"mapped",[1585,1740,1575,1604]],[[65021,65021],"valid",[],"NV8"],[[65022,65023],"disallowed"],[[65024,65039],"ignored"],[[65040,65040],"disallowed_STD3_mapped",[44]],[[65041,65041],"mapped",[12289]],[[65042,65042],"disallowed"],[[65043,65043],"disallowed_STD3_mapped",[58]],[[65044,65044],"disallowed_STD3_mapped",[59]],[[65045,65045],"disallowed_STD3_mapped",[33]],[[65046,65046],"disallowed_STD3_mapped",[63]],[[65047,65047],"mapped",[12310]],[[65048,65048],"mapped",[12311]],[[65049,65049],"disallowed"],[[65050,65055],"disallowed"],[[65056,65059],"valid"],[[65060,65062],"valid"],[[65063,65069],"valid"],[[65070,65071],"valid"],[[65072,65072],"disallowed"],[[65073,65073],"mapped",[8212]],[[65074,65074],"mapped",[8211]],[[65075,65076],"disallowed_STD3_mapped",[95]],[[65077,65077],"disallowed_STD3_mapped",[40]],[[65078,65078],"disallowed_STD3_mapped",[41]],[[65079,65079],"disallowed_STD3_mapped",[123]],[[65080,65080],"disallowed_STD3_mapped",[125]],[[65081,65081],"mapped",[12308]],[[65082,65082],"mapped",[12309]],[[65083,65083],"mapped",[12304]],[[65084,65084],"mapped",[12305]],[[65085,65085],"mapped",[12298]],[[65086,65086],"mapped",[12299]],[[65087,65087],"mapped",[12296]],[[65088,65088],"mapped",[12297]],[[65089,65089],"mapped",[12300]],[[65090,65090],"mapped",[12301]],[[65091,65091],"mapped",[12302]],[[65092,65092],"mapped",[12303]],[[65093,65094],"valid",[],"NV8"],[[65095,65095],"disallowed_STD3_mapped",[91]],[[65096,65096],"disallowed_STD3_mapped",[93]],[[65097,65100],"disallowed_STD3_mapped",[32,773]],[[65101,65103],"disallowed_STD3_mapped",[95]],[[65104,65104],"disallowed_STD3_mapped",[44]],[[65105,65105],"mapped",[12289]],[[65106,65106],"disallowed"],[[65107,65107],"disallowed"],[[65108,65108],"disallowed_STD3_mapped",[59]],[[65109,65109],"disallowed_STD3_mapped",[58]],[[65110,65110],"disallowed_STD3_mapped",[63]],[[65111,65111],"disallowed_STD3_mapped",[33]],[[65112,65112],"mapped",[8212]],[[65113,65113],"disallowed_STD3_mapped",[40]],[[65114,65114],"disallowed_STD3_mapped",[41]],[[65115,65115],"disallowed_STD3_mapped",[123]],[[65116,65116],"disallowed_STD3_mapped",[125]],[[65117,65117],"mapped",[12308]],[[65118,65118],"mapped",[12309]],[[65119,65119],"disallowed_STD3_mapped",[35]],[[65120,65120],"disallowed_STD3_mapped",[38]],[[65121,65121],"disallowed_STD3_mapped",[42]],[[65122,65122],"disallowed_STD3_mapped",[43]],[[65123,65123],"mapped",[45]],[[65124,65124],"disallowed_STD3_mapped",[60]],[[65125,65125],"disallowed_STD3_mapped",[62]],[[65126,65126],"disallowed_STD3_mapped",[61]],[[65127,65127],"disallowed"],[[65128,65128],"disallowed_STD3_mapped",[92]],[[65129,65129],"disallowed_STD3_mapped",[36]],[[65130,65130],"disallowed_STD3_mapped",[37]],[[65131,65131],"disallowed_STD3_mapped",[64]],[[65132,65135],"disallowed"],[[65136,65136],"disallowed_STD3_mapped",[32,1611]],[[65137,65137],"mapped",[1600,1611]],[[65138,65138],"disallowed_STD3_mapped",[32,1612]],[[65139,65139],"valid"],[[65140,65140],"disallowed_STD3_mapped",[32,1613]],[[65141,65141],"disallowed"],[[65142,65142],"disallowed_STD3_mapped",[32,1614]],[[65143,65143],"mapped",[1600,1614]],[[65144,65144],"disallowed_STD3_mapped",[32,1615]],[[65145,65145],"mapped",[1600,1615]],[[65146,65146],"disallowed_STD3_mapped",[32,1616]],[[65147,65147],"mapped",[1600,1616]],[[65148,65148],"disallowed_STD3_mapped",[32,1617]],[[65149,65149],"mapped",[1600,1617]],[[65150,65150],"disallowed_STD3_mapped",[32,1618]],[[65151,65151],"mapped",[1600,1618]],[[65152,65152],"mapped",[1569]],[[65153,65154],"mapped",[1570]],[[65155,65156],"mapped",[1571]],[[65157,65158],"mapped",[1572]],[[65159,65160],"mapped",[1573]],[[65161,65164],"mapped",[1574]],[[65165,65166],"mapped",[1575]],[[65167,65170],"mapped",[1576]],[[65171,65172],"mapped",[1577]],[[65173,65176],"mapped",[1578]],[[65177,65180],"mapped",[1579]],[[65181,65184],"mapped",[1580]],[[65185,65188],"mapped",[1581]],[[65189,65192],"mapped",[1582]],[[65193,65194],"mapped",[1583]],[[65195,65196],"mapped",[1584]],[[65197,65198],"mapped",[1585]],[[65199,65200],"mapped",[1586]],[[65201,65204],"mapped",[1587]],[[65205,65208],"mapped",[1588]],[[65209,65212],"mapped",[1589]],[[65213,65216],"mapped",[1590]],[[65217,65220],"mapped",[1591]],[[65221,65224],"mapped",[1592]],[[65225,65228],"mapped",[1593]],[[65229,65232],"mapped",[1594]],[[65233,65236],"mapped",[1601]],[[65237,65240],"mapped",[1602]],[[65241,65244],"mapped",[1603]],[[65245,65248],"mapped",[1604]],[[65249,65252],"mapped",[1605]],[[65253,65256],"mapped",[1606]],[[65257,65260],"mapped",[1607]],[[65261,65262],"mapped",[1608]],[[65263,65264],"mapped",[1609]],[[65265,65268],"mapped",[1610]],[[65269,65270],"mapped",[1604,1570]],[[65271,65272],"mapped",[1604,1571]],[[65273,65274],"mapped",[1604,1573]],[[65275,65276],"mapped",[1604,1575]],[[65277,65278],"disallowed"],[[65279,65279],"ignored"],[[65280,65280],"disallowed"],[[65281,65281],"disallowed_STD3_mapped",[33]],[[65282,65282],"disallowed_STD3_mapped",[34]],[[65283,65283],"disallowed_STD3_mapped",[35]],[[65284,65284],"disallowed_STD3_mapped",[36]],[[65285,65285],"disallowed_STD3_mapped",[37]],[[65286,65286],"disallowed_STD3_mapped",[38]],[[65287,65287],"disallowed_STD3_mapped",[39]],[[65288,65288],"disallowed_STD3_mapped",[40]],[[65289,65289],"disallowed_STD3_mapped",[41]],[[65290,65290],"disallowed_STD3_mapped",[42]],[[65291,65291],"disallowed_STD3_mapped",[43]],[[65292,65292],"disallowed_STD3_mapped",[44]],[[65293,65293],"mapped",[45]],[[65294,65294],"mapped",[46]],[[65295,65295],"disallowed_STD3_mapped",[47]],[[65296,65296],"mapped",[48]],[[65297,65297],"mapped",[49]],[[65298,65298],"mapped",[50]],[[65299,65299],"mapped",[51]],[[65300,65300],"mapped",[52]],[[65301,65301],"mapped",[53]],[[65302,65302],"mapped",[54]],[[65303,65303],"mapped",[55]],[[65304,65304],"mapped",[56]],[[65305,65305],"mapped",[57]],[[65306,65306],"disallowed_STD3_mapped",[58]],[[65307,65307],"disallowed_STD3_mapped",[59]],[[65308,65308],"disallowed_STD3_mapped",[60]],[[65309,65309],"disallowed_STD3_mapped",[61]],[[65310,65310],"disallowed_STD3_mapped",[62]],[[65311,65311],"disallowed_STD3_mapped",[63]],[[65312,65312],"disallowed_STD3_mapped",[64]],[[65313,65313],"mapped",[97]],[[65314,65314],"mapped",[98]],[[65315,65315],"mapped",[99]],[[65316,65316],"mapped",[100]],[[65317,65317],"mapped",[101]],[[65318,65318],"mapped",[102]],[[65319,65319],"mapped",[103]],[[65320,65320],"mapped",[104]],[[65321,65321],"mapped",[105]],[[65322,65322],"mapped",[106]],[[65323,65323],"mapped",[107]],[[65324,65324],"mapped",[108]],[[65325,65325],"mapped",[109]],[[65326,65326],"mapped",[110]],[[65327,65327],"mapped",[111]],[[65328,65328],"mapped",[112]],[[65329,65329],"mapped",[113]],[[65330,65330],"mapped",[114]],[[65331,65331],"mapped",[115]],[[65332,65332],"mapped",[116]],[[65333,65333],"mapped",[117]],[[65334,65334],"mapped",[118]],[[65335,65335],"mapped",[119]],[[65336,65336],"mapped",[120]],[[65337,65337],"mapped",[121]],[[65338,65338],"mapped",[122]],[[65339,65339],"disallowed_STD3_mapped",[91]],[[65340,65340],"disallowed_STD3_mapped",[92]],[[65341,65341],"disallowed_STD3_mapped",[93]],[[65342,65342],"disallowed_STD3_mapped",[94]],[[65343,65343],"disallowed_STD3_mapped",[95]],[[65344,65344],"disallowed_STD3_mapped",[96]],[[65345,65345],"mapped",[97]],[[65346,65346],"mapped",[98]],[[65347,65347],"mapped",[99]],[[65348,65348],"mapped",[100]],[[65349,65349],"mapped",[101]],[[65350,65350],"mapped",[102]],[[65351,65351],"mapped",[103]],[[65352,65352],"mapped",[104]],[[65353,65353],"mapped",[105]],[[65354,65354],"mapped",[106]],[[65355,65355],"mapped",[107]],[[65356,65356],"mapped",[108]],[[65357,65357],"mapped",[109]],[[65358,65358],"mapped",[110]],[[65359,65359],"mapped",[111]],[[65360,65360],"mapped",[112]],[[65361,65361],"mapped",[113]],[[65362,65362],"mapped",[114]],[[65363,65363],"mapped",[115]],[[65364,65364],"mapped",[116]],[[65365,65365],"mapped",[117]],[[65366,65366],"mapped",[118]],[[65367,65367],"mapped",[119]],[[65368,65368],"mapped",[120]],[[65369,65369],"mapped",[121]],[[65370,65370],"mapped",[122]],[[65371,65371],"disallowed_STD3_mapped",[123]],[[65372,65372],"disallowed_STD3_mapped",[124]],[[65373,65373],"disallowed_STD3_mapped",[125]],[[65374,65374],"disallowed_STD3_mapped",[126]],[[65375,65375],"mapped",[10629]],[[65376,65376],"mapped",[10630]],[[65377,65377],"mapped",[46]],[[65378,65378],"mapped",[12300]],[[65379,65379],"mapped",[12301]],[[65380,65380],"mapped",[12289]],[[65381,65381],"mapped",[12539]],[[65382,65382],"mapped",[12530]],[[65383,65383],"mapped",[12449]],[[65384,65384],"mapped",[12451]],[[65385,65385],"mapped",[12453]],[[65386,65386],"mapped",[12455]],[[65387,65387],"mapped",[12457]],[[65388,65388],"mapped",[12515]],[[65389,65389],"mapped",[12517]],[[65390,65390],"mapped",[12519]],[[65391,65391],"mapped",[12483]],[[65392,65392],"mapped",[12540]],[[65393,65393],"mapped",[12450]],[[65394,65394],"mapped",[12452]],[[65395,65395],"mapped",[12454]],[[65396,65396],"mapped",[12456]],[[65397,65397],"mapped",[12458]],[[65398,65398],"mapped",[12459]],[[65399,65399],"mapped",[12461]],[[65400,65400],"mapped",[12463]],[[65401,65401],"mapped",[12465]],[[65402,65402],"mapped",[12467]],[[65403,65403],"mapped",[12469]],[[65404,65404],"mapped",[12471]],[[65405,65405],"mapped",[12473]],[[65406,65406],"mapped",[12475]],[[65407,65407],"mapped",[12477]],[[65408,65408],"mapped",[12479]],[[65409,65409],"mapped",[12481]],[[65410,65410],"mapped",[12484]],[[65411,65411],"mapped",[12486]],[[65412,65412],"mapped",[12488]],[[65413,65413],"mapped",[12490]],[[65414,65414],"mapped",[12491]],[[65415,65415],"mapped",[12492]],[[65416,65416],"mapped",[12493]],[[65417,65417],"mapped",[12494]],[[65418,65418],"mapped",[12495]],[[65419,65419],"mapped",[12498]],[[65420,65420],"mapped",[12501]],[[65421,65421],"mapped",[12504]],[[65422,65422],"mapped",[12507]],[[65423,65423],"mapped",[12510]],[[65424,65424],"mapped",[12511]],[[65425,65425],"mapped",[12512]],[[65426,65426],"mapped",[12513]],[[65427,65427],"mapped",[12514]],[[65428,65428],"mapped",[12516]],[[65429,65429],"mapped",[12518]],[[65430,65430],"mapped",[12520]],[[65431,65431],"mapped",[12521]],[[65432,65432],"mapped",[12522]],[[65433,65433],"mapped",[12523]],[[65434,65434],"mapped",[12524]],[[65435,65435],"mapped",[12525]],[[65436,65436],"mapped",[12527]],[[65437,65437],"mapped",[12531]],[[65438,65438],"mapped",[12441]],[[65439,65439],"mapped",[12442]],[[65440,65440],"disallowed"],[[65441,65441],"mapped",[4352]],[[65442,65442],"mapped",[4353]],[[65443,65443],"mapped",[4522]],[[65444,65444],"mapped",[4354]],[[65445,65445],"mapped",[4524]],[[65446,65446],"mapped",[4525]],[[65447,65447],"mapped",[4355]],[[65448,65448],"mapped",[4356]],[[65449,65449],"mapped",[4357]],[[65450,65450],"mapped",[4528]],[[65451,65451],"mapped",[4529]],[[65452,65452],"mapped",[4530]],[[65453,65453],"mapped",[4531]],[[65454,65454],"mapped",[4532]],[[65455,65455],"mapped",[4533]],[[65456,65456],"mapped",[4378]],[[65457,65457],"mapped",[4358]],[[65458,65458],"mapped",[4359]],[[65459,65459],"mapped",[4360]],[[65460,65460],"mapped",[4385]],[[65461,65461],"mapped",[4361]],[[65462,65462],"mapped",[4362]],[[65463,65463],"mapped",[4363]],[[65464,65464],"mapped",[4364]],[[65465,65465],"mapped",[4365]],[[65466,65466],"mapped",[4366]],[[65467,65467],"mapped",[4367]],[[65468,65468],"mapped",[4368]],[[65469,65469],"mapped",[4369]],[[65470,65470],"mapped",[4370]],[[65471,65473],"disallowed"],[[65474,65474],"mapped",[4449]],[[65475,65475],"mapped",[4450]],[[65476,65476],"mapped",[4451]],[[65477,65477],"mapped",[4452]],[[65478,65478],"mapped",[4453]],[[65479,65479],"mapped",[4454]],[[65480,65481],"disallowed"],[[65482,65482],"mapped",[4455]],[[65483,65483],"mapped",[4456]],[[65484,65484],"mapped",[4457]],[[65485,65485],"mapped",[4458]],[[65486,65486],"mapped",[4459]],[[65487,65487],"mapped",[4460]],[[65488,65489],"disallowed"],[[65490,65490],"mapped",[4461]],[[65491,65491],"mapped",[4462]],[[65492,65492],"mapped",[4463]],[[65493,65493],"mapped",[4464]],[[65494,65494],"mapped",[4465]],[[65495,65495],"mapped",[4466]],[[65496,65497],"disallowed"],[[65498,65498],"mapped",[4467]],[[65499,65499],"mapped",[4468]],[[65500,65500],"mapped",[4469]],[[65501,65503],"disallowed"],[[65504,65504],"mapped",[162]],[[65505,65505],"mapped",[163]],[[65506,65506],"mapped",[172]],[[65507,65507],"disallowed_STD3_mapped",[32,772]],[[65508,65508],"mapped",[166]],[[65509,65509],"mapped",[165]],[[65510,65510],"mapped",[8361]],[[65511,65511],"disallowed"],[[65512,65512],"mapped",[9474]],[[65513,65513],"mapped",[8592]],[[65514,65514],"mapped",[8593]],[[65515,65515],"mapped",[8594]],[[65516,65516],"mapped",[8595]],[[65517,65517],"mapped",[9632]],[[65518,65518],"mapped",[9675]],[[65519,65528],"disallowed"],[[65529,65531],"disallowed"],[[65532,65532],"disallowed"],[[65533,65533],"disallowed"],[[65534,65535],"disallowed"],[[65536,65547],"valid"],[[65548,65548],"disallowed"],[[65549,65574],"valid"],[[65575,65575],"disallowed"],[[65576,65594],"valid"],[[65595,65595],"disallowed"],[[65596,65597],"valid"],[[65598,65598],"disallowed"],[[65599,65613],"valid"],[[65614,65615],"disallowed"],[[65616,65629],"valid"],[[65630,65663],"disallowed"],[[65664,65786],"valid"],[[65787,65791],"disallowed"],[[65792,65794],"valid",[],"NV8"],[[65795,65798],"disallowed"],[[65799,65843],"valid",[],"NV8"],[[65844,65846],"disallowed"],[[65847,65855],"valid",[],"NV8"],[[65856,65930],"valid",[],"NV8"],[[65931,65932],"valid",[],"NV8"],[[65933,65935],"disallowed"],[[65936,65947],"valid",[],"NV8"],[[65948,65951],"disallowed"],[[65952,65952],"valid",[],"NV8"],[[65953,65999],"disallowed"],[[66000,66044],"valid",[],"NV8"],[[66045,66045],"valid"],[[66046,66175],"disallowed"],[[66176,66204],"valid"],[[66205,66207],"disallowed"],[[66208,66256],"valid"],[[66257,66271],"disallowed"],[[66272,66272],"valid"],[[66273,66299],"valid",[],"NV8"],[[66300,66303],"disallowed"],[[66304,66334],"valid"],[[66335,66335],"valid"],[[66336,66339],"valid",[],"NV8"],[[66340,66351],"disallowed"],[[66352,66368],"valid"],[[66369,66369],"valid",[],"NV8"],[[66370,66377],"valid"],[[66378,66378],"valid",[],"NV8"],[[66379,66383],"disallowed"],[[66384,66426],"valid"],[[66427,66431],"disallowed"],[[66432,66461],"valid"],[[66462,66462],"disallowed"],[[66463,66463],"valid",[],"NV8"],[[66464,66499],"valid"],[[66500,66503],"disallowed"],[[66504,66511],"valid"],[[66512,66517],"valid",[],"NV8"],[[66518,66559],"disallowed"],[[66560,66560],"mapped",[66600]],[[66561,66561],"mapped",[66601]],[[66562,66562],"mapped",[66602]],[[66563,66563],"mapped",[66603]],[[66564,66564],"mapped",[66604]],[[66565,66565],"mapped",[66605]],[[66566,66566],"mapped",[66606]],[[66567,66567],"mapped",[66607]],[[66568,66568],"mapped",[66608]],[[66569,66569],"mapped",[66609]],[[66570,66570],"mapped",[66610]],[[66571,66571],"mapped",[66611]],[[66572,66572],"mapped",[66612]],[[66573,66573],"mapped",[66613]],[[66574,66574],"mapped",[66614]],[[66575,66575],"mapped",[66615]],[[66576,66576],"mapped",[66616]],[[66577,66577],"mapped",[66617]],[[66578,66578],"mapped",[66618]],[[66579,66579],"mapped",[66619]],[[66580,66580],"mapped",[66620]],[[66581,66581],"mapped",[66621]],[[66582,66582],"mapped",[66622]],[[66583,66583],"mapped",[66623]],[[66584,66584],"mapped",[66624]],[[66585,66585],"mapped",[66625]],[[66586,66586],"mapped",[66626]],[[66587,66587],"mapped",[66627]],[[66588,66588],"mapped",[66628]],[[66589,66589],"mapped",[66629]],[[66590,66590],"mapped",[66630]],[[66591,66591],"mapped",[66631]],[[66592,66592],"mapped",[66632]],[[66593,66593],"mapped",[66633]],[[66594,66594],"mapped",[66634]],[[66595,66595],"mapped",[66635]],[[66596,66596],"mapped",[66636]],[[66597,66597],"mapped",[66637]],[[66598,66598],"mapped",[66638]],[[66599,66599],"mapped",[66639]],[[66600,66637],"valid"],[[66638,66717],"valid"],[[66718,66719],"disallowed"],[[66720,66729],"valid"],[[66730,66815],"disallowed"],[[66816,66855],"valid"],[[66856,66863],"disallowed"],[[66864,66915],"valid"],[[66916,66926],"disallowed"],[[66927,66927],"valid",[],"NV8"],[[66928,67071],"disallowed"],[[67072,67382],"valid"],[[67383,67391],"disallowed"],[[67392,67413],"valid"],[[67414,67423],"disallowed"],[[67424,67431],"valid"],[[67432,67583],"disallowed"],[[67584,67589],"valid"],[[67590,67591],"disallowed"],[[67592,67592],"valid"],[[67593,67593],"disallowed"],[[67594,67637],"valid"],[[67638,67638],"disallowed"],[[67639,67640],"valid"],[[67641,67643],"disallowed"],[[67644,67644],"valid"],[[67645,67646],"disallowed"],[[67647,67647],"valid"],[[67648,67669],"valid"],[[67670,67670],"disallowed"],[[67671,67679],"valid",[],"NV8"],[[67680,67702],"valid"],[[67703,67711],"valid",[],"NV8"],[[67712,67742],"valid"],[[67743,67750],"disallowed"],[[67751,67759],"valid",[],"NV8"],[[67760,67807],"disallowed"],[[67808,67826],"valid"],[[67827,67827],"disallowed"],[[67828,67829],"valid"],[[67830,67834],"disallowed"],[[67835,67839],"valid",[],"NV8"],[[67840,67861],"valid"],[[67862,67865],"valid",[],"NV8"],[[67866,67867],"valid",[],"NV8"],[[67868,67870],"disallowed"],[[67871,67871],"valid",[],"NV8"],[[67872,67897],"valid"],[[67898,67902],"disallowed"],[[67903,67903],"valid",[],"NV8"],[[67904,67967],"disallowed"],[[67968,68023],"valid"],[[68024,68027],"disallowed"],[[68028,68029],"valid",[],"NV8"],[[68030,68031],"valid"],[[68032,68047],"valid",[],"NV8"],[[68048,68049],"disallowed"],[[68050,68095],"valid",[],"NV8"],[[68096,68099],"valid"],[[68100,68100],"disallowed"],[[68101,68102],"valid"],[[68103,68107],"disallowed"],[[68108,68115],"valid"],[[68116,68116],"disallowed"],[[68117,68119],"valid"],[[68120,68120],"disallowed"],[[68121,68147],"valid"],[[68148,68151],"disallowed"],[[68152,68154],"valid"],[[68155,68158],"disallowed"],[[68159,68159],"valid"],[[68160,68167],"valid",[],"NV8"],[[68168,68175],"disallowed"],[[68176,68184],"valid",[],"NV8"],[[68185,68191],"disallowed"],[[68192,68220],"valid"],[[68221,68223],"valid",[],"NV8"],[[68224,68252],"valid"],[[68253,68255],"valid",[],"NV8"],[[68256,68287],"disallowed"],[[68288,68295],"valid"],[[68296,68296],"valid",[],"NV8"],[[68297,68326],"valid"],[[68327,68330],"disallowed"],[[68331,68342],"valid",[],"NV8"],[[68343,68351],"disallowed"],[[68352,68405],"valid"],[[68406,68408],"disallowed"],[[68409,68415],"valid",[],"NV8"],[[68416,68437],"valid"],[[68438,68439],"disallowed"],[[68440,68447],"valid",[],"NV8"],[[68448,68466],"valid"],[[68467,68471],"disallowed"],[[68472,68479],"valid",[],"NV8"],[[68480,68497],"valid"],[[68498,68504],"disallowed"],[[68505,68508],"valid",[],"NV8"],[[68509,68520],"disallowed"],[[68521,68527],"valid",[],"NV8"],[[68528,68607],"disallowed"],[[68608,68680],"valid"],[[68681,68735],"disallowed"],[[68736,68736],"mapped",[68800]],[[68737,68737],"mapped",[68801]],[[68738,68738],"mapped",[68802]],[[68739,68739],"mapped",[68803]],[[68740,68740],"mapped",[68804]],[[68741,68741],"mapped",[68805]],[[68742,68742],"mapped",[68806]],[[68743,68743],"mapped",[68807]],[[68744,68744],"mapped",[68808]],[[68745,68745],"mapped",[68809]],[[68746,68746],"mapped",[68810]],[[68747,68747],"mapped",[68811]],[[68748,68748],"mapped",[68812]],[[68749,68749],"mapped",[68813]],[[68750,68750],"mapped",[68814]],[[68751,68751],"mapped",[68815]],[[68752,68752],"mapped",[68816]],[[68753,68753],"mapped",[68817]],[[68754,68754],"mapped",[68818]],[[68755,68755],"mapped",[68819]],[[68756,68756],"mapped",[68820]],[[68757,68757],"mapped",[68821]],[[68758,68758],"mapped",[68822]],[[68759,68759],"mapped",[68823]],[[68760,68760],"mapped",[68824]],[[68761,68761],"mapped",[68825]],[[68762,68762],"mapped",[68826]],[[68763,68763],"mapped",[68827]],[[68764,68764],"mapped",[68828]],[[68765,68765],"mapped",[68829]],[[68766,68766],"mapped",[68830]],[[68767,68767],"mapped",[68831]],[[68768,68768],"mapped",[68832]],[[68769,68769],"mapped",[68833]],[[68770,68770],"mapped",[68834]],[[68771,68771],"mapped",[68835]],[[68772,68772],"mapped",[68836]],[[68773,68773],"mapped",[68837]],[[68774,68774],"mapped",[68838]],[[68775,68775],"mapped",[68839]],[[68776,68776],"mapped",[68840]],[[68777,68777],"mapped",[68841]],[[68778,68778],"mapped",[68842]],[[68779,68779],"mapped",[68843]],[[68780,68780],"mapped",[68844]],[[68781,68781],"mapped",[68845]],[[68782,68782],"mapped",[68846]],[[68783,68783],"mapped",[68847]],[[68784,68784],"mapped",[68848]],[[68785,68785],"mapped",[68849]],[[68786,68786],"mapped",[68850]],[[68787,68799],"disallowed"],[[68800,68850],"valid"],[[68851,68857],"disallowed"],[[68858,68863],"valid",[],"NV8"],[[68864,69215],"disallowed"],[[69216,69246],"valid",[],"NV8"],[[69247,69631],"disallowed"],[[69632,69702],"valid"],[[69703,69709],"valid",[],"NV8"],[[69710,69713],"disallowed"],[[69714,69733],"valid",[],"NV8"],[[69734,69743],"valid"],[[69744,69758],"disallowed"],[[69759,69759],"valid"],[[69760,69818],"valid"],[[69819,69820],"valid",[],"NV8"],[[69821,69821],"disallowed"],[[69822,69825],"valid",[],"NV8"],[[69826,69839],"disallowed"],[[69840,69864],"valid"],[[69865,69871],"disallowed"],[[69872,69881],"valid"],[[69882,69887],"disallowed"],[[69888,69940],"valid"],[[69941,69941],"disallowed"],[[69942,69951],"valid"],[[69952,69955],"valid",[],"NV8"],[[69956,69967],"disallowed"],[[69968,70003],"valid"],[[70004,70005],"valid",[],"NV8"],[[70006,70006],"valid"],[[70007,70015],"disallowed"],[[70016,70084],"valid"],[[70085,70088],"valid",[],"NV8"],[[70089,70089],"valid",[],"NV8"],[[70090,70092],"valid"],[[70093,70093],"valid",[],"NV8"],[[70094,70095],"disallowed"],[[70096,70105],"valid"],[[70106,70106],"valid"],[[70107,70107],"valid",[],"NV8"],[[70108,70108],"valid"],[[70109,70111],"valid",[],"NV8"],[[70112,70112],"disallowed"],[[70113,70132],"valid",[],"NV8"],[[70133,70143],"disallowed"],[[70144,70161],"valid"],[[70162,70162],"disallowed"],[[70163,70199],"valid"],[[70200,70205],"valid",[],"NV8"],[[70206,70271],"disallowed"],[[70272,70278],"valid"],[[70279,70279],"disallowed"],[[70280,70280],"valid"],[[70281,70281],"disallowed"],[[70282,70285],"valid"],[[70286,70286],"disallowed"],[[70287,70301],"valid"],[[70302,70302],"disallowed"],[[70303,70312],"valid"],[[70313,70313],"valid",[],"NV8"],[[70314,70319],"disallowed"],[[70320,70378],"valid"],[[70379,70383],"disallowed"],[[70384,70393],"valid"],[[70394,70399],"disallowed"],[[70400,70400],"valid"],[[70401,70403],"valid"],[[70404,70404],"disallowed"],[[70405,70412],"valid"],[[70413,70414],"disallowed"],[[70415,70416],"valid"],[[70417,70418],"disallowed"],[[70419,70440],"valid"],[[70441,70441],"disallowed"],[[70442,70448],"valid"],[[70449,70449],"disallowed"],[[70450,70451],"valid"],[[70452,70452],"disallowed"],[[70453,70457],"valid"],[[70458,70459],"disallowed"],[[70460,70468],"valid"],[[70469,70470],"disallowed"],[[70471,70472],"valid"],[[70473,70474],"disallowed"],[[70475,70477],"valid"],[[70478,70479],"disallowed"],[[70480,70480],"valid"],[[70481,70486],"disallowed"],[[70487,70487],"valid"],[[70488,70492],"disallowed"],[[70493,70499],"valid"],[[70500,70501],"disallowed"],[[70502,70508],"valid"],[[70509,70511],"disallowed"],[[70512,70516],"valid"],[[70517,70783],"disallowed"],[[70784,70853],"valid"],[[70854,70854],"valid",[],"NV8"],[[70855,70855],"valid"],[[70856,70863],"disallowed"],[[70864,70873],"valid"],[[70874,71039],"disallowed"],[[71040,71093],"valid"],[[71094,71095],"disallowed"],[[71096,71104],"valid"],[[71105,71113],"valid",[],"NV8"],[[71114,71127],"valid",[],"NV8"],[[71128,71133],"valid"],[[71134,71167],"disallowed"],[[71168,71232],"valid"],[[71233,71235],"valid",[],"NV8"],[[71236,71236],"valid"],[[71237,71247],"disallowed"],[[71248,71257],"valid"],[[71258,71295],"disallowed"],[[71296,71351],"valid"],[[71352,71359],"disallowed"],[[71360,71369],"valid"],[[71370,71423],"disallowed"],[[71424,71449],"valid"],[[71450,71452],"disallowed"],[[71453,71467],"valid"],[[71468,71471],"disallowed"],[[71472,71481],"valid"],[[71482,71487],"valid",[],"NV8"],[[71488,71839],"disallowed"],[[71840,71840],"mapped",[71872]],[[71841,71841],"mapped",[71873]],[[71842,71842],"mapped",[71874]],[[71843,71843],"mapped",[71875]],[[71844,71844],"mapped",[71876]],[[71845,71845],"mapped",[71877]],[[71846,71846],"mapped",[71878]],[[71847,71847],"mapped",[71879]],[[71848,71848],"mapped",[71880]],[[71849,71849],"mapped",[71881]],[[71850,71850],"mapped",[71882]],[[71851,71851],"mapped",[71883]],[[71852,71852],"mapped",[71884]],[[71853,71853],"mapped",[71885]],[[71854,71854],"mapped",[71886]],[[71855,71855],"mapped",[71887]],[[71856,71856],"mapped",[71888]],[[71857,71857],"mapped",[71889]],[[71858,71858],"mapped",[71890]],[[71859,71859],"mapped",[71891]],[[71860,71860],"mapped",[71892]],[[71861,71861],"mapped",[71893]],[[71862,71862],"mapped",[71894]],[[71863,71863],"mapped",[71895]],[[71864,71864],"mapped",[71896]],[[71865,71865],"mapped",[71897]],[[71866,71866],"mapped",[71898]],[[71867,71867],"mapped",[71899]],[[71868,71868],"mapped",[71900]],[[71869,71869],"mapped",[71901]],[[71870,71870],"mapped",[71902]],[[71871,71871],"mapped",[71903]],[[71872,71913],"valid"],[[71914,71922],"valid",[],"NV8"],[[71923,71934],"disallowed"],[[71935,71935],"valid"],[[71936,72383],"disallowed"],[[72384,72440],"valid"],[[72441,73727],"disallowed"],[[73728,74606],"valid"],[[74607,74648],"valid"],[[74649,74649],"valid"],[[74650,74751],"disallowed"],[[74752,74850],"valid",[],"NV8"],[[74851,74862],"valid",[],"NV8"],[[74863,74863],"disallowed"],[[74864,74867],"valid",[],"NV8"],[[74868,74868],"valid",[],"NV8"],[[74869,74879],"disallowed"],[[74880,75075],"valid"],[[75076,77823],"disallowed"],[[77824,78894],"valid"],[[78895,82943],"disallowed"],[[82944,83526],"valid"],[[83527,92159],"disallowed"],[[92160,92728],"valid"],[[92729,92735],"disallowed"],[[92736,92766],"valid"],[[92767,92767],"disallowed"],[[92768,92777],"valid"],[[92778,92781],"disallowed"],[[92782,92783],"valid",[],"NV8"],[[92784,92879],"disallowed"],[[92880,92909],"valid"],[[92910,92911],"disallowed"],[[92912,92916],"valid"],[[92917,92917],"valid",[],"NV8"],[[92918,92927],"disallowed"],[[92928,92982],"valid"],[[92983,92991],"valid",[],"NV8"],[[92992,92995],"valid"],[[92996,92997],"valid",[],"NV8"],[[92998,93007],"disallowed"],[[93008,93017],"valid"],[[93018,93018],"disallowed"],[[93019,93025],"valid",[],"NV8"],[[93026,93026],"disallowed"],[[93027,93047],"valid"],[[93048,93052],"disallowed"],[[93053,93071],"valid"],[[93072,93951],"disallowed"],[[93952,94020],"valid"],[[94021,94031],"disallowed"],[[94032,94078],"valid"],[[94079,94094],"disallowed"],[[94095,94111],"valid"],[[94112,110591],"disallowed"],[[110592,110593],"valid"],[[110594,113663],"disallowed"],[[113664,113770],"valid"],[[113771,113775],"disallowed"],[[113776,113788],"valid"],[[113789,113791],"disallowed"],[[113792,113800],"valid"],[[113801,113807],"disallowed"],[[113808,113817],"valid"],[[113818,113819],"disallowed"],[[113820,113820],"valid",[],"NV8"],[[113821,113822],"valid"],[[113823,113823],"valid",[],"NV8"],[[113824,113827],"ignored"],[[113828,118783],"disallowed"],[[118784,119029],"valid",[],"NV8"],[[119030,119039],"disallowed"],[[119040,119078],"valid",[],"NV8"],[[119079,119080],"disallowed"],[[119081,119081],"valid",[],"NV8"],[[119082,119133],"valid",[],"NV8"],[[119134,119134],"mapped",[119127,119141]],[[119135,119135],"mapped",[119128,119141]],[[119136,119136],"mapped",[119128,119141,119150]],[[119137,119137],"mapped",[119128,119141,119151]],[[119138,119138],"mapped",[119128,119141,119152]],[[119139,119139],"mapped",[119128,119141,119153]],[[119140,119140],"mapped",[119128,119141,119154]],[[119141,119154],"valid",[],"NV8"],[[119155,119162],"disallowed"],[[119163,119226],"valid",[],"NV8"],[[119227,119227],"mapped",[119225,119141]],[[119228,119228],"mapped",[119226,119141]],[[119229,119229],"mapped",[119225,119141,119150]],[[119230,119230],"mapped",[119226,119141,119150]],[[119231,119231],"mapped",[119225,119141,119151]],[[119232,119232],"mapped",[119226,119141,119151]],[[119233,119261],"valid",[],"NV8"],[[119262,119272],"valid",[],"NV8"],[[119273,119295],"disallowed"],[[119296,119365],"valid",[],"NV8"],[[119366,119551],"disallowed"],[[119552,119638],"valid",[],"NV8"],[[119639,119647],"disallowed"],[[119648,119665],"valid",[],"NV8"],[[119666,119807],"disallowed"],[[119808,119808],"mapped",[97]],[[119809,119809],"mapped",[98]],[[119810,119810],"mapped",[99]],[[119811,119811],"mapped",[100]],[[119812,119812],"mapped",[101]],[[119813,119813],"mapped",[102]],[[119814,119814],"mapped",[103]],[[119815,119815],"mapped",[104]],[[119816,119816],"mapped",[105]],[[119817,119817],"mapped",[106]],[[119818,119818],"mapped",[107]],[[119819,119819],"mapped",[108]],[[119820,119820],"mapped",[109]],[[119821,119821],"mapped",[110]],[[119822,119822],"mapped",[111]],[[119823,119823],"mapped",[112]],[[119824,119824],"mapped",[113]],[[119825,119825],"mapped",[114]],[[119826,119826],"mapped",[115]],[[119827,119827],"mapped",[116]],[[119828,119828],"mapped",[117]],[[119829,119829],"mapped",[118]],[[119830,119830],"mapped",[119]],[[119831,119831],"mapped",[120]],[[119832,119832],"mapped",[121]],[[119833,119833],"mapped",[122]],[[119834,119834],"mapped",[97]],[[119835,119835],"mapped",[98]],[[119836,119836],"mapped",[99]],[[119837,119837],"mapped",[100]],[[119838,119838],"mapped",[101]],[[119839,119839],"mapped",[102]],[[119840,119840],"mapped",[103]],[[119841,119841],"mapped",[104]],[[119842,119842],"mapped",[105]],[[119843,119843],"mapped",[106]],[[119844,119844],"mapped",[107]],[[119845,119845],"mapped",[108]],[[119846,119846],"mapped",[109]],[[119847,119847],"mapped",[110]],[[119848,119848],"mapped",[111]],[[119849,119849],"mapped",[112]],[[119850,119850],"mapped",[113]],[[119851,119851],"mapped",[114]],[[119852,119852],"mapped",[115]],[[119853,119853],"mapped",[116]],[[119854,119854],"mapped",[117]],[[119855,119855],"mapped",[118]],[[119856,119856],"mapped",[119]],[[119857,119857],"mapped",[120]],[[119858,119858],"mapped",[121]],[[119859,119859],"mapped",[122]],[[119860,119860],"mapped",[97]],[[119861,119861],"mapped",[98]],[[119862,119862],"mapped",[99]],[[119863,119863],"mapped",[100]],[[119864,119864],"mapped",[101]],[[119865,119865],"mapped",[102]],[[119866,119866],"mapped",[103]],[[119867,119867],"mapped",[104]],[[119868,119868],"mapped",[105]],[[119869,119869],"mapped",[106]],[[119870,119870],"mapped",[107]],[[119871,119871],"mapped",[108]],[[119872,119872],"mapped",[109]],[[119873,119873],"mapped",[110]],[[119874,119874],"mapped",[111]],[[119875,119875],"mapped",[112]],[[119876,119876],"mapped",[113]],[[119877,119877],"mapped",[114]],[[119878,119878],"mapped",[115]],[[119879,119879],"mapped",[116]],[[119880,119880],"mapped",[117]],[[119881,119881],"mapped",[118]],[[119882,119882],"mapped",[119]],[[119883,119883],"mapped",[120]],[[119884,119884],"mapped",[121]],[[119885,119885],"mapped",[122]],[[119886,119886],"mapped",[97]],[[119887,119887],"mapped",[98]],[[119888,119888],"mapped",[99]],[[119889,119889],"mapped",[100]],[[119890,119890],"mapped",[101]],[[119891,119891],"mapped",[102]],[[119892,119892],"mapped",[103]],[[119893,119893],"disallowed"],[[119894,119894],"mapped",[105]],[[119895,119895],"mapped",[106]],[[119896,119896],"mapped",[107]],[[119897,119897],"mapped",[108]],[[119898,119898],"mapped",[109]],[[119899,119899],"mapped",[110]],[[119900,119900],"mapped",[111]],[[119901,119901],"mapped",[112]],[[119902,119902],"mapped",[113]],[[119903,119903],"mapped",[114]],[[119904,119904],"mapped",[115]],[[119905,119905],"mapped",[116]],[[119906,119906],"mapped",[117]],[[119907,119907],"mapped",[118]],[[119908,119908],"mapped",[119]],[[119909,119909],"mapped",[120]],[[119910,119910],"mapped",[121]],[[119911,119911],"mapped",[122]],[[119912,119912],"mapped",[97]],[[119913,119913],"mapped",[98]],[[119914,119914],"mapped",[99]],[[119915,119915],"mapped",[100]],[[119916,119916],"mapped",[101]],[[119917,119917],"mapped",[102]],[[119918,119918],"mapped",[103]],[[119919,119919],"mapped",[104]],[[119920,119920],"mapped",[105]],[[119921,119921],"mapped",[106]],[[119922,119922],"mapped",[107]],[[119923,119923],"mapped",[108]],[[119924,119924],"mapped",[109]],[[119925,119925],"mapped",[110]],[[119926,119926],"mapped",[111]],[[119927,119927],"mapped",[112]],[[119928,119928],"mapped",[113]],[[119929,119929],"mapped",[114]],[[119930,119930],"mapped",[115]],[[119931,119931],"mapped",[116]],[[119932,119932],"mapped",[117]],[[119933,119933],"mapped",[118]],[[119934,119934],"mapped",[119]],[[119935,119935],"mapped",[120]],[[119936,119936],"mapped",[121]],[[119937,119937],"mapped",[122]],[[119938,119938],"mapped",[97]],[[119939,119939],"mapped",[98]],[[119940,119940],"mapped",[99]],[[119941,119941],"mapped",[100]],[[119942,119942],"mapped",[101]],[[119943,119943],"mapped",[102]],[[119944,119944],"mapped",[103]],[[119945,119945],"mapped",[104]],[[119946,119946],"mapped",[105]],[[119947,119947],"mapped",[106]],[[119948,119948],"mapped",[107]],[[119949,119949],"mapped",[108]],[[119950,119950],"mapped",[109]],[[119951,119951],"mapped",[110]],[[119952,119952],"mapped",[111]],[[119953,119953],"mapped",[112]],[[119954,119954],"mapped",[113]],[[119955,119955],"mapped",[114]],[[119956,119956],"mapped",[115]],[[119957,119957],"mapped",[116]],[[119958,119958],"mapped",[117]],[[119959,119959],"mapped",[118]],[[119960,119960],"mapped",[119]],[[119961,119961],"mapped",[120]],[[119962,119962],"mapped",[121]],[[119963,119963],"mapped",[122]],[[119964,119964],"mapped",[97]],[[119965,119965],"disallowed"],[[119966,119966],"mapped",[99]],[[119967,119967],"mapped",[100]],[[119968,119969],"disallowed"],[[119970,119970],"mapped",[103]],[[119971,119972],"disallowed"],[[119973,119973],"mapped",[106]],[[119974,119974],"mapped",[107]],[[119975,119976],"disallowed"],[[119977,119977],"mapped",[110]],[[119978,119978],"mapped",[111]],[[119979,119979],"mapped",[112]],[[119980,119980],"mapped",[113]],[[119981,119981],"disallowed"],[[119982,119982],"mapped",[115]],[[119983,119983],"mapped",[116]],[[119984,119984],"mapped",[117]],[[119985,119985],"mapped",[118]],[[119986,119986],"mapped",[119]],[[119987,119987],"mapped",[120]],[[119988,119988],"mapped",[121]],[[119989,119989],"mapped",[122]],[[119990,119990],"mapped",[97]],[[119991,119991],"mapped",[98]],[[119992,119992],"mapped",[99]],[[119993,119993],"mapped",[100]],[[119994,119994],"disallowed"],[[119995,119995],"mapped",[102]],[[119996,119996],"disallowed"],[[119997,119997],"mapped",[104]],[[119998,119998],"mapped",[105]],[[119999,119999],"mapped",[106]],[[120000,120000],"mapped",[107]],[[120001,120001],"mapped",[108]],[[120002,120002],"mapped",[109]],[[120003,120003],"mapped",[110]],[[120004,120004],"disallowed"],[[120005,120005],"mapped",[112]],[[120006,120006],"mapped",[113]],[[120007,120007],"mapped",[114]],[[120008,120008],"mapped",[115]],[[120009,120009],"mapped",[116]],[[120010,120010],"mapped",[117]],[[120011,120011],"mapped",[118]],[[120012,120012],"mapped",[119]],[[120013,120013],"mapped",[120]],[[120014,120014],"mapped",[121]],[[120015,120015],"mapped",[122]],[[120016,120016],"mapped",[97]],[[120017,120017],"mapped",[98]],[[120018,120018],"mapped",[99]],[[120019,120019],"mapped",[100]],[[120020,120020],"mapped",[101]],[[120021,120021],"mapped",[102]],[[120022,120022],"mapped",[103]],[[120023,120023],"mapped",[104]],[[120024,120024],"mapped",[105]],[[120025,120025],"mapped",[106]],[[120026,120026],"mapped",[107]],[[120027,120027],"mapped",[108]],[[120028,120028],"mapped",[109]],[[120029,120029],"mapped",[110]],[[120030,120030],"mapped",[111]],[[120031,120031],"mapped",[112]],[[120032,120032],"mapped",[113]],[[120033,120033],"mapped",[114]],[[120034,120034],"mapped",[115]],[[120035,120035],"mapped",[116]],[[120036,120036],"mapped",[117]],[[120037,120037],"mapped",[118]],[[120038,120038],"mapped",[119]],[[120039,120039],"mapped",[120]],[[120040,120040],"mapped",[121]],[[120041,120041],"mapped",[122]],[[120042,120042],"mapped",[97]],[[120043,120043],"mapped",[98]],[[120044,120044],"mapped",[99]],[[120045,120045],"mapped",[100]],[[120046,120046],"mapped",[101]],[[120047,120047],"mapped",[102]],[[120048,120048],"mapped",[103]],[[120049,120049],"mapped",[104]],[[120050,120050],"mapped",[105]],[[120051,120051],"mapped",[106]],[[120052,120052],"mapped",[107]],[[120053,120053],"mapped",[108]],[[120054,120054],"mapped",[109]],[[120055,120055],"mapped",[110]],[[120056,120056],"mapped",[111]],[[120057,120057],"mapped",[112]],[[120058,120058],"mapped",[113]],[[120059,120059],"mapped",[114]],[[120060,120060],"mapped",[115]],[[120061,120061],"mapped",[116]],[[120062,120062],"mapped",[117]],[[120063,120063],"mapped",[118]],[[120064,120064],"mapped",[119]],[[120065,120065],"mapped",[120]],[[120066,120066],"mapped",[121]],[[120067,120067],"mapped",[122]],[[120068,120068],"mapped",[97]],[[120069,120069],"mapped",[98]],[[120070,120070],"disallowed"],[[120071,120071],"mapped",[100]],[[120072,120072],"mapped",[101]],[[120073,120073],"mapped",[102]],[[120074,120074],"mapped",[103]],[[120075,120076],"disallowed"],[[120077,120077],"mapped",[106]],[[120078,120078],"mapped",[107]],[[120079,120079],"mapped",[108]],[[120080,120080],"mapped",[109]],[[120081,120081],"mapped",[110]],[[120082,120082],"mapped",[111]],[[120083,120083],"mapped",[112]],[[120084,120084],"mapped",[113]],[[120085,120085],"disallowed"],[[120086,120086],"mapped",[115]],[[120087,120087],"mapped",[116]],[[120088,120088],"mapped",[117]],[[120089,120089],"mapped",[118]],[[120090,120090],"mapped",[119]],[[120091,120091],"mapped",[120]],[[120092,120092],"mapped",[121]],[[120093,120093],"disallowed"],[[120094,120094],"mapped",[97]],[[120095,120095],"mapped",[98]],[[120096,120096],"mapped",[99]],[[120097,120097],"mapped",[100]],[[120098,120098],"mapped",[101]],[[120099,120099],"mapped",[102]],[[120100,120100],"mapped",[103]],[[120101,120101],"mapped",[104]],[[120102,120102],"mapped",[105]],[[120103,120103],"mapped",[106]],[[120104,120104],"mapped",[107]],[[120105,120105],"mapped",[108]],[[120106,120106],"mapped",[109]],[[120107,120107],"mapped",[110]],[[120108,120108],"mapped",[111]],[[120109,120109],"mapped",[112]],[[120110,120110],"mapped",[113]],[[120111,120111],"mapped",[114]],[[120112,120112],"mapped",[115]],[[120113,120113],"mapped",[116]],[[120114,120114],"mapped",[117]],[[120115,120115],"mapped",[118]],[[120116,120116],"mapped",[119]],[[120117,120117],"mapped",[120]],[[120118,120118],"mapped",[121]],[[120119,120119],"mapped",[122]],[[120120,120120],"mapped",[97]],[[120121,120121],"mapped",[98]],[[120122,120122],"disallowed"],[[120123,120123],"mapped",[100]],[[120124,120124],"mapped",[101]],[[120125,120125],"mapped",[102]],[[120126,120126],"mapped",[103]],[[120127,120127],"disallowed"],[[120128,120128],"mapped",[105]],[[120129,120129],"mapped",[106]],[[120130,120130],"mapped",[107]],[[120131,120131],"mapped",[108]],[[120132,120132],"mapped",[109]],[[120133,120133],"disallowed"],[[120134,120134],"mapped",[111]],[[120135,120137],"disallowed"],[[120138,120138],"mapped",[115]],[[120139,120139],"mapped",[116]],[[120140,120140],"mapped",[117]],[[120141,120141],"mapped",[118]],[[120142,120142],"mapped",[119]],[[120143,120143],"mapped",[120]],[[120144,120144],"mapped",[121]],[[120145,120145],"disallowed"],[[120146,120146],"mapped",[97]],[[120147,120147],"mapped",[98]],[[120148,120148],"mapped",[99]],[[120149,120149],"mapped",[100]],[[120150,120150],"mapped",[101]],[[120151,120151],"mapped",[102]],[[120152,120152],"mapped",[103]],[[120153,120153],"mapped",[104]],[[120154,120154],"mapped",[105]],[[120155,120155],"mapped",[106]],[[120156,120156],"mapped",[107]],[[120157,120157],"mapped",[108]],[[120158,120158],"mapped",[109]],[[120159,120159],"mapped",[110]],[[120160,120160],"mapped",[111]],[[120161,120161],"mapped",[112]],[[120162,120162],"mapped",[113]],[[120163,120163],"mapped",[114]],[[120164,120164],"mapped",[115]],[[120165,120165],"mapped",[116]],[[120166,120166],"mapped",[117]],[[120167,120167],"mapped",[118]],[[120168,120168],"mapped",[119]],[[120169,120169],"mapped",[120]],[[120170,120170],"mapped",[121]],[[120171,120171],"mapped",[122]],[[120172,120172],"mapped",[97]],[[120173,120173],"mapped",[98]],[[120174,120174],"mapped",[99]],[[120175,120175],"mapped",[100]],[[120176,120176],"mapped",[101]],[[120177,120177],"mapped",[102]],[[120178,120178],"mapped",[103]],[[120179,120179],"mapped",[104]],[[120180,120180],"mapped",[105]],[[120181,120181],"mapped",[106]],[[120182,120182],"mapped",[107]],[[120183,120183],"mapped",[108]],[[120184,120184],"mapped",[109]],[[120185,120185],"mapped",[110]],[[120186,120186],"mapped",[111]],[[120187,120187],"mapped",[112]],[[120188,120188],"mapped",[113]],[[120189,120189],"mapped",[114]],[[120190,120190],"mapped",[115]],[[120191,120191],"mapped",[116]],[[120192,120192],"mapped",[117]],[[120193,120193],"mapped",[118]],[[120194,120194],"mapped",[119]],[[120195,120195],"mapped",[120]],[[120196,120196],"mapped",[121]],[[120197,120197],"mapped",[122]],[[120198,120198],"mapped",[97]],[[120199,120199],"mapped",[98]],[[120200,120200],"mapped",[99]],[[120201,120201],"mapped",[100]],[[120202,120202],"mapped",[101]],[[120203,120203],"mapped",[102]],[[120204,120204],"mapped",[103]],[[120205,120205],"mapped",[104]],[[120206,120206],"mapped",[105]],[[120207,120207],"mapped",[106]],[[120208,120208],"mapped",[107]],[[120209,120209],"mapped",[108]],[[120210,120210],"mapped",[109]],[[120211,120211],"mapped",[110]],[[120212,120212],"mapped",[111]],[[120213,120213],"mapped",[112]],[[120214,120214],"mapped",[113]],[[120215,120215],"mapped",[114]],[[120216,120216],"mapped",[115]],[[120217,120217],"mapped",[116]],[[120218,120218],"mapped",[117]],[[120219,120219],"mapped",[118]],[[120220,120220],"mapped",[119]],[[120221,120221],"mapped",[120]],[[120222,120222],"mapped",[121]],[[120223,120223],"mapped",[122]],[[120224,120224],"mapped",[97]],[[120225,120225],"mapped",[98]],[[120226,120226],"mapped",[99]],[[120227,120227],"mapped",[100]],[[120228,120228],"mapped",[101]],[[120229,120229],"mapped",[102]],[[120230,120230],"mapped",[103]],[[120231,120231],"mapped",[104]],[[120232,120232],"mapped",[105]],[[120233,120233],"mapped",[106]],[[120234,120234],"mapped",[107]],[[120235,120235],"mapped",[108]],[[120236,120236],"mapped",[109]],[[120237,120237],"mapped",[110]],[[120238,120238],"mapped",[111]],[[120239,120239],"mapped",[112]],[[120240,120240],"mapped",[113]],[[120241,120241],"mapped",[114]],[[120242,120242],"mapped",[115]],[[120243,120243],"mapped",[116]],[[120244,120244],"mapped",[117]],[[120245,120245],"mapped",[118]],[[120246,120246],"mapped",[119]],[[120247,120247],"mapped",[120]],[[120248,120248],"mapped",[121]],[[120249,120249],"mapped",[122]],[[120250,120250],"mapped",[97]],[[120251,120251],"mapped",[98]],[[120252,120252],"mapped",[99]],[[120253,120253],"mapped",[100]],[[120254,120254],"mapped",[101]],[[120255,120255],"mapped",[102]],[[120256,120256],"mapped",[103]],[[120257,120257],"mapped",[104]],[[120258,120258],"mapped",[105]],[[120259,120259],"mapped",[106]],[[120260,120260],"mapped",[107]],[[120261,120261],"mapped",[108]],[[120262,120262],"mapped",[109]],[[120263,120263],"mapped",[110]],[[120264,120264],"mapped",[111]],[[120265,120265],"mapped",[112]],[[120266,120266],"mapped",[113]],[[120267,120267],"mapped",[114]],[[120268,120268],"mapped",[115]],[[120269,120269],"mapped",[116]],[[120270,120270],"mapped",[117]],[[120271,120271],"mapped",[118]],[[120272,120272],"mapped",[119]],[[120273,120273],"mapped",[120]],[[120274,120274],"mapped",[121]],[[120275,120275],"mapped",[122]],[[120276,120276],"mapped",[97]],[[120277,120277],"mapped",[98]],[[120278,120278],"mapped",[99]],[[120279,120279],"mapped",[100]],[[120280,120280],"mapped",[101]],[[120281,120281],"mapped",[102]],[[120282,120282],"mapped",[103]],[[120283,120283],"mapped",[104]],[[120284,120284],"mapped",[105]],[[120285,120285],"mapped",[106]],[[120286,120286],"mapped",[107]],[[120287,120287],"mapped",[108]],[[120288,120288],"mapped",[109]],[[120289,120289],"mapped",[110]],[[120290,120290],"mapped",[111]],[[120291,120291],"mapped",[112]],[[120292,120292],"mapped",[113]],[[120293,120293],"mapped",[114]],[[120294,120294],"mapped",[115]],[[120295,120295],"mapped",[116]],[[120296,120296],"mapped",[117]],[[120297,120297],"mapped",[118]],[[120298,120298],"mapped",[119]],[[120299,120299],"mapped",[120]],[[120300,120300],"mapped",[121]],[[120301,120301],"mapped",[122]],[[120302,120302],"mapped",[97]],[[120303,120303],"mapped",[98]],[[120304,120304],"mapped",[99]],[[120305,120305],"mapped",[100]],[[120306,120306],"mapped",[101]],[[120307,120307],"mapped",[102]],[[120308,120308],"mapped",[103]],[[120309,120309],"mapped",[104]],[[120310,120310],"mapped",[105]],[[120311,120311],"mapped",[106]],[[120312,120312],"mapped",[107]],[[120313,120313],"mapped",[108]],[[120314,120314],"mapped",[109]],[[120315,120315],"mapped",[110]],[[120316,120316],"mapped",[111]],[[120317,120317],"mapped",[112]],[[120318,120318],"mapped",[113]],[[120319,120319],"mapped",[114]],[[120320,120320],"mapped",[115]],[[120321,120321],"mapped",[116]],[[120322,120322],"mapped",[117]],[[120323,120323],"mapped",[118]],[[120324,120324],"mapped",[119]],[[120325,120325],"mapped",[120]],[[120326,120326],"mapped",[121]],[[120327,120327],"mapped",[122]],[[120328,120328],"mapped",[97]],[[120329,120329],"mapped",[98]],[[120330,120330],"mapped",[99]],[[120331,120331],"mapped",[100]],[[120332,120332],"mapped",[101]],[[120333,120333],"mapped",[102]],[[120334,120334],"mapped",[103]],[[120335,120335],"mapped",[104]],[[120336,120336],"mapped",[105]],[[120337,120337],"mapped",[106]],[[120338,120338],"mapped",[107]],[[120339,120339],"mapped",[108]],[[120340,120340],"mapped",[109]],[[120341,120341],"mapped",[110]],[[120342,120342],"mapped",[111]],[[120343,120343],"mapped",[112]],[[120344,120344],"mapped",[113]],[[120345,120345],"mapped",[114]],[[120346,120346],"mapped",[115]],[[120347,120347],"mapped",[116]],[[120348,120348],"mapped",[117]],[[120349,120349],"mapped",[118]],[[120350,120350],"mapped",[119]],[[120351,120351],"mapped",[120]],[[120352,120352],"mapped",[121]],[[120353,120353],"mapped",[122]],[[120354,120354],"mapped",[97]],[[120355,120355],"mapped",[98]],[[120356,120356],"mapped",[99]],[[120357,120357],"mapped",[100]],[[120358,120358],"mapped",[101]],[[120359,120359],"mapped",[102]],[[120360,120360],"mapped",[103]],[[120361,120361],"mapped",[104]],[[120362,120362],"mapped",[105]],[[120363,120363],"mapped",[106]],[[120364,120364],"mapped",[107]],[[120365,120365],"mapped",[108]],[[120366,120366],"mapped",[109]],[[120367,120367],"mapped",[110]],[[120368,120368],"mapped",[111]],[[120369,120369],"mapped",[112]],[[120370,120370],"mapped",[113]],[[120371,120371],"mapped",[114]],[[120372,120372],"mapped",[115]],[[120373,120373],"mapped",[116]],[[120374,120374],"mapped",[117]],[[120375,120375],"mapped",[118]],[[120376,120376],"mapped",[119]],[[120377,120377],"mapped",[120]],[[120378,120378],"mapped",[121]],[[120379,120379],"mapped",[122]],[[120380,120380],"mapped",[97]],[[120381,120381],"mapped",[98]],[[120382,120382],"mapped",[99]],[[120383,120383],"mapped",[100]],[[120384,120384],"mapped",[101]],[[120385,120385],"mapped",[102]],[[120386,120386],"mapped",[103]],[[120387,120387],"mapped",[104]],[[120388,120388],"mapped",[105]],[[120389,120389],"mapped",[106]],[[120390,120390],"mapped",[107]],[[120391,120391],"mapped",[108]],[[120392,120392],"mapped",[109]],[[120393,120393],"mapped",[110]],[[120394,120394],"mapped",[111]],[[120395,120395],"mapped",[112]],[[120396,120396],"mapped",[113]],[[120397,120397],"mapped",[114]],[[120398,120398],"mapped",[115]],[[120399,120399],"mapped",[116]],[[120400,120400],"mapped",[117]],[[120401,120401],"mapped",[118]],[[120402,120402],"mapped",[119]],[[120403,120403],"mapped",[120]],[[120404,120404],"mapped",[121]],[[120405,120405],"mapped",[122]],[[120406,120406],"mapped",[97]],[[120407,120407],"mapped",[98]],[[120408,120408],"mapped",[99]],[[120409,120409],"mapped",[100]],[[120410,120410],"mapped",[101]],[[120411,120411],"mapped",[102]],[[120412,120412],"mapped",[103]],[[120413,120413],"mapped",[104]],[[120414,120414],"mapped",[105]],[[120415,120415],"mapped",[106]],[[120416,120416],"mapped",[107]],[[120417,120417],"mapped",[108]],[[120418,120418],"mapped",[109]],[[120419,120419],"mapped",[110]],[[120420,120420],"mapped",[111]],[[120421,120421],"mapped",[112]],[[120422,120422],"mapped",[113]],[[120423,120423],"mapped",[114]],[[120424,120424],"mapped",[115]],[[120425,120425],"mapped",[116]],[[120426,120426],"mapped",[117]],[[120427,120427],"mapped",[118]],[[120428,120428],"mapped",[119]],[[120429,120429],"mapped",[120]],[[120430,120430],"mapped",[121]],[[120431,120431],"mapped",[122]],[[120432,120432],"mapped",[97]],[[120433,120433],"mapped",[98]],[[120434,120434],"mapped",[99]],[[120435,120435],"mapped",[100]],[[120436,120436],"mapped",[101]],[[120437,120437],"mapped",[102]],[[120438,120438],"mapped",[103]],[[120439,120439],"mapped",[104]],[[120440,120440],"mapped",[105]],[[120441,120441],"mapped",[106]],[[120442,120442],"mapped",[107]],[[120443,120443],"mapped",[108]],[[120444,120444],"mapped",[109]],[[120445,120445],"mapped",[110]],[[120446,120446],"mapped",[111]],[[120447,120447],"mapped",[112]],[[120448,120448],"mapped",[113]],[[120449,120449],"mapped",[114]],[[120450,120450],"mapped",[115]],[[120451,120451],"mapped",[116]],[[120452,120452],"mapped",[117]],[[120453,120453],"mapped",[118]],[[120454,120454],"mapped",[119]],[[120455,120455],"mapped",[120]],[[120456,120456],"mapped",[121]],[[120457,120457],"mapped",[122]],[[120458,120458],"mapped",[97]],[[120459,120459],"mapped",[98]],[[120460,120460],"mapped",[99]],[[120461,120461],"mapped",[100]],[[120462,120462],"mapped",[101]],[[120463,120463],"mapped",[102]],[[120464,120464],"mapped",[103]],[[120465,120465],"mapped",[104]],[[120466,120466],"mapped",[105]],[[120467,120467],"mapped",[106]],[[120468,120468],"mapped",[107]],[[120469,120469],"mapped",[108]],[[120470,120470],"mapped",[109]],[[120471,120471],"mapped",[110]],[[120472,120472],"mapped",[111]],[[120473,120473],"mapped",[112]],[[120474,120474],"mapped",[113]],[[120475,120475],"mapped",[114]],[[120476,120476],"mapped",[115]],[[120477,120477],"mapped",[116]],[[120478,120478],"mapped",[117]],[[120479,120479],"mapped",[118]],[[120480,120480],"mapped",[119]],[[120481,120481],"mapped",[120]],[[120482,120482],"mapped",[121]],[[120483,120483],"mapped",[122]],[[120484,120484],"mapped",[305]],[[120485,120485],"mapped",[567]],[[120486,120487],"disallowed"],[[120488,120488],"mapped",[945]],[[120489,120489],"mapped",[946]],[[120490,120490],"mapped",[947]],[[120491,120491],"mapped",[948]],[[120492,120492],"mapped",[949]],[[120493,120493],"mapped",[950]],[[120494,120494],"mapped",[951]],[[120495,120495],"mapped",[952]],[[120496,120496],"mapped",[953]],[[120497,120497],"mapped",[954]],[[120498,120498],"mapped",[955]],[[120499,120499],"mapped",[956]],[[120500,120500],"mapped",[957]],[[120501,120501],"mapped",[958]],[[120502,120502],"mapped",[959]],[[120503,120503],"mapped",[960]],[[120504,120504],"mapped",[961]],[[120505,120505],"mapped",[952]],[[120506,120506],"mapped",[963]],[[120507,120507],"mapped",[964]],[[120508,120508],"mapped",[965]],[[120509,120509],"mapped",[966]],[[120510,120510],"mapped",[967]],[[120511,120511],"mapped",[968]],[[120512,120512],"mapped",[969]],[[120513,120513],"mapped",[8711]],[[120514,120514],"mapped",[945]],[[120515,120515],"mapped",[946]],[[120516,120516],"mapped",[947]],[[120517,120517],"mapped",[948]],[[120518,120518],"mapped",[949]],[[120519,120519],"mapped",[950]],[[120520,120520],"mapped",[951]],[[120521,120521],"mapped",[952]],[[120522,120522],"mapped",[953]],[[120523,120523],"mapped",[954]],[[120524,120524],"mapped",[955]],[[120525,120525],"mapped",[956]],[[120526,120526],"mapped",[957]],[[120527,120527],"mapped",[958]],[[120528,120528],"mapped",[959]],[[120529,120529],"mapped",[960]],[[120530,120530],"mapped",[961]],[[120531,120532],"mapped",[963]],[[120533,120533],"mapped",[964]],[[120534,120534],"mapped",[965]],[[120535,120535],"mapped",[966]],[[120536,120536],"mapped",[967]],[[120537,120537],"mapped",[968]],[[120538,120538],"mapped",[969]],[[120539,120539],"mapped",[8706]],[[120540,120540],"mapped",[949]],[[120541,120541],"mapped",[952]],[[120542,120542],"mapped",[954]],[[120543,120543],"mapped",[966]],[[120544,120544],"mapped",[961]],[[120545,120545],"mapped",[960]],[[120546,120546],"mapped",[945]],[[120547,120547],"mapped",[946]],[[120548,120548],"mapped",[947]],[[120549,120549],"mapped",[948]],[[120550,120550],"mapped",[949]],[[120551,120551],"mapped",[950]],[[120552,120552],"mapped",[951]],[[120553,120553],"mapped",[952]],[[120554,120554],"mapped",[953]],[[120555,120555],"mapped",[954]],[[120556,120556],"mapped",[955]],[[120557,120557],"mapped",[956]],[[120558,120558],"mapped",[957]],[[120559,120559],"mapped",[958]],[[120560,120560],"mapped",[959]],[[120561,120561],"mapped",[960]],[[120562,120562],"mapped",[961]],[[120563,120563],"mapped",[952]],[[120564,120564],"mapped",[963]],[[120565,120565],"mapped",[964]],[[120566,120566],"mapped",[965]],[[120567,120567],"mapped",[966]],[[120568,120568],"mapped",[967]],[[120569,120569],"mapped",[968]],[[120570,120570],"mapped",[969]],[[120571,120571],"mapped",[8711]],[[120572,120572],"mapped",[945]],[[120573,120573],"mapped",[946]],[[120574,120574],"mapped",[947]],[[120575,120575],"mapped",[948]],[[120576,120576],"mapped",[949]],[[120577,120577],"mapped",[950]],[[120578,120578],"mapped",[951]],[[120579,120579],"mapped",[952]],[[120580,120580],"mapped",[953]],[[120581,120581],"mapped",[954]],[[120582,120582],"mapped",[955]],[[120583,120583],"mapped",[956]],[[120584,120584],"mapped",[957]],[[120585,120585],"mapped",[958]],[[120586,120586],"mapped",[959]],[[120587,120587],"mapped",[960]],[[120588,120588],"mapped",[961]],[[120589,120590],"mapped",[963]],[[120591,120591],"mapped",[964]],[[120592,120592],"mapped",[965]],[[120593,120593],"mapped",[966]],[[120594,120594],"mapped",[967]],[[120595,120595],"mapped",[968]],[[120596,120596],"mapped",[969]],[[120597,120597],"mapped",[8706]],[[120598,120598],"mapped",[949]],[[120599,120599],"mapped",[952]],[[120600,120600],"mapped",[954]],[[120601,120601],"mapped",[966]],[[120602,120602],"mapped",[961]],[[120603,120603],"mapped",[960]],[[120604,120604],"mapped",[945]],[[120605,120605],"mapped",[946]],[[120606,120606],"mapped",[947]],[[120607,120607],"mapped",[948]],[[120608,120608],"mapped",[949]],[[120609,120609],"mapped",[950]],[[120610,120610],"mapped",[951]],[[120611,120611],"mapped",[952]],[[120612,120612],"mapped",[953]],[[120613,120613],"mapped",[954]],[[120614,120614],"mapped",[955]],[[120615,120615],"mapped",[956]],[[120616,120616],"mapped",[957]],[[120617,120617],"mapped",[958]],[[120618,120618],"mapped",[959]],[[120619,120619],"mapped",[960]],[[120620,120620],"mapped",[961]],[[120621,120621],"mapped",[952]],[[120622,120622],"mapped",[963]],[[120623,120623],"mapped",[964]],[[120624,120624],"mapped",[965]],[[120625,120625],"mapped",[966]],[[120626,120626],"mapped",[967]],[[120627,120627],"mapped",[968]],[[120628,120628],"mapped",[969]],[[120629,120629],"mapped",[8711]],[[120630,120630],"mapped",[945]],[[120631,120631],"mapped",[946]],[[120632,120632],"mapped",[947]],[[120633,120633],"mapped",[948]],[[120634,120634],"mapped",[949]],[[120635,120635],"mapped",[950]],[[120636,120636],"mapped",[951]],[[120637,120637],"mapped",[952]],[[120638,120638],"mapped",[953]],[[120639,120639],"mapped",[954]],[[120640,120640],"mapped",[955]],[[120641,120641],"mapped",[956]],[[120642,120642],"mapped",[957]],[[120643,120643],"mapped",[958]],[[120644,120644],"mapped",[959]],[[120645,120645],"mapped",[960]],[[120646,120646],"mapped",[961]],[[120647,120648],"mapped",[963]],[[120649,120649],"mapped",[964]],[[120650,120650],"mapped",[965]],[[120651,120651],"mapped",[966]],[[120652,120652],"mapped",[967]],[[120653,120653],"mapped",[968]],[[120654,120654],"mapped",[969]],[[120655,120655],"mapped",[8706]],[[120656,120656],"mapped",[949]],[[120657,120657],"mapped",[952]],[[120658,120658],"mapped",[954]],[[120659,120659],"mapped",[966]],[[120660,120660],"mapped",[961]],[[120661,120661],"mapped",[960]],[[120662,120662],"mapped",[945]],[[120663,120663],"mapped",[946]],[[120664,120664],"mapped",[947]],[[120665,120665],"mapped",[948]],[[120666,120666],"mapped",[949]],[[120667,120667],"mapped",[950]],[[120668,120668],"mapped",[951]],[[120669,120669],"mapped",[952]],[[120670,120670],"mapped",[953]],[[120671,120671],"mapped",[954]],[[120672,120672],"mapped",[955]],[[120673,120673],"mapped",[956]],[[120674,120674],"mapped",[957]],[[120675,120675],"mapped",[958]],[[120676,120676],"mapped",[959]],[[120677,120677],"mapped",[960]],[[120678,120678],"mapped",[961]],[[120679,120679],"mapped",[952]],[[120680,120680],"mapped",[963]],[[120681,120681],"mapped",[964]],[[120682,120682],"mapped",[965]],[[120683,120683],"mapped",[966]],[[120684,120684],"mapped",[967]],[[120685,120685],"mapped",[968]],[[120686,120686],"mapped",[969]],[[120687,120687],"mapped",[8711]],[[120688,120688],"mapped",[945]],[[120689,120689],"mapped",[946]],[[120690,120690],"mapped",[947]],[[120691,120691],"mapped",[948]],[[120692,120692],"mapped",[949]],[[120693,120693],"mapped",[950]],[[120694,120694],"mapped",[951]],[[120695,120695],"mapped",[952]],[[120696,120696],"mapped",[953]],[[120697,120697],"mapped",[954]],[[120698,120698],"mapped",[955]],[[120699,120699],"mapped",[956]],[[120700,120700],"mapped",[957]],[[120701,120701],"mapped",[958]],[[120702,120702],"mapped",[959]],[[120703,120703],"mapped",[960]],[[120704,120704],"mapped",[961]],[[120705,120706],"mapped",[963]],[[120707,120707],"mapped",[964]],[[120708,120708],"mapped",[965]],[[120709,120709],"mapped",[966]],[[120710,120710],"mapped",[967]],[[120711,120711],"mapped",[968]],[[120712,120712],"mapped",[969]],[[120713,120713],"mapped",[8706]],[[120714,120714],"mapped",[949]],[[120715,120715],"mapped",[952]],[[120716,120716],"mapped",[954]],[[120717,120717],"mapped",[966]],[[120718,120718],"mapped",[961]],[[120719,120719],"mapped",[960]],[[120720,120720],"mapped",[945]],[[120721,120721],"mapped",[946]],[[120722,120722],"mapped",[947]],[[120723,120723],"mapped",[948]],[[120724,120724],"mapped",[949]],[[120725,120725],"mapped",[950]],[[120726,120726],"mapped",[951]],[[120727,120727],"mapped",[952]],[[120728,120728],"mapped",[953]],[[120729,120729],"mapped",[954]],[[120730,120730],"mapped",[955]],[[120731,120731],"mapped",[956]],[[120732,120732],"mapped",[957]],[[120733,120733],"mapped",[958]],[[120734,120734],"mapped",[959]],[[120735,120735],"mapped",[960]],[[120736,120736],"mapped",[961]],[[120737,120737],"mapped",[952]],[[120738,120738],"mapped",[963]],[[120739,120739],"mapped",[964]],[[120740,120740],"mapped",[965]],[[120741,120741],"mapped",[966]],[[120742,120742],"mapped",[967]],[[120743,120743],"mapped",[968]],[[120744,120744],"mapped",[969]],[[120745,120745],"mapped",[8711]],[[120746,120746],"mapped",[945]],[[120747,120747],"mapped",[946]],[[120748,120748],"mapped",[947]],[[120749,120749],"mapped",[948]],[[120750,120750],"mapped",[949]],[[120751,120751],"mapped",[950]],[[120752,120752],"mapped",[951]],[[120753,120753],"mapped",[952]],[[120754,120754],"mapped",[953]],[[120755,120755],"mapped",[954]],[[120756,120756],"mapped",[955]],[[120757,120757],"mapped",[956]],[[120758,120758],"mapped",[957]],[[120759,120759],"mapped",[958]],[[120760,120760],"mapped",[959]],[[120761,120761],"mapped",[960]],[[120762,120762],"mapped",[961]],[[120763,120764],"mapped",[963]],[[120765,120765],"mapped",[964]],[[120766,120766],"mapped",[965]],[[120767,120767],"mapped",[966]],[[120768,120768],"mapped",[967]],[[120769,120769],"mapped",[968]],[[120770,120770],"mapped",[969]],[[120771,120771],"mapped",[8706]],[[120772,120772],"mapped",[949]],[[120773,120773],"mapped",[952]],[[120774,120774],"mapped",[954]],[[120775,120775],"mapped",[966]],[[120776,120776],"mapped",[961]],[[120777,120777],"mapped",[960]],[[120778,120779],"mapped",[989]],[[120780,120781],"disallowed"],[[120782,120782],"mapped",[48]],[[120783,120783],"mapped",[49]],[[120784,120784],"mapped",[50]],[[120785,120785],"mapped",[51]],[[120786,120786],"mapped",[52]],[[120787,120787],"mapped",[53]],[[120788,120788],"mapped",[54]],[[120789,120789],"mapped",[55]],[[120790,120790],"mapped",[56]],[[120791,120791],"mapped",[57]],[[120792,120792],"mapped",[48]],[[120793,120793],"mapped",[49]],[[120794,120794],"mapped",[50]],[[120795,120795],"mapped",[51]],[[120796,120796],"mapped",[52]],[[120797,120797],"mapped",[53]],[[120798,120798],"mapped",[54]],[[120799,120799],"mapped",[55]],[[120800,120800],"mapped",[56]],[[120801,120801],"mapped",[57]],[[120802,120802],"mapped",[48]],[[120803,120803],"mapped",[49]],[[120804,120804],"mapped",[50]],[[120805,120805],"mapped",[51]],[[120806,120806],"mapped",[52]],[[120807,120807],"mapped",[53]],[[120808,120808],"mapped",[54]],[[120809,120809],"mapped",[55]],[[120810,120810],"mapped",[56]],[[120811,120811],"mapped",[57]],[[120812,120812],"mapped",[48]],[[120813,120813],"mapped",[49]],[[120814,120814],"mapped",[50]],[[120815,120815],"mapped",[51]],[[120816,120816],"mapped",[52]],[[120817,120817],"mapped",[53]],[[120818,120818],"mapped",[54]],[[120819,120819],"mapped",[55]],[[120820,120820],"mapped",[56]],[[120821,120821],"mapped",[57]],[[120822,120822],"mapped",[48]],[[120823,120823],"mapped",[49]],[[120824,120824],"mapped",[50]],[[120825,120825],"mapped",[51]],[[120826,120826],"mapped",[52]],[[120827,120827],"mapped",[53]],[[120828,120828],"mapped",[54]],[[120829,120829],"mapped",[55]],[[120830,120830],"mapped",[56]],[[120831,120831],"mapped",[57]],[[120832,121343],"valid",[],"NV8"],[[121344,121398],"valid"],[[121399,121402],"valid",[],"NV8"],[[121403,121452],"valid"],[[121453,121460],"valid",[],"NV8"],[[121461,121461],"valid"],[[121462,121475],"valid",[],"NV8"],[[121476,121476],"valid"],[[121477,121483],"valid",[],"NV8"],[[121484,121498],"disallowed"],[[121499,121503],"valid"],[[121504,121504],"disallowed"],[[121505,121519],"valid"],[[121520,124927],"disallowed"],[[124928,125124],"valid"],[[125125,125126],"disallowed"],[[125127,125135],"valid",[],"NV8"],[[125136,125142],"valid"],[[125143,126463],"disallowed"],[[126464,126464],"mapped",[1575]],[[126465,126465],"mapped",[1576]],[[126466,126466],"mapped",[1580]],[[126467,126467],"mapped",[1583]],[[126468,126468],"disallowed"],[[126469,126469],"mapped",[1608]],[[126470,126470],"mapped",[1586]],[[126471,126471],"mapped",[1581]],[[126472,126472],"mapped",[1591]],[[126473,126473],"mapped",[1610]],[[126474,126474],"mapped",[1603]],[[126475,126475],"mapped",[1604]],[[126476,126476],"mapped",[1605]],[[126477,126477],"mapped",[1606]],[[126478,126478],"mapped",[1587]],[[126479,126479],"mapped",[1593]],[[126480,126480],"mapped",[1601]],[[126481,126481],"mapped",[1589]],[[126482,126482],"mapped",[1602]],[[126483,126483],"mapped",[1585]],[[126484,126484],"mapped",[1588]],[[126485,126485],"mapped",[1578]],[[126486,126486],"mapped",[1579]],[[126487,126487],"mapped",[1582]],[[126488,126488],"mapped",[1584]],[[126489,126489],"mapped",[1590]],[[126490,126490],"mapped",[1592]],[[126491,126491],"mapped",[1594]],[[126492,126492],"mapped",[1646]],[[126493,126493],"mapped",[1722]],[[126494,126494],"mapped",[1697]],[[126495,126495],"mapped",[1647]],[[126496,126496],"disallowed"],[[126497,126497],"mapped",[1576]],[[126498,126498],"mapped",[1580]],[[126499,126499],"disallowed"],[[126500,126500],"mapped",[1607]],[[126501,126502],"disallowed"],[[126503,126503],"mapped",[1581]],[[126504,126504],"disallowed"],[[126505,126505],"mapped",[1610]],[[126506,126506],"mapped",[1603]],[[126507,126507],"mapped",[1604]],[[126508,126508],"mapped",[1605]],[[126509,126509],"mapped",[1606]],[[126510,126510],"mapped",[1587]],[[126511,126511],"mapped",[1593]],[[126512,126512],"mapped",[1601]],[[126513,126513],"mapped",[1589]],[[126514,126514],"mapped",[1602]],[[126515,126515],"disallowed"],[[126516,126516],"mapped",[1588]],[[126517,126517],"mapped",[1578]],[[126518,126518],"mapped",[1579]],[[126519,126519],"mapped",[1582]],[[126520,126520],"disallowed"],[[126521,126521],"mapped",[1590]],[[126522,126522],"disallowed"],[[126523,126523],"mapped",[1594]],[[126524,126529],"disallowed"],[[126530,126530],"mapped",[1580]],[[126531,126534],"disallowed"],[[126535,126535],"mapped",[1581]],[[126536,126536],"disallowed"],[[126537,126537],"mapped",[1610]],[[126538,126538],"disallowed"],[[126539,126539],"mapped",[1604]],[[126540,126540],"disallowed"],[[126541,126541],"mapped",[1606]],[[126542,126542],"mapped",[1587]],[[126543,126543],"mapped",[1593]],[[126544,126544],"disallowed"],[[126545,126545],"mapped",[1589]],[[126546,126546],"mapped",[1602]],[[126547,126547],"disallowed"],[[126548,126548],"mapped",[1588]],[[126549,126550],"disallowed"],[[126551,126551],"mapped",[1582]],[[126552,126552],"disallowed"],[[126553,126553],"mapped",[1590]],[[126554,126554],"disallowed"],[[126555,126555],"mapped",[1594]],[[126556,126556],"disallowed"],[[126557,126557],"mapped",[1722]],[[126558,126558],"disallowed"],[[126559,126559],"mapped",[1647]],[[126560,126560],"disallowed"],[[126561,126561],"mapped",[1576]],[[126562,126562],"mapped",[1580]],[[126563,126563],"disallowed"],[[126564,126564],"mapped",[1607]],[[126565,126566],"disallowed"],[[126567,126567],"mapped",[1581]],[[126568,126568],"mapped",[1591]],[[126569,126569],"mapped",[1610]],[[126570,126570],"mapped",[1603]],[[126571,126571],"disallowed"],[[126572,126572],"mapped",[1605]],[[126573,126573],"mapped",[1606]],[[126574,126574],"mapped",[1587]],[[126575,126575],"mapped",[1593]],[[126576,126576],"mapped",[1601]],[[126577,126577],"mapped",[1589]],[[126578,126578],"mapped",[1602]],[[126579,126579],"disallowed"],[[126580,126580],"mapped",[1588]],[[126581,126581],"mapped",[1578]],[[126582,126582],"mapped",[1579]],[[126583,126583],"mapped",[1582]],[[126584,126584],"disallowed"],[[126585,126585],"mapped",[1590]],[[126586,126586],"mapped",[1592]],[[126587,126587],"mapped",[1594]],[[126588,126588],"mapped",[1646]],[[126589,126589],"disallowed"],[[126590,126590],"mapped",[1697]],[[126591,126591],"disallowed"],[[126592,126592],"mapped",[1575]],[[126593,126593],"mapped",[1576]],[[126594,126594],"mapped",[1580]],[[126595,126595],"mapped",[1583]],[[126596,126596],"mapped",[1607]],[[126597,126597],"mapped",[1608]],[[126598,126598],"mapped",[1586]],[[126599,126599],"mapped",[1581]],[[126600,126600],"mapped",[1591]],[[126601,126601],"mapped",[1610]],[[126602,126602],"disallowed"],[[126603,126603],"mapped",[1604]],[[126604,126604],"mapped",[1605]],[[126605,126605],"mapped",[1606]],[[126606,126606],"mapped",[1587]],[[126607,126607],"mapped",[1593]],[[126608,126608],"mapped",[1601]],[[126609,126609],"mapped",[1589]],[[126610,126610],"mapped",[1602]],[[126611,126611],"mapped",[1585]],[[126612,126612],"mapped",[1588]],[[126613,126613],"mapped",[1578]],[[126614,126614],"mapped",[1579]],[[126615,126615],"mapped",[1582]],[[126616,126616],"mapped",[1584]],[[126617,126617],"mapped",[1590]],[[126618,126618],"mapped",[1592]],[[126619,126619],"mapped",[1594]],[[126620,126624],"disallowed"],[[126625,126625],"mapped",[1576]],[[126626,126626],"mapped",[1580]],[[126627,126627],"mapped",[1583]],[[126628,126628],"disallowed"],[[126629,126629],"mapped",[1608]],[[126630,126630],"mapped",[1586]],[[126631,126631],"mapped",[1581]],[[126632,126632],"mapped",[1591]],[[126633,126633],"mapped",[1610]],[[126634,126634],"disallowed"],[[126635,126635],"mapped",[1604]],[[126636,126636],"mapped",[1605]],[[126637,126637],"mapped",[1606]],[[126638,126638],"mapped",[1587]],[[126639,126639],"mapped",[1593]],[[126640,126640],"mapped",[1601]],[[126641,126641],"mapped",[1589]],[[126642,126642],"mapped",[1602]],[[126643,126643],"mapped",[1585]],[[126644,126644],"mapped",[1588]],[[126645,126645],"mapped",[1578]],[[126646,126646],"mapped",[1579]],[[126647,126647],"mapped",[1582]],[[126648,126648],"mapped",[1584]],[[126649,126649],"mapped",[1590]],[[126650,126650],"mapped",[1592]],[[126651,126651],"mapped",[1594]],[[126652,126703],"disallowed"],[[126704,126705],"valid",[],"NV8"],[[126706,126975],"disallowed"],[[126976,127019],"valid",[],"NV8"],[[127020,127023],"disallowed"],[[127024,127123],"valid",[],"NV8"],[[127124,127135],"disallowed"],[[127136,127150],"valid",[],"NV8"],[[127151,127152],"disallowed"],[[127153,127166],"valid",[],"NV8"],[[127167,127167],"valid",[],"NV8"],[[127168,127168],"disallowed"],[[127169,127183],"valid",[],"NV8"],[[127184,127184],"disallowed"],[[127185,127199],"valid",[],"NV8"],[[127200,127221],"valid",[],"NV8"],[[127222,127231],"disallowed"],[[127232,127232],"disallowed"],[[127233,127233],"disallowed_STD3_mapped",[48,44]],[[127234,127234],"disallowed_STD3_mapped",[49,44]],[[127235,127235],"disallowed_STD3_mapped",[50,44]],[[127236,127236],"disallowed_STD3_mapped",[51,44]],[[127237,127237],"disallowed_STD3_mapped",[52,44]],[[127238,127238],"disallowed_STD3_mapped",[53,44]],[[127239,127239],"disallowed_STD3_mapped",[54,44]],[[127240,127240],"disallowed_STD3_mapped",[55,44]],[[127241,127241],"disallowed_STD3_mapped",[56,44]],[[127242,127242],"disallowed_STD3_mapped",[57,44]],[[127243,127244],"valid",[],"NV8"],[[127245,127247],"disallowed"],[[127248,127248],"disallowed_STD3_mapped",[40,97,41]],[[127249,127249],"disallowed_STD3_mapped",[40,98,41]],[[127250,127250],"disallowed_STD3_mapped",[40,99,41]],[[127251,127251],"disallowed_STD3_mapped",[40,100,41]],[[127252,127252],"disallowed_STD3_mapped",[40,101,41]],[[127253,127253],"disallowed_STD3_mapped",[40,102,41]],[[127254,127254],"disallowed_STD3_mapped",[40,103,41]],[[127255,127255],"disallowed_STD3_mapped",[40,104,41]],[[127256,127256],"disallowed_STD3_mapped",[40,105,41]],[[127257,127257],"disallowed_STD3_mapped",[40,106,41]],[[127258,127258],"disallowed_STD3_mapped",[40,107,41]],[[127259,127259],"disallowed_STD3_mapped",[40,108,41]],[[127260,127260],"disallowed_STD3_mapped",[40,109,41]],[[127261,127261],"disallowed_STD3_mapped",[40,110,41]],[[127262,127262],"disallowed_STD3_mapped",[40,111,41]],[[127263,127263],"disallowed_STD3_mapped",[40,112,41]],[[127264,127264],"disallowed_STD3_mapped",[40,113,41]],[[127265,127265],"disallowed_STD3_mapped",[40,114,41]],[[127266,127266],"disallowed_STD3_mapped",[40,115,41]],[[127267,127267],"disallowed_STD3_mapped",[40,116,41]],[[127268,127268],"disallowed_STD3_mapped",[40,117,41]],[[127269,127269],"disallowed_STD3_mapped",[40,118,41]],[[127270,127270],"disallowed_STD3_mapped",[40,119,41]],[[127271,127271],"disallowed_STD3_mapped",[40,120,41]],[[127272,127272],"disallowed_STD3_mapped",[40,121,41]],[[127273,127273],"disallowed_STD3_mapped",[40,122,41]],[[127274,127274],"mapped",[12308,115,12309]],[[127275,127275],"mapped",[99]],[[127276,127276],"mapped",[114]],[[127277,127277],"mapped",[99,100]],[[127278,127278],"mapped",[119,122]],[[127279,127279],"disallowed"],[[127280,127280],"mapped",[97]],[[127281,127281],"mapped",[98]],[[127282,127282],"mapped",[99]],[[127283,127283],"mapped",[100]],[[127284,127284],"mapped",[101]],[[127285,127285],"mapped",[102]],[[127286,127286],"mapped",[103]],[[127287,127287],"mapped",[104]],[[127288,127288],"mapped",[105]],[[127289,127289],"mapped",[106]],[[127290,127290],"mapped",[107]],[[127291,127291],"mapped",[108]],[[127292,127292],"mapped",[109]],[[127293,127293],"mapped",[110]],[[127294,127294],"mapped",[111]],[[127295,127295],"mapped",[112]],[[127296,127296],"mapped",[113]],[[127297,127297],"mapped",[114]],[[127298,127298],"mapped",[115]],[[127299,127299],"mapped",[116]],[[127300,127300],"mapped",[117]],[[127301,127301],"mapped",[118]],[[127302,127302],"mapped",[119]],[[127303,127303],"mapped",[120]],[[127304,127304],"mapped",[121]],[[127305,127305],"mapped",[122]],[[127306,127306],"mapped",[104,118]],[[127307,127307],"mapped",[109,118]],[[127308,127308],"mapped",[115,100]],[[127309,127309],"mapped",[115,115]],[[127310,127310],"mapped",[112,112,118]],[[127311,127311],"mapped",[119,99]],[[127312,127318],"valid",[],"NV8"],[[127319,127319],"valid",[],"NV8"],[[127320,127326],"valid",[],"NV8"],[[127327,127327],"valid",[],"NV8"],[[127328,127337],"valid",[],"NV8"],[[127338,127338],"mapped",[109,99]],[[127339,127339],"mapped",[109,100]],[[127340,127343],"disallowed"],[[127344,127352],"valid",[],"NV8"],[[127353,127353],"valid",[],"NV8"],[[127354,127354],"valid",[],"NV8"],[[127355,127356],"valid",[],"NV8"],[[127357,127358],"valid",[],"NV8"],[[127359,127359],"valid",[],"NV8"],[[127360,127369],"valid",[],"NV8"],[[127370,127373],"valid",[],"NV8"],[[127374,127375],"valid",[],"NV8"],[[127376,127376],"mapped",[100,106]],[[127377,127386],"valid",[],"NV8"],[[127387,127461],"disallowed"],[[127462,127487],"valid",[],"NV8"],[[127488,127488],"mapped",[12411,12363]],[[127489,127489],"mapped",[12467,12467]],[[127490,127490],"mapped",[12469]],[[127491,127503],"disallowed"],[[127504,127504],"mapped",[25163]],[[127505,127505],"mapped",[23383]],[[127506,127506],"mapped",[21452]],[[127507,127507],"mapped",[12487]],[[127508,127508],"mapped",[20108]],[[127509,127509],"mapped",[22810]],[[127510,127510],"mapped",[35299]],[[127511,127511],"mapped",[22825]],[[127512,127512],"mapped",[20132]],[[127513,127513],"mapped",[26144]],[[127514,127514],"mapped",[28961]],[[127515,127515],"mapped",[26009]],[[127516,127516],"mapped",[21069]],[[127517,127517],"mapped",[24460]],[[127518,127518],"mapped",[20877]],[[127519,127519],"mapped",[26032]],[[127520,127520],"mapped",[21021]],[[127521,127521],"mapped",[32066]],[[127522,127522],"mapped",[29983]],[[127523,127523],"mapped",[36009]],[[127524,127524],"mapped",[22768]],[[127525,127525],"mapped",[21561]],[[127526,127526],"mapped",[28436]],[[127527,127527],"mapped",[25237]],[[127528,127528],"mapped",[25429]],[[127529,127529],"mapped",[19968]],[[127530,127530],"mapped",[19977]],[[127531,127531],"mapped",[36938]],[[127532,127532],"mapped",[24038]],[[127533,127533],"mapped",[20013]],[[127534,127534],"mapped",[21491]],[[127535,127535],"mapped",[25351]],[[127536,127536],"mapped",[36208]],[[127537,127537],"mapped",[25171]],[[127538,127538],"mapped",[31105]],[[127539,127539],"mapped",[31354]],[[127540,127540],"mapped",[21512]],[[127541,127541],"mapped",[28288]],[[127542,127542],"mapped",[26377]],[[127543,127543],"mapped",[26376]],[[127544,127544],"mapped",[30003]],[[127545,127545],"mapped",[21106]],[[127546,127546],"mapped",[21942]],[[127547,127551],"disallowed"],[[127552,127552],"mapped",[12308,26412,12309]],[[127553,127553],"mapped",[12308,19977,12309]],[[127554,127554],"mapped",[12308,20108,12309]],[[127555,127555],"mapped",[12308,23433,12309]],[[127556,127556],"mapped",[12308,28857,12309]],[[127557,127557],"mapped",[12308,25171,12309]],[[127558,127558],"mapped",[12308,30423,12309]],[[127559,127559],"mapped",[12308,21213,12309]],[[127560,127560],"mapped",[12308,25943,12309]],[[127561,127567],"disallowed"],[[127568,127568],"mapped",[24471]],[[127569,127569],"mapped",[21487]],[[127570,127743],"disallowed"],[[127744,127776],"valid",[],"NV8"],[[127777,127788],"valid",[],"NV8"],[[127789,127791],"valid",[],"NV8"],[[127792,127797],"valid",[],"NV8"],[[127798,127798],"valid",[],"NV8"],[[127799,127868],"valid",[],"NV8"],[[127869,127869],"valid",[],"NV8"],[[127870,127871],"valid",[],"NV8"],[[127872,127891],"valid",[],"NV8"],[[127892,127903],"valid",[],"NV8"],[[127904,127940],"valid",[],"NV8"],[[127941,127941],"valid",[],"NV8"],[[127942,127946],"valid",[],"NV8"],[[127947,127950],"valid",[],"NV8"],[[127951,127955],"valid",[],"NV8"],[[127956,127967],"valid",[],"NV8"],[[127968,127984],"valid",[],"NV8"],[[127985,127991],"valid",[],"NV8"],[[127992,127999],"valid",[],"NV8"],[[128000,128062],"valid",[],"NV8"],[[128063,128063],"valid",[],"NV8"],[[128064,128064],"valid",[],"NV8"],[[128065,128065],"valid",[],"NV8"],[[128066,128247],"valid",[],"NV8"],[[128248,128248],"valid",[],"NV8"],[[128249,128252],"valid",[],"NV8"],[[128253,128254],"valid",[],"NV8"],[[128255,128255],"valid",[],"NV8"],[[128256,128317],"valid",[],"NV8"],[[128318,128319],"valid",[],"NV8"],[[128320,128323],"valid",[],"NV8"],[[128324,128330],"valid",[],"NV8"],[[128331,128335],"valid",[],"NV8"],[[128336,128359],"valid",[],"NV8"],[[128360,128377],"valid",[],"NV8"],[[128378,128378],"disallowed"],[[128379,128419],"valid",[],"NV8"],[[128420,128420],"disallowed"],[[128421,128506],"valid",[],"NV8"],[[128507,128511],"valid",[],"NV8"],[[128512,128512],"valid",[],"NV8"],[[128513,128528],"valid",[],"NV8"],[[128529,128529],"valid",[],"NV8"],[[128530,128532],"valid",[],"NV8"],[[128533,128533],"valid",[],"NV8"],[[128534,128534],"valid",[],"NV8"],[[128535,128535],"valid",[],"NV8"],[[128536,128536],"valid",[],"NV8"],[[128537,128537],"valid",[],"NV8"],[[128538,128538],"valid",[],"NV8"],[[128539,128539],"valid",[],"NV8"],[[128540,128542],"valid",[],"NV8"],[[128543,128543],"valid",[],"NV8"],[[128544,128549],"valid",[],"NV8"],[[128550,128551],"valid",[],"NV8"],[[128552,128555],"valid",[],"NV8"],[[128556,128556],"valid",[],"NV8"],[[128557,128557],"valid",[],"NV8"],[[128558,128559],"valid",[],"NV8"],[[128560,128563],"valid",[],"NV8"],[[128564,128564],"valid",[],"NV8"],[[128565,128576],"valid",[],"NV8"],[[128577,128578],"valid",[],"NV8"],[[128579,128580],"valid",[],"NV8"],[[128581,128591],"valid",[],"NV8"],[[128592,128639],"valid",[],"NV8"],[[128640,128709],"valid",[],"NV8"],[[128710,128719],"valid",[],"NV8"],[[128720,128720],"valid",[],"NV8"],[[128721,128735],"disallowed"],[[128736,128748],"valid",[],"NV8"],[[128749,128751],"disallowed"],[[128752,128755],"valid",[],"NV8"],[[128756,128767],"disallowed"],[[128768,128883],"valid",[],"NV8"],[[128884,128895],"disallowed"],[[128896,128980],"valid",[],"NV8"],[[128981,129023],"disallowed"],[[129024,129035],"valid",[],"NV8"],[[129036,129039],"disallowed"],[[129040,129095],"valid",[],"NV8"],[[129096,129103],"disallowed"],[[129104,129113],"valid",[],"NV8"],[[129114,129119],"disallowed"],[[129120,129159],"valid",[],"NV8"],[[129160,129167],"disallowed"],[[129168,129197],"valid",[],"NV8"],[[129198,129295],"disallowed"],[[129296,129304],"valid",[],"NV8"],[[129305,129407],"disallowed"],[[129408,129412],"valid",[],"NV8"],[[129413,129471],"disallowed"],[[129472,129472],"valid",[],"NV8"],[[129473,131069],"disallowed"],[[131070,131071],"disallowed"],[[131072,173782],"valid"],[[173783,173823],"disallowed"],[[173824,177972],"valid"],[[177973,177983],"disallowed"],[[177984,178205],"valid"],[[178206,178207],"disallowed"],[[178208,183969],"valid"],[[183970,194559],"disallowed"],[[194560,194560],"mapped",[20029]],[[194561,194561],"mapped",[20024]],[[194562,194562],"mapped",[20033]],[[194563,194563],"mapped",[131362]],[[194564,194564],"mapped",[20320]],[[194565,194565],"mapped",[20398]],[[194566,194566],"mapped",[20411]],[[194567,194567],"mapped",[20482]],[[194568,194568],"mapped",[20602]],[[194569,194569],"mapped",[20633]],[[194570,194570],"mapped",[20711]],[[194571,194571],"mapped",[20687]],[[194572,194572],"mapped",[13470]],[[194573,194573],"mapped",[132666]],[[194574,194574],"mapped",[20813]],[[194575,194575],"mapped",[20820]],[[194576,194576],"mapped",[20836]],[[194577,194577],"mapped",[20855]],[[194578,194578],"mapped",[132380]],[[194579,194579],"mapped",[13497]],[[194580,194580],"mapped",[20839]],[[194581,194581],"mapped",[20877]],[[194582,194582],"mapped",[132427]],[[194583,194583],"mapped",[20887]],[[194584,194584],"mapped",[20900]],[[194585,194585],"mapped",[20172]],[[194586,194586],"mapped",[20908]],[[194587,194587],"mapped",[20917]],[[194588,194588],"mapped",[168415]],[[194589,194589],"mapped",[20981]],[[194590,194590],"mapped",[20995]],[[194591,194591],"mapped",[13535]],[[194592,194592],"mapped",[21051]],[[194593,194593],"mapped",[21062]],[[194594,194594],"mapped",[21106]],[[194595,194595],"mapped",[21111]],[[194596,194596],"mapped",[13589]],[[194597,194597],"mapped",[21191]],[[194598,194598],"mapped",[21193]],[[194599,194599],"mapped",[21220]],[[194600,194600],"mapped",[21242]],[[194601,194601],"mapped",[21253]],[[194602,194602],"mapped",[21254]],[[194603,194603],"mapped",[21271]],[[194604,194604],"mapped",[21321]],[[194605,194605],"mapped",[21329]],[[194606,194606],"mapped",[21338]],[[194607,194607],"mapped",[21363]],[[194608,194608],"mapped",[21373]],[[194609,194611],"mapped",[21375]],[[194612,194612],"mapped",[133676]],[[194613,194613],"mapped",[28784]],[[194614,194614],"mapped",[21450]],[[194615,194615],"mapped",[21471]],[[194616,194616],"mapped",[133987]],[[194617,194617],"mapped",[21483]],[[194618,194618],"mapped",[21489]],[[194619,194619],"mapped",[21510]],[[194620,194620],"mapped",[21662]],[[194621,194621],"mapped",[21560]],[[194622,194622],"mapped",[21576]],[[194623,194623],"mapped",[21608]],[[194624,194624],"mapped",[21666]],[[194625,194625],"mapped",[21750]],[[194626,194626],"mapped",[21776]],[[194627,194627],"mapped",[21843]],[[194628,194628],"mapped",[21859]],[[194629,194630],"mapped",[21892]],[[194631,194631],"mapped",[21913]],[[194632,194632],"mapped",[21931]],[[194633,194633],"mapped",[21939]],[[194634,194634],"mapped",[21954]],[[194635,194635],"mapped",[22294]],[[194636,194636],"mapped",[22022]],[[194637,194637],"mapped",[22295]],[[194638,194638],"mapped",[22097]],[[194639,194639],"mapped",[22132]],[[194640,194640],"mapped",[20999]],[[194641,194641],"mapped",[22766]],[[194642,194642],"mapped",[22478]],[[194643,194643],"mapped",[22516]],[[194644,194644],"mapped",[22541]],[[194645,194645],"mapped",[22411]],[[194646,194646],"mapped",[22578]],[[194647,194647],"mapped",[22577]],[[194648,194648],"mapped",[22700]],[[194649,194649],"mapped",[136420]],[[194650,194650],"mapped",[22770]],[[194651,194651],"mapped",[22775]],[[194652,194652],"mapped",[22790]],[[194653,194653],"mapped",[22810]],[[194654,194654],"mapped",[22818]],[[194655,194655],"mapped",[22882]],[[194656,194656],"mapped",[136872]],[[194657,194657],"mapped",[136938]],[[194658,194658],"mapped",[23020]],[[194659,194659],"mapped",[23067]],[[194660,194660],"mapped",[23079]],[[194661,194661],"mapped",[23000]],[[194662,194662],"mapped",[23142]],[[194663,194663],"mapped",[14062]],[[194664,194664],"disallowed"],[[194665,194665],"mapped",[23304]],[[194666,194667],"mapped",[23358]],[[194668,194668],"mapped",[137672]],[[194669,194669],"mapped",[23491]],[[194670,194670],"mapped",[23512]],[[194671,194671],"mapped",[23527]],[[194672,194672],"mapped",[23539]],[[194673,194673],"mapped",[138008]],[[194674,194674],"mapped",[23551]],[[194675,194675],"mapped",[23558]],[[194676,194676],"disallowed"],[[194677,194677],"mapped",[23586]],[[194678,194678],"mapped",[14209]],[[194679,194679],"mapped",[23648]],[[194680,194680],"mapped",[23662]],[[194681,194681],"mapped",[23744]],[[194682,194682],"mapped",[23693]],[[194683,194683],"mapped",[138724]],[[194684,194684],"mapped",[23875]],[[194685,194685],"mapped",[138726]],[[194686,194686],"mapped",[23918]],[[194687,194687],"mapped",[23915]],[[194688,194688],"mapped",[23932]],[[194689,194689],"mapped",[24033]],[[194690,194690],"mapped",[24034]],[[194691,194691],"mapped",[14383]],[[194692,194692],"mapped",[24061]],[[194693,194693],"mapped",[24104]],[[194694,194694],"mapped",[24125]],[[194695,194695],"mapped",[24169]],[[194696,194696],"mapped",[14434]],[[194697,194697],"mapped",[139651]],[[194698,194698],"mapped",[14460]],[[194699,194699],"mapped",[24240]],[[194700,194700],"mapped",[24243]],[[194701,194701],"mapped",[24246]],[[194702,194702],"mapped",[24266]],[[194703,194703],"mapped",[172946]],[[194704,194704],"mapped",[24318]],[[194705,194706],"mapped",[140081]],[[194707,194707],"mapped",[33281]],[[194708,194709],"mapped",[24354]],[[194710,194710],"mapped",[14535]],[[194711,194711],"mapped",[144056]],[[194712,194712],"mapped",[156122]],[[194713,194713],"mapped",[24418]],[[194714,194714],"mapped",[24427]],[[194715,194715],"mapped",[14563]],[[194716,194716],"mapped",[24474]],[[194717,194717],"mapped",[24525]],[[194718,194718],"mapped",[24535]],[[194719,194719],"mapped",[24569]],[[194720,194720],"mapped",[24705]],[[194721,194721],"mapped",[14650]],[[194722,194722],"mapped",[14620]],[[194723,194723],"mapped",[24724]],[[194724,194724],"mapped",[141012]],[[194725,194725],"mapped",[24775]],[[194726,194726],"mapped",[24904]],[[194727,194727],"mapped",[24908]],[[194728,194728],"mapped",[24910]],[[194729,194729],"mapped",[24908]],[[194730,194730],"mapped",[24954]],[[194731,194731],"mapped",[24974]],[[194732,194732],"mapped",[25010]],[[194733,194733],"mapped",[24996]],[[194734,194734],"mapped",[25007]],[[194735,194735],"mapped",[25054]],[[194736,194736],"mapped",[25074]],[[194737,194737],"mapped",[25078]],[[194738,194738],"mapped",[25104]],[[194739,194739],"mapped",[25115]],[[194740,194740],"mapped",[25181]],[[194741,194741],"mapped",[25265]],[[194742,194742],"mapped",[25300]],[[194743,194743],"mapped",[25424]],[[194744,194744],"mapped",[142092]],[[194745,194745],"mapped",[25405]],[[194746,194746],"mapped",[25340]],[[194747,194747],"mapped",[25448]],[[194748,194748],"mapped",[25475]],[[194749,194749],"mapped",[25572]],[[194750,194750],"mapped",[142321]],[[194751,194751],"mapped",[25634]],[[194752,194752],"mapped",[25541]],[[194753,194753],"mapped",[25513]],[[194754,194754],"mapped",[14894]],[[194755,194755],"mapped",[25705]],[[194756,194756],"mapped",[25726]],[[194757,194757],"mapped",[25757]],[[194758,194758],"mapped",[25719]],[[194759,194759],"mapped",[14956]],[[194760,194760],"mapped",[25935]],[[194761,194761],"mapped",[25964]],[[194762,194762],"mapped",[143370]],[[194763,194763],"mapped",[26083]],[[194764,194764],"mapped",[26360]],[[194765,194765],"mapped",[26185]],[[194766,194766],"mapped",[15129]],[[194767,194767],"mapped",[26257]],[[194768,194768],"mapped",[15112]],[[194769,194769],"mapped",[15076]],[[194770,194770],"mapped",[20882]],[[194771,194771],"mapped",[20885]],[[194772,194772],"mapped",[26368]],[[194773,194773],"mapped",[26268]],[[194774,194774],"mapped",[32941]],[[194775,194775],"mapped",[17369]],[[194776,194776],"mapped",[26391]],[[194777,194777],"mapped",[26395]],[[194778,194778],"mapped",[26401]],[[194779,194779],"mapped",[26462]],[[194780,194780],"mapped",[26451]],[[194781,194781],"mapped",[144323]],[[194782,194782],"mapped",[15177]],[[194783,194783],"mapped",[26618]],[[194784,194784],"mapped",[26501]],[[194785,194785],"mapped",[26706]],[[194786,194786],"mapped",[26757]],[[194787,194787],"mapped",[144493]],[[194788,194788],"mapped",[26766]],[[194789,194789],"mapped",[26655]],[[194790,194790],"mapped",[26900]],[[194791,194791],"mapped",[15261]],[[194792,194792],"mapped",[26946]],[[194793,194793],"mapped",[27043]],[[194794,194794],"mapped",[27114]],[[194795,194795],"mapped",[27304]],[[194796,194796],"mapped",[145059]],[[194797,194797],"mapped",[27355]],[[194798,194798],"mapped",[15384]],[[194799,194799],"mapped",[27425]],[[194800,194800],"mapped",[145575]],[[194801,194801],"mapped",[27476]],[[194802,194802],"mapped",[15438]],[[194803,194803],"mapped",[27506]],[[194804,194804],"mapped",[27551]],[[194805,194805],"mapped",[27578]],[[194806,194806],"mapped",[27579]],[[194807,194807],"mapped",[146061]],[[194808,194808],"mapped",[138507]],[[194809,194809],"mapped",[146170]],[[194810,194810],"mapped",[27726]],[[194811,194811],"mapped",[146620]],[[194812,194812],"mapped",[27839]],[[194813,194813],"mapped",[27853]],[[194814,194814],"mapped",[27751]],[[194815,194815],"mapped",[27926]],[[194816,194816],"mapped",[27966]],[[194817,194817],"mapped",[28023]],[[194818,194818],"mapped",[27969]],[[194819,194819],"mapped",[28009]],[[194820,194820],"mapped",[28024]],[[194821,194821],"mapped",[28037]],[[194822,194822],"mapped",[146718]],[[194823,194823],"mapped",[27956]],[[194824,194824],"mapped",[28207]],[[194825,194825],"mapped",[28270]],[[194826,194826],"mapped",[15667]],[[194827,194827],"mapped",[28363]],[[194828,194828],"mapped",[28359]],[[194829,194829],"mapped",[147153]],[[194830,194830],"mapped",[28153]],[[194831,194831],"mapped",[28526]],[[194832,194832],"mapped",[147294]],[[194833,194833],"mapped",[147342]],[[194834,194834],"mapped",[28614]],[[194835,194835],"mapped",[28729]],[[194836,194836],"mapped",[28702]],[[194837,194837],"mapped",[28699]],[[194838,194838],"mapped",[15766]],[[194839,194839],"mapped",[28746]],[[194840,194840],"mapped",[28797]],[[194841,194841],"mapped",[28791]],[[194842,194842],"mapped",[28845]],[[194843,194843],"mapped",[132389]],[[194844,194844],"mapped",[28997]],[[194845,194845],"mapped",[148067]],[[194846,194846],"mapped",[29084]],[[194847,194847],"disallowed"],[[194848,194848],"mapped",[29224]],[[194849,194849],"mapped",[29237]],[[194850,194850],"mapped",[29264]],[[194851,194851],"mapped",[149000]],[[194852,194852],"mapped",[29312]],[[194853,194853],"mapped",[29333]],[[194854,194854],"mapped",[149301]],[[194855,194855],"mapped",[149524]],[[194856,194856],"mapped",[29562]],[[194857,194857],"mapped",[29579]],[[194858,194858],"mapped",[16044]],[[194859,194859],"mapped",[29605]],[[194860,194861],"mapped",[16056]],[[194862,194862],"mapped",[29767]],[[194863,194863],"mapped",[29788]],[[194864,194864],"mapped",[29809]],[[194865,194865],"mapped",[29829]],[[194866,194866],"mapped",[29898]],[[194867,194867],"mapped",[16155]],[[194868,194868],"mapped",[29988]],[[194869,194869],"mapped",[150582]],[[194870,194870],"mapped",[30014]],[[194871,194871],"mapped",[150674]],[[194872,194872],"mapped",[30064]],[[194873,194873],"mapped",[139679]],[[194874,194874],"mapped",[30224]],[[194875,194875],"mapped",[151457]],[[194876,194876],"mapped",[151480]],[[194877,194877],"mapped",[151620]],[[194878,194878],"mapped",[16380]],[[194879,194879],"mapped",[16392]],[[194880,194880],"mapped",[30452]],[[194881,194881],"mapped",[151795]],[[194882,194882],"mapped",[151794]],[[194883,194883],"mapped",[151833]],[[194884,194884],"mapped",[151859]],[[194885,194885],"mapped",[30494]],[[194886,194887],"mapped",[30495]],[[194888,194888],"mapped",[30538]],[[194889,194889],"mapped",[16441]],[[194890,194890],"mapped",[30603]],[[194891,194891],"mapped",[16454]],[[194892,194892],"mapped",[16534]],[[194893,194893],"mapped",[152605]],[[194894,194894],"mapped",[30798]],[[194895,194895],"mapped",[30860]],[[194896,194896],"mapped",[30924]],[[194897,194897],"mapped",[16611]],[[194898,194898],"mapped",[153126]],[[194899,194899],"mapped",[31062]],[[194900,194900],"mapped",[153242]],[[194901,194901],"mapped",[153285]],[[194902,194902],"mapped",[31119]],[[194903,194903],"mapped",[31211]],[[194904,194904],"mapped",[16687]],[[194905,194905],"mapped",[31296]],[[194906,194906],"mapped",[31306]],[[194907,194907],"mapped",[31311]],[[194908,194908],"mapped",[153980]],[[194909,194910],"mapped",[154279]],[[194911,194911],"disallowed"],[[194912,194912],"mapped",[16898]],[[194913,194913],"mapped",[154539]],[[194914,194914],"mapped",[31686]],[[194915,194915],"mapped",[31689]],[[194916,194916],"mapped",[16935]],[[194917,194917],"mapped",[154752]],[[194918,194918],"mapped",[31954]],[[194919,194919],"mapped",[17056]],[[194920,194920],"mapped",[31976]],[[194921,194921],"mapped",[31971]],[[194922,194922],"mapped",[32000]],[[194923,194923],"mapped",[155526]],[[194924,194924],"mapped",[32099]],[[194925,194925],"mapped",[17153]],[[194926,194926],"mapped",[32199]],[[194927,194927],"mapped",[32258]],[[194928,194928],"mapped",[32325]],[[194929,194929],"mapped",[17204]],[[194930,194930],"mapped",[156200]],[[194931,194931],"mapped",[156231]],[[194932,194932],"mapped",[17241]],[[194933,194933],"mapped",[156377]],[[194934,194934],"mapped",[32634]],[[194935,194935],"mapped",[156478]],[[194936,194936],"mapped",[32661]],[[194937,194937],"mapped",[32762]],[[194938,194938],"mapped",[32773]],[[194939,194939],"mapped",[156890]],[[194940,194940],"mapped",[156963]],[[194941,194941],"mapped",[32864]],[[194942,194942],"mapped",[157096]],[[194943,194943],"mapped",[32880]],[[194944,194944],"mapped",[144223]],[[194945,194945],"mapped",[17365]],[[194946,194946],"mapped",[32946]],[[194947,194947],"mapped",[33027]],[[194948,194948],"mapped",[17419]],[[194949,194949],"mapped",[33086]],[[194950,194950],"mapped",[23221]],[[194951,194951],"mapped",[157607]],[[194952,194952],"mapped",[157621]],[[194953,194953],"mapped",[144275]],[[194954,194954],"mapped",[144284]],[[194955,194955],"mapped",[33281]],[[194956,194956],"mapped",[33284]],[[194957,194957],"mapped",[36766]],[[194958,194958],"mapped",[17515]],[[194959,194959],"mapped",[33425]],[[194960,194960],"mapped",[33419]],[[194961,194961],"mapped",[33437]],[[194962,194962],"mapped",[21171]],[[194963,194963],"mapped",[33457]],[[194964,194964],"mapped",[33459]],[[194965,194965],"mapped",[33469]],[[194966,194966],"mapped",[33510]],[[194967,194967],"mapped",[158524]],[[194968,194968],"mapped",[33509]],[[194969,194969],"mapped",[33565]],[[194970,194970],"mapped",[33635]],[[194971,194971],"mapped",[33709]],[[194972,194972],"mapped",[33571]],[[194973,194973],"mapped",[33725]],[[194974,194974],"mapped",[33767]],[[194975,194975],"mapped",[33879]],[[194976,194976],"mapped",[33619]],[[194977,194977],"mapped",[33738]],[[194978,194978],"mapped",[33740]],[[194979,194979],"mapped",[33756]],[[194980,194980],"mapped",[158774]],[[194981,194981],"mapped",[159083]],[[194982,194982],"mapped",[158933]],[[194983,194983],"mapped",[17707]],[[194984,194984],"mapped",[34033]],[[194985,194985],"mapped",[34035]],[[194986,194986],"mapped",[34070]],[[194987,194987],"mapped",[160714]],[[194988,194988],"mapped",[34148]],[[194989,194989],"mapped",[159532]],[[194990,194990],"mapped",[17757]],[[194991,194991],"mapped",[17761]],[[194992,194992],"mapped",[159665]],[[194993,194993],"mapped",[159954]],[[194994,194994],"mapped",[17771]],[[194995,194995],"mapped",[34384]],[[194996,194996],"mapped",[34396]],[[194997,194997],"mapped",[34407]],[[194998,194998],"mapped",[34409]],[[194999,194999],"mapped",[34473]],[[195000,195000],"mapped",[34440]],[[195001,195001],"mapped",[34574]],[[195002,195002],"mapped",[34530]],[[195003,195003],"mapped",[34681]],[[195004,195004],"mapped",[34600]],[[195005,195005],"mapped",[34667]],[[195006,195006],"mapped",[34694]],[[195007,195007],"disallowed"],[[195008,195008],"mapped",[34785]],[[195009,195009],"mapped",[34817]],[[195010,195010],"mapped",[17913]],[[195011,195011],"mapped",[34912]],[[195012,195012],"mapped",[34915]],[[195013,195013],"mapped",[161383]],[[195014,195014],"mapped",[35031]],[[195015,195015],"mapped",[35038]],[[195016,195016],"mapped",[17973]],[[195017,195017],"mapped",[35066]],[[195018,195018],"mapped",[13499]],[[195019,195019],"mapped",[161966]],[[195020,195020],"mapped",[162150]],[[195021,195021],"mapped",[18110]],[[195022,195022],"mapped",[18119]],[[195023,195023],"mapped",[35488]],[[195024,195024],"mapped",[35565]],[[195025,195025],"mapped",[35722]],[[195026,195026],"mapped",[35925]],[[195027,195027],"mapped",[162984]],[[195028,195028],"mapped",[36011]],[[195029,195029],"mapped",[36033]],[[195030,195030],"mapped",[36123]],[[195031,195031],"mapped",[36215]],[[195032,195032],"mapped",[163631]],[[195033,195033],"mapped",[133124]],[[195034,195034],"mapped",[36299]],[[195035,195035],"mapped",[36284]],[[195036,195036],"mapped",[36336]],[[195037,195037],"mapped",[133342]],[[195038,195038],"mapped",[36564]],[[195039,195039],"mapped",[36664]],[[195040,195040],"mapped",[165330]],[[195041,195041],"mapped",[165357]],[[195042,195042],"mapped",[37012]],[[195043,195043],"mapped",[37105]],[[195044,195044],"mapped",[37137]],[[195045,195045],"mapped",[165678]],[[195046,195046],"mapped",[37147]],[[195047,195047],"mapped",[37432]],[[195048,195048],"mapped",[37591]],[[195049,195049],"mapped",[37592]],[[195050,195050],"mapped",[37500]],[[195051,195051],"mapped",[37881]],[[195052,195052],"mapped",[37909]],[[195053,195053],"mapped",[166906]],[[195054,195054],"mapped",[38283]],[[195055,195055],"mapped",[18837]],[[195056,195056],"mapped",[38327]],[[195057,195057],"mapped",[167287]],[[195058,195058],"mapped",[18918]],[[195059,195059],"mapped",[38595]],[[195060,195060],"mapped",[23986]],[[195061,195061],"mapped",[38691]],[[195062,195062],"mapped",[168261]],[[195063,195063],"mapped",[168474]],[[195064,195064],"mapped",[19054]],[[195065,195065],"mapped",[19062]],[[195066,195066],"mapped",[38880]],[[195067,195067],"mapped",[168970]],[[195068,195068],"mapped",[19122]],[[195069,195069],"mapped",[169110]],[[195070,195071],"mapped",[38923]],[[195072,195072],"mapped",[38953]],[[195073,195073],"mapped",[169398]],[[195074,195074],"mapped",[39138]],[[195075,195075],"mapped",[19251]],[[195076,195076],"mapped",[39209]],[[195077,195077],"mapped",[39335]],[[195078,195078],"mapped",[39362]],[[195079,195079],"mapped",[39422]],[[195080,195080],"mapped",[19406]],[[195081,195081],"mapped",[170800]],[[195082,195082],"mapped",[39698]],[[195083,195083],"mapped",[40000]],[[195084,195084],"mapped",[40189]],[[195085,195085],"mapped",[19662]],[[195086,195086],"mapped",[19693]],[[195087,195087],"mapped",[40295]],[[195088,195088],"mapped",[172238]],[[195089,195089],"mapped",[19704]],[[195090,195090],"mapped",[172293]],[[195091,195091],"mapped",[172558]],[[195092,195092],"mapped",[172689]],[[195093,195093],"mapped",[40635]],[[195094,195094],"mapped",[19798]],[[195095,195095],"mapped",[40697]],[[195096,195096],"mapped",[40702]],[[195097,195097],"mapped",[40709]],[[195098,195098],"mapped",[40719]],[[195099,195099],"mapped",[40726]],[[195100,195100],"mapped",[40763]],[[195101,195101],"mapped",[173568]],[[195102,196605],"disallowed"],[[196606,196607],"disallowed"],[[196608,262141],"disallowed"],[[262142,262143],"disallowed"],[[262144,327677],"disallowed"],[[327678,327679],"disallowed"],[[327680,393213],"disallowed"],[[393214,393215],"disallowed"],[[393216,458749],"disallowed"],[[458750,458751],"disallowed"],[[458752,524285],"disallowed"],[[524286,524287],"disallowed"],[[524288,589821],"disallowed"],[[589822,589823],"disallowed"],[[589824,655357],"disallowed"],[[655358,655359],"disallowed"],[[655360,720893],"disallowed"],[[720894,720895],"disallowed"],[[720896,786429],"disallowed"],[[786430,786431],"disallowed"],[[786432,851965],"disallowed"],[[851966,851967],"disallowed"],[[851968,917501],"disallowed"],[[917502,917503],"disallowed"],[[917504,917504],"disallowed"],[[917505,917505],"disallowed"],[[917506,917535],"disallowed"],[[917536,917631],"disallowed"],[[917632,917759],"disallowed"],[[917760,917999],"ignored"],[[918000,983037],"disallowed"],[[983038,983039],"disallowed"],[[983040,1048573],"disallowed"],[[1048574,1048575],"disallowed"],[[1048576,1114109],"disallowed"],[[1114110,1114111],"disallowed"]] \ No newline at end of file diff --git a/dist/node_modules/tr46/package.json b/dist/node_modules/tr46/package.json new file mode 100644 index 0000000..b6826da --- /dev/null +++ b/dist/node_modules/tr46/package.json @@ -0,0 +1,31 @@ +{ + "name": "tr46", + "version": "0.0.3", + "description": "An implementation of the Unicode TR46 spec", + "main": "index.js", + "scripts": { + "test": "mocha", + "pretest": "node scripts/getLatestUnicodeTests.js", + "prepublish": "node scripts/generateMappingTable.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/Sebmaster/tr46.js.git" + }, + "keywords": [ + "unicode", + "tr46", + "url", + "whatwg" + ], + "author": "Sebastian Mayr ", + "license": "MIT", + "bugs": { + "url": "https://github.com/Sebmaster/tr46.js/issues" + }, + "homepage": "https://github.com/Sebmaster/tr46.js#readme", + "devDependencies": { + "mocha": "^2.2.5", + "request": "^2.57.0" + } +} diff --git a/dist/node_modules/tunnel/.idea/encodings.xml b/dist/node_modules/tunnel/.idea/encodings.xml new file mode 100644 index 0000000..97626ba --- /dev/null +++ b/dist/node_modules/tunnel/.idea/encodings.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/dist/node_modules/tunnel/.idea/modules.xml b/dist/node_modules/tunnel/.idea/modules.xml new file mode 100644 index 0000000..27bf888 --- /dev/null +++ b/dist/node_modules/tunnel/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/dist/node_modules/tunnel/.idea/node-tunnel.iml b/dist/node_modules/tunnel/.idea/node-tunnel.iml new file mode 100644 index 0000000..24643cc --- /dev/null +++ b/dist/node_modules/tunnel/.idea/node-tunnel.iml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/dist/node_modules/tunnel/.idea/vcs.xml b/dist/node_modules/tunnel/.idea/vcs.xml new file mode 100644 index 0000000..94a25f7 --- /dev/null +++ b/dist/node_modules/tunnel/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/dist/node_modules/tunnel/.idea/workspace.xml b/dist/node_modules/tunnel/.idea/workspace.xml new file mode 100644 index 0000000..1a318c8 --- /dev/null +++ b/dist/node_modules/tunnel/.idea/workspace.xml @@ -0,0 +1,797 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + max + onconne + + + + + + + + + + + + + false + + false + false + true + + + true + DEFINITION_ORDER + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +